The interfaces provided are listed below, along with usage samples. * *
======================= JobsV1Beta3Client =======================
*
diff --git a/google-cloud-dataflow/src/main/java/com/google/dataflow/v1beta3/stub/GrpcFlexTemplatesServiceStub.java b/google-cloud-dataflow/src/main/java/com/google/dataflow/v1beta3/stub/GrpcFlexTemplatesServiceStub.java
index ddea2246..ec2f172d 100644
--- a/google-cloud-dataflow/src/main/java/com/google/dataflow/v1beta3/stub/GrpcFlexTemplatesServiceStub.java
+++ b/google-cloud-dataflow/src/main/java/com/google/dataflow/v1beta3/stub/GrpcFlexTemplatesServiceStub.java
@@ -23,6 +23,7 @@
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.UnaryCallable;
+import com.google.common.collect.ImmutableMap;
import com.google.dataflow.v1beta3.LaunchFlexTemplateRequest;
import com.google.dataflow.v1beta3.LaunchFlexTemplateResponse;
import com.google.longrunning.stub.GrpcOperationsStub;
@@ -103,6 +104,13 @@ protected GrpcFlexTemplatesServiceStub(
launchFlexTemplateTransportSettings =
GrpcCallSettings.
- * A short and friendly name for the worker pool this event refers to,
- * populated from the value of PoolStageRelation::user_pool_name.
+ * A short and friendly name for the worker pool this event refers to.
*
*
* string worker_pool = 7;
@@ -591,8 +590,7 @@ public java.lang.String getWorkerPool() {
*
*
*
- * A short and friendly name for the worker pool this event refers to,
- * populated from the value of PoolStageRelation::user_pool_name.
+ * A short and friendly name for the worker pool this event refers to.
*
*
* string worker_pool = 7;
@@ -1628,8 +1626,7 @@ public com.google.protobuf.TimestampOrBuilder getTimeOrBuilder() {
*
*
*
- * A short and friendly name for the worker pool this event refers to,
- * populated from the value of PoolStageRelation::user_pool_name.
+ * A short and friendly name for the worker pool this event refers to.
*
*
* string worker_pool = 7;
@@ -1651,8 +1648,7 @@ public java.lang.String getWorkerPool() {
*
*
*
- * A short and friendly name for the worker pool this event refers to,
- * populated from the value of PoolStageRelation::user_pool_name.
+ * A short and friendly name for the worker pool this event refers to.
*
*
* string worker_pool = 7;
@@ -1674,8 +1670,7 @@ public com.google.protobuf.ByteString getWorkerPoolBytes() {
*
*
*
- * A short and friendly name for the worker pool this event refers to,
- * populated from the value of PoolStageRelation::user_pool_name.
+ * A short and friendly name for the worker pool this event refers to.
*
*
* string worker_pool = 7;
@@ -1696,8 +1691,7 @@ public Builder setWorkerPool(java.lang.String value) {
*
*
*
- * A short and friendly name for the worker pool this event refers to,
- * populated from the value of PoolStageRelation::user_pool_name.
+ * A short and friendly name for the worker pool this event refers to.
*
*
* string worker_pool = 7;
@@ -1714,8 +1708,7 @@ public Builder clearWorkerPool() {
*
*
*
- * A short and friendly name for the worker pool this event refers to,
- * populated from the value of PoolStageRelation::user_pool_name.
+ * A short and friendly name for the worker pool this event refers to.
*
*
* string worker_pool = 7;
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/AutoscalingEventOrBuilder.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/AutoscalingEventOrBuilder.java
index 8a3d0d3f..9bde11f0 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/AutoscalingEventOrBuilder.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/AutoscalingEventOrBuilder.java
@@ -157,8 +157,7 @@ public interface AutoscalingEventOrBuilder
*
*
*
- * A short and friendly name for the worker pool this event refers to,
- * populated from the value of PoolStageRelation::user_pool_name.
+ * A short and friendly name for the worker pool this event refers to.
*
*
* string worker_pool = 7;
@@ -170,8 +169,7 @@ public interface AutoscalingEventOrBuilder
*
*
*
- * A short and friendly name for the worker pool this event refers to,
- * populated from the value of PoolStageRelation::user_pool_name.
+ * A short and friendly name for the worker pool this event refers to.
*
*
* string worker_pool = 7;
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/BigTableIODetails.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/BigTableIODetails.java
index af67bd4e..a1b38b17 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/BigTableIODetails.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/BigTableIODetails.java
@@ -22,7 +22,7 @@
*
*
*
- * Metadata for a Cloud BigTable connector used by the job.
+ * Metadata for a Cloud Bigtable connector used by the job.
*
*
* Protobuf type {@code google.dataflow.v1beta3.BigTableIODetails}
@@ -455,7 +455,7 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build
*
*
*
- * Metadata for a Cloud BigTable connector used by the job.
+ * Metadata for a Cloud Bigtable connector used by the job.
*
*
* Protobuf type {@code google.dataflow.v1beta3.BigTableIODetails}
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/FlexTemplateRuntimeEnvironment.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/FlexTemplateRuntimeEnvironment.java
index 85a9e5ad..62079147 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/FlexTemplateRuntimeEnvironment.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/FlexTemplateRuntimeEnvironment.java
@@ -53,6 +53,9 @@ private FlexTemplateRuntimeEnvironment() {
flexrsGoal_ = 0;
stagingLocation_ = "";
sdkContainerImage_ = "";
+ autoscalingAlgorithm_ = 0;
+ saveHeapDumpsToGcsPath_ = "";
+ launcherMachineType_ = "";
}
@java.lang.Override
@@ -219,6 +222,37 @@ private FlexTemplateRuntimeEnvironment(
sdkContainerImage_ = s;
break;
}
+ case 160:
+ {
+ diskSizeGb_ = input.readInt32();
+ break;
+ }
+ case 168:
+ {
+ int rawValue = input.readEnum();
+
+ autoscalingAlgorithm_ = rawValue;
+ break;
+ }
+ case 176:
+ {
+ dumpHeapOnOom_ = input.readBool();
+ break;
+ }
+ case 186:
+ {
+ java.lang.String s = input.readStringRequireUtf8();
+
+ saveHeapDumpsToGcsPath_ = s;
+ break;
+ }
+ case 194:
+ {
+ java.lang.String s = input.readStringRequireUtf8();
+
+ launcherMachineType_ = s;
+ break;
+ }
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
@@ -1173,6 +1207,185 @@ public com.google.protobuf.ByteString getSdkContainerImageBytes() {
}
}
+ public static final int DISK_SIZE_GB_FIELD_NUMBER = 20;
+ private int diskSizeGb_;
+ /**
+ *
+ *
+ *
+ * Worker disk size, in gigabytes.
+ *
+ *
+ * int32 disk_size_gb = 20;
+ *
+ * @return The diskSizeGb.
+ */
+ @java.lang.Override
+ public int getDiskSizeGb() {
+ return diskSizeGb_;
+ }
+
+ public static final int AUTOSCALING_ALGORITHM_FIELD_NUMBER = 21;
+ private int autoscalingAlgorithm_;
+ /**
+ *
+ *
+ *
+ * The algorithm to use for autoscaling
+ *
+ *
+ * .google.dataflow.v1beta3.AutoscalingAlgorithm autoscaling_algorithm = 21;
+ *
+ * @return The enum numeric value on the wire for autoscalingAlgorithm.
+ */
+ @java.lang.Override
+ public int getAutoscalingAlgorithmValue() {
+ return autoscalingAlgorithm_;
+ }
+ /**
+ *
+ *
+ *
+ * The algorithm to use for autoscaling
+ *
+ *
+ * .google.dataflow.v1beta3.AutoscalingAlgorithm autoscaling_algorithm = 21;
+ *
+ * @return The autoscalingAlgorithm.
+ */
+ @java.lang.Override
+ public com.google.dataflow.v1beta3.AutoscalingAlgorithm getAutoscalingAlgorithm() {
+ @SuppressWarnings("deprecation")
+ com.google.dataflow.v1beta3.AutoscalingAlgorithm result =
+ com.google.dataflow.v1beta3.AutoscalingAlgorithm.valueOf(autoscalingAlgorithm_);
+ return result == null ? com.google.dataflow.v1beta3.AutoscalingAlgorithm.UNRECOGNIZED : result;
+ }
+
+ public static final int DUMP_HEAP_ON_OOM_FIELD_NUMBER = 22;
+ private boolean dumpHeapOnOom_;
+ /**
+ *
+ *
+ *
+ * If true, save a heap dump before killing a thread or process which is GC
+ * thrashing or out of memory. The location of the heap file will either be
+ * echoed back to the user, or the user will be given the opportunity to
+ * download the heap file.
+ *
+ *
+ * bool dump_heap_on_oom = 22;
+ *
+ * @return The dumpHeapOnOom.
+ */
+ @java.lang.Override
+ public boolean getDumpHeapOnOom() {
+ return dumpHeapOnOom_;
+ }
+
+ public static final int SAVE_HEAP_DUMPS_TO_GCS_PATH_FIELD_NUMBER = 23;
+ private volatile java.lang.Object saveHeapDumpsToGcsPath_;
+ /**
+ *
+ *
+ *
+ * Cloud Storage bucket (directory) to upload heap dumps to the given
+ * location. Enabling this implies that heap dumps should be generated on OOM
+ * (dump_heap_on_oom is set to true).
+ *
+ *
+ * string save_heap_dumps_to_gcs_path = 23;
+ *
+ * @return The saveHeapDumpsToGcsPath.
+ */
+ @java.lang.Override
+ public java.lang.String getSaveHeapDumpsToGcsPath() {
+ java.lang.Object ref = saveHeapDumpsToGcsPath_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ saveHeapDumpsToGcsPath_ = s;
+ return s;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Cloud Storage bucket (directory) to upload heap dumps to the given
+ * location. Enabling this implies that heap dumps should be generated on OOM
+ * (dump_heap_on_oom is set to true).
+ *
+ *
+ * string save_heap_dumps_to_gcs_path = 23;
+ *
+ * @return The bytes for saveHeapDumpsToGcsPath.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString getSaveHeapDumpsToGcsPathBytes() {
+ java.lang.Object ref = saveHeapDumpsToGcsPath_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ saveHeapDumpsToGcsPath_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ public static final int LAUNCHER_MACHINE_TYPE_FIELD_NUMBER = 24;
+ private volatile java.lang.Object launcherMachineType_;
+ /**
+ *
+ *
+ *
+ * The machine type to use for launching the job. The default is
+ * n1-standard-1.
+ *
+ *
+ * string launcher_machine_type = 24;
+ *
+ * @return The launcherMachineType.
+ */
+ @java.lang.Override
+ public java.lang.String getLauncherMachineType() {
+ java.lang.Object ref = launcherMachineType_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ launcherMachineType_ = s;
+ return s;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * The machine type to use for launching the job. The default is
+ * n1-standard-1.
+ *
+ *
+ * string launcher_machine_type = 24;
+ *
+ * @return The bytes for launcherMachineType.
+ */
+ @java.lang.Override
+ public com.google.protobuf.ByteString getLauncherMachineTypeBytes() {
+ java.lang.Object ref = launcherMachineType_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ launcherMachineType_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
private byte memoizedIsInitialized = -1;
@java.lang.Override
@@ -1247,6 +1460,23 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sdkContainerImage_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 18, sdkContainerImage_);
}
+ if (diskSizeGb_ != 0) {
+ output.writeInt32(20, diskSizeGb_);
+ }
+ if (autoscalingAlgorithm_
+ != com.google.dataflow.v1beta3.AutoscalingAlgorithm.AUTOSCALING_ALGORITHM_UNKNOWN
+ .getNumber()) {
+ output.writeEnum(21, autoscalingAlgorithm_);
+ }
+ if (dumpHeapOnOom_ != false) {
+ output.writeBool(22, dumpHeapOnOom_);
+ }
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(saveHeapDumpsToGcsPath_)) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 23, saveHeapDumpsToGcsPath_);
+ }
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(launcherMachineType_)) {
+ com.google.protobuf.GeneratedMessageV3.writeString(output, 24, launcherMachineType_);
+ }
unknownFields.writeTo(output);
}
@@ -1325,6 +1555,23 @@ public int getSerializedSize() {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sdkContainerImage_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(18, sdkContainerImage_);
}
+ if (diskSizeGb_ != 0) {
+ size += com.google.protobuf.CodedOutputStream.computeInt32Size(20, diskSizeGb_);
+ }
+ if (autoscalingAlgorithm_
+ != com.google.dataflow.v1beta3.AutoscalingAlgorithm.AUTOSCALING_ALGORITHM_UNKNOWN
+ .getNumber()) {
+ size += com.google.protobuf.CodedOutputStream.computeEnumSize(21, autoscalingAlgorithm_);
+ }
+ if (dumpHeapOnOom_ != false) {
+ size += com.google.protobuf.CodedOutputStream.computeBoolSize(22, dumpHeapOnOom_);
+ }
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(saveHeapDumpsToGcsPath_)) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(23, saveHeapDumpsToGcsPath_);
+ }
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(launcherMachineType_)) {
+ size += com.google.protobuf.GeneratedMessageV3.computeStringSize(24, launcherMachineType_);
+ }
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
@@ -1360,6 +1607,11 @@ public boolean equals(final java.lang.Object obj) {
if (flexrsGoal_ != other.flexrsGoal_) return false;
if (!getStagingLocation().equals(other.getStagingLocation())) return false;
if (!getSdkContainerImage().equals(other.getSdkContainerImage())) return false;
+ if (getDiskSizeGb() != other.getDiskSizeGb()) return false;
+ if (autoscalingAlgorithm_ != other.autoscalingAlgorithm_) return false;
+ if (getDumpHeapOnOom() != other.getDumpHeapOnOom()) return false;
+ if (!getSaveHeapDumpsToGcsPath().equals(other.getSaveHeapDumpsToGcsPath())) return false;
+ if (!getLauncherMachineType().equals(other.getLauncherMachineType())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@@ -1411,6 +1663,16 @@ public int hashCode() {
hash = (53 * hash) + getStagingLocation().hashCode();
hash = (37 * hash) + SDK_CONTAINER_IMAGE_FIELD_NUMBER;
hash = (53 * hash) + getSdkContainerImage().hashCode();
+ hash = (37 * hash) + DISK_SIZE_GB_FIELD_NUMBER;
+ hash = (53 * hash) + getDiskSizeGb();
+ hash = (37 * hash) + AUTOSCALING_ALGORITHM_FIELD_NUMBER;
+ hash = (53 * hash) + autoscalingAlgorithm_;
+ hash = (37 * hash) + DUMP_HEAP_ON_OOM_FIELD_NUMBER;
+ hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getDumpHeapOnOom());
+ hash = (37 * hash) + SAVE_HEAP_DUMPS_TO_GCS_PATH_FIELD_NUMBER;
+ hash = (53 * hash) + getSaveHeapDumpsToGcsPath().hashCode();
+ hash = (37 * hash) + LAUNCHER_MACHINE_TYPE_FIELD_NUMBER;
+ hash = (53 * hash) + getLauncherMachineType().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
@@ -1612,6 +1874,16 @@ public Builder clear() {
sdkContainerImage_ = "";
+ diskSizeGb_ = 0;
+
+ autoscalingAlgorithm_ = 0;
+
+ dumpHeapOnOom_ = false;
+
+ saveHeapDumpsToGcsPath_ = "";
+
+ launcherMachineType_ = "";
+
return this;
}
@@ -1663,6 +1935,11 @@ public com.google.dataflow.v1beta3.FlexTemplateRuntimeEnvironment buildPartial()
result.flexrsGoal_ = flexrsGoal_;
result.stagingLocation_ = stagingLocation_;
result.sdkContainerImage_ = sdkContainerImage_;
+ result.diskSizeGb_ = diskSizeGb_;
+ result.autoscalingAlgorithm_ = autoscalingAlgorithm_;
+ result.dumpHeapOnOom_ = dumpHeapOnOom_;
+ result.saveHeapDumpsToGcsPath_ = saveHeapDumpsToGcsPath_;
+ result.launcherMachineType_ = launcherMachineType_;
onBuilt();
return result;
}
@@ -1783,6 +2060,23 @@ public Builder mergeFrom(com.google.dataflow.v1beta3.FlexTemplateRuntimeEnvironm
sdkContainerImage_ = other.sdkContainerImage_;
onChanged();
}
+ if (other.getDiskSizeGb() != 0) {
+ setDiskSizeGb(other.getDiskSizeGb());
+ }
+ if (other.autoscalingAlgorithm_ != 0) {
+ setAutoscalingAlgorithmValue(other.getAutoscalingAlgorithmValue());
+ }
+ if (other.getDumpHeapOnOom() != false) {
+ setDumpHeapOnOom(other.getDumpHeapOnOom());
+ }
+ if (!other.getSaveHeapDumpsToGcsPath().isEmpty()) {
+ saveHeapDumpsToGcsPath_ = other.saveHeapDumpsToGcsPath_;
+ onChanged();
+ }
+ if (!other.getLauncherMachineType().isEmpty()) {
+ launcherMachineType_ = other.launcherMachineType_;
+ onChanged();
+ }
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
@@ -3825,6 +4119,439 @@ public Builder setSdkContainerImageBytes(com.google.protobuf.ByteString value) {
return this;
}
+ private int diskSizeGb_;
+ /**
+ *
+ *
+ *
+ * Worker disk size, in gigabytes.
+ *
+ *
+ * int32 disk_size_gb = 20;
+ *
+ * @return The diskSizeGb.
+ */
+ @java.lang.Override
+ public int getDiskSizeGb() {
+ return diskSizeGb_;
+ }
+ /**
+ *
+ *
+ *
+ * Worker disk size, in gigabytes.
+ *
+ *
+ * int32 disk_size_gb = 20;
+ *
+ * @param value The diskSizeGb to set.
+ * @return This builder for chaining.
+ */
+ public Builder setDiskSizeGb(int value) {
+
+ diskSizeGb_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Worker disk size, in gigabytes.
+ *
+ *
+ * int32 disk_size_gb = 20;
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearDiskSizeGb() {
+
+ diskSizeGb_ = 0;
+ onChanged();
+ return this;
+ }
+
+ private int autoscalingAlgorithm_ = 0;
+ /**
+ *
+ *
+ *
+ * The algorithm to use for autoscaling
+ *
+ *
+ * .google.dataflow.v1beta3.AutoscalingAlgorithm autoscaling_algorithm = 21;
+ *
+ * @return The enum numeric value on the wire for autoscalingAlgorithm.
+ */
+ @java.lang.Override
+ public int getAutoscalingAlgorithmValue() {
+ return autoscalingAlgorithm_;
+ }
+ /**
+ *
+ *
+ *
+ * The algorithm to use for autoscaling
+ *
+ *
+ * .google.dataflow.v1beta3.AutoscalingAlgorithm autoscaling_algorithm = 21;
+ *
+ * @param value The enum numeric value on the wire for autoscalingAlgorithm to set.
+ * @return This builder for chaining.
+ */
+ public Builder setAutoscalingAlgorithmValue(int value) {
+
+ autoscalingAlgorithm_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The algorithm to use for autoscaling
+ *
+ *
+ * .google.dataflow.v1beta3.AutoscalingAlgorithm autoscaling_algorithm = 21;
+ *
+ * @return The autoscalingAlgorithm.
+ */
+ @java.lang.Override
+ public com.google.dataflow.v1beta3.AutoscalingAlgorithm getAutoscalingAlgorithm() {
+ @SuppressWarnings("deprecation")
+ com.google.dataflow.v1beta3.AutoscalingAlgorithm result =
+ com.google.dataflow.v1beta3.AutoscalingAlgorithm.valueOf(autoscalingAlgorithm_);
+ return result == null
+ ? com.google.dataflow.v1beta3.AutoscalingAlgorithm.UNRECOGNIZED
+ : result;
+ }
+ /**
+ *
+ *
+ *
+ * The algorithm to use for autoscaling
+ *
+ *
+ * .google.dataflow.v1beta3.AutoscalingAlgorithm autoscaling_algorithm = 21;
+ *
+ * @param value The autoscalingAlgorithm to set.
+ * @return This builder for chaining.
+ */
+ public Builder setAutoscalingAlgorithm(com.google.dataflow.v1beta3.AutoscalingAlgorithm value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ autoscalingAlgorithm_ = value.getNumber();
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The algorithm to use for autoscaling
+ *
+ *
+ * .google.dataflow.v1beta3.AutoscalingAlgorithm autoscaling_algorithm = 21;
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearAutoscalingAlgorithm() {
+
+ autoscalingAlgorithm_ = 0;
+ onChanged();
+ return this;
+ }
+
+ private boolean dumpHeapOnOom_;
+ /**
+ *
+ *
+ *
+ * If true, save a heap dump before killing a thread or process which is GC
+ * thrashing or out of memory. The location of the heap file will either be
+ * echoed back to the user, or the user will be given the opportunity to
+ * download the heap file.
+ *
+ *
+ * bool dump_heap_on_oom = 22;
+ *
+ * @return The dumpHeapOnOom.
+ */
+ @java.lang.Override
+ public boolean getDumpHeapOnOom() {
+ return dumpHeapOnOom_;
+ }
+ /**
+ *
+ *
+ *
+ * If true, save a heap dump before killing a thread or process which is GC
+ * thrashing or out of memory. The location of the heap file will either be
+ * echoed back to the user, or the user will be given the opportunity to
+ * download the heap file.
+ *
+ *
+ * bool dump_heap_on_oom = 22;
+ *
+ * @param value The dumpHeapOnOom to set.
+ * @return This builder for chaining.
+ */
+ public Builder setDumpHeapOnOom(boolean value) {
+
+ dumpHeapOnOom_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * If true, save a heap dump before killing a thread or process which is GC
+ * thrashing or out of memory. The location of the heap file will either be
+ * echoed back to the user, or the user will be given the opportunity to
+ * download the heap file.
+ *
+ *
+ * bool dump_heap_on_oom = 22;
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearDumpHeapOnOom() {
+
+ dumpHeapOnOom_ = false;
+ onChanged();
+ return this;
+ }
+
+ private java.lang.Object saveHeapDumpsToGcsPath_ = "";
+ /**
+ *
+ *
+ *
+ * Cloud Storage bucket (directory) to upload heap dumps to the given
+ * location. Enabling this implies that heap dumps should be generated on OOM
+ * (dump_heap_on_oom is set to true).
+ *
+ *
+ * string save_heap_dumps_to_gcs_path = 23;
+ *
+ * @return The saveHeapDumpsToGcsPath.
+ */
+ public java.lang.String getSaveHeapDumpsToGcsPath() {
+ java.lang.Object ref = saveHeapDumpsToGcsPath_;
+ if (!(ref instanceof java.lang.String)) {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ saveHeapDumpsToGcsPath_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Cloud Storage bucket (directory) to upload heap dumps to the given
+ * location. Enabling this implies that heap dumps should be generated on OOM
+ * (dump_heap_on_oom is set to true).
+ *
+ *
+ * string save_heap_dumps_to_gcs_path = 23;
+ *
+ * @return The bytes for saveHeapDumpsToGcsPath.
+ */
+ public com.google.protobuf.ByteString getSaveHeapDumpsToGcsPathBytes() {
+ java.lang.Object ref = saveHeapDumpsToGcsPath_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ saveHeapDumpsToGcsPath_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * Cloud Storage bucket (directory) to upload heap dumps to the given
+ * location. Enabling this implies that heap dumps should be generated on OOM
+ * (dump_heap_on_oom is set to true).
+ *
+ *
+ * string save_heap_dumps_to_gcs_path = 23;
+ *
+ * @param value The saveHeapDumpsToGcsPath to set.
+ * @return This builder for chaining.
+ */
+ public Builder setSaveHeapDumpsToGcsPath(java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ saveHeapDumpsToGcsPath_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Cloud Storage bucket (directory) to upload heap dumps to the given
+ * location. Enabling this implies that heap dumps should be generated on OOM
+ * (dump_heap_on_oom is set to true).
+ *
+ *
+ * string save_heap_dumps_to_gcs_path = 23;
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearSaveHeapDumpsToGcsPath() {
+
+ saveHeapDumpsToGcsPath_ = getDefaultInstance().getSaveHeapDumpsToGcsPath();
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * Cloud Storage bucket (directory) to upload heap dumps to the given
+ * location. Enabling this implies that heap dumps should be generated on OOM
+ * (dump_heap_on_oom is set to true).
+ *
+ *
+ * string save_heap_dumps_to_gcs_path = 23;
+ *
+ * @param value The bytes for saveHeapDumpsToGcsPath to set.
+ * @return This builder for chaining.
+ */
+ public Builder setSaveHeapDumpsToGcsPathBytes(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ checkByteStringIsUtf8(value);
+
+ saveHeapDumpsToGcsPath_ = value;
+ onChanged();
+ return this;
+ }
+
+ private java.lang.Object launcherMachineType_ = "";
+ /**
+ *
+ *
+ *
+ * The machine type to use for launching the job. The default is
+ * n1-standard-1.
+ *
+ *
+ * string launcher_machine_type = 24;
+ *
+ * @return The launcherMachineType.
+ */
+ public java.lang.String getLauncherMachineType() {
+ java.lang.Object ref = launcherMachineType_;
+ if (!(ref instanceof java.lang.String)) {
+ com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ launcherMachineType_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * The machine type to use for launching the job. The default is
+ * n1-standard-1.
+ *
+ *
+ * string launcher_machine_type = 24;
+ *
+ * @return The bytes for launcherMachineType.
+ */
+ public com.google.protobuf.ByteString getLauncherMachineTypeBytes() {
+ java.lang.Object ref = launcherMachineType_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
+ launcherMachineType_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ *
+ *
+ *
+ * The machine type to use for launching the job. The default is
+ * n1-standard-1.
+ *
+ *
+ * string launcher_machine_type = 24;
+ *
+ * @param value The launcherMachineType to set.
+ * @return This builder for chaining.
+ */
+ public Builder setLauncherMachineType(java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ launcherMachineType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The machine type to use for launching the job. The default is
+ * n1-standard-1.
+ *
+ *
+ * string launcher_machine_type = 24;
+ *
+ * @return This builder for chaining.
+ */
+ public Builder clearLauncherMachineType() {
+
+ launcherMachineType_ = getDefaultInstance().getLauncherMachineType();
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ *
+ *
+ * The machine type to use for launching the job. The default is
+ * n1-standard-1.
+ *
+ *
+ * string launcher_machine_type = 24;
+ *
+ * @param value The bytes for launcherMachineType to set.
+ * @return This builder for chaining.
+ */
+ public Builder setLauncherMachineTypeBytes(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ checkByteStringIsUtf8(value);
+
+ launcherMachineType_ = value;
+ onChanged();
+ return this;
+ }
+
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/FlexTemplateRuntimeEnvironmentOrBuilder.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/FlexTemplateRuntimeEnvironmentOrBuilder.java
index 4d301bb9..0d8edd91 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/FlexTemplateRuntimeEnvironmentOrBuilder.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/FlexTemplateRuntimeEnvironmentOrBuilder.java
@@ -570,4 +570,114 @@ java.lang.String getAdditionalUserLabelsOrDefault(
* @return The bytes for sdkContainerImage.
*/
com.google.protobuf.ByteString getSdkContainerImageBytes();
+
+ /**
+ *
+ *
+ *
+ * Worker disk size, in gigabytes.
+ *
+ *
+ * int32 disk_size_gb = 20;
+ *
+ * @return The diskSizeGb.
+ */
+ int getDiskSizeGb();
+
+ /**
+ *
+ *
+ *
+ * The algorithm to use for autoscaling
+ *
+ *
+ * .google.dataflow.v1beta3.AutoscalingAlgorithm autoscaling_algorithm = 21;
+ *
+ * @return The enum numeric value on the wire for autoscalingAlgorithm.
+ */
+ int getAutoscalingAlgorithmValue();
+ /**
+ *
+ *
+ *
+ * The algorithm to use for autoscaling
+ *
+ *
+ * .google.dataflow.v1beta3.AutoscalingAlgorithm autoscaling_algorithm = 21;
+ *
+ * @return The autoscalingAlgorithm.
+ */
+ com.google.dataflow.v1beta3.AutoscalingAlgorithm getAutoscalingAlgorithm();
+
+ /**
+ *
+ *
+ *
+ * If true, save a heap dump before killing a thread or process which is GC
+ * thrashing or out of memory. The location of the heap file will either be
+ * echoed back to the user, or the user will be given the opportunity to
+ * download the heap file.
+ *
+ *
+ * bool dump_heap_on_oom = 22;
+ *
+ * @return The dumpHeapOnOom.
+ */
+ boolean getDumpHeapOnOom();
+
+ /**
+ *
+ *
+ *
+ * Cloud Storage bucket (directory) to upload heap dumps to the given
+ * location. Enabling this implies that heap dumps should be generated on OOM
+ * (dump_heap_on_oom is set to true).
+ *
+ *
+ * string save_heap_dumps_to_gcs_path = 23;
+ *
+ * @return The saveHeapDumpsToGcsPath.
+ */
+ java.lang.String getSaveHeapDumpsToGcsPath();
+ /**
+ *
+ *
+ *
+ * Cloud Storage bucket (directory) to upload heap dumps to the given
+ * location. Enabling this implies that heap dumps should be generated on OOM
+ * (dump_heap_on_oom is set to true).
+ *
+ *
+ * string save_heap_dumps_to_gcs_path = 23;
+ *
+ * @return The bytes for saveHeapDumpsToGcsPath.
+ */
+ com.google.protobuf.ByteString getSaveHeapDumpsToGcsPathBytes();
+
+ /**
+ *
+ *
+ *
+ * The machine type to use for launching the job. The default is
+ * n1-standard-1.
+ *
+ *
+ * string launcher_machine_type = 24;
+ *
+ * @return The launcherMachineType.
+ */
+ java.lang.String getLauncherMachineType();
+ /**
+ *
+ *
+ *
+ * The machine type to use for launching the job. The default is
+ * n1-standard-1.
+ *
+ *
+ * string launcher_machine_type = 24;
+ *
+ * @return The bytes for launcherMachineType.
+ */
+ com.google.protobuf.ByteString getLauncherMachineTypeBytes();
}
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobMetadata.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobMetadata.java
index e17cec01..ebf9a259 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobMetadata.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobMetadata.java
@@ -410,7 +410,7 @@ public com.google.dataflow.v1beta3.BigQueryIODetailsOrBuilder getBigqueryDetails
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -423,7 +423,7 @@ public java.util.List
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -437,7 +437,7 @@ public java.util.List
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -450,7 +450,7 @@ public int getBigTableDetailsCount() {
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -463,7 +463,7 @@ public com.google.dataflow.v1beta3.BigTableIODetails getBigTableDetails(int inde
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -480,7 +480,7 @@ public com.google.dataflow.v1beta3.BigTableIODetailsOrBuilder getBigTableDetails
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -493,7 +493,7 @@ public java.util.List
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -507,7 +507,7 @@ public java.util.List
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -520,7 +520,7 @@ public int getPubsubDetailsCount() {
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -533,7 +533,7 @@ public com.google.dataflow.v1beta3.PubSubIODetails getPubsubDetails(int index) {
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2264,7 +2264,7 @@ private void ensureBigTableDetailsIsMutable() {
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2280,7 +2280,7 @@ public java.util.List
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2296,7 +2296,7 @@ public int getBigTableDetailsCount() {
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2312,7 +2312,7 @@ public com.google.dataflow.v1beta3.BigTableIODetails getBigTableDetails(int inde
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2335,7 +2335,7 @@ public Builder setBigTableDetails(
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2355,7 +2355,7 @@ public Builder setBigTableDetails(
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2377,7 +2377,7 @@ public Builder addBigTableDetails(com.google.dataflow.v1beta3.BigTableIODetails
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2400,7 +2400,7 @@ public Builder addBigTableDetails(
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2420,7 +2420,7 @@ public Builder addBigTableDetails(
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2440,7 +2440,7 @@ public Builder addBigTableDetails(
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2460,7 +2460,7 @@ public Builder addAllBigTableDetails(
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2479,7 +2479,7 @@ public Builder clearBigTableDetails() {
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2498,7 +2498,7 @@ public Builder removeBigTableDetails(int index) {
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2511,7 +2511,7 @@ public com.google.dataflow.v1beta3.BigTableIODetails.Builder getBigTableDetailsB
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2528,7 +2528,7 @@ public com.google.dataflow.v1beta3.BigTableIODetailsOrBuilder getBigTableDetails
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2545,7 +2545,7 @@ public com.google.dataflow.v1beta3.BigTableIODetailsOrBuilder getBigTableDetails
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2558,7 +2558,7 @@ public com.google.dataflow.v1beta3.BigTableIODetails.Builder addBigTableDetailsB
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2572,7 +2572,7 @@ public com.google.dataflow.v1beta3.BigTableIODetails.Builder addBigTableDetailsB
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -2623,7 +2623,7 @@ private void ensurePubsubDetailsIsMutable() {
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2639,7 +2639,7 @@ public java.util.List
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2655,7 +2655,7 @@ public int getPubsubDetailsCount() {
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2671,7 +2671,7 @@ public com.google.dataflow.v1beta3.PubSubIODetails getPubsubDetails(int index) {
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2693,7 +2693,7 @@ public Builder setPubsubDetails(int index, com.google.dataflow.v1beta3.PubSubIOD
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2713,7 +2713,7 @@ public Builder setPubsubDetails(
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2735,7 +2735,7 @@ public Builder addPubsubDetails(com.google.dataflow.v1beta3.PubSubIODetails valu
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2757,7 +2757,7 @@ public Builder addPubsubDetails(int index, com.google.dataflow.v1beta3.PubSubIOD
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2777,7 +2777,7 @@ public Builder addPubsubDetails(
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2797,7 +2797,7 @@ public Builder addPubsubDetails(
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2817,7 +2817,7 @@ public Builder addAllPubsubDetails(
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2836,7 +2836,7 @@ public Builder clearPubsubDetails() {
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2855,7 +2855,7 @@ public Builder removePubsubDetails(int index) {
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2867,7 +2867,7 @@ public com.google.dataflow.v1beta3.PubSubIODetails.Builder getPubsubDetailsBuild
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2884,7 +2884,7 @@ public com.google.dataflow.v1beta3.PubSubIODetailsOrBuilder getPubsubDetailsOrBu
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2901,7 +2901,7 @@ public com.google.dataflow.v1beta3.PubSubIODetailsOrBuilder getPubsubDetailsOrBu
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2914,7 +2914,7 @@ public com.google.dataflow.v1beta3.PubSubIODetails.Builder addPubsubDetailsBuild
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -2927,7 +2927,7 @@ public com.google.dataflow.v1beta3.PubSubIODetails.Builder addPubsubDetailsBuild
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobMetadataOrBuilder.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobMetadataOrBuilder.java
index 2abd1447..8a0e6ca7 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobMetadataOrBuilder.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobMetadataOrBuilder.java
@@ -166,7 +166,7 @@ public interface JobMetadataOrBuilder
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -176,7 +176,7 @@ public interface JobMetadataOrBuilder
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -186,7 +186,7 @@ public interface JobMetadataOrBuilder
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -196,7 +196,7 @@ public interface JobMetadataOrBuilder
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -207,7 +207,7 @@ public interface JobMetadataOrBuilder
*
*
*
- * Identification of a Cloud BigTable source used in the Dataflow job.
+ * Identification of a Cloud Bigtable source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.BigTableIODetails big_table_details = 4;
@@ -218,7 +218,7 @@ public interface JobMetadataOrBuilder
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -228,7 +228,7 @@ public interface JobMetadataOrBuilder
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -238,7 +238,7 @@ public interface JobMetadataOrBuilder
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -248,7 +248,7 @@ public interface JobMetadataOrBuilder
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
@@ -259,7 +259,7 @@ public interface JobMetadataOrBuilder
*
*
*
- * Identification of a PubSub source used in the Dataflow job.
+ * Identification of a Pub/Sub source used in the Dataflow job.
*
*
* repeated .google.dataflow.v1beta3.PubSubIODetails pubsub_details = 5;
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobsProto.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobsProto.java
index f00f36ec..1553d4fe 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobsProto.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/JobsProto.java
@@ -166,12 +166,12 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
java.lang.String[] descriptorData = {
"\n\"google/dataflow/v1beta3/jobs.proto\022\027go"
+ "ogle.dataflow.v1beta3\032\034google/api/annota"
- + "tions.proto\032)google/dataflow/v1beta3/env"
- + "ironment.proto\032\'google/dataflow/v1beta3/"
- + "snapshots.proto\032\036google/protobuf/duratio"
- + "n.proto\032\034google/protobuf/struct.proto\032\037g"
- + "oogle/protobuf/timestamp.proto\032\027google/a"
- + "pi/client.proto\"\256\t\n\003Job\022\n\n\002id\030\001 \001(\t\022\022\n\np"
+ + "tions.proto\032\027google/api/client.proto\032)go"
+ + "ogle/dataflow/v1beta3/environment.proto\032"
+ + "\'google/dataflow/v1beta3/snapshots.proto"
+ + "\032\036google/protobuf/duration.proto\032\034google"
+ + "/protobuf/struct.proto\032\037google/protobuf/"
+ + "timestamp.proto\"\256\t\n\003Job\022\n\n\002id\030\001 \001(\t\022\022\n\np"
+ "roject_id\030\002 \001(\t\022\014\n\004name\030\003 \001(\t\022.\n\004type\030\004 "
+ "\001(\0162 .google.dataflow.v1beta3.JobType\0229\n"
+ "\013environment\030\005 \001(\0132$.google.dataflow.v1b"
@@ -324,47 +324,63 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "\"\n\036JOB_STATE_RESOURCE_CLEANING_UP\020\014*a\n\007J"
+ "obView\022\024\n\020JOB_VIEW_UNKNOWN\020\000\022\024\n\020JOB_VIEW"
+ "_SUMMARY\020\001\022\020\n\014JOB_VIEW_ALL\020\002\022\030\n\024JOB_VIEW"
- + "_DESCRIPTION\020\0032\217\007\n\013JobsV1Beta3\022V\n\tCreate"
- + "Job\022).google.dataflow.v1beta3.CreateJobR"
- + "equest\032\034.google.dataflow.v1beta3.Job\"\000\022P"
- + "\n\006GetJob\022&.google.dataflow.v1beta3.GetJo"
- + "bRequest\032\034.google.dataflow.v1beta3.Job\"\000"
- + "\022V\n\tUpdateJob\022).google.dataflow.v1beta3."
- + "UpdateJobRequest\032\034.google.dataflow.v1bet"
- + "a3.Job\"\000\022a\n\010ListJobs\022(.google.dataflow.v"
- + "1beta3.ListJobsRequest\032).google.dataflow"
- + ".v1beta3.ListJobsResponse\"\000\022k\n\022Aggregate"
- + "dListJobs\022(.google.dataflow.v1beta3.List"
- + "JobsRequest\032).google.dataflow.v1beta3.Li"
- + "stJobsResponse\"\000\022v\n\017CheckActiveJobs\022/.go"
- + "ogle.dataflow.v1beta3.CheckActiveJobsReq"
- + "uest\0320.google.dataflow.v1beta3.CheckActi"
- + "veJobsResponse\"\000\022_\n\013SnapshotJob\022+.google"
- + ".dataflow.v1beta3.SnapshotJobRequest\032!.g"
- + "oogle.dataflow.v1beta3.Snapshot\"\000\032\324\001\312A\027d"
- + "ataflow.googleapis.com\322A\266\001https://www.go"
- + "ogleapis.com/auth/cloud-platform,https:/"
- + "/www.googleapis.com/auth/compute,https:/"
- + "/www.googleapis.com/auth/compute.readonl"
- + "y,https://www.googleapis.com/auth/userin"
- + "fo.emailB\316\001\n\033com.google.dataflow.v1beta3"
- + "B\tJobsProtoP\001Z?google.golang.org/genprot"
- + "o/googleapis/dataflow/v1beta3;dataflow\252\002"
- + "\035Google.Cloud.Dataflow.V1Beta3\312\002\035Google\\"
- + "Cloud\\Dataflow\\V1beta3\352\002 Google::Cloud::"
- + "Dataflow::V1beta3b\006proto3"
+ + "_DESCRIPTION\020\0032\221\014\n\013JobsV1Beta3\022\301\001\n\tCreat"
+ + "eJob\022).google.dataflow.v1beta3.CreateJob"
+ + "Request\032\034.google.dataflow.v1beta3.Job\"k\202"
+ + "\323\344\223\002e\" /v1b3/projects/{project_id}/jobs:"
+ + "\003jobZ<\"5/v1b3/projects/{project_id}/loca"
+ + "tions/{location}/jobs:\003job\022\303\001\n\006GetJob\022&."
+ + "google.dataflow.v1beta3.GetJobRequest\032\034."
+ + "google.dataflow.v1beta3.Job\"s\202\323\344\223\002m\022)/v1"
+ + "b3/projects/{project_id}/jobs/{job_id}Z@"
+ + "\022>/v1b3/projects/{project_id}/locations/"
+ + "{location}/jobs/{job_id}\022\323\001\n\tUpdateJob\022)"
+ + ".google.dataflow.v1beta3.UpdateJobReques"
+ + "t\032\034.google.dataflow.v1beta3.Job\"}\202\323\344\223\002w\032"
+ + ")/v1b3/projects/{project_id}/jobs/{job_i"
+ + "d}:\003jobZE\032>/v1b3/projects/{project_id}/l"
+ + "ocations/{location}/jobs/{job_id}:\003job\022\302"
+ + "\001\n\010ListJobs\022(.google.dataflow.v1beta3.Li"
+ + "stJobsRequest\032).google.dataflow.v1beta3."
+ + "ListJobsResponse\"a\202\323\344\223\002[\022 /v1b3/projects"
+ + "/{project_id}/jobsZ7\0225/v1b3/projects/{pr"
+ + "oject_id}/locations/{location}/jobs\022\236\001\n\022"
+ + "AggregatedListJobs\022(.google.dataflow.v1b"
+ + "eta3.ListJobsRequest\032).google.dataflow.v"
+ + "1beta3.ListJobsResponse\"3\202\323\344\223\002-\022+/v1b3/p"
+ + "rojects/{project_id}/jobs:aggregated\022v\n\017"
+ + "CheckActiveJobs\022/.google.dataflow.v1beta"
+ + "3.CheckActiveJobsRequest\0320.google.datafl"
+ + "ow.v1beta3.CheckActiveJobsResponse\"\000\022\354\001\n"
+ + "\013SnapshotJob\022+.google.dataflow.v1beta3.S"
+ + "napshotJobRequest\032!.google.dataflow.v1be"
+ + "ta3.Snapshot\"\214\001\202\323\344\223\002\205\001\"2/v1b3/projects/{"
+ + "project_id}/jobs/{job_id}:snapshot:\001*ZL\""
+ + "G/v1b3/projects/{project_id}/locations/{"
+ + "location}/jobs/{job_id}:snapshot:\001*\032\324\001\312A"
+ + "\027dataflow.googleapis.com\322A\266\001https://www."
+ + "googleapis.com/auth/cloud-platform,https"
+ + "://www.googleapis.com/auth/compute,https"
+ + "://www.googleapis.com/auth/compute.reado"
+ + "nly,https://www.googleapis.com/auth/user"
+ + "info.emailB\316\001\n\033com.google.dataflow.v1bet"
+ + "a3B\tJobsProtoP\001Z?google.golang.org/genpr"
+ + "oto/googleapis/dataflow/v1beta3;dataflow"
+ + "\252\002\035Google.Cloud.Dataflow.V1Beta3\312\002\035Googl"
+ + "e\\Cloud\\Dataflow\\V1beta3\352\002 Google::Cloud"
+ + "::Dataflow::V1beta3b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
+ com.google.api.ClientProto.getDescriptor(),
com.google.dataflow.v1beta3.EnvironmentProto.getDescriptor(),
com.google.dataflow.v1beta3.SnapshotsProto.getDescriptor(),
com.google.protobuf.DurationProto.getDescriptor(),
com.google.protobuf.StructProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
- com.google.api.ClientProto.getDescriptor(),
});
internal_static_google_dataflow_v1beta3_Job_descriptor =
getDescriptor().getMessageTypes().get(0);
@@ -680,16 +696,17 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
+ registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.oauthScopes);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
+ com.google.api.ClientProto.getDescriptor();
com.google.dataflow.v1beta3.EnvironmentProto.getDescriptor();
com.google.dataflow.v1beta3.SnapshotsProto.getDescriptor();
com.google.protobuf.DurationProto.getDescriptor();
com.google.protobuf.StructProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
- com.google.api.ClientProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/MessagesProto.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/MessagesProto.java
index dfa6c3ec..f1a65642 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/MessagesProto.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/MessagesProto.java
@@ -62,9 +62,9 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
java.lang.String[] descriptorData = {
"\n&google/dataflow/v1beta3/messages.proto"
+ "\022\027google.dataflow.v1beta3\032\034google/api/an"
- + "notations.proto\032\034google/protobuf/struct."
- + "proto\032\037google/protobuf/timestamp.proto\032\027"
- + "google/api/client.proto\"\243\001\n\nJobMessage\022\n"
+ + "notations.proto\032\027google/api/client.proto"
+ + "\032\034google/protobuf/struct.proto\032\037google/p"
+ + "rotobuf/timestamp.proto\"\243\001\n\nJobMessage\022\n"
+ "\n\002id\030\001 \001(\t\022(\n\004time\030\002 \001(\0132\032.google.protob"
+ "uf.Timestamp\022\024\n\014message_text\030\003 \001(\t\022I\n\022me"
+ "ssage_importance\030\004 \001(\0162-.google.dataflow"
@@ -101,30 +101,33 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "\020\000\022\025\n\021JOB_MESSAGE_DEBUG\020\001\022\030\n\024JOB_MESSAGE"
+ "_DETAILED\020\002\022\025\n\021JOB_MESSAGE_BASIC\020\005\022\027\n\023JO"
+ "B_MESSAGE_WARNING\020\003\022\025\n\021JOB_MESSAGE_ERROR"
- + "\020\0042\340\002\n\017MessagesV1Beta3\022v\n\017ListJobMessage"
- + "s\022/.google.dataflow.v1beta3.ListJobMessa"
- + "gesRequest\0320.google.dataflow.v1beta3.Lis"
- + "tJobMessagesResponse\"\000\032\324\001\312A\027dataflow.goo"
- + "gleapis.com\322A\266\001https://www.googleapis.co"
- + "m/auth/cloud-platform,https://www.google"
- + "apis.com/auth/compute,https://www.google"
- + "apis.com/auth/compute.readonly,https://w"
- + "ww.googleapis.com/auth/userinfo.emailB\322\001"
- + "\n\033com.google.dataflow.v1beta3B\rMessagesP"
- + "rotoP\001Z?google.golang.org/genproto/googl"
- + "eapis/dataflow/v1beta3;dataflow\252\002\035Google"
- + ".Cloud.Dataflow.V1Beta3\312\002\035Google\\Cloud\\D"
- + "ataflow\\V1beta3\352\002 Google::Cloud::Dataflo"
- + "w::V1beta3b\006proto3"
+ + "\020\0042\347\003\n\017MessagesV1Beta3\022\374\001\n\017ListJobMessag"
+ + "es\022/.google.dataflow.v1beta3.ListJobMess"
+ + "agesRequest\0320.google.dataflow.v1beta3.Li"
+ + "stJobMessagesResponse\"\205\001\202\323\344\223\002\177\0222/v1b3/pr"
+ + "ojects/{project_id}/jobs/{job_id}/messag"
+ + "esZI\022G/v1b3/projects/{project_id}/locati"
+ + "ons/{location}/jobs/{job_id}/messages\032\324\001"
+ + "\312A\027dataflow.googleapis.com\322A\266\001https://ww"
+ + "w.googleapis.com/auth/cloud-platform,htt"
+ + "ps://www.googleapis.com/auth/compute,htt"
+ + "ps://www.googleapis.com/auth/compute.rea"
+ + "donly,https://www.googleapis.com/auth/us"
+ + "erinfo.emailB\322\001\n\033com.google.dataflow.v1b"
+ + "eta3B\rMessagesProtoP\001Z?google.golang.org"
+ + "/genproto/googleapis/dataflow/v1beta3;da"
+ + "taflow\252\002\035Google.Cloud.Dataflow.V1Beta3\312\002"
+ + "\035Google\\Cloud\\Dataflow\\V1beta3\352\002 Google:"
+ + ":Cloud::Dataflow::V1beta3b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
+ com.google.api.ClientProto.getDescriptor(),
com.google.protobuf.StructProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
- com.google.api.ClientProto.getDescriptor(),
});
internal_static_google_dataflow_v1beta3_JobMessage_descriptor =
getDescriptor().getMessageTypes().get(0);
@@ -191,13 +194,14 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
+ registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.oauthScopes);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
+ com.google.api.ClientProto.getDescriptor();
com.google.protobuf.StructProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
- com.google.api.ClientProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/MetricsProto.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/MetricsProto.java
index 0b197e76..bb5f2dc3 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/MetricsProto.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/MetricsProto.java
@@ -94,9 +94,9 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
java.lang.String[] descriptorData = {
"\n%google/dataflow/v1beta3/metrics.proto\022"
+ "\027google.dataflow.v1beta3\032\034google/api/ann"
- + "otations.proto\032\034google/protobuf/struct.p"
- + "roto\032\037google/protobuf/timestamp.proto\032\027g"
- + "oogle/api/client.proto\"\261\001\n\024MetricStructu"
+ + "otations.proto\032\027google/api/client.proto\032"
+ + "\034google/protobuf/struct.proto\032\037google/pr"
+ + "otobuf/timestamp.proto\"\261\001\n\024MetricStructu"
+ "redName\022\016\n\006origin\030\001 \001(\t\022\014\n\004name\030\002 \001(\t\022K\n"
+ "\007context\030\003 \003(\0132:.google.dataflow.v1beta3"
+ ".MetricStructuredName.ContextEntry\032.\n\014Co"
@@ -160,37 +160,45 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "\022\037\n\033EXECUTION_STATE_NOT_STARTED\020\001\022\033\n\027EXE"
+ "CUTION_STATE_RUNNING\020\002\022\035\n\031EXECUTION_STAT"
+ "E_SUCCEEDED\020\003\022\032\n\026EXECUTION_STATE_FAILED\020"
- + "\004\022\035\n\031EXECUTION_STATE_CANCELLED\020\0052\332\004\n\016Met"
- + "ricsV1Beta3\022e\n\rGetJobMetrics\022-.google.da"
- + "taflow.v1beta3.GetJobMetricsRequest\032#.go"
- + "ogle.dataflow.v1beta3.JobMetrics\"\000\022\200\001\n\026G"
- + "etJobExecutionDetails\0226.google.dataflow."
- + "v1beta3.GetJobExecutionDetailsRequest\032,."
- + "google.dataflow.v1beta3.JobExecutionDeta"
- + "ils\"\000\022\206\001\n\030GetStageExecutionDetails\0228.goo"
- + "gle.dataflow.v1beta3.GetStageExecutionDe"
- + "tailsRequest\032..google.dataflow.v1beta3.S"
- + "tageExecutionDetails\"\000\032\324\001\312A\027dataflow.goo"
- + "gleapis.com\322A\266\001https://www.googleapis.co"
- + "m/auth/cloud-platform,https://www.google"
- + "apis.com/auth/compute,https://www.google"
- + "apis.com/auth/compute.readonly,https://w"
- + "ww.googleapis.com/auth/userinfo.emailB\321\001"
- + "\n\033com.google.dataflow.v1beta3B\014MetricsPr"
- + "otoP\001Z?google.golang.org/genproto/google"
- + "apis/dataflow/v1beta3;dataflow\252\002\035Google."
- + "Cloud.Dataflow.V1Beta3\312\002\035Google\\Cloud\\Da"
- + "taflow\\V1beta3\352\002 Google::Cloud::Dataflow"
- + "::V1beta3b\006proto3"
+ + "\004\022\035\n\031EXECUTION_STATE_CANCELLED\020\0052\237\007\n\016Met"
+ + "ricsV1Beta3\022\351\001\n\rGetJobMetrics\022-.google.d"
+ + "ataflow.v1beta3.GetJobMetricsRequest\032#.g"
+ + "oogle.dataflow.v1beta3.JobMetrics\"\203\001\202\323\344\223"
+ + "\002}\0221/v1b3/projects/{project_id}/jobs/{jo"
+ + "b_id}/metricsZH\022F/v1b3/projects/{project"
+ + "_id}/locations/{location}/jobs/{job_id}/"
+ + "metrics\022\327\001\n\026GetJobExecutionDetails\0226.goo"
+ + "gle.dataflow.v1beta3.GetJobExecutionDeta"
+ + "ilsRequest\032,.google.dataflow.v1beta3.Job"
+ + "ExecutionDetails\"W\202\323\344\223\002Q\022O/v1b3/projects"
+ + "/{project_id}/locations/{location}/jobs/"
+ + "{job_id}/executionDetails\022\357\001\n\030GetStageEx"
+ + "ecutionDetails\0228.google.dataflow.v1beta3"
+ + ".GetStageExecutionDetailsRequest\032..googl"
+ + "e.dataflow.v1beta3.StageExecutionDetails"
+ + "\"i\202\323\344\223\002c\022a/v1b3/projects/{project_id}/lo"
+ + "cations/{location}/jobs/{job_id}/stages/"
+ + "{stage_id}/executionDetails\032\324\001\312A\027dataflo"
+ + "w.googleapis.com\322A\266\001https://www.googleap"
+ + "is.com/auth/cloud-platform,https://www.g"
+ + "oogleapis.com/auth/compute,https://www.g"
+ + "oogleapis.com/auth/compute.readonly,http"
+ + "s://www.googleapis.com/auth/userinfo.ema"
+ + "ilB\321\001\n\033com.google.dataflow.v1beta3B\014Metr"
+ + "icsProtoP\001Z?google.golang.org/genproto/g"
+ + "oogleapis/dataflow/v1beta3;dataflow\252\002\035Go"
+ + "ogle.Cloud.Dataflow.V1Beta3\312\002\035Google\\Clo"
+ + "ud\\Dataflow\\V1beta3\352\002 Google::Cloud::Dat"
+ + "aflow::V1beta3b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
+ com.google.api.ClientProto.getDescriptor(),
com.google.protobuf.StructProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
- com.google.api.ClientProto.getDescriptor(),
});
internal_static_google_dataflow_v1beta3_MetricStructuredName_descriptor =
getDescriptor().getMessageTypes().get(0);
@@ -328,13 +336,14 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
+ registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.oauthScopes);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
+ com.google.api.ClientProto.getDescriptor();
com.google.protobuf.StructProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
- com.google.api.ClientProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/RuntimeEnvironment.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/RuntimeEnvironment.java
index 37111a3f..8e302621 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/RuntimeEnvironment.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/RuntimeEnvironment.java
@@ -515,7 +515,8 @@ public com.google.protobuf.ByteString getMachineTypeBytes() {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -529,7 +530,8 @@ public com.google.protobuf.ProtocolStringList getAdditionalExperimentsList() {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -543,7 +545,8 @@ public int getAdditionalExperimentsCount() {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -558,7 +561,8 @@ public java.lang.String getAdditionalExperiments(int index) {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -2258,7 +2262,8 @@ private void ensureAdditionalExperimentsIsMutable() {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -2272,7 +2277,8 @@ public com.google.protobuf.ProtocolStringList getAdditionalExperimentsList() {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -2286,7 +2292,8 @@ public int getAdditionalExperimentsCount() {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -2301,7 +2308,8 @@ public java.lang.String getAdditionalExperiments(int index) {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -2316,7 +2324,8 @@ public com.google.protobuf.ByteString getAdditionalExperimentsBytes(int index) {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -2338,7 +2347,8 @@ public Builder setAdditionalExperiments(int index, java.lang.String value) {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -2359,7 +2369,8 @@ public Builder addAdditionalExperiments(java.lang.String value) {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -2377,7 +2388,8 @@ public Builder addAllAdditionalExperiments(java.lang.Iterable
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -2394,7 +2406,8 @@ public Builder clearAdditionalExperiments() {
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/RuntimeEnvironmentOrBuilder.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/RuntimeEnvironmentOrBuilder.java
index cd871e7a..a81e6964 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/RuntimeEnvironmentOrBuilder.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/RuntimeEnvironmentOrBuilder.java
@@ -178,7 +178,8 @@ public interface RuntimeEnvironmentOrBuilder
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -190,7 +191,8 @@ public interface RuntimeEnvironmentOrBuilder
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -202,7 +204,8 @@ public interface RuntimeEnvironmentOrBuilder
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
@@ -215,7 +218,8 @@ public interface RuntimeEnvironmentOrBuilder
*
*
*
- * Additional experiment flags for the job.
+ * Additional experiment flags for the job, specified with the
+ * `--experiments` option.
*
*
* repeated string additional_experiments = 7;
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/Snapshot.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/Snapshot.java
index 5686e72b..7d8087ba 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/Snapshot.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/Snapshot.java
@@ -486,7 +486,7 @@ public com.google.dataflow.v1beta3.SnapshotState getState() {
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -500,7 +500,7 @@ public com.google.dataflow.v1beta3.SnapshotState getState() {
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -514,7 +514,7 @@ public com.google.dataflow.v1beta3.SnapshotState getState() {
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -527,7 +527,7 @@ public int getPubsubMetadataCount() {
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -540,7 +540,7 @@ public com.google.dataflow.v1beta3.PubsubSnapshotMetadata getPubsubMetadata(int
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -1982,7 +1982,7 @@ private void ensurePubsubMetadataIsMutable() {
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -1999,7 +1999,7 @@ private void ensurePubsubMetadataIsMutable() {
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2015,7 +2015,7 @@ public int getPubsubMetadataCount() {
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2031,7 +2031,7 @@ public com.google.dataflow.v1beta3.PubsubSnapshotMetadata getPubsubMetadata(int
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2054,7 +2054,7 @@ public Builder setPubsubMetadata(
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2074,7 +2074,7 @@ public Builder setPubsubMetadata(
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2096,7 +2096,7 @@ public Builder addPubsubMetadata(com.google.dataflow.v1beta3.PubsubSnapshotMetad
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2119,7 +2119,7 @@ public Builder addPubsubMetadata(
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2139,7 +2139,7 @@ public Builder addPubsubMetadata(
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2159,7 +2159,7 @@ public Builder addPubsubMetadata(
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2179,7 +2179,7 @@ public Builder addAllPubsubMetadata(
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2198,7 +2198,7 @@ public Builder clearPubsubMetadata() {
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2217,7 +2217,7 @@ public Builder removePubsubMetadata(int index) {
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2230,7 +2230,7 @@ public com.google.dataflow.v1beta3.PubsubSnapshotMetadata.Builder getPubsubMetad
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2247,7 +2247,7 @@ public com.google.dataflow.v1beta3.PubsubSnapshotMetadataOrBuilder getPubsubMeta
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2264,7 +2264,7 @@ public com.google.dataflow.v1beta3.PubsubSnapshotMetadataOrBuilder getPubsubMeta
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2277,7 +2277,7 @@ public com.google.dataflow.v1beta3.PubsubSnapshotMetadata.Builder addPubsubMetad
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -2292,7 +2292,7 @@ public com.google.dataflow.v1beta3.PubsubSnapshotMetadata.Builder addPubsubMetad
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/SnapshotOrBuilder.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/SnapshotOrBuilder.java
index a44559d1..567a5fb2 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/SnapshotOrBuilder.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/SnapshotOrBuilder.java
@@ -197,7 +197,7 @@ public interface SnapshotOrBuilder
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -207,7 +207,7 @@ public interface SnapshotOrBuilder
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -217,7 +217,7 @@ public interface SnapshotOrBuilder
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -227,7 +227,7 @@ public interface SnapshotOrBuilder
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
@@ -238,7 +238,7 @@ public interface SnapshotOrBuilder
*
*
*
- * PubSub snapshot metadata.
+ * Pub/Sub snapshot metadata.
*
*
* repeated .google.dataflow.v1beta3.PubsubSnapshotMetadata pubsub_metadata = 7;
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/SnapshotsProto.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/SnapshotsProto.java
index 5eacac1e..205928d5 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/SnapshotsProto.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/SnapshotsProto.java
@@ -66,9 +66,9 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
java.lang.String[] descriptorData = {
"\n\'google/dataflow/v1beta3/snapshots.prot"
+ "o\022\027google.dataflow.v1beta3\032\034google/api/a"
- + "nnotations.proto\032\036google/protobuf/durati"
- + "on.proto\032\037google/protobuf/timestamp.prot"
- + "o\032\027google/api/client.proto\"t\n\026PubsubSnap"
+ + "nnotations.proto\032\027google/api/client.prot"
+ + "o\032\036google/protobuf/duration.proto\032\037googl"
+ + "e/protobuf/timestamp.proto\"t\n\026PubsubSnap"
+ "shotMetadata\022\022\n\ntopic_name\030\001 \001(\t\022\025\n\rsnap"
+ "shot_name\030\002 \001(\t\022/\n\013expire_time\030\003 \001(\0132\032.g"
+ "oogle.protobuf.Timestamp\"\333\002\n\010Snapshot\022\n\n"
@@ -91,36 +91,47 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "s\030\001 \003(\0132!.google.dataflow.v1beta3.Snapsh"
+ "ot*i\n\rSnapshotState\022\032\n\026UNKNOWN_SNAPSHOT_"
+ "STATE\020\000\022\013\n\007PENDING\020\001\022\013\n\007RUNNING\020\002\022\t\n\005REA"
- + "DY\020\003\022\n\n\006FAILED\020\004\022\013\n\007DELETED\020\0052\261\004\n\020Snapsh"
- + "otsV1Beta3\022_\n\013GetSnapshot\022+.google.dataf"
- + "low.v1beta3.GetSnapshotRequest\032!.google."
- + "dataflow.v1beta3.Snapshot\"\000\022s\n\016DeleteSna"
- + "pshot\022..google.dataflow.v1beta3.DeleteSn"
- + "apshotRequest\032/.google.dataflow.v1beta3."
- + "DeleteSnapshotResponse\"\000\022p\n\rListSnapshot"
- + "s\022-.google.dataflow.v1beta3.ListSnapshot"
- + "sRequest\032..google.dataflow.v1beta3.ListS"
- + "napshotsResponse\"\000\032\324\001\312A\027dataflow.googlea"
- + "pis.com\322A\266\001https://www.googleapis.com/au"
- + "th/cloud-platform,https://www.googleapis"
- + ".com/auth/compute,https://www.googleapis"
- + ".com/auth/compute.readonly,https://www.g"
- + "oogleapis.com/auth/userinfo.emailB\323\001\n\033co"
- + "m.google.dataflow.v1beta3B\016SnapshotsProt"
- + "oP\001Z?google.golang.org/genproto/googleap"
- + "is/dataflow/v1beta3;dataflow\252\002\035Google.Cl"
- + "oud.Dataflow.V1Beta3\312\002\035Google\\Cloud\\Data"
- + "flow\\V1beta3\352\002 Google::Cloud::Dataflow::"
- + "V1beta3b\006proto3"
+ + "DY\020\003\022\n\n\006FAILED\020\004\022\013\n\007DELETED\020\0052\357\007\n\020Snapsh"
+ + "otsV1Beta3\022\350\001\n\013GetSnapshot\022+.google.data"
+ + "flow.v1beta3.GetSnapshotRequest\032!.google"
+ + ".dataflow.v1beta3.Snapshot\"\210\001\202\323\344\223\002\201\001\0223/v"
+ + "1b3/projects/{project_id}/snapshots/{sna"
+ + "pshot_id}ZJ\022H/v1b3/projects/{project_id}"
+ + "/locations/{location}/snapshots/{snapsho"
+ + "t_id}\022\354\001\n\016DeleteSnapshot\022..google.datafl"
+ + "ow.v1beta3.DeleteSnapshotRequest\032/.googl"
+ + "e.dataflow.v1beta3.DeleteSnapshotRespons"
+ + "e\"y\202\323\344\223\002s*%/v1b3/projects/{project_id}/s"
+ + "napshotsZJ*H/v1b3/projects/{project_id}/"
+ + "locations/{location}/snapshots/{snapshot"
+ + "_id}\022\251\002\n\rListSnapshots\022-.google.dataflow"
+ + ".v1beta3.ListSnapshotsRequest\032..google.d"
+ + "ataflow.v1beta3.ListSnapshotsResponse\"\270\001"
+ + "\202\323\344\223\002\261\001\022%/v1b3/projects/{project_id}/sna"
+ + "pshotsZ<\022:/v1b3/projects/{project_id}/lo"
+ + "cations/{location}/snapshotsZJ\022H/v1b3/pr"
+ + "ojects/{project_id}/locations/{location}"
+ + "/jobs/{job_id}/snapshots\032\324\001\312A\027dataflow.g"
+ + "oogleapis.com\322A\266\001https://www.googleapis."
+ + "com/auth/cloud-platform,https://www.goog"
+ + "leapis.com/auth/compute,https://www.goog"
+ + "leapis.com/auth/compute.readonly,https:/"
+ + "/www.googleapis.com/auth/userinfo.emailB"
+ + "\323\001\n\033com.google.dataflow.v1beta3B\016Snapsho"
+ + "tsProtoP\001Z?google.golang.org/genproto/go"
+ + "ogleapis/dataflow/v1beta3;dataflow\252\002\035Goo"
+ + "gle.Cloud.Dataflow.V1Beta3\312\002\035Google\\Clou"
+ + "d\\Dataflow\\V1beta3\352\002 Google::Cloud::Data"
+ + "flow::V1beta3b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
+ com.google.api.ClientProto.getDescriptor(),
com.google.protobuf.DurationProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
- com.google.api.ClientProto.getDescriptor(),
});
internal_static_google_dataflow_v1beta3_PubsubSnapshotMetadata_descriptor =
getDescriptor().getMessageTypes().get(0);
@@ -188,13 +199,14 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
+ registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.oauthScopes);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
+ com.google.api.ClientProto.getDescriptor();
com.google.protobuf.DurationProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
- com.google.api.ClientProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/TemplatesProto.java b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/TemplatesProto.java
index 448cddbc..9e65bf89 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/TemplatesProto.java
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/TemplatesProto.java
@@ -150,10 +150,10 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
java.lang.String[] descriptorData = {
"\n\'google/dataflow/v1beta3/templates.prot"
+ "o\022\027google.dataflow.v1beta3\032\034google/api/a"
- + "nnotations.proto\032)google/dataflow/v1beta"
- + "3/environment.proto\032\"google/dataflow/v1b"
- + "eta3/jobs.proto\032\027google/rpc/status.proto"
- + "\032\027google/api/client.proto\"G\n\032LaunchFlexT"
+ + "nnotations.proto\032\027google/api/client.prot"
+ + "o\032)google/dataflow/v1beta3/environment.p"
+ + "roto\032\"google/dataflow/v1beta3/jobs.proto"
+ + "\032\027google/rpc/status.proto\"G\n\032LaunchFlexT"
+ "emplateResponse\022)\n\003job\030\001 \001(\0132\034.google.da"
+ "taflow.v1beta3.Job\"\345\001\n\rContainerSpec\022\r\n\005"
+ "image\030\001 \001(\t\022;\n\010metadata\030\002 \001(\0132).google.d"
@@ -179,7 +179,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "\001(\t:\0028\001\0324\n\022LaunchOptionsEntry\022\013\n\003key\030\001 \001"
+ "(\t\022\r\n\005value\030\002 \001(\t:\0028\001\032<\n\032TransformNameMa"
+ "ppingsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:"
- + "\0028\001B\n\n\010template\"\316\005\n\036FlexTemplateRuntimeE"
+ + "\0028\001B\n\n\010template\"\220\007\n\036FlexTemplateRuntimeE"
+ "nvironment\022\023\n\013num_workers\030\001 \001(\005\022\023\n\013max_w"
+ "orkers\030\002 \001(\005\022\014\n\004zone\030\003 \001(\t\022\035\n\025service_ac"
+ "count_email\030\004 \001(\t\022\025\n\rtemp_location\030\005 \001(\t"
@@ -195,138 +195,154 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "ble_streaming_engine\030\017 \001(\010\022H\n\013flexrs_goa"
+ "l\030\020 \001(\01623.google.dataflow.v1beta3.FlexRe"
+ "sourceSchedulingGoal\022\030\n\020staging_location"
- + "\030\021 \001(\t\022\033\n\023sdk_container_image\030\022 \001(\t\032;\n\031A"
- + "dditionalUserLabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n"
- + "\005value\030\002 \001(\t:\0028\001\"\250\001\n\031LaunchFlexTemplateR"
- + "equest\022\022\n\nproject_id\030\001 \001(\t\022N\n\020launch_par"
- + "ameter\030\002 \001(\01324.google.dataflow.v1beta3.L"
- + "aunchFlexTemplateParameter\022\020\n\010location\030\003"
- + " \001(\t\022\025\n\rvalidate_only\030\004 \001(\010\"\331\004\n\022RuntimeE"
- + "nvironment\022\023\n\013num_workers\030\013 \001(\005\022\023\n\013max_w"
- + "orkers\030\001 \001(\005\022\014\n\004zone\030\002 \001(\t\022\035\n\025service_ac"
- + "count_email\030\003 \001(\t\022\025\n\rtemp_location\030\004 \001(\t"
- + "\022\"\n\032bypass_temp_dir_validation\030\005 \001(\010\022\024\n\014"
- + "machine_type\030\006 \001(\t\022\036\n\026additional_experim"
- + "ents\030\007 \003(\t\022\017\n\007network\030\010 \001(\t\022\022\n\nsubnetwor"
- + "k\030\t \001(\t\022e\n\026additional_user_labels\030\n \003(\0132"
- + "E.google.dataflow.v1beta3.RuntimeEnviron"
- + "ment.AdditionalUserLabelsEntry\022\024\n\014kms_ke"
- + "y_name\030\014 \001(\t\022O\n\020ip_configuration\030\016 \001(\01625"
- + ".google.dataflow.v1beta3.WorkerIPAddress"
- + "Configuration\022\025\n\rworker_region\030\017 \001(\t\022\023\n\013"
- + "worker_zone\030\020 \001(\t\022\037\n\027enable_streaming_en"
- + "gine\030\021 \001(\010\032;\n\031AdditionalUserLabelsEntry\022"
- + "\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\265\002\n\021Para"
- + "meterMetadata\022\014\n\004name\030\001 \001(\t\022\r\n\005label\030\002 \001"
- + "(\t\022\021\n\thelp_text\030\003 \001(\t\022\023\n\013is_optional\030\004 \001"
- + "(\010\022\017\n\007regexes\030\005 \003(\t\022:\n\nparam_type\030\006 \001(\0162"
- + "&.google.dataflow.v1beta3.ParameterType\022"
- + "W\n\017custom_metadata\030\007 \003(\0132>.google.datafl"
- + "ow.v1beta3.ParameterMetadata.CustomMetad"
- + "ataEntry\0325\n\023CustomMetadataEntry\022\013\n\003key\030\001"
- + " \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"u\n\020TemplateMetad"
- + "ata\022\014\n\004name\030\001 \001(\t\022\023\n\013description\030\002 \001(\t\022>"
- + "\n\nparameters\030\003 \003(\0132*.google.dataflow.v1b"
- + "eta3.ParameterMetadata\"\206\001\n\007SDKInfo\022;\n\010la"
- + "nguage\030\001 \001(\0162).google.dataflow.v1beta3.S"
- + "DKInfo.Language\022\017\n\007version\030\002 \001(\t\"-\n\010Lang"
- + "uage\022\013\n\007UNKNOWN\020\000\022\010\n\004JAVA\020\001\022\n\n\006PYTHON\020\002\""
- + "\205\001\n\017RuntimeMetadata\0222\n\010sdk_info\030\001 \001(\0132 ."
- + "google.dataflow.v1beta3.SDKInfo\022>\n\nparam"
- + "eters\030\002 \003(\0132*.google.dataflow.v1beta3.Pa"
- + "rameterMetadata\"\306\002\n\034CreateJobFromTemplat"
- + "eRequest\022\022\n\nproject_id\030\001 \001(\t\022\020\n\010job_name"
- + "\030\004 \001(\t\022\022\n\010gcs_path\030\002 \001(\tH\000\022Y\n\nparameters"
- + "\030\003 \003(\0132E.google.dataflow.v1beta3.CreateJ"
- + "obFromTemplateRequest.ParametersEntry\022@\n"
- + "\013environment\030\005 \001(\0132+.google.dataflow.v1b"
- + "eta3.RuntimeEnvironment\022\020\n\010location\030\006 \001("
- + "\t\0321\n\017ParametersEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005val"
- + "ue\030\002 \001(\t:\0028\001B\n\n\010template\"\305\001\n\022GetTemplate"
- + "Request\022\022\n\nproject_id\030\001 \001(\t\022\022\n\010gcs_path\030"
- + "\002 \001(\tH\000\022F\n\004view\030\003 \001(\01628.google.dataflow."
- + "v1beta3.GetTemplateRequest.TemplateView\022"
- + "\020\n\010location\030\004 \001(\t\"!\n\014TemplateView\022\021\n\rMET"
- + "ADATA_ONLY\020\000B\n\n\010template\"\277\002\n\023GetTemplate"
- + "Response\022\"\n\006status\030\001 \001(\0132\022.google.rpc.St"
- + "atus\022;\n\010metadata\030\002 \001(\0132).google.dataflow"
- + ".v1beta3.TemplateMetadata\022P\n\rtemplate_ty"
- + "pe\030\003 \001(\01629.google.dataflow.v1beta3.GetTe"
- + "mplateResponse.TemplateType\022B\n\020runtime_m"
- + "etadata\030\004 \001(\0132(.google.dataflow.v1beta3."
- + "RuntimeMetadata\"1\n\014TemplateType\022\013\n\007UNKNO"
- + "WN\020\000\022\n\n\006LEGACY\020\001\022\010\n\004FLEX\020\002\"\262\003\n\030LaunchTem"
- + "plateParameters\022\020\n\010job_name\030\001 \001(\t\022U\n\npar"
- + "ameters\030\002 \003(\0132A.google.dataflow.v1beta3."
- + "LaunchTemplateParameters.ParametersEntry"
- + "\022@\n\013environment\030\003 \001(\0132+.google.dataflow."
- + "v1beta3.RuntimeEnvironment\022\016\n\006update\030\004 \001"
- + "(\010\022k\n\026transform_name_mapping\030\005 \003(\0132K.goo"
- + "gle.dataflow.v1beta3.LaunchTemplateParam"
- + "eters.TransformNameMappingEntry\0321\n\017Param"
- + "etersEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\002"
- + "8\001\032;\n\031TransformNameMappingEntry\022\013\n\003key\030\001"
- + " \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\224\002\n\025LaunchTempla"
- + "teRequest\022\022\n\nproject_id\030\001 \001(\t\022\025\n\rvalidat"
- + "e_only\030\002 \001(\010\022\022\n\010gcs_path\030\003 \001(\tH\000\022P\n\020dyna"
- + "mic_template\030\006 \001(\01324.google.dataflow.v1b"
- + "eta3.DynamicTemplateLaunchParamsH\000\022L\n\021la"
- + "unch_parameters\030\004 \001(\01321.google.dataflow."
- + "v1beta3.LaunchTemplateParameters\022\020\n\010loca"
- + "tion\030\005 \001(\tB\n\n\010template\"C\n\026LaunchTemplate"
- + "Response\022)\n\003job\030\001 \001(\0132\034.google.dataflow."
- + "v1beta3.Job\"\276\001\n\031InvalidTemplateParameter"
- + "s\022c\n\024parameter_violations\030\001 \003(\0132E.google"
- + ".dataflow.v1beta3.InvalidTemplateParamet"
- + "ers.ParameterViolation\032<\n\022ParameterViola"
- + "tion\022\021\n\tparameter\030\001 \001(\t\022\023\n\013description\030\002"
- + " \001(\t\"I\n\033DynamicTemplateLaunchParams\022\020\n\010g"
- + "cs_path\030\001 \001(\t\022\030\n\020staging_location\030\002 \001(\t*"
- + "\316\001\n\rParameterType\022\013\n\007DEFAULT\020\000\022\010\n\004TEXT\020\001"
- + "\022\023\n\017GCS_READ_BUCKET\020\002\022\024\n\020GCS_WRITE_BUCKE"
- + "T\020\003\022\021\n\rGCS_READ_FILE\020\004\022\022\n\016GCS_WRITE_FILE"
- + "\020\005\022\023\n\017GCS_READ_FOLDER\020\006\022\024\n\020GCS_WRITE_FOL"
- + "DER\020\007\022\020\n\014PUBSUB_TOPIC\020\010\022\027\n\023PUBSUB_SUBSCR"
- + "IPTION\020\t2\272\004\n\020TemplatesService\022n\n\025CreateJ"
- + "obFromTemplate\0225.google.dataflow.v1beta3"
- + ".CreateJobFromTemplateRequest\032\034.google.d"
- + "ataflow.v1beta3.Job\"\000\022s\n\016LaunchTemplate\022"
- + "..google.dataflow.v1beta3.LaunchTemplate"
- + "Request\032/.google.dataflow.v1beta3.Launch"
- + "TemplateResponse\"\000\022j\n\013GetTemplate\022+.goog"
- + "le.dataflow.v1beta3.GetTemplateRequest\032,"
- + ".google.dataflow.v1beta3.GetTemplateResp"
- + "onse\"\000\032\324\001\312A\027dataflow.googleapis.com\322A\266\001h"
- + "ttps://www.googleapis.com/auth/cloud-pla"
- + "tform,https://www.googleapis.com/auth/co"
- + "mpute,https://www.googleapis.com/auth/co"
- + "mpute.readonly,https://www.googleapis.co"
- + "m/auth/userinfo.email2\356\002\n\024FlexTemplatesS"
- + "ervice\022\177\n\022LaunchFlexTemplate\0222.google.da"
- + "taflow.v1beta3.LaunchFlexTemplateRequest"
- + "\0323.google.dataflow.v1beta3.LaunchFlexTem"
- + "plateResponse\"\000\032\324\001\312A\027dataflow.googleapis"
- + ".com\322A\266\001https://www.googleapis.com/auth/"
- + "cloud-platform,https://www.googleapis.co"
- + "m/auth/compute,https://www.googleapis.co"
- + "m/auth/compute.readonly,https://www.goog"
- + "leapis.com/auth/userinfo.emailB\323\001\n\033com.g"
- + "oogle.dataflow.v1beta3B\016TemplatesProtoP\001"
- + "Z?google.golang.org/genproto/googleapis/"
- + "dataflow/v1beta3;dataflow\252\002\035Google.Cloud"
- + ".Dataflow.V1Beta3\312\002\035Google\\Cloud\\Dataflo"
- + "w\\V1beta3\352\002 Google::Cloud::Dataflow::V1b"
- + "eta3b\006proto3"
+ + "\030\021 \001(\t\022\033\n\023sdk_container_image\030\022 \001(\t\022\024\n\014d"
+ + "isk_size_gb\030\024 \001(\005\022L\n\025autoscaling_algorit"
+ + "hm\030\025 \001(\0162-.google.dataflow.v1beta3.Autos"
+ + "calingAlgorithm\022\030\n\020dump_heap_on_oom\030\026 \001("
+ + "\010\022#\n\033save_heap_dumps_to_gcs_path\030\027 \001(\t\022\035"
+ + "\n\025launcher_machine_type\030\030 \001(\t\032;\n\031Additio"
+ + "nalUserLabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value"
+ + "\030\002 \001(\t:\0028\001\"\250\001\n\031LaunchFlexTemplateRequest"
+ + "\022\022\n\nproject_id\030\001 \001(\t\022N\n\020launch_parameter"
+ + "\030\002 \001(\01324.google.dataflow.v1beta3.LaunchF"
+ + "lexTemplateParameter\022\020\n\010location\030\003 \001(\t\022\025"
+ + "\n\rvalidate_only\030\004 \001(\010\"\331\004\n\022RuntimeEnviron"
+ + "ment\022\023\n\013num_workers\030\013 \001(\005\022\023\n\013max_workers"
+ + "\030\001 \001(\005\022\014\n\004zone\030\002 \001(\t\022\035\n\025service_account_"
+ + "email\030\003 \001(\t\022\025\n\rtemp_location\030\004 \001(\t\022\"\n\032by"
+ + "pass_temp_dir_validation\030\005 \001(\010\022\024\n\014machin"
+ + "e_type\030\006 \001(\t\022\036\n\026additional_experiments\030\007"
+ + " \003(\t\022\017\n\007network\030\010 \001(\t\022\022\n\nsubnetwork\030\t \001("
+ + "\t\022e\n\026additional_user_labels\030\n \003(\0132E.goog"
+ + "le.dataflow.v1beta3.RuntimeEnvironment.A"
+ + "dditionalUserLabelsEntry\022\024\n\014kms_key_name"
+ + "\030\014 \001(\t\022O\n\020ip_configuration\030\016 \001(\01625.googl"
+ + "e.dataflow.v1beta3.WorkerIPAddressConfig"
+ + "uration\022\025\n\rworker_region\030\017 \001(\t\022\023\n\013worker"
+ + "_zone\030\020 \001(\t\022\037\n\027enable_streaming_engine\030\021"
+ + " \001(\010\032;\n\031AdditionalUserLabelsEntry\022\013\n\003key"
+ + "\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\265\002\n\021ParameterM"
+ + "etadata\022\014\n\004name\030\001 \001(\t\022\r\n\005label\030\002 \001(\t\022\021\n\t"
+ + "help_text\030\003 \001(\t\022\023\n\013is_optional\030\004 \001(\010\022\017\n\007"
+ + "regexes\030\005 \003(\t\022:\n\nparam_type\030\006 \001(\0162&.goog"
+ + "le.dataflow.v1beta3.ParameterType\022W\n\017cus"
+ + "tom_metadata\030\007 \003(\0132>.google.dataflow.v1b"
+ + "eta3.ParameterMetadata.CustomMetadataEnt"
+ + "ry\0325\n\023CustomMetadataEntry\022\013\n\003key\030\001 \001(\t\022\r"
+ + "\n\005value\030\002 \001(\t:\0028\001\"u\n\020TemplateMetadata\022\014\n"
+ + "\004name\030\001 \001(\t\022\023\n\013description\030\002 \001(\t\022>\n\npara"
+ + "meters\030\003 \003(\0132*.google.dataflow.v1beta3.P"
+ + "arameterMetadata\"\206\001\n\007SDKInfo\022;\n\010language"
+ + "\030\001 \001(\0162).google.dataflow.v1beta3.SDKInfo"
+ + ".Language\022\017\n\007version\030\002 \001(\t\"-\n\010Language\022\013"
+ + "\n\007UNKNOWN\020\000\022\010\n\004JAVA\020\001\022\n\n\006PYTHON\020\002\"\205\001\n\017Ru"
+ + "ntimeMetadata\0222\n\010sdk_info\030\001 \001(\0132 .google"
+ + ".dataflow.v1beta3.SDKInfo\022>\n\nparameters\030"
+ + "\002 \003(\0132*.google.dataflow.v1beta3.Paramete"
+ + "rMetadata\"\306\002\n\034CreateJobFromTemplateReque"
+ + "st\022\022\n\nproject_id\030\001 \001(\t\022\020\n\010job_name\030\004 \001(\t"
+ + "\022\022\n\010gcs_path\030\002 \001(\tH\000\022Y\n\nparameters\030\003 \003(\013"
+ + "2E.google.dataflow.v1beta3.CreateJobFrom"
+ + "TemplateRequest.ParametersEntry\022@\n\013envir"
+ + "onment\030\005 \001(\0132+.google.dataflow.v1beta3.R"
+ + "untimeEnvironment\022\020\n\010location\030\006 \001(\t\0321\n\017P"
+ + "arametersEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001"
+ + "(\t:\0028\001B\n\n\010template\"\305\001\n\022GetTemplateReques"
+ + "t\022\022\n\nproject_id\030\001 \001(\t\022\022\n\010gcs_path\030\002 \001(\tH"
+ + "\000\022F\n\004view\030\003 \001(\01628.google.dataflow.v1beta"
+ + "3.GetTemplateRequest.TemplateView\022\020\n\010loc"
+ + "ation\030\004 \001(\t\"!\n\014TemplateView\022\021\n\rMETADATA_"
+ + "ONLY\020\000B\n\n\010template\"\277\002\n\023GetTemplateRespon"
+ + "se\022\"\n\006status\030\001 \001(\0132\022.google.rpc.Status\022;"
+ + "\n\010metadata\030\002 \001(\0132).google.dataflow.v1bet"
+ + "a3.TemplateMetadata\022P\n\rtemplate_type\030\003 \001"
+ + "(\01629.google.dataflow.v1beta3.GetTemplate"
+ + "Response.TemplateType\022B\n\020runtime_metadat"
+ + "a\030\004 \001(\0132(.google.dataflow.v1beta3.Runtim"
+ + "eMetadata\"1\n\014TemplateType\022\013\n\007UNKNOWN\020\000\022\n"
+ + "\n\006LEGACY\020\001\022\010\n\004FLEX\020\002\"\262\003\n\030LaunchTemplateP"
+ + "arameters\022\020\n\010job_name\030\001 \001(\t\022U\n\nparameter"
+ + "s\030\002 \003(\0132A.google.dataflow.v1beta3.Launch"
+ + "TemplateParameters.ParametersEntry\022@\n\013en"
+ + "vironment\030\003 \001(\0132+.google.dataflow.v1beta"
+ + "3.RuntimeEnvironment\022\016\n\006update\030\004 \001(\010\022k\n\026"
+ + "transform_name_mapping\030\005 \003(\0132K.google.da"
+ + "taflow.v1beta3.LaunchTemplateParameters."
+ + "TransformNameMappingEntry\0321\n\017ParametersE"
+ + "ntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\032;\n\031"
+ + "TransformNameMappingEntry\022\013\n\003key\030\001 \001(\t\022\r"
+ + "\n\005value\030\002 \001(\t:\0028\001\"\224\002\n\025LaunchTemplateRequ"
+ + "est\022\022\n\nproject_id\030\001 \001(\t\022\025\n\rvalidate_only"
+ + "\030\002 \001(\010\022\022\n\010gcs_path\030\003 \001(\tH\000\022P\n\020dynamic_te"
+ + "mplate\030\006 \001(\01324.google.dataflow.v1beta3.D"
+ + "ynamicTemplateLaunchParamsH\000\022L\n\021launch_p"
+ + "arameters\030\004 \001(\01321.google.dataflow.v1beta"
+ + "3.LaunchTemplateParameters\022\020\n\010location\030\005"
+ + " \001(\tB\n\n\010template\"C\n\026LaunchTemplateRespon"
+ + "se\022)\n\003job\030\001 \001(\0132\034.google.dataflow.v1beta"
+ + "3.Job\"\276\001\n\031InvalidTemplateParameters\022c\n\024p"
+ + "arameter_violations\030\001 \003(\0132E.google.dataf"
+ + "low.v1beta3.InvalidTemplateParameters.Pa"
+ + "rameterViolation\032<\n\022ParameterViolation\022\021"
+ + "\n\tparameter\030\001 \001(\t\022\023\n\013description\030\002 \001(\t\"I"
+ + "\n\033DynamicTemplateLaunchParams\022\020\n\010gcs_pat"
+ + "h\030\001 \001(\t\022\030\n\020staging_location\030\002 \001(\t*\316\001\n\rPa"
+ + "rameterType\022\013\n\007DEFAULT\020\000\022\010\n\004TEXT\020\001\022\023\n\017GC"
+ + "S_READ_BUCKET\020\002\022\024\n\020GCS_WRITE_BUCKET\020\003\022\021\n"
+ + "\rGCS_READ_FILE\020\004\022\022\n\016GCS_WRITE_FILE\020\005\022\023\n\017"
+ + "GCS_READ_FOLDER\020\006\022\024\n\020GCS_WRITE_FOLDER\020\007\022"
+ + "\020\n\014PUBSUB_TOPIC\020\010\022\027\n\023PUBSUB_SUBSCRIPTION"
+ + "\020\t2\302\007\n\020TemplatesService\022\337\001\n\025CreateJobFro"
+ + "mTemplate\0225.google.dataflow.v1beta3.Crea"
+ + "teJobFromTemplateRequest\032\034.google.datafl"
+ + "ow.v1beta3.Job\"q\202\323\344\223\002k\"%/v1b3/projects/{"
+ + "project_id}/templates:\001*Z?\":/v1b3/projec"
+ + "ts/{project_id}/locations/{location}/tem"
+ + "plates:\001*\022\224\002\n\016LaunchTemplate\022..google.da"
+ + "taflow.v1beta3.LaunchTemplateRequest\032/.g"
+ + "oogle.dataflow.v1beta3.LaunchTemplateRes"
+ + "ponse\"\240\001\202\323\344\223\002\231\001\",/v1b3/projects/{project"
+ + "_id}/templates:launch:\021launch_parameters"
+ + "ZV\"A/v1b3/projects/{project_id}/location"
+ + "s/{location}/templates:launch:\021launch_pa"
+ + "rameters\022\335\001\n\013GetTemplate\022+.google.datafl"
+ + "ow.v1beta3.GetTemplateRequest\032,.google.d"
+ + "ataflow.v1beta3.GetTemplateResponse\"s\202\323\344"
+ + "\223\002m\022)/v1b3/projects/{project_id}/templat"
+ + "es:getZ@\022>/v1b3/projects/{project_id}/lo"
+ + "cations/{location}/templates:get\032\324\001\312A\027da"
+ + "taflow.googleapis.com\322A\266\001https://www.goo"
+ + "gleapis.com/auth/cloud-platform,https://"
+ + "www.googleapis.com/auth/compute,https://"
+ + "www.googleapis.com/auth/compute.readonly"
+ + ",https://www.googleapis.com/auth/userinf"
+ + "o.email2\277\003\n\024FlexTemplatesService\022\317\001\n\022Lau"
+ + "nchFlexTemplate\0222.google.dataflow.v1beta"
+ + "3.LaunchFlexTemplateRequest\0323.google.dat"
+ + "aflow.v1beta3.LaunchFlexTemplateResponse"
+ + "\"P\202\323\344\223\002J\"E/v1b3/projects/{project_id}/lo"
+ + "cations/{location}/flexTemplates:launch:"
+ + "\001*\032\324\001\312A\027dataflow.googleapis.com\322A\266\001https"
+ + "://www.googleapis.com/auth/cloud-platfor"
+ + "m,https://www.googleapis.com/auth/comput"
+ + "e,https://www.googleapis.com/auth/comput"
+ + "e.readonly,https://www.googleapis.com/au"
+ + "th/userinfo.emailB\323\001\n\033com.google.dataflo"
+ + "w.v1beta3B\016TemplatesProtoP\001Z?google.gola"
+ + "ng.org/genproto/googleapis/dataflow/v1be"
+ + "ta3;dataflow\252\002\035Google.Cloud.Dataflow.V1B"
+ + "eta3\312\002\035Google\\Cloud\\Dataflow\\V1beta3\352\002 G"
+ + "oogle::Cloud::Dataflow::V1beta3b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
+ com.google.api.ClientProto.getDescriptor(),
com.google.dataflow.v1beta3.EnvironmentProto.getDescriptor(),
com.google.dataflow.v1beta3.JobsProto.getDescriptor(),
com.google.rpc.StatusProto.getDescriptor(),
- com.google.api.ClientProto.getDescriptor(),
});
internal_static_google_dataflow_v1beta3_LaunchFlexTemplateResponse_descriptor =
getDescriptor().getMessageTypes().get(0);
@@ -414,6 +430,11 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"FlexrsGoal",
"StagingLocation",
"SdkContainerImage",
+ "DiskSizeGb",
+ "AutoscalingAlgorithm",
+ "DumpHeapOnOom",
+ "SaveHeapDumpsToGcsPath",
+ "LauncherMachineType",
});
internal_static_google_dataflow_v1beta3_FlexTemplateRuntimeEnvironment_AdditionalUserLabelsEntry_descriptor =
internal_static_google_dataflow_v1beta3_FlexTemplateRuntimeEnvironment_descriptor
@@ -627,14 +648,15 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
+ registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.oauthScopes);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
+ com.google.api.ClientProto.getDescriptor();
com.google.dataflow.v1beta3.EnvironmentProto.getDescriptor();
com.google.dataflow.v1beta3.JobsProto.getDescriptor();
com.google.rpc.StatusProto.getDescriptor();
- com.google.api.ClientProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/proto/google/dataflow/v1beta3/environment.proto b/proto-google-cloud-dataflow-v1beta3/src/main/proto/google/dataflow/v1beta3/environment.proto
index 914a87f0..26487a52 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/proto/google/dataflow/v1beta3/environment.proto
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/proto/google/dataflow/v1beta3/environment.proto
@@ -1,4 +1,4 @@
-// Copyright 2021 Google LLC
+// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
diff --git a/proto-google-cloud-dataflow-v1beta3/src/main/proto/google/dataflow/v1beta3/jobs.proto b/proto-google-cloud-dataflow-v1beta3/src/main/proto/google/dataflow/v1beta3/jobs.proto
index 8d6c49a9..17df58bf 100644
--- a/proto-google-cloud-dataflow-v1beta3/src/main/proto/google/dataflow/v1beta3/jobs.proto
+++ b/proto-google-cloud-dataflow-v1beta3/src/main/proto/google/dataflow/v1beta3/jobs.proto
@@ -1,4 +1,4 @@
-// Copyright 2021 Google LLC
+// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -17,12 +17,12 @@ syntax = "proto3";
package google.dataflow.v1beta3;
import "google/api/annotations.proto";
+import "google/api/client.proto";
import "google/dataflow/v1beta3/environment.proto";
import "google/dataflow/v1beta3/snapshots.proto";
import "google/protobuf/duration.proto";
import "google/protobuf/struct.proto";
import "google/protobuf/timestamp.proto";
-import "google/api/client.proto";
option csharp_namespace = "Google.Cloud.Dataflow.V1Beta3";
option go_package = "google.golang.org/genproto/googleapis/dataflow/v1beta3;dataflow";
@@ -50,6 +50,14 @@ service JobsV1Beta3 {
// `projects.jobs.create` is not recommended, as your job will always start
// in `us-central1`.
rpc CreateJob(CreateJobRequest) returns (Job) {
+ option (google.api.http) = {
+ post: "/v1b3/projects/{project_id}/jobs"
+ body: "job"
+ additional_bindings {
+ post: "/v1b3/projects/{project_id}/locations/{location}/jobs"
+ body: "job"
+ }
+ };
}
// Gets the state of the specified Cloud Dataflow job.
@@ -60,6 +68,12 @@ service JobsV1Beta3 {
// `projects.jobs.get` is not recommended, as you can only get the state of
// jobs that are running in `us-central1`.
rpc GetJob(GetJobRequest) returns (Job) {
+ option (google.api.http) = {
+ get: "/v1b3/projects/{project_id}/jobs/{job_id}"
+ additional_bindings {
+ get: "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}"
+ }
+ };
}
// Updates the state of an existing Cloud Dataflow job.
@@ -70,6 +84,14 @@ service JobsV1Beta3 {
// `projects.jobs.update` is not recommended, as you can only update the state
// of jobs that are running in `us-central1`.
rpc UpdateJob(UpdateJobRequest) returns (Job) {
+ option (google.api.http) = {
+ put: "/v1b3/projects/{project_id}/jobs/{job_id}"
+ body: "job"
+ additional_bindings {
+ put: "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}"
+ body: "job"
+ }
+ };
}
// List the jobs of a project.
@@ -81,10 +103,19 @@ service JobsV1Beta3 {
// `projects.jobs.list` is not recommended, as you can only get the list of
// jobs that are running in `us-central1`.
rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) {
+ option (google.api.http) = {
+ get: "/v1b3/projects/{project_id}/jobs"
+ additional_bindings {
+ get: "/v1b3/projects/{project_id}/locations/{location}/jobs"
+ }
+ };
}
// List the jobs of a project across all regions.
rpc AggregatedListJobs(ListJobsRequest) returns (ListJobsResponse) {
+ option (google.api.http) = {
+ get: "/v1b3/projects/{project_id}/jobs:aggregated"
+ };
}
// Check for existence of active jobs in the given project across all regions.
@@ -93,6 +124,14 @@ service JobsV1Beta3 {
// Snapshot the state of a streaming job.
rpc SnapshotJob(SnapshotJobRequest) returns (Snapshot) {
+ option (google.api.http) = {
+ post: "/v1b3/projects/{project_id}/jobs/{job_id}:snapshot"
+ body: "*"
+ additional_bindings {
+ post: "/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}:snapshot"
+ body: "*"
+ }
+ };
}
}
@@ -277,7 +316,7 @@ message FileIODetails {
string file_pattern = 1;
}
-// Metadata for a Cloud BigTable connector used by the job.
+// Metadata for a Cloud Bigtable connector used by the job.
message BigTableIODetails {
// ProjectId accessed in the connection.
string project_id = 1;
@@ -359,10 +398,10 @@ message JobMetadata {
// Identification of a BigQuery source used in the Dataflow job.
repeated BigQueryIODetails bigquery_details = 3;
- // Identification of a Cloud BigTable source used in the Dataflow job.
+ // Identification of a Cloud Bigtable source used in the Dataflow job.
repeated BigTableIODetails big_table_details = 4;
- // Identification of a PubSub source used in the Dataflow job.
+ // Identification of a Pub/Sub source used in the Dataflow job.
repeated PubSubIODetails pubsub_details = 5;
// Identification of a File source used in the Dataflow job.
@@ -398,27 +437,6 @@ message PipelineDescription {
repeated DisplayData display_data = 3;
}
-// Description of the type, names/ids, and input/outputs for a transform.
-message TransformSummary {
- // Type of transform.
- KindType kind = 1;
-
- // SDK generated id of this transform instance.
- string id = 2;
-
- // User provided name for this transform instance.
- string name = 3;
-
- // Transform-specific display data.
- repeated DisplayData display_data = 4;
-
- // User names for all collection outputs to this transform.
- repeated string output_collection_name = 5;
-
- // User names for all collection inputs to this transform.
- repeated string input_collection_name = 6;
-}
-
// Type of transform or stage operation.
enum KindType {
// Unrecognized transform type.
@@ -449,6 +467,27 @@ enum KindType {
SHUFFLE_KIND = 8;
}
+// Description of the type, names/ids, and input/outputs for a transform.
+message TransformSummary {
+ // Type of transform.
+ KindType kind = 1;
+
+ // SDK generated id of this transform instance.
+ string id = 2;
+
+ // User provided name for this transform instance.
+ string name = 3;
+
+ // Transform-specific display data.
+ repeated DisplayData display_data = 4;
+
+ // User names for all collection outputs to this transform.
+ repeated string output_collection_name = 5;
+
+ // User names for all collection inputs to this transform.
+ repeated string input_collection_name = 6;
+}
+
// Description of the composing transforms, names/ids, and input/outputs of a
// stage of execution. Some composing transforms and sources may have been
// generated by the Dataflow service during execution planning.
@@ -613,22 +652,6 @@ message Step {
google.protobuf.Struct properties = 3;
}
-// Additional information about how a Cloud Dataflow job will be executed that
-// isn't contained in the submitted job.
-message JobExecutionInfo {
- // A mapping from each stage to the information about that stage.
- map