diff --git a/generated/google-apis-cloudasset_v1/lib/google/apis/cloudasset_v1/classes.rb b/generated/google-apis-cloudasset_v1/lib/google/apis/cloudasset_v1/classes.rb index 5e03683c7ea..25c609a7bff 100644 --- a/generated/google-apis-cloudasset_v1/lib/google/apis/cloudasset_v1/classes.rb +++ b/generated/google-apis-cloudasset_v1/lib/google/apis/cloudasset_v1/classes.rb @@ -1355,7 +1355,7 @@ def update!(**args) class GcsDestination include Google::Apis::Core::Hashable - # The URI of the Cloud Storage object. It's the same URI that is used by gsutil. + # The URI of the Cloud Storage object. It's the same URI that is used by gcloud storage. # Example: "gs://bucket_name/object_name". See [Viewing and Editing Object # Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata) for # more information. If the specified Cloud Storage object already exists and diff --git a/generated/google-apis-cloudasset_v1beta1/lib/google/apis/cloudasset_v1beta1/classes.rb b/generated/google-apis-cloudasset_v1beta1/lib/google/apis/cloudasset_v1beta1/classes.rb index ed0713e5378..d967f854b3d 100644 --- a/generated/google-apis-cloudasset_v1beta1/lib/google/apis/cloudasset_v1beta1/classes.rb +++ b/generated/google-apis-cloudasset_v1beta1/lib/google/apis/cloudasset_v1beta1/classes.rb @@ -464,7 +464,7 @@ def update!(**args) class GcsDestination include Google::Apis::Core::Hashable - # The URI of the Cloud Storage object. It's the same URI that is used by gsutil. + # The URI of the Cloud Storage object. It's the same URI that is used by gcloud storage. # For example: "gs://bucket_name/object_name". See [Viewing and Editing Object # Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata) for # more information. diff --git a/generated/google-apis-genomics_v1alpha2/lib/google/apis/genomics_v1alpha2/classes.rb b/generated/google-apis-genomics_v1alpha2/lib/google/apis/genomics_v1alpha2/classes.rb index 4f194b57ec5..f8a140cfb4c 100644 --- a/generated/google-apis-genomics_v1alpha2/lib/google/apis/genomics_v1alpha2/classes.rb +++ b/generated/google-apis-genomics_v1alpha2/lib/google/apis/genomics_v1alpha2/classes.rb @@ -787,18 +787,18 @@ def update!(**args) # otherwise. The pipeline runner should add a key/value pair to either the # inputs or outputs map. The indicated data copies will be carried out before/ # after pipeline execution, just as if the corresponding arguments were provided - # to `gsutil cp`. For example: Given the following `PipelineParameter`, + # to `gcloud storage cp`. For example: Given the following `PipelineParameter`, # specified in the `inputParameters` list: ``` `name: "input_file", localCopy: ` # path: "file.txt", disk: "pd1"`` ``` where `disk` is defined in the ` # PipelineResources` object as: ``` `name: "pd1", mountPoint: "/mnt/disk/"` ``` # We create a disk named `pd1`, mount it on the host VM, and map `/mnt/pd1` to `/ # mnt/disk` in the docker container. At runtime, an entry for `input_file` would # be required in the inputs map, such as: ``` inputs["input_file"] = "gs://my- - # bucket/bar.txt" ``` This would generate the following gsutil call: ``` gsutil + # bucket/bar.txt" ``` This would generate the following gcloud storage call: ``` gcloud storage # cp gs://my-bucket/bar.txt /mnt/pd1/file.txt ``` The file `/mnt/pd1/file.txt` # maps to `/mnt/disk/file.txt` in the Docker container. Acceptable paths are: # Google Cloud storage pathLocal path file file glob directory For outputs, the - # direction of the copy is reversed: ``` gsutil cp /mnt/disk/file.txt gs://my- + # direction of the copy is reversed: ``` gcloud storage cp /mnt/disk/file.txt gs://my- # bucket/bar.txt ``` Acceptable paths are: Local pathGoogle Cloud Storage path # file file file directory - directory must already exist glob directory - # directory will be created if it doesn't exist One restriction due to docker