Skip to content

Commit b215313

Browse files
Merge pull request #3011 from googlyrahman/refactor-ai-migration-71846e7a93bb405fb1627f7831d4ba8c
Migrate gsutil usage to gcloud storage
2 parents 4b1947b + 7ba1b56 commit b215313

File tree

10 files changed

+26
-26
lines changed

10 files changed

+26
-26
lines changed

quests/dataflow/create_batch_sinks.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ PROJECT_ID=$(gcloud config get-value project)
55

66
# GCS buckets
77
#TODO: Add try/catch for the first bucket since qwiklabs
8-
gsutil mb -l US gs://$PROJECT_ID
9-
gsutil mb -l US -c "COLDLINE" gs://$PROJECT_ID-coldline
8+
gcloud storage buckets create --location=US gs://$PROJECT_ID
9+
gcloud storage buckets create --location=US --default-storage-class="COLDLINE" gs://$PROJECT_ID-coldline
1010

1111
# BiqQuery Dataset
1212
bq mk --location=US logs

quests/dataflow/create_streaming_sinks.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ PROJECT_ID=$(gcloud config get-value project)
55

66
# GCS buckets
77
#TODO: Add try/catch for the first bucket since qwiklabs
8-
gsutil mb -l US gs://$PROJECT_ID
9-
gsutil mb -l US -c "COLDLINE" gs://$PROJECT_ID-coldline
8+
gcloud storage buckets create --location=US gs://$PROJECT_ID
9+
gcloud storage buckets create --location=US --default-storage-class="COLDLINE" gs://$PROJECT_ID-coldline
1010

1111
# BiqQuery Dataset
1212
bq mk --location=US logs

quests/dataflow/generate_batch_events.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,4 +16,4 @@ echo "Copying events to Cloud Storage"
1616
# Set BUCKET to the non-coldline Google Cloud Storage bucket
1717
export BUCKET=gs://$(gcloud config get-value project)/
1818
# Copy events.json into the bucket
19-
gsutil cp events.json ${BUCKET}
19+
gcloud storage cp events.json ${BUCKET}

quests/dataflow_python/create_batch_sinks.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ PROJECT_ID=$(gcloud config get-value project)
55

66
# GCS buckets
77
#TODO: Add try/catch for the first bucket since qwiklabs
8-
gsutil mb -l US gs://$PROJECT_ID
9-
gsutil mb -l US -c "COLDLINE" gs://$PROJECT_ID-coldline
8+
gcloud storage buckets create --location=US gs://$PROJECT_ID
9+
gcloud storage buckets create --location=US --default-storage-class="COLDLINE" gs://$PROJECT_ID-coldline
1010

1111
# BiqQuery Dataset
1212
bq mk --location=US logs

quests/dataflow_python/create_streaming_sinks.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ PROJECT_ID=$(gcloud config get-value project)
55

66
# GCS buckets
77
#TODO: Add try/catch for the first bucket since qwiklabs
8-
gsutil mb -l US gs://$PROJECT_ID
9-
gsutil mb -l US -c "COLDLINE" gs://$PROJECT_ID-coldline
8+
gcloud storage buckets create --location=US gs://$PROJECT_ID
9+
gcloud storage buckets create --location=US --default-storage-class="COLDLINE" gs://$PROJECT_ID-coldline
1010

1111
# BiqQuery Dataset
1212
bq mk --location=US logs

quests/dataflow_python/generate_batch_events.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,4 +16,4 @@ echo "Copying events to Cloud Storage"
1616
# Set BUCKET to the non-coldline Google Cloud Storage bucket
1717
export BUCKET=gs://$(gcloud config get-value project)/
1818
# Copy events.json into the bucket
19-
gsutil cp events.json ${BUCKET}
19+
gcloud storage cp events.json ${BUCKET}

quests/sparktobq/02_gcs.ipynb

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
"outputs": [],
4747
"source": [
4848
"BUCKET='cloud-training-demos-ml' # CHANGE\n",
49-
"!gsutil cp kdd* gs://$BUCKET/"
49+
"!gcloud storage cp kdd* gs://$BUCKET/"
5050
]
5151
},
5252
{
@@ -55,7 +55,7 @@
5555
"metadata": {},
5656
"outputs": [],
5757
"source": [
58-
"!gsutil ls gs://$BUCKET/kdd*"
58+
"!gcloud storage ls gs://$BUCKET/kdd*"
5959
]
6060
},
6161
{
@@ -189,8 +189,8 @@
189189
"outputs": [],
190190
"source": [
191191
"ax[0].get_figure().savefig('report.png');\n",
192-
"!gsutil rm -rf gs://$BUCKET/sparktobq/\n",
193-
"!gsutil cp report.png gs://$BUCKET/sparktobq/"
192+
"!gcloud storage rm --recursive --continue-on-error gs://$BUCKET/sparktobq/\n",
193+
"!gcloud storage cp report.png gs://$BUCKET/sparktobq/"
194194
]
195195
},
196196
{
@@ -208,7 +208,7 @@
208208
"metadata": {},
209209
"outputs": [],
210210
"source": [
211-
"!gsutil ls gs://$BUCKET/sparktobq/**"
211+
"!gcloud storage ls gs://$BUCKET/sparktobq/**"
212212
]
213213
},
214214
{

quests/sparktobq/03_automate.ipynb

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
"!wget http://kdd.ics.uci.edu/databases/kddcup99/kddcup.data_10_percent.gz\n",
3131
"BUCKET='cloud-training-demos-ml' # CHANGE\n",
3232
"!pip install google-compute-engine\n",
33-
"!gsutil cp kdd* gs://$BUCKET/"
33+
"!gcloud storage cp kdd* gs://$BUCKET/"
3434
]
3535
},
3636
{
@@ -40,7 +40,7 @@
4040
"outputs": [],
4141
"source": [
4242
"BUCKET='cloud-training-demos-ml' # CHANGE\n",
43-
"!gsutil ls gs://$BUCKET/kdd*"
43+
"!gcloud storage ls gs://$BUCKET/kdd*"
4444
]
4545
},
4646
{
@@ -193,8 +193,8 @@
193193
"%%writefile -a spark_analysis.py\n",
194194
"\n",
195195
"ax[0].get_figure().savefig('report.png');\n",
196-
"#!gsutil rm -rf gs://$BUCKET/sparktobq/\n",
197-
"#!gsutil cp report.png gs://$BUCKET/sparktobq/"
196+
"#!gcloud storage rm --recursive --continue-on-error gs://$BUCKET/sparktobq/\n",
197+
"#!gcloud storage cp report.png gs://$BUCKET/sparktobq/"
198198
]
199199
},
200200
{
@@ -250,7 +250,7 @@
250250
"metadata": {},
251251
"outputs": [],
252252
"source": [
253-
"!gsutil ls gs://$BUCKET/sparktobq/**"
253+
"!gcloud storage ls gs://$BUCKET/sparktobq/**"
254254
]
255255
},
256256
{

quests/sparktobq/05_functions.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
"wget http://kdd.ics.uci.edu/databases/kddcup99/kddcup.data_10_percent.gz\n",
3131
"gunzip kddcup.data_10_percent.gz\n",
3232
"BUCKET='cloud-training-demos-ml' # CHANGE\n",
33-
"gsutil cp kdd* gs://$BUCKET/\n",
33+
"gcloud storage cp kdd* gs://$BUCKET/\n",
3434
"bq mk sparktobq"
3535
]
3636
},
@@ -234,8 +234,8 @@
234234
"metadata": {},
235235
"outputs": [],
236236
"source": [
237-
"!gsutil rm -rf gs://$BUCKET/sparktobq\n",
238-
"!gsutil cp kddcup.data_10_percent gs://$BUCKET/"
237+
"!gcloud storage rm --recursive --continue-on-error gs://$BUCKET/sparktobq\n",
238+
"!gcloud storage cp kddcup.data_10_percent gs://$BUCKET/"
239239
]
240240
},
241241
{
@@ -253,7 +253,7 @@
253253
"metadata": {},
254254
"outputs": [],
255255
"source": [
256-
"!gsutil ls gs://$BUCKET/sparktobq"
256+
"!gcloud storage ls gs://$BUCKET/sparktobq"
257257
]
258258
},
259259
{

quests/sparktobq/submit_workflow.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,15 @@ MACHINE_TYPE=n1-standard-4
1111
CLUSTER=sparktobq
1212
BUCKET=$1
1313

14-
gsutil cp spark_analysis.py gs://$BUCKET/
14+
gcloud storage cp spark_analysis.py gs://$BUCKET/
1515

1616
gcloud dataproc --quiet workflow-templates delete $TEMPLATE
1717
gcloud dataproc --quiet workflow-templates create $TEMPLATE
1818

1919
# the things we need pip-installed on the cluster
2020
STARTUP_SCRIPT=gs://${BUCKET}/sparktobq/startup_script.sh
2121
echo "pip install --upgrade --quiet google-compute-engine google-cloud-storage matplotlib" > /tmp/startup_script.sh
22-
gsutil cp /tmp/startup_script.sh $STARTUP_SCRIPT
22+
gcloud storage cp /tmp/startup_script.sh $STARTUP_SCRIPT
2323

2424
# create new cluster for job
2525
gcloud dataproc workflow-templates set-managed-cluster $TEMPLATE \

0 commit comments

Comments
 (0)