File tree Expand file tree Collapse file tree 10 files changed +26
-26
lines changed Expand file tree Collapse file tree 10 files changed +26
-26
lines changed Original file line number Diff line number Diff line change @@ -5,8 +5,8 @@ PROJECT_ID=$(gcloud config get-value project)
55
66# GCS buckets
77# TODO: Add try/catch for the first bucket since qwiklabs
8- gsutil mb -l US gs://$PROJECT_ID
9- gsutil mb -l US -c " COLDLINE" gs://$PROJECT_ID -coldline
8+ gcloud storage buckets create --location= US gs://$PROJECT_ID
9+ gcloud storage buckets create --location=US --default-storage-class= " COLDLINE" gs://$PROJECT_ID -coldline
1010
1111# BiqQuery Dataset
1212bq mk --location=US logs
Original file line number Diff line number Diff line change @@ -5,8 +5,8 @@ PROJECT_ID=$(gcloud config get-value project)
55
66# GCS buckets
77# TODO: Add try/catch for the first bucket since qwiklabs
8- gsutil mb -l US gs://$PROJECT_ID
9- gsutil mb -l US -c " COLDLINE" gs://$PROJECT_ID -coldline
8+ gcloud storage buckets create --location= US gs://$PROJECT_ID
9+ gcloud storage buckets create --location=US --default-storage-class= " COLDLINE" gs://$PROJECT_ID -coldline
1010
1111# BiqQuery Dataset
1212bq mk --location=US logs
Original file line number Diff line number Diff line change @@ -16,4 +16,4 @@ echo "Copying events to Cloud Storage"
1616# Set BUCKET to the non-coldline Google Cloud Storage bucket
1717export BUCKET=gs://$( gcloud config get-value project) /
1818# Copy events.json into the bucket
19- gsutil cp events.json ${BUCKET}
19+ gcloud storage cp events.json ${BUCKET}
Original file line number Diff line number Diff line change @@ -5,8 +5,8 @@ PROJECT_ID=$(gcloud config get-value project)
55
66# GCS buckets
77# TODO: Add try/catch for the first bucket since qwiklabs
8- gsutil mb -l US gs://$PROJECT_ID
9- gsutil mb -l US -c " COLDLINE" gs://$PROJECT_ID -coldline
8+ gcloud storage buckets create --location= US gs://$PROJECT_ID
9+ gcloud storage buckets create --location=US --default-storage-class= " COLDLINE" gs://$PROJECT_ID -coldline
1010
1111# BiqQuery Dataset
1212bq mk --location=US logs
Original file line number Diff line number Diff line change @@ -5,8 +5,8 @@ PROJECT_ID=$(gcloud config get-value project)
55
66# GCS buckets
77# TODO: Add try/catch for the first bucket since qwiklabs
8- gsutil mb -l US gs://$PROJECT_ID
9- gsutil mb -l US -c " COLDLINE" gs://$PROJECT_ID -coldline
8+ gcloud storage buckets create --location= US gs://$PROJECT_ID
9+ gcloud storage buckets create --location=US --default-storage-class= " COLDLINE" gs://$PROJECT_ID -coldline
1010
1111# BiqQuery Dataset
1212bq mk --location=US logs
Original file line number Diff line number Diff line change @@ -16,4 +16,4 @@ echo "Copying events to Cloud Storage"
1616# Set BUCKET to the non-coldline Google Cloud Storage bucket
1717export BUCKET=gs://$( gcloud config get-value project) /
1818# Copy events.json into the bucket
19- gsutil cp events.json ${BUCKET}
19+ gcloud storage cp events.json ${BUCKET}
Original file line number Diff line number Diff line change 4646 "outputs" : [],
4747 "source" : [
4848 " BUCKET='cloud-training-demos-ml' # CHANGE\n " ,
49- " !gsutil cp kdd* gs://$BUCKET/"
49+ " !gcloud storage cp kdd* gs://$BUCKET/"
5050 ]
5151 },
5252 {
5555 "metadata" : {},
5656 "outputs" : [],
5757 "source" : [
58- " !gsutil ls gs://$BUCKET/kdd*"
58+ " !gcloud storage ls gs://$BUCKET/kdd*"
5959 ]
6060 },
6161 {
189189 "outputs" : [],
190190 "source" : [
191191 " ax[0].get_figure().savefig('report.png');\n " ,
192- " !gsutil rm -rf gs://$BUCKET/sparktobq/\n " ,
193- " !gsutil cp report.png gs://$BUCKET/sparktobq/"
192+ " !gcloud storage rm --recursive --continue-on-error gs://$BUCKET/sparktobq/\n " ,
193+ " !gcloud storage cp report.png gs://$BUCKET/sparktobq/"
194194 ]
195195 },
196196 {
208208 "metadata" : {},
209209 "outputs" : [],
210210 "source" : [
211- " !gsutil ls gs://$BUCKET/sparktobq/**"
211+ " !gcloud storage ls gs://$BUCKET/sparktobq/**"
212212 ]
213213 },
214214 {
Original file line number Diff line number Diff line change 3030 " !wget http://kdd.ics.uci.edu/databases/kddcup99/kddcup.data_10_percent.gz\n " ,
3131 " BUCKET='cloud-training-demos-ml' # CHANGE\n " ,
3232 " !pip install google-compute-engine\n " ,
33- " !gsutil cp kdd* gs://$BUCKET/"
33+ " !gcloud storage cp kdd* gs://$BUCKET/"
3434 ]
3535 },
3636 {
4040 "outputs" : [],
4141 "source" : [
4242 " BUCKET='cloud-training-demos-ml' # CHANGE\n " ,
43- " !gsutil ls gs://$BUCKET/kdd*"
43+ " !gcloud storage ls gs://$BUCKET/kdd*"
4444 ]
4545 },
4646 {
193193 " %%writefile -a spark_analysis.py\n " ,
194194 " \n " ,
195195 " ax[0].get_figure().savefig('report.png');\n " ,
196- " #!gsutil rm -rf gs://$BUCKET/sparktobq/\n " ,
197- " #!gsutil cp report.png gs://$BUCKET/sparktobq/"
196+ " #!gcloud storage rm --recursive --continue-on-error gs://$BUCKET/sparktobq/\n " ,
197+ " #!gcloud storage cp report.png gs://$BUCKET/sparktobq/"
198198 ]
199199 },
200200 {
250250 "metadata" : {},
251251 "outputs" : [],
252252 "source" : [
253- " !gsutil ls gs://$BUCKET/sparktobq/**"
253+ " !gcloud storage ls gs://$BUCKET/sparktobq/**"
254254 ]
255255 },
256256 {
Original file line number Diff line number Diff line change 3030 " wget http://kdd.ics.uci.edu/databases/kddcup99/kddcup.data_10_percent.gz\n " ,
3131 " gunzip kddcup.data_10_percent.gz\n " ,
3232 " BUCKET='cloud-training-demos-ml' # CHANGE\n " ,
33- " gsutil cp kdd* gs://$BUCKET/\n " ,
33+ " gcloud storage cp kdd* gs://$BUCKET/\n " ,
3434 " bq mk sparktobq"
3535 ]
3636 },
234234 "metadata" : {},
235235 "outputs" : [],
236236 "source" : [
237- " !gsutil rm -rf gs://$BUCKET/sparktobq\n " ,
238- " !gsutil cp kddcup.data_10_percent gs://$BUCKET/"
237+ " !gcloud storage rm --recursive --continue-on-error gs://$BUCKET/sparktobq\n " ,
238+ " !gcloud storage cp kddcup.data_10_percent gs://$BUCKET/"
239239 ]
240240 },
241241 {
253253 "metadata" : {},
254254 "outputs" : [],
255255 "source" : [
256- " !gsutil ls gs://$BUCKET/sparktobq"
256+ " !gcloud storage ls gs://$BUCKET/sparktobq"
257257 ]
258258 },
259259 {
Original file line number Diff line number Diff line change @@ -11,15 +11,15 @@ MACHINE_TYPE=n1-standard-4
1111CLUSTER=sparktobq
1212BUCKET=$1
1313
14- gsutil cp spark_analysis.py gs://$BUCKET /
14+ gcloud storage cp spark_analysis.py gs://$BUCKET /
1515
1616gcloud dataproc --quiet workflow-templates delete $TEMPLATE
1717gcloud dataproc --quiet workflow-templates create $TEMPLATE
1818
1919# the things we need pip-installed on the cluster
2020STARTUP_SCRIPT=gs://${BUCKET} /sparktobq/startup_script.sh
2121echo " pip install --upgrade --quiet google-compute-engine google-cloud-storage matplotlib" > /tmp/startup_script.sh
22- gsutil cp /tmp/startup_script.sh $STARTUP_SCRIPT
22+ gcloud storage cp /tmp/startup_script.sh $STARTUP_SCRIPT
2323
2424# create new cluster for job
2525gcloud dataproc workflow-templates set-managed-cluster $TEMPLATE \
You can’t perform that action at this time.
0 commit comments