export BIGQUERY_DATASET_NAME=
export CLOUD_STORAGE_BUCKET_NAME=
export REGION=
- Set the zone by using command
gcloud config set compute/zone $REGION
- create Bigquery Dataset using command
bq mk $BIGQUERY_DATASET_NAME
gsutil mb gs://$CLOUD_STORAGE_BUCKET_NAME
- Set Region (if in case you get region us-east1 instead of -a replace -b)
gcloud config set compute/zone ${REGION}-a
gcloud dataproc clusters create sample-cluster --region ${REGION}
- SSH into Dataproc Cluster
gcloud compute ssh sample-cluster-m --zone=${REGION}-a
hdfs dfs -cp gs://cloud-training/gsp323/data.txt /data.txt
- exit the SSH terminal
- Submit the Spark Dataproc Job
gcloud dataproc jobs submit spark --cluster sample-cluster \
--class org.apache.spark.examples.SparkPageRank \
--region $REGION \
--jars file:///usr/lib/spark/examples/jars/spark-examples.jar -- /data.txt
-
Initialize Dataprep
-
Create flow
-
Import to GCS gs://cloud-training/gsp323/runs.csv
-
Edit a Recipe
- Execute The Following Commands
gcloud iam service-accounts create my-natlang-sa \
--display-name "my natural language service account"
gcloud iam service-accounts keys create ~/key.json \
--iam-account my-natlang-sa@$DEVSHELL_PROJECT_ID.iam.gserviceaccount.com
wget https://raw.githubusercontent.com/guys-in-the-cloud/cloud-skill-boosts/main/Challenge-labs/Perform%20Foundational%20Data%2C%20ML%2C%20and%20AI%20Tasks%20in%20Google%20Cloud%3A%20Challenge%20Lab/speech-request.json
curl -s -X POST -H "Content-Type: application/json" --data-binary @speech-request.json \
"https://speech.googleapis.com/v1/speech:recognize?key=${API_KEY}" > speech.json
gsutil cp speech.json gs://$DEVSHELL_PROJECT_ID-marking/<changefilename>
gcloud ml language analyze-entities --content="Old Norse texts portray Odin as one-eyed and long-bearded, frequently wielding a spear named Gungnir and wearing a cloak and a broad hat." > language.json
gsutil cp language.json gs://$DEVSHELL_PROJECT_ID-marking/<changefilename>
- Copy the Video Intelligence configuration file
wget https://github.com/guys-in-the-cloud/cloud-skill-boosts/blob/main/Challenge-labs/Perform%20Foundational%20Data%2C%20ML%2C%20and%20AI%20Tasks%20in%20Google%20Cloud:%20Challenge%20Lab/video-intelligence-request.json
curl -s -H 'Content-Type: application/json' \
-H 'Authorization: Bearer '$(gcloud auth print-access-token)'' \
'https://videointelligence.googleapis.com/v1/videos:annotate' \
-d @video-intelligence-request.json > video.json
gsutil cp video.json gs://$DEVSHELL_PROJECT_ID-marking/<changefilename>