Google Cloud Platform Guide 2026: Cloud Run, BigQuery, Firebase, and GKE
Advertisement
GCP for Developers 2026: Google's Cloud Platform
GCP is underrated. Cloud Run (serverless containers), BigQuery (analytics), Firebase (real-time apps), and Vertex AI (ML) are best-in-class services.
- Cloud Run: Serverless Containers
- Firebase: Realtime Apps
- BigQuery: Analytics at Scale
- GKE: Managed Kubernetes
- Vertex AI: ML on GCP
- GCP vs AWS Comparison
Cloud Run: Serverless Containers
Cloud Run is the easiest way to deploy containers — no Kubernetes knowledge needed:
# Deploy a container to Cloud Run
gcloud run deploy my-app \
--image gcr.io/my-project/my-app:latest \
--platform managed \
--region us-central1 \
--allow-unauthenticated \
--memory 512Mi \
--cpu 1 \
--min-instances 0 \
--max-instances 100 \
--concurrency 80
# Deploy from source (builds container automatically)
gcloud run deploy my-app \
--source . \
--region us-central1
# Set environment variables
gcloud run services update my-app \
--set-env-vars DATABASE_URL="${DATABASE_URL}",NODE_ENV=production
# .github/workflows/cloud-run.yml
name: Deploy to Cloud Run
on:
push:
branches: [main]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: google-github-actions/auth@v2
with:
credentials_json: ${{ secrets.GCP_SA_KEY }}
- uses: google-github-actions/deploy-cloudrun@v2
with:
service: my-app
image: gcr.io/my-project/my-app:${{ github.sha }}
region: us-central1
Firebase: Realtime Apps
// Firebase Admin SDK for server-side
import { initializeApp, cert } from 'firebase-admin/app'
import { getFirestore, FieldValue } from 'firebase-admin/firestore'
import { getAuth } from 'firebase-admin/auth'
const app = initializeApp({
credential: cert(JSON.parse(process.env.FIREBASE_SERVICE_ACCOUNT!)),
})
const db = getFirestore(app)
const auth = getAuth(app)
// Firestore CRUD
async function createPost(post: Post) {
const ref = await db.collection('posts').add({
...post,
createdAt: FieldValue.serverTimestamp(),
views: 0,
})
return ref.id
}
async function getPostsByTag(tag: string) {
const snapshot = await db.collection('posts')
.where('tags', 'array-contains', tag)
.where('published', '==', true)
.orderBy('createdAt', 'desc')
.limit(20)
.get()
return snapshot.docs.map(doc => ({ id: doc.id, ...doc.data() }))
}
// Verify Firebase auth token in API
async function verifyFirebaseToken(token: string) {
const decoded = await auth.verifyIdToken(token)
return { uid: decoded.uid, email: decoded.email }
}
// Firebase Client SDK
import { initializeApp } from 'firebase/app'
import { getFirestore, onSnapshot, collection, query, where } from 'firebase/firestore'
const app = initializeApp({
apiKey: process.env.NEXT_PUBLIC_FIREBASE_API_KEY,
projectId: process.env.NEXT_PUBLIC_FIREBASE_PROJECT_ID,
// ...
})
const db = getFirestore(app)
// Real-time listener
function useRealtimePosts(tag: string) {
const [posts, setPosts] = useState([])
useEffect(() => {
const q = query(
collection(db, 'posts'),
where('tags', 'array-contains', tag),
where('published', '==', true)
)
const unsubscribe = onSnapshot(q, (snapshot) => {
setPosts(snapshot.docs.map(doc => ({ id: doc.id, ...doc.data() })))
})
return unsubscribe // Cleanup
}, [tag])
return posts
}
BigQuery: Analytics at Scale
-- Query petabytes of data in seconds
-- Free tier: 1TB queries/month, 10GB storage
-- Analyze your app's events
CREATE OR REPLACE TABLE myapp.events
PARTITION BY DATE(event_time)
CLUSTER BY event_type, user_id
AS SELECT
user_id,
event_type,
JSON_EXTRACT_SCALAR(metadata, '$.page') as page,
TIMESTAMP_MILLIS(timestamp) as event_time
FROM raw_events;
-- Daily active users
SELECT
DATE(event_time) as date,
COUNT(DISTINCT user_id) as dau,
COUNT(*) as events
FROM myapp.events
WHERE event_time >= TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 30 DAY)
GROUP BY date
ORDER BY date DESC;
-- User funnel analysis
WITH funnel AS (
SELECT user_id,
COUNTIF(event_type = 'signup') > 0 as signed_up,
COUNTIF(event_type = 'first_post') > 0 as created_post,
COUNTIF(event_type = 'subscription') > 0 as subscribed
FROM myapp.events
GROUP BY user_id
)
SELECT
COUNT(*) as total_users,
COUNTIF(signed_up) as step1_signup,
COUNTIF(created_post) as step2_post,
COUNTIF(subscribed) as step3_subscribe,
ROUND(100 * COUNTIF(created_post) / COUNT(*), 1) as signup_to_post_rate
FROM funnel;
GKE: Managed Kubernetes
# Create GKE cluster
gcloud container clusters create my-cluster \
--num-nodes=3 \
--zone=us-central1-a \
--machine-type=e2-medium \
--enable-autoscaling \
--min-nodes=1 \
--max-nodes=10 \
--enable-autorepair \
--enable-autoupgrade
# Get credentials
gcloud container clusters get-credentials my-cluster --zone=us-central1-a
# Deploy
kubectl apply -f k8s/
Vertex AI: ML on GCP
import vertexai
from vertexai.generative_models import GenerativeModel
vertexai.init(project="my-project", location="us-central1")
model = GenerativeModel("gemini-1.5-pro")
# Text generation
response = model.generate_content("Explain quantum computing")
print(response.text)
# Embeddings
from vertexai.language_models import TextEmbeddingModel
embedding_model = TextEmbeddingModel.from_pretrained("text-embedding-004")
embeddings = embedding_model.get_embeddings(["Hello world", "Machine learning"])
for emb in embeddings:
print(f"Embedding dimension: {len(emb.values)}")
GCP vs AWS Comparison
| Service | AWS | GCP |
|---|---|---|
| Containers | ECS/Fargate | Cloud Run |
| Kubernetes | EKS | GKE |
| Serverless | Lambda | Cloud Functions |
| Object Storage | S3 | Cloud Storage |
| Database | RDS | Cloud SQL |
| Analytics | Redshift | BigQuery |
| CDN | CloudFront | Cloud CDN |
| ML | SageMaker | Vertex AI |
GCP's strengths: BigQuery (best analytics), Cloud Run (simplest containers), Vertex AI (Google's models), Kubernetes (GKE is the most polished managed K8s).
Advertisement