Skip to content

Instantly share code, notes, and snippets.

@pentium10
pentium10 / build.py
Created October 31, 2025 06:20
ADK cold start improvement
import ast
import compileall
import importlib
import json
import os
import shutil
# --- Configuration ---
# List of packages to process - add more packages here
PACKAGES_TO_PROCESS = [
@pentium10
pentium10 / bigquery_advanced_mode_options_table.csv
Last active September 27, 2025 10:33
bigquery_advanced_mode_options_table
Feature Before Advanced Runtime After Advanced Runtime
Query Speed Standard performance. Significantly Faster. Benchmarks show ~30% faster runtimes; one case showed a 21x speedup.
Developer Effort N/A Zero. A single SQL command enables it for the whole project.
Code Changes N/A None. No need to rewrite SQL, change schemas, or alter BI tools.
Cost (On-Demand) Based on bytes processed Identical. You're still scanning the same data, so the cost is the same.
Value Proposition Stable performance. Massive increase in performance-per-dollar. Faster dashboards, more productive analysts.
Resource Efficiency Good. Exceptional. Slot consumption is reduced by ~32%, meaning less internal competition for resources.
main:
params: [event]
steps:
- trainingPipelineCheck:
switch:
- condition: ${not("google.cloud.aiplatform.ui.PipelineService.CreateTrainingPipeline" == event.data.protoPayload.methodName)}
next: end
# proceed further only when we have the CreateTrainingPipeline event
- run:
call: googleapis.workflowexecutions.v1.projects.locations.workflows.executions.run
main:
params: [args]
steps:
- initialize:
assign:
- location: ${default(map.get(args,"location"),"us-central1")}
- trainingPipelineId: ${default(map.get(args,"trainingPipelineId"),"3590189825883373568")}
- destination_dataset: "vertexai_model_history"
- getTrainingPipeline:
call: VertexAI_GetTrainingPipeline
select creation_time, job_id, bi_engine_statistics
from `prj_id.region-us`.INFORMATION_SCHEMA.JOBS_BY_PROJECT
where creation_time BETWEEN TIMESTAMP_SUB(CURRENT_TIMESTAMP(), INTERVAL 1 DAY) AND CURRENT_TIMESTAMP()
and job_type = "QUERY"
DECLARE var_day STRING DEFAULT '2025-09-01';
with t as (
SELECT creation_time,
round(6.5* (total_bytes_processed/POWER(2,40) ),2) AS processedBytesCostProjection,
round(6.5* (total_bytes_billed/POWER(2,40) ),2) AS billedBytesCostInUSD
from `prj_id.region-us`.INFORMATION_SCHEMA.JOBS_BY_PROJECT
where creation_time BETWEEN timestamp(var_day) and TIMESTAMP_add(timestamp(var_day), INTERVAL 1 DAY)
and job_type = "QUERY"
)
SELECT
DECLARE var_day STRING DEFAULT '2025-09-01';
SELECT creation_time,
round(6.5* (total_bytes_processed/POWER(2,40) ),2) AS processedBytesCostProjection,
round(6.5* (total_bytes_billed/POWER(2,40) ),2) AS billedBytesCostInUSD
from `prj_id.region-us`.INFORMATION_SCHEMA.JOBS_BY_PROJECT
where creation_time BETWEEN timestamp(var_day) and TIMESTAMP_add(timestamp(var_day), INTERVAL 1 DAY)
and job_type = "QUERY"
ORDER BY processedBytesCostProjection desc
substitutions:
_IMAGE_NAME: "gcr.io/${PROJECT_ID}/myimage"
options:
dynamic_substitutions: true
steps:
# Step 0 - Build the container image
- name: "gcr.io/cloud-builders/docker"
args: ["build", "-f", "Dockerfile", "-t", "${_IMAGE_NAME}", "."]
# Step 1 - Minify the container with docker-slim
- name: "gcr.io/cloud-builders/docker"
wrk -t5 -c200 -d300s https://fat-apache-container/php_info.php
Running 5m test @ https://fat-apache-container/php_info.php
5 threads and 200 connections
Thread Stats Avg Stdev Max +/- Stdev
Latency 546.14ms 380.60ms 2.00s 76.86%
Req/Sec 69.65 43.33 350.00 71.01%
103361 requests in 5.00m, 7.86GB read
Socket errors: connect 0, read 1, write 0, timeout 2200
Requests/sec: 344.49
Transfer/sec: 26.83MB
DECLARE var_day STRING DEFAULT '2022-01-09';
with t AS (
SELECT
protopayload_auditlog.servicedata_v1_bigquery.jobCompletedEvent.job.jobStatistics.createTime,
5* (protopayload_auditlog.servicedata_v1_bigquery .jobCompletedEvent.job.jobStatistics. totalProcessedBytes/POWER(2,40) ) AS processedBytesCostProjection,
5* (protopayload_auditlog.servicedata_v1_bigquery .jobCompletedEvent.job.jobStatistics. totalBilledBytes/POWER(2,40) ) AS billedBytesCostInUSD
FROM
`<dataset_auditlogs>.cloudaudit_googleapis_com_data_access_*`
WHERE
_TABLE_SUFFIX >= var_day and protopayload_auditlog.servicedata_v1_bigquery.jobCompletedEvent.job.jobStatistics.createTime>=TIMESTAMP(var_day)