-
-
Save Atrate/be4a7d308549c7a9fe281d2cdf578d21 to your computer and use it in GitHub Desktop.
| #!/bin/bash --posix | |
| docker logs snowflake-proxy 2>&1 | grep --color=auto 'Traffic Relayed' | awk ' | |
| { | |
| # Extract the download and upload values | |
| down[1] = $14 | |
| down[2] = $15 | |
| gsub(/[^a-zA-Z]/, "", down[2]) | |
| up[1] = $17 | |
| up[2] = $18 | |
| gsub(/[^a-zA-Z]/, "", up[2]) | |
| # Convert to bytes | |
| if (down[2] == "B") down_total += down[1]; | |
| else if (down[2] == "KB") down_total += down[1] * 1024; | |
| else if (down[2] == "MB") down_total += down[1] * 1024 * 1024; | |
| else if (down[2] == "GB") down_total += down[1] * 1024 * 1024 * 1024; | |
| if (up[2] == "B") up_total += up[1]; | |
| else if (up[2] == "KB") up_total += up[1] * 1024; | |
| else if (up[2] == "MB") up_total += up[1] * 1024 * 1024; | |
| else if (up[2] == "GB") up_total += up[1] * 1024 * 1024 * 1024; | |
| count++; | |
| } | |
| END { | |
| print "Sum of down traffic on Snowflake: " down_total / 1024/ 1024 / 1024 " GB"; | |
| print "Sum of up traffic on Snowflake: " up_total / 1024 / 1024 / 1024 " GB"; | |
| print "Total connections established: " count | |
| }' |
@aquila0101 For your use-case, you'll need to change
# Extract the download and upload values
down[1] = $14
down[2] = $15
gsub(/[^a-zA-Z]/, "", down[2])
up[1] = $17
up[2] = $18
gsub(/[^a-zA-Z]/, "", up[2])
to
# Extract the download and upload values
down[1] = $15
down[2] = $16
gsub(/[^a-zA-Z]/, "", down[2])
up[1] = $18
up[2] = $19
gsub(/[^a-zA-Z]/, "", up[2])
I'll have to check on my server whether the log format just changed or whether for some reason yours is different than mine.
As far as I can see, my logs do not include the word "completed". I may have an older version of snowflake installed or something of that sort.
now it's correct:
root@DietPi:~# ./snowstats.sh
Sum of down traffic on Snowflake: 69.8545 GB
Sum of up traffic on Snowflake: 3.73765 GB
Total connections established: 100
I'm using this to run snowflake:
https://gitlab.torproject.org/tpo/anti-censorship/docker-snowflake-proxy
I updated this script to work with the newest version of the snowflake docker. Also changed it to only show stats for the past 24 hours, and cleaned up the output to look a bit better for the terminal (and in my case a widget for the KDE Plasma 6 desktop too).
#!/bin/bash
TIME=24h
# Fetch logs from the last 24 hours and process with AWK
docker logs --since $TIME snowflake-proxy 2>&1 | grep 'Traffic Relayed' | awk '
{
# --- 1. DATA EXTRACTION ---
# Hour extraction
split($2, time_parts, ":")
hr = time_parts[1]
# Connection and Traffic values
conns_in_hour = $9
d_val = $16; d_unit = $17;
u_val = $21; u_unit = $22;
# --- 2. TRAFFIC TOTALS LOGIC ---
gsub(/[^a-zA-Z]/, "", d_unit)
gsub(/[^a-zA-Z]/, "", u_unit)
# Convert Down to bytes
if (d_unit == "KB") d_bytes = d_val * 1024;
else if (d_unit == "MB") d_bytes = d_val * 1024 * 1024;
else if (d_unit == "GB") d_bytes = d_val * 1024 * 1024 * 1024;
else d_bytes = d_val;
# Convert Up to bytes
if (u_unit == "KB") u_bytes = u_val * 1024;
else if (u_unit == "MB") u_bytes = u_val * 1024 * 1024;
else if (u_unit == "GB") u_bytes = u_val * 1024 * 1024 * 1024;
else u_bytes = u_val;
# Aggregate totals
down_total += d_bytes
up_total += u_bytes
total_conns += conns_in_hour
# Store connections for the graph
hourly_counts[hr] = conns_in_hour
}
END {
# --- 3. PRINT TRAFFIC SUMMARY ---
print "========================================="
print " SNOWFLAKE PROXY REPORT (LAST 24H) "
print "========================================="
printf "Total Download: %.4f GB\n", down_total / 1024^3
printf "Total Upload: %.4f GB\n", up_total / 1024^3
printf "1Hr Connections: %d connections\n", hourly_counts[hr]
printf "Total Connections: %d connections\n", total_conns
print "========================================="
}'
my server is debian 12 and these are the logs so far