Created
January 31, 2026 23:12
-
-
Save camwest/66a71e59f2aa77535f3b05a5b81eb16f to your computer and use it in GitHub Desktop.
PR Cycle Time Analysis - Measures time from first Claude Code prompt to PR merge (Next-gen DORA metric)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/bin/bash | |
| # PR Cycle Time Analysis - Measures time from first Claude prompt to PR merge | |
| # Groups by CALENDAR WEEK (Mon-Sun) when PR merged | |
| # "Next gen DORA" - captures full ideation-to-ship cycle via Claude sessions | |
| set -euo pipefail | |
| WEEKS_AGO="${1:-0}" # 0 = current week, 1 = last week, etc. | |
| REPO_FILTER="${2:-}" | |
| CLAUDE_DIR="$HOME/.claude" | |
| # Calculate calendar week boundaries (Monday-Sunday) | |
| # Get the Monday of the target week | |
| get_monday() { | |
| local weeks_back=$1 | |
| local today_dow=$(date +%u) # 1=Mon, 7=Sun | |
| local days_since_monday=$((today_dow - 1)) | |
| local days_back=$((days_since_monday + (weeks_back * 7))) | |
| date -v-${days_back}d +%Y-%m-%d 2>/dev/null || date -d "-${days_back} days" +%Y-%m-%d | |
| } | |
| # Get the Sunday of the target week (Monday + 6 days) | |
| get_sunday() { | |
| local monday=$1 | |
| # macOS: use -v to add days, need to set base date first then adjust | |
| local monday_epoch=$(date -j -f "%Y-%m-%d" "$monday" +%s 2>/dev/null) | |
| if [ -n "$monday_epoch" ]; then | |
| local sunday_epoch=$((monday_epoch + 6*86400)) | |
| date -r "$sunday_epoch" +%Y-%m-%d | |
| else | |
| date -d "$monday + 6 days" +%Y-%m-%d | |
| fi | |
| } | |
| # Get ISO week number | |
| get_week_number() { | |
| local date_str=$1 | |
| date -j -f "%Y-%m-%d" "$date_str" +%V 2>/dev/null || date -d "$date_str" +%V | |
| } | |
| WEEK_MONDAY=$(get_monday "$WEEKS_AGO") | |
| WEEK_SUNDAY=$(get_sunday "$WEEK_MONDAY") | |
| WEEK_NUM=$(get_week_number "$WEEK_MONDAY") | |
| # Format for display | |
| WEEK_START_DISPLAY=$(date -j -f "%Y-%m-%d" "$WEEK_MONDAY" "+%b %d" 2>/dev/null || date -d "$WEEK_MONDAY" "+%b %d") | |
| WEEK_END_DISPLAY=$(date -j -f "%Y-%m-%d" "$WEEK_SUNDAY" "+%b %d" 2>/dev/null || date -d "$WEEK_SUNDAY" "+%b %d") | |
| # Convert to timestamps for filtering | |
| CUTOFF_TS=$(date -j -f "%Y-%m-%d" "$WEEK_MONDAY" +%s 2>/dev/null || date -d "$WEEK_MONDAY" +%s) | |
| END_TS=$(date -j -f "%Y-%m-%dT%H:%M:%S" "${WEEK_SUNDAY}T23:59:59" +%s 2>/dev/null || date -d "${WEEK_SUNDAY} 23:59:59" +%s) | |
| # Temp files for processing | |
| TEMP_DIR=$(mktemp -d) | |
| trap "rm -rf $TEMP_DIR" EXIT | |
| PR_DATA="$TEMP_DIR/pr_data.tsv" | |
| MERGED_DATA="$TEMP_DIR/merged_data.tsv" | |
| echo "Scanning Claude sessions for PRs..." >&2 | |
| # Find all session JSONL files with PR URLs (no date filtering - we filter by merge date later) | |
| find "$CLAUDE_DIR/projects" -name "*.jsonl" -type f 2>/dev/null | while read -r session_file; do | |
| # Check if file contains GitHub PR URLs | |
| if grep -q 'github\.com/[^/"[:space:]]*/[^/"[:space:]]*/pull/[0-9]' "$session_file" 2>/dev/null; then | |
| # Get first message timestamp from session (skip non-message entries like file-history-snapshot) | |
| first_ts=$(grep -m1 '"type":"user"\|"type":"assistant"' "$session_file" 2>/dev/null | jq -r '.timestamp // empty' 2>/dev/null) | |
| if [ -z "$first_ts" ]; then | |
| continue | |
| fi | |
| # Convert to unix timestamp | |
| first_unix=$(date -j -f "%Y-%m-%dT%H:%M:%S" "${first_ts%%.*}" +%s 2>/dev/null || \ | |
| date -d "${first_ts%%.*}" +%s 2>/dev/null || echo "0") | |
| # Extract PR URLs from this session | |
| grep -oh 'https://github\.com/[^/"[:space:]]*/[^/"[:space:]]*/pull/[0-9]*' "$session_file" 2>/dev/null | \ | |
| sort -u | while read -r pr_url; do | |
| # Parse owner/repo/number from URL | |
| if [[ "$pr_url" =~ github\.com/([^/]+)/([^/]+)/pull/([0-9]+) ]]; then | |
| owner="${BASH_REMATCH[1]}" | |
| repo="${BASH_REMATCH[2]}" | |
| pr_num="${BASH_REMATCH[3]}" | |
| # Apply repo filter if specified | |
| if [ -n "$REPO_FILTER" ] && [[ "$owner/$repo" != *"$REPO_FILTER"* ]]; then | |
| continue | |
| fi | |
| echo -e "${owner}/${repo}\t${pr_num}\t${first_ts}\t${first_unix}" | |
| fi | |
| done | |
| fi | |
| done | sort -t$'\t' -k1,2 -k4,4n | awk -F'\t' '!seen[$1"\t"$2]++' > "$PR_DATA" | |
| # ^ Sort by repo/PR then by timestamp, keep earliest session per PR | |
| # Count unique PRs | |
| PR_COUNT=$(wc -l < "$PR_DATA" | tr -d ' ') | |
| if [ "$PR_COUNT" -eq 0 ]; then | |
| echo "## PR Cycle Time - CW$WEEK_NUM ($WEEK_START_DISPLAY - $WEEK_END_DISPLAY)" | |
| echo "" | |
| echo "No PRs found in Claude sessions." | |
| exit 0 | |
| fi | |
| echo "Found $PR_COUNT unique PRs, querying GitHub..." >&2 | |
| # Query GitHub for merge status and filter by merge date | |
| MERGED_COUNT=0 | |
| OPEN_COUNT=0 | |
| TOTAL_CHECKED=0 | |
| while IFS=$'\t' read -r repo pr_num session_ts session_unix; do | |
| TOTAL_CHECKED=$((TOTAL_CHECKED + 1)) | |
| # Progress indicator every 20 PRs | |
| if [ $((TOTAL_CHECKED % 20)) -eq 0 ]; then | |
| echo " Checked $TOTAL_CHECKED / $PR_COUNT PRs..." >&2 | |
| fi | |
| # Query GitHub | |
| gh_result=$(gh pr view "$pr_num" --repo "$repo" --json mergedAt,state,number,title 2>/dev/null || echo '{"state":"error"}') | |
| state=$(echo "$gh_result" | jq -r '.state // "error"') | |
| merged_at=$(echo "$gh_result" | jq -r '.mergedAt // empty') | |
| title=$(echo "$gh_result" | jq -r '.title // "Unknown"' | cut -c1-50) | |
| if [ "$state" = "MERGED" ] && [ -n "$merged_at" ]; then | |
| # Convert merge timestamp | |
| merged_unix=$(date -j -f "%Y-%m-%dT%H:%M:%SZ" "$merged_at" +%s 2>/dev/null || \ | |
| date -d "$merged_at" +%s 2>/dev/null || echo "0") | |
| # Filter by MERGE DATE being within calendar week | |
| if [ "$merged_unix" -ge "$CUTOFF_TS" ] && [ "$merged_unix" -le "$END_TS" ]; then | |
| if [ "$merged_unix" -gt "$session_unix" ]; then | |
| cycle_secs=$((merged_unix - session_unix)) | |
| cycle_mins=$((cycle_secs / 60)) | |
| echo -e "${repo}\t${pr_num}\t${cycle_mins}\t${merged_at}\t${title}" >> "$MERGED_DATA" | |
| MERGED_COUNT=$((MERGED_COUNT + 1)) | |
| fi | |
| fi | |
| elif [ "$state" = "OPEN" ]; then | |
| OPEN_COUNT=$((OPEN_COUNT + 1)) | |
| fi | |
| done < "$PR_DATA" | |
| echo "Done." >&2 | |
| # Calculate statistics if we have merged PRs | |
| if [ ! -f "$MERGED_DATA" ] || [ ! -s "$MERGED_DATA" ]; then | |
| echo "## PR Cycle Time - CW$WEEK_NUM ($WEEK_START_DISPLAY - $WEEK_END_DISPLAY)" | |
| echo "" | |
| echo "_Measures: first Claude prompt → PR merge (Claude-assisted PRs only)_" | |
| echo "" | |
| echo "| Metric | Value |" | |
| echo "|--------|-------|" | |
| echo "| PRs in Claude sessions | $PR_COUNT |" | |
| echo "| Merged this week | 0 |" | |
| echo "| Currently open | $OPEN_COUNT |" | |
| echo "" | |
| echo "No PRs merged during this calendar week." | |
| exit 0 | |
| fi | |
| # Calculate repo breakdown | |
| REPO_BREAKDOWN="$TEMP_DIR/repo_breakdown.txt" | |
| cut -f1 "$MERGED_DATA" | sort | uniq -c | sort -rn > "$REPO_BREAKDOWN" | |
| # Calculate statistics | |
| SORTED_TIMES="$TEMP_DIR/sorted_times.txt" | |
| cut -f3 "$MERGED_DATA" | sort -n > "$SORTED_TIMES" | |
| STATS=$(awk ' | |
| BEGIN { sum=0; count=0; min=999999; max=0 } | |
| { | |
| mins = $1 | |
| count++ | |
| sum += mins | |
| if (mins < min) min = mins | |
| if (mins > max) max = mins | |
| } | |
| END { | |
| avg = (count > 0) ? int(sum/count) : 0 | |
| printf "%d\t%d\t%d\t%d\n", count, avg, min, max | |
| } | |
| ' "$SORTED_TIMES") | |
| # Calculate median from sorted file | |
| TOTAL_LINES=$(wc -l < "$SORTED_TIMES" | tr -d ' ') | |
| if [ "$TOTAL_LINES" -gt 0 ]; then | |
| MEDIAN_LINE=$(( (TOTAL_LINES + 1) / 2 )) | |
| MEDIAN=$(sed -n "${MEDIAN_LINE}p" "$SORTED_TIMES") | |
| else | |
| MEDIAN=0 | |
| fi | |
| COUNT=$(echo "$STATS" | cut -f1) | |
| AVG=$(echo "$STATS" | cut -f2) | |
| MIN=$(echo "$STATS" | cut -f3) | |
| MAX=$(echo "$STATS" | cut -f4) | |
| # Format times for display | |
| format_time() { | |
| local mins=$1 | |
| if [ "$mins" -lt 60 ]; then | |
| echo "${mins}m" | |
| elif [ "$mins" -lt 1440 ]; then | |
| local hours=$((mins / 60)) | |
| local rem=$((mins % 60)) | |
| if [ "$rem" -gt 0 ]; then | |
| echo "${hours}h ${rem}m" | |
| else | |
| echo "${hours}h" | |
| fi | |
| else | |
| local days=$((mins / 1440)) | |
| local hours=$(((mins % 1440) / 60)) | |
| echo "${days}d ${hours}h" | |
| fi | |
| } | |
| # Distribution buckets | |
| DIST=$(awk -F'\t' ' | |
| { | |
| mins = $3 | |
| if (mins < 15) b1++ | |
| else if (mins < 30) b2++ | |
| else if (mins < 60) b3++ | |
| else if (mins < 120) b4++ | |
| else b5++ | |
| } | |
| END { | |
| printf "%d\t%d\t%d\t%d\t%d\n", b1+0, b2+0, b3+0, b4+0, b5+0 | |
| } | |
| ' "$MERGED_DATA") | |
| B1=$(echo "$DIST" | cut -f1) | |
| B2=$(echo "$DIST" | cut -f2) | |
| B3=$(echo "$DIST" | cut -f3) | |
| B4=$(echo "$DIST" | cut -f4) | |
| B5=$(echo "$DIST" | cut -f5) | |
| # Generate bar for distribution | |
| make_bar() { | |
| local count=$1 | |
| local max_count=$2 | |
| local max_width=20 | |
| if [ "$max_count" -eq 0 ]; then | |
| echo "" | |
| return | |
| fi | |
| local width=$((count * max_width / max_count)) | |
| printf '%*s' "$width" '' | tr ' ' '█' | |
| } | |
| MAX_BUCKET=$(echo "$B1 $B2 $B3 $B4 $B5" | tr ' ' '\n' | sort -rn | head -1) | |
| # Output report | |
| echo "## PR Cycle Time - CW$WEEK_NUM ($WEEK_START_DISPLAY - $WEEK_END_DISPLAY)" | |
| echo "" | |
| echo "| Metric | Value |" | |
| echo "|--------|-------|" | |
| echo "| Merged (with Claude sessions) | $MERGED_COUNT |" | |
| echo "| Avg cycle time | $(format_time $AVG) |" | |
| echo "| Median cycle time | $(format_time $MEDIAN) |" | |
| echo "| Fastest | $(format_time $MIN) |" | |
| echo "| Slowest | $(format_time $MAX) |" | |
| echo "" | |
| # Repo breakdown | |
| echo "## By Repo" | |
| echo "" | |
| while read -r count repo; do | |
| short_repo=$(echo "$repo" | cut -d'/' -f2) | |
| echo "- $short_repo: $count" | |
| done < "$REPO_BREAKDOWN" | |
| echo "" | |
| echo "## Distribution" | |
| echo "" | |
| printf "< 15 mins %s %d\n" "$(make_bar $B1 $MAX_BUCKET)" "$B1" | |
| printf "15-30 mins %s %d\n" "$(make_bar $B2 $MAX_BUCKET)" "$B2" | |
| printf "30-60 mins %s %d\n" "$(make_bar $B3 $MAX_BUCKET)" "$B3" | |
| printf "1-2 hours %s %d\n" "$(make_bar $B4 $MAX_BUCKET)" "$B4" | |
| printf "2+ hours %s %d\n" "$(make_bar $B5 $MAX_BUCKET)" "$B5" | |
| echo "" | |
| echo "## Recent PRs" | |
| echo "" | |
| # Show recent merged PRs (sorted by merge date, newest first) | |
| sort -t$'\t' -k4 -r "$MERGED_DATA" | head -10 | while IFS=$'\t' read -r repo pr_num cycle_mins merged_at title; do | |
| # Format merge date | |
| merge_date=$(date -j -f "%Y-%m-%dT%H:%M:%SZ" "$merged_at" "+%b %d" 2>/dev/null || \ | |
| date -d "$merged_at" "+%b %d" 2>/dev/null || echo "recent") | |
| # Get short repo name | |
| short_repo=$(echo "$repo" | cut -d'/' -f2) | |
| echo "- ${short_repo}#${pr_num}: $(format_time $cycle_mins) (merged $merge_date)" | |
| done | |
| echo "" | |
| echo "---" | |
| echo "_Cycle time = first Claude prompt → PR merge. Only includes PRs found in local ~/.claude session logs._" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment