Skip to content

Instantly share code, notes, and snippets.

@amon-ra
Created February 13, 2026 18:59
Show Gist options
  • Select an option

  • Save amon-ra/b519b521b0aed27dbb051ccccafa6c4a to your computer and use it in GitHub Desktop.

Select an option

Save amon-ra/b519b521b0aed27dbb051ccccafa6c4a to your computer and use it in GitHub Desktop.
Sincronize OCA repos using gh cli
#!/bin/bash
# Odoo Repository Migration Script
# Migrates OCA repositories with __manifest__.py containing version '18.0' to odoo-saas-addons
# Enhanced with archived repository checking and skipping
set -euo pipefail
set -o errtrace
# Configuration
ORG_SOURCE="OCA"
ORG_TARGET="odoo-saas-addons"
VERSION="18.0"
WORK_DIR="/tmp/odoomigration"
LOG_FILE="${WORK_DIR}/odoomigration_$(date +%Y%m%d_%H%M%S).log"
ERROR_LOG_FILE="${WORK_DIR}/odoomigration_errors_$(date +%Y%m%d_%H%M%S).log"
REPOS_FILE="${WORK_DIR}/repositories_to_check.txt"
FILTERED_FILE="${WORK_DIR}/repositories_to_migrate.txt"
DRY_RUN=false
SKIP_CONFIRMATION=true
ECHO_MODE=false
RECENT_DAYS=365
SKIP_CLEANUP=false
TEST_MODE=false
TEST_REPO=""
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Ensure log directory exists early so logging in parse_args or other early code won't fail
mkdir -p "$(dirname "$LOG_FILE")" 2>>"$ERROR_LOG_FILE" || true
# Enhanced PS4 for better xtrace output
export PS4='+[$(date "+%Y-%m-%d %H:%M:%S")] ${BASH_SOURCE}:${LINENO}:${FUNCNAME[0]}: '
# Error handler: print failing command, exit code, stack trace and tail of log
error_report() {
local exit_code=${1:-$?}
local last_cmd="${BASH_COMMAND:-unknown}"
echo >&2 "============================================================"
echo >&2 "ERROR: Command '${last_cmd}' failed with exit code ${exit_code}"
echo >&2 "Location: ${BASH_SOURCE[1]:-unknown}:${BASH_LINENO[0]:-unknown}"
echo >&2 "Stack trace:"
local i=0
while caller $i; do
i=$((i+1))
done >&2
echo >&2 "----- Last lines of log (${LOG_FILE:-/dev/null}) -----"
tail -n 50 "${LOG_FILE:-/dev/null}" 2>>"$ERROR_LOG_FILE" || true
echo >&2 "============================================================"
}
trap 'error_report $?' ERR
# Logging function
log() {
local level=$1
shift
local message="$*"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
echo -e "[$timestamp] [$level] $message" | tee -a "$LOG_FILE"
}
# Helper functions
log_info() {
log "${BLUE}INFO${NC}" "$@"
}
log_success() {
log "${GREEN}SUCCESS${NC}" "$@"
}
log_warning() {
log "${YELLOW}WARNING${NC}" "$@"
}
log_error() {
log "${RED}ERROR${NC}" "$@"
}
# Function to display help
show_help() {
cat << EOF
Odoo Repository Migration Script
Usage: $0 [OPTIONS]
Options:
--dry-run Simulate migration without making changes
--interactive Ask for confirmation before each action
--skip-cleanup Do not perform cleanup of temporary files on exit
--echo, --xtrace Enable active bash echo mode (set -x) to print executed commands
--test Test create_github_actions_workflow function (does not modify actual repositories)
--help, -h Show this help message
Description:
This script migrates OCA repositories containing "Available addons"
in README.md to the odoo-saas-addons organization using ONLY
GitHub CLI (gh) - no git commands are used.
Enhanced with archived repository checking:
- Skips migration if source repository is archived
- Skips migration if target repository exists but is archived
- Automatically deletes non-fork target repositories and recreates them
- Transfers all open pull requests during forking
The process includes:
1. Discovering all OCA repositories (using GitHub API)
2. Filtering for repositories with "Available addons" in README.md (using GitHub API)
3. Checking for archived repositories and skipping them (using GitHub CLI)
4. Forking source repositories to target organization (using GitHub CLI)
5. Deleting non-fork repositories and recreating them as forks
6. Transferring all open pull requests (using GitHub API)
7. Setting up GitHub Actions for upstream sync (using GitHub API)
8. Generating migration report
Prerequisites:
- GitHub CLI (gh) installed and authenticated
- jq installed (for JSON parsing)
Examples:
$0 # Run with default settings (autonomous mode)
$0 --dry-run # Simulate migration
$0 --interactive # Run with confirmation prompts
$0 --echo # Run with active bash echo (set -x)
$0 --test # Test create_github_actions_workflow function
EOF
}
# Parse command line arguments
parse_args() {
while [[ $# -gt 0 ]]; do
case $1 in
--dry-run)
DRY_RUN=true
log_info "Dry run mode enabled"
shift
;;
--interactive)
SKIP_CONFIRMATION=false
log_info "Interactive mode enabled"
shift
;;
--skip-cleanup)
SKIP_CLEANUP=true
log_info "Cleanup will be skipped on exit"
shift
;;
--echo|--xtrace)
ECHO_MODE=true
# Add timestamp to xtrace prefix for better tracing
export PS4='+[$(date "+%Y-%m-%d %H:%M:%S")] '
set -x
log_info "Active bash echo mode enabled (set -x)"
shift
;;
--test)
TEST_MODE=true
log_info "Test mode enabled - will run create_github_actions_workflow test"
shift
;;
--help|-h)
show_help
exit 0
;;
*)
log_error "Unknown option: $1"
show_help
exit 1
;;
esac
done
}
# Function to verify prerequisites
verify_prerequisites() {
log_info "Verifying prerequisites..."
# Check if GitHub CLI is installed
if ! command -v gh &> /dev/null; then
log_error "GitHub CLI (gh) is not installed"
exit 1
fi
# Check if jq is installed (for JSON parsing)
if ! command -v jq &> /dev/null; then
log_error "jq is not installed (required for parsing repository data)"
exit 1
fi
# Verify GitHub CLI authentication
if ! gh auth status &> /dev/null; then
log_error "GitHub CLI is not authenticated. Run 'gh auth login' first."
exit 1
fi
log_success "Prerequisites verified"
}
# Function to setup working directory
setup_work_dir() {
mkdir -p "$WORK_DIR"
cd "$WORK_DIR"
# Create error log file
touch "$ERROR_LOG_FILE"
log_success "Working directory created: $WORK_DIR"
log_success "Error log file created: $ERROR_LOG_FILE"
}
# Function to check if repository is archived
is_repository_archived() {
local org=$1
local repo_name=$2
if [[ "$DRY_RUN" == true ]]; then
log_warning "[DRY RUN] Would check if $org/$repo_name is archived"
return 1 # Assume not archived in dry run
fi
# Check repository status using GitHub CLI
local repo_info=$(gh repo view "$org/$repo_name" --json "archived" 2>>$ERROR_LOG_FILE || echo '{"archived": false}')
local is_archived=$(echo "$repo_info" | jq -r '.archived // false')
if [[ "$is_archived" == "true" ]]; then
return 0 # Repository is archived
else
return 1 # Repository is not archived
fi
}
# Function to get all OCA repositories
get_oca_repositories() {
log_info "Fetching OCA repositories pushed in the last ${RECENT_DAYS} day(s)..."
mkdir -p "$(dirname "$REPOS_FILE")"
> "$REPOS_FILE"
# Compute cutoff in seconds (now - RECENT_DAYS)
local cutoff_seconds=$((RECENT_DAYS * 24 * 3600))
# Build jq expression to select repos with pushed_at within the cutoff
local jq_expr=".[] | select(.pushed_at != null and (.pushed_at | fromdateiso8601) >= (now - ${cutoff_seconds})) | .name"
# Fetch and write repo names
local repos
repos=$(gh api "orgs/${ORG_SOURCE}/repos" --paginate --jq "${jq_expr}" 2>>$ERROR_LOG_FILE || true)
if [[ -z "$repos" ]]; then
log_warning "No repositories updated in the last ${RECENT_DAYS} day(s) were found for ${ORG_SOURCE}"
return 0
fi
echo "$repos" | sort -f >> "$REPOS_FILE"
local total_repos
total_repos=$(echo "$repos" | wc -l)
log_success "Total OCA repositories found updated within ${RECENT_DAYS} day(s): $total_repos"
}
# Function to search for "Available addons" text in README.md of repository
search_manifest_in_repo() {
local repo_name=$1
log_info "Searching for 'Available addons' in README.md of $repo_name..."
if [[ "$DRY_RUN" == true ]]; then
log_warning "[DRY RUN] Would search for 'Available addons' in $repo_name"
return 1
fi
# Get README.md content from source repository
local readme_url="repos/$ORG_SOURCE/$repo_name/readme"
local readme_content
# Try to get README.md using GitHub API
if ! readme_content=$(gh api "$readme_url" --jq '.content' 2>>$ERROR_LOG_FILE); then
log_info "README.md not found in $repo_name"
return 1
fi
# Base64 decode the content
local decoded_content
if command -v base64 &> /dev/null; then
decoded_content=$(echo "$readme_content" | base64 -d 2>>$ERROR_LOG_FILE)
else
# Fallback for systems without base64
decoded_content=$(python3 -c "import base64, sys; print(base64.b64decode(sys.stdin.read()).decode())" <<< "$readme_content" 2>>$ERROR_LOG_FILE)
fi
# Check if decoding failed
if [[ -z "$decoded_content" ]]; then
log_warning "Failed to decode README.md content for $repo_name"
return 1
fi
# Search for "Available addons" text
if echo "$decoded_content" | grep -qi "Available addons"; then
log_success "Found 'Available addons' in $repo_name/README.md"
return 0
else
log_info "✗ $repo_name does not contain 'Available addons' in README.md"
return 1
fi
}
# Function to filter repositories with "Available addons" in README.md
filter_repositories() {
log_info "Filtering repositories with 'Available addons' in README.md..."
echo "" > "$FILTERED_FILE"
local -i total_repos=$(cat "$REPOS_FILE" | wc -l)
local -i processed=0
local -i matched=0
while IFS= read -r repo_name; do
log_info "Processing $repo_name ($processed/$total_repos)..."
processed=$((processed+1))
if search_manifest_in_repo "$repo_name"; then
echo "$repo_name" >> "$FILTERED_FILE"
matched=$((matched+1))
log_success "✓ $repo_name has 'Available addons' in README.md"
else
log_info "✗ $repo_name does not have 'Available addons' in README.md"
fi
# Progress indicator
if (( processed % 10 == 0 )); then
log_info "Progress: $processed/$total_repos processed, $matched matched"
fi
done < "$REPOS_FILE"
log_success "Found $matched repositories with 'Available addons' in README.md out of $total_repos total"
}
# Function to create target repository by forking source
create_target_repository() {
local repo_name=$1
log_info "Creating target repository by forking: $repo_name"
if [[ "$DRY_RUN" == true ]]; then
log_warning "[DRY RUN] Would fork $repo_name from $ORG_SOURCE to $ORG_TARGET"
return 0
fi
# Check if source repository is archived
if is_repository_archived "$ORG_SOURCE" "$repo_name"; then
log_warning "Source repository $ORG_SOURCE/$repo_name is archived - skipping migration"
return 2 # Special return code for archived source
fi
# Check if repository already exists and is a fork of source
if gh repo view "$ORG_TARGET/$repo_name" &> /dev/null; then
# Check if existing target repository is archived
if is_repository_archived "$ORG_TARGET" "$repo_name"; then
log_warning "Target repository $ORG_TARGET/$repo_name already exists but is archived - skipping migration"
return 3 # Special return code for archived target
fi
# Verify it's a fork of the source repository
local parent_info
parent_info=$(gh repo view "$ORG_TARGET/$repo_name" --json "parent" --jq '.parent.full_name' 2>>$ERROR_LOG_FILE || echo "")
if [[ "$parent_info" == "$ORG_SOURCE/$repo_name" ]]; then
log_success "Repository $ORG_TARGET/$repo_name already exists as a fork of $ORG_SOURCE/$repo_name"
return 0
fi
fi
# Fork the repository to the target organization
# Use GitHub API to fork directly to the target organization
if gh api "repos/$ORG_SOURCE/$repo_name/forks" --method POST -f "organization=$ORG_TARGET" &> /dev/null; then
# Wait for fork to complete
local max_wait=30
local waited=0
while [[ $waited -lt $max_wait ]]; do
if gh repo view "$ORG_TARGET/$repo_name" &> /dev/null; then
log_success "Successfully forked repository: $ORG_SOURCE/$repo_name -> $ORG_TARGET/$repo_name"
return 0
fi
sleep 1
waited=$((waited + 1))
done
log_error "Fork operation timed out for $repo_name"
return 1
else
# Fallback: try using gh repo fork command
if gh repo fork "$ORG_SOURCE/$repo_name" --org "$ORG_TARGET" --clone=false 2>>$ERROR_LOG_FILE; then
log_success "Successfully forked repository: $ORG_SOURCE/$repo_name -> $ORG_TARGET/$repo_name"
return 0
else
log_error "Failed to fork repository: $ORG_SOURCE/$repo_name -> $ORG_TARGET/$repo_name"
return 1
fi
fi
}
# Function to transfer pull requests from source to target
transfer_pull_requests() {
local repo_name=$1
log_info "Transferring pull requests for $repo_name..."
if [[ "$DRY_RUN" == true ]]; then
log_warning "[DRY RUN] Would transfer pull requests for $repo_name"
return 0
fi
# Get all open PRs from source repository
local prs
prs=$(gh api "repos/$ORG_SOURCE/$repo_name/pulls?state=open&per_page=100" --paginate --jq '.[] | {number, title, body, head: {ref, repo: {full_name}}, base: {ref, repo: {full_name}}}' 2>>$ERROR_LOG_FILE || true)
if [[ -z "$prs" ]]; then
log_info "No open pull requests found in $ORG_SOURCE/$repo_name"
return 0
fi
# Get existing PRs in target to avoid duplicates
local existing_prs
existing_prs=$(gh api "repos/$ORG_TARGET/$repo_name/pulls?state=open&per_page=100" --paginate --jq '.[] | "\(.title)|\(.head.ref)|\(.base.ref)"' 2>>$ERROR_LOG_FILE || true)
# Process each PR
local pr_count=0
local created=0
local skipped=0
local failed=0
echo "$prs" | jq -c '.' 2>>$ERROR_LOG_FILE | while IFS= read -r pr; do
if [[ -z "$pr" ]]; then
continue
fi
pr_count=$((pr_count + 1))
local pr_number=$(echo "$pr" | jq -r '.number')
local pr_title=$(echo "$pr" | jq -r '.title' | sed 's/"//g')
local pr_body=$(echo "$pr" | jq -r '.body // ""' | sed 's/"/\\"/g')
local head_branch=$(echo "$pr" | jq -r '.head.ref')
local head_repo=$(echo "$pr" | jq -r '.head.repo.full_name')
local base_branch=$(echo "$pr" | jq -r '.base.ref')
log_info "Processing PR #$pr_number: $pr_title"
# Check if head repository is same as source repo
# if [[ "$head_repo" != "$ORG_SOURCE/$repo_name" ]]; then
# log_info "Skipping PR #$pr_number - head repository $head_repo is external"
# skipped=$((skipped + 1))
# continue
# fi
# Check if PR already exists in target (by title or by head:base combination)
local pr_key="${pr_title}|${head_branch}|${base_branch}"
if echo "$existing_prs" | grep -qF "$pr_key"; then
log_info "Skipping PR #$pr_number - already exists in target (title: $pr_title, head: $head_branch, base: $base_branch)"
skipped=$((skipped + 1))
continue
fi
# Also check if any PR exists with the same head and base branches
local branch_key="${head_branch}|${base_branch}"
if echo "$existing_prs" | grep -qF "$branch_key"; then
log_info "Skipping PR #$pr_number - PR with same branches already exists in target (head: $head_branch, base: $base_branch)"
skipped=$((skipped + 1))
continue
fi
# Create PR in target using the same branches
if gh pr create --repo "$ORG_TARGET/$repo_name" --base "$base_branch" --head "$head_branch" --title "$pr_title" --body "$pr_body" 2>>$ERROR_LOG_FILE; then
log_success "Created PR for $repo_name: #$pr_number - $pr_title"
created=$((created + 1))
else
log_warning "Failed to create PR for $repo_name: #$pr_number - $pr_title (branch may not exist in target)"
failed=$((failed + 1))
fi
done
log_success "Pull request transfer completed for $repo_name: $created created, $skipped skipped (already exists or external), $failed failed"
}
# Function to mirror repository (fork is done in create_target_repository)
mirror_repository() {
local repo_name=$1
log_info "Mirroring repository: $repo_name"
if [[ "$DRY_RUN" == true ]]; then
log_warning "[DRY RUN] Would mirror $repo_name from $ORG_SOURCE to $ORG_TARGET"
return 0
fi
# Repository is already forked by create_target_repository
# Transfer pull requests from source to target
transfer_pull_requests "$repo_name"
log_success "Successfully mirrored $repo_name to $ORG_TARGET"
return 0
}
# Function to set default branch
set_default_branch() {
local repo_name=$1
local default_branch="$VERSION"
log_info "Setting default branch for $repo_name to $default_branch"
if [[ "$DRY_RUN" == true ]]; then
log_warning "[DRY RUN] Would set default branch to $default_branch for $repo_name"
return 0
fi
# Check if $VERSION branch exists
if ! gh api "repos/$ORG_TARGET/$repo_name/branches/$default_branch" &> /dev/null; then
log_warning "Branch $default_branch not found in $repo_name, keeping current default"
return 0
fi
# Set default branch
if gh api "repos/$ORG_TARGET/$repo_name" --method PATCH --field "default_branch=$default_branch" 2>>$ERROR_LOG_FILE; then
log_success "Set default branch to $default_branch for $repo_name"
else
log_warning "Failed to set default branch for $repo_name"
fi
}
# Function to create GitHub Actions workflow using gh CLI
create_github_actions_workflow() {
local repo_name=$1
local default_branch="$VERSION"
log_info "Creating GitHub Actions workflow for $repo_name"
if [[ "$DRY_RUN" == true ]]; then
log_warning "[DRY RUN] Would create GitHub Actions workflow for $repo_name"
return 0
fi
# Create workflow content
local workflow_content
workflow_content=$(cat << 'EOF'
name: 'Upstream Sync'
on:
schedule:
- cron: '0 7 * * 1,4'
# scheduled at 07:00 every Monday and Thursday
workflow_dispatch: # click the button on Github repo!
inputs:
sync_test_mode: # Adds a boolean option that appears during manual workflow run for easy test mode config
description: 'Fork Sync Test Mode'
type: boolean
default: false
jobs:
sync_latest_from_upstream:
runs-on: ubuntu-latest
name: Sync latest commits from upstream repo
steps:
# REQUIRED step
# Step 1: run a standard checkout action, provided by github
- name: Checkout target repo
uses: actions/checkout@v3
with:
# optional: set the branch to checkout,
# sync action checks out your 'target_sync_branch' anyway
ref: "my-branch"
# REQUIRED if your upstream repo is private (see wiki)
persist-credentials: false
# REQUIRED step
# Step 2: run the sync action
- name: Sync upstream changes
id: sync
uses: aormsby/Fork-Sync-With-Upstream-action@v3.4.1
with:
target_sync_branch: "my-branch"
# REQUIRED 'target_repo_token' exactly like this!
target_repo_token: ${{ secrets.GITHUB_TOKEN }}
upstream_sync_branch: "my-branch"
upstream_sync_repo: ORG_NAME/REPO_NAME
upstream_repo_access_token: ${{ secrets.UPSTREAM_REPO_SECRET }}
# Set test_mode true during manual dispatch to run tests instead of the true action!!
test_mode: ${{ inputs.sync_test_mode }}
# Step 3: Display a sample message based on the sync output var 'has_new_commits'
- name: New commits found
if: steps.sync.outputs.has_new_commits == 'true'
run: echo "New commits were found to sync."
- name: No new commits
if: steps.sync.outputs.has_new_commits == 'false'
run: echo "There were no new commits."
- name: Show value of 'has_new_commits'
run: echo ${{ steps.sync.outputs.has_new_commits }}
EOF
)
# Replace REPO_NAME with actual repository name
workflow_content=$(echo "$workflow_content" | sed "s/REPO_NAME/${repo_name}/g")
workflow_content=$(echo "$workflow_content" | sed "s/ORG_NAME/${ORG_SOURCE}/g")
workflow_content=$(echo "$workflow_content" | sed "s/my-branch/${default_branch}/g")
# Create the workflow file using GitHub API
# First, check if we need to get the current default branch
# local default_branch
# default_branch=$(gh repo view "$ORG_TARGET/$repo_name" --json defaultBranchRef --jq '.defaultBranchRef.name' 2>>$ERROR_LOG_FILE || echo "main")
# Encode content to base64 for the API
local encoded_content
if command -v base64 &> /dev/null; then
encoded_content=$(echo "$workflow_content" | base64 -w 0 2>>$ERROR_LOG_FILE)
else
encoded_content=$(python3 -c "import base64, sys; print(base64.b64encode(sys.stdin.read().encode()).decode())" <<< "$workflow_content" 2>>$ERROR_LOG_FILE)
fi
# Use gh api to create the file
if gh api "/repos/$ORG_TARGET/$repo_name/contents/.github/workflows/sync-upstream3.yml" --method PUT \
-f "message=Add upstream sync workflow" \
-f "content=$encoded_content" \
-f "branch=$default_branch" 2>>$ERROR_LOG_FILE; then
log_success "Created GitHub Actions workflow for $repo_name"
else
log_warning "Failed to create GitHub Actions workflow for $repo_name (may already exist)"
fi
}
# Function to migrate single repository
migrate_repository() {
local repo_name=$1
if [[ -z "$repo_name" ]]; then
return 0
fi
log_info "Starting migration of $repo_name..."
# Confirmation if interactive mode
if [[ "$SKIP_CONFIRMATION" == false ]]; then
echo -n "Proceed with migration of $repo_name? (y/N): "
read -r response
if [[ ! "$response" =~ ^[Yy]$ ]]; then
log_info "Skipping $repo_name"
return 0
fi
fi
# Check if target repository exists and verify its parent
if gh repo view "$ORG_TARGET/$repo_name" &> /dev/null; then
log_info "Target repository $ORG_TARGET/$repo_name already exists"
# Get parent information to check if it's a fork of the source
local parent_info
parent_info=$(gh repo view "$ORG_TARGET/$repo_name" --json "parent" --jq '.parent.full_name' 2>>$ERROR_LOG_FILE || echo "")
if [[ "$parent_info" != "$ORG_SOURCE/$repo_name" ]]; then
log_warning "Target repository $ORG_TARGET/$repo_name exists but is not a fork of $ORG_SOURCE/$repo_name (parent: $parent_info)"
if [[ "$DRY_RUN" == true ]]; then
log_warning "[DRY RUN] Would delete and recreate repository: $ORG_TARGET/$repo_name"
else
# Delete the existing repository since it's not a fork of the source
log_info "Deleting existing repository $ORG_TARGET/$repo_name..."
if gh repo delete "$ORG_TARGET/$repo_name" --yes 2>>$ERROR_LOG_FILE; then
log_success "Deleted repository: $ORG_TARGET/$repo_name"
else
log_error "Failed to delete repository: $ORG_TARGET/$repo_name"
return 1
fi
fi
else
log_success "Target repository $ORG_TARGET/$repo_name is already a fork of $ORG_SOURCE/$repo_name"
fi
fi
# Create target repository
local create_result
create_result=$(create_target_repository "$repo_name")
local exit_code=$?
case $exit_code in
0)
log_success "Target repository setup completed for $repo_name"
;;
2)
log_info "Skipping migration of $repo_name due to archived source repository"
return 0
;;
3)
log_info "Skipping migration of $repo_name due to archived target repository"
return 0
;;
*)
log_error "Failed to create target repository for $repo_name (exit code: $exit_code)"
return 1
;;
esac
# Mirror repository
if ! mirror_repository "$repo_name"; then
log_error "Failed to mirror $repo_name"
return 1
fi
# Set default branch
if ! set_default_branch "$repo_name"; then
log_warning "Failed to set default branch for $repo_name"
fi
# Create GitHub Actions workflow
if ! create_github_actions_workflow "$repo_name"; then
log_warning "Failed to create GitHub Actions workflow for $repo_name"
fi
log_success "Migration completed for $repo_name"
return 0
}
# Function to migrate all repositories
migrate_all_repositories() {
log_info "Starting migration of all repositories..."
if [[ ! -f "$FILTERED_FILE" ]]; then
log_error "Filtered repositories file not found. Run discovery first."
exit 1
fi
local total_repos=$(cat "$FILTERED_FILE" | wc -l)
local processed=0
local success=0
local failed=0
local skipped_archived=0
while IFS= read -r repo_name; do
processed=$((processed+1))
log_info "Migrating $repo_name ($processed/$total_repos)..."
if migrate_repository "$repo_name"; then
success=$((success+1))
else
# Check if this was an archival skip by checking recent log messages
if tail -5 "$LOG_FILE" | grep -q "archived.*skipping migration"; then
skipped_archived=$((skipped_archived+1))
else
failed=$((failed+1))
fi
fi
done < "$FILTERED_FILE"
log_info "Migration completed: $success successful, $failed failed, $skipped_archived skipped (archived) out of $total_repos total"
}
# Function to generate migration report
generate_report() {
log_info "Generating migration report..."
local report_file="${WORK_DIR}/migration_report.txt"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
cat > "$report_file" << EOF
Odoo Repository Migration Report
Generated: $timestamp
Source Organization: $ORG_SOURCE
Target Organization: $ORG_TARGET
=================================
SUMMARY
=================================
Dry Run Mode: $DRY_RUN
Interactive Mode: $([ "$SKIP_CONFIRMATION" == false ] && echo "Enabled" || echo "Disabled")
Archived Repository Checking: Enabled
Pull Request Transfer: Enabled
=================================
REPOSITORIES PROCESSED
=================================
EOF
if [[ -f "$FILTERED_FILE" ]]; then
local total_to_migrate=$(cat "$FILTERED_FILE" | wc -l)
echo "Total repositories with 'Available addons' in README.md: $total_to_migrate" >> "$report_file"
echo "" >> "$report_file"
if [[ "$total_to_migrate" -gt 0 ]]; then
echo "Repositories to migrate:" >> "$report_file"
while IFS= read -r repo_name; do
echo " - $repo_name" >> "$report_file"
done < "$FILTERED_FILE"
fi
fi
cat >> "$report_file" << EOF
=================================
FILES GENERATED
=================================
- Log file: $LOG_FILE
- Repository list: $REPOS_FILE
- Filtered list: $FILTERED_FILE
- Migration report: $report_file
=================================
NEXT STEPS
=================================
1. Review the log file for any errors
2. Verify that all repositories were forked correctly
3. Check that all target repositories are proper forks of source
4. Check that GitHub Actions workflows are running
5. Monitor the daily sync process
6. Verify pull requests were transferred
7. Update any documentation as needed
=================================
NOTES
=================================
- All repositories are forked using GitHub CLI (gh repo fork)
- No git commands were used - only GitHub CLI and API
- Non-fork target repositories are automatically deleted and recreated as forks
- All pull requests from source were transferred to target
- GitHub Actions workflows are configured for daily sync at 2 AM UTC
- Archived repositories are automatically skipped
- All operations are logged for audit purposes
EOF
log_success "Migration report generated: $report_file"
# Display summary
echo ""
echo "=== MIGRATION SUMMARY ==="
if [[ -f "$FILTERED_FILE" ]]; then
local total=$(cat "$FILTERED_FILE" | wc -l)
echo "Repositories with 'Available addons' in README.md: $total"
fi
echo "Log file: $LOG_FILE"
echo "Report file: $report_file"
echo ""
if [[ "$DRY_RUN" == true ]]; then
log_warning "This was a DRY RUN - no actual changes were made"
else
log_success "Migration completed successfully!"
fi
}
# Function to cleanup
cleanup() {
if [[ "$SKIP_CLEANUP" == true ]]; then
log_info "Skipping cleanup as requested (--skip-cleanup)"
# If echo mode was enabled, disable xtrace for tidy exit even when skipping heavy cleanup
if [[ "$ECHO_MODE" == true ]]; then
set +x
fi
return 0
fi
log_info "Cleaning up temporary files..."
# If echo mode was enabled, disable xtrace for tidy exit
if [[ "$ECHO_MODE" == true ]]; then
set +x
fi
# Remove temporary directories created during operations
find "$WORK_DIR" -maxdepth 1 -type d -name "fork_temp_*" -exec rm -rf {} + 2>>$ERROR_LOG_FILE || true
find "$WORK_DIR" -maxdepth 1 -type d -name "temp_*" -exec rm -rf {} + 2>>$ERROR_LOG_FILE || true
log_success "Cleanup completed"
}
# Main execution
main() {
# Setup working directory
setup_work_dir
log_info "Starting Odoo repository migration searching for 'Available addons' in README.md..."
# Parse command line arguments
parse_args "$@"
# Verify prerequisites
verify_prerequisites
# Check if test mode is enabled
if [[ "$TEST_MODE" == true ]]; then
test_create_github_actions_workflow
exit $?
fi
# Get all OCA repositories
get_oca_repositories
# Filter repositories with $VERSION manifests
filter_repositories
# Migrate repositories
migrate_all_repositories
# Generate report
generate_report
# Cleanup
# cleanup
log_info "Migration process completed"
}
# Function to test create_github_actions_workflow
test_create_github_actions_workflow() {
local repo_name=${TEST_REPO:-"account-analytic"}
if ! create_github_actions_workflow "$repo_name"; then
log_warning "Failed to create GitHub Actions workflow for $repo_name"
fi
return 0
}
# Handle script interruption
trap cleanup EXIT
# Run main function with all arguments
main "$@"
## gh auth refresh -h github.com -s delete_repo,workflow
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment