Created
December 3, 2025 22:15
-
-
Save nullity00/29da37f3295e10e8971ae1374f7e1c57 to your computer and use it in GitHub Desktop.
Migrate repositories across organizations
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| const https = require('https'); | |
| const fs = require('fs'); | |
| const CONFIG = { | |
| SOURCE_ORG: 'org_a', | |
| SOURCE_TOKEN: 'ghp_org_a', | |
| DESTINATION_ORG: 'org_b', | |
| DESTINATION_TOKEN: 'ghp_org_b', | |
| INCLUDE_FORKS: false, | |
| INCLUDE_ARCHIVED: false, | |
| INCLUDE_PRIVATE: true, | |
| INCLUDE_PUBLIC: false, | |
| JSON_FILE_PATH: 'repositories.json', | |
| REPOSITORY_SOURCE: 'json', | |
| REPOSITORIES: [ | |
| { sourceName: 'repo1', targetName: 'repo1', visibility: 'private' } | |
| ], | |
| MAX_CONCURRENT: 5, | |
| STATUS_CHECK_INTERVAL: 30000, | |
| }; | |
| const QUERY_REPOSITORIES = ` | |
| query($org: String!, $cursor: String) { | |
| organization(login: $org) { | |
| repositories(first: 100, after: $cursor) { | |
| pageInfo { hasNextPage endCursor } | |
| nodes { name isPrivate isFork isArchived visibility createdAt updatedAt diskUsage defaultBranchRef { name } } | |
| } | |
| } | |
| } | |
| `; | |
| function makeGraphQLRequest(query, variables, token) { | |
| return new Promise((resolve, reject) => { | |
| const data = JSON.stringify({ query: query, variables: variables }); | |
| const options = { | |
| hostname: 'api.github.com', | |
| port: 443, | |
| path: '/graphql', | |
| method: 'POST', | |
| headers: { | |
| 'Authorization': `Bearer ${token}`, | |
| 'Content-Type': 'application/json', | |
| 'Content-Length': data.length, | |
| 'User-Agent': 'migration-script' | |
| } | |
| }; | |
| const req = https.request(options, (res) => { | |
| let responseData = ''; | |
| res.on('data', (chunk) => { responseData += chunk; }); | |
| res.on('end', () => { | |
| try { | |
| const parsed = JSON.parse(responseData); | |
| if (parsed.errors) reject(new Error(JSON.stringify(parsed.errors))); | |
| else resolve(parsed.data); | |
| } catch (e) { | |
| reject(e); | |
| } | |
| }); | |
| }); | |
| req.on('error', (e) => reject(e)); | |
| req.write(data); | |
| req.end(); | |
| }); | |
| } | |
| async function fetchAllRepositories(org, token) { | |
| const repositories = []; | |
| let hasNextPage = true; | |
| let cursor = null; | |
| while (hasNextPage) { | |
| const variables = { org: org, cursor: cursor }; | |
| const data = await makeGraphQLRequest(QUERY_REPOSITORIES, variables, token); | |
| if (!data.organization) throw new Error(`Organization '${org}' not found or access denied`); | |
| const { nodes, pageInfo } = data.organization.repositories; | |
| repositories.push(...nodes); | |
| hasNextPage = pageInfo.hasNextPage; | |
| cursor = pageInfo.endCursor; | |
| } | |
| return repositories; | |
| } | |
| function filterRepositories(repositories) { | |
| return repositories.filter(repo => { | |
| if (!CONFIG.INCLUDE_FORKS && repo.isFork) return false; | |
| if (!CONFIG.INCLUDE_ARCHIVED && repo.isArchived) return false; | |
| if (!CONFIG.INCLUDE_PRIVATE && repo.isPrivate) return false; | |
| if (!CONFIG.INCLUDE_PUBLIC && !repo.isPrivate) return false; | |
| return true; | |
| }); | |
| } | |
| function sortRepositories(repositories) { | |
| return repositories.sort((a, b) => a.name.localeCompare(b.name)); | |
| } | |
| function convertToMigrationFormat(repositories) { | |
| return repositories.map(repo => ({ | |
| sourceName: repo.name, | |
| targetName: repo.name, | |
| visibility: repo.isPrivate ? 'private' : 'public' | |
| })); | |
| } | |
| function saveRepositoriesJson(migrationRepos, sortedRepositories) { | |
| const jsonOutput = { | |
| sourceOrg: CONFIG.SOURCE_ORG, | |
| generatedAt: new Date().toISOString(), | |
| totalCount: sortedRepositories.length, | |
| repositories: migrationRepos, | |
| details: sortedRepositories.map(repo => ({ | |
| name: repo.name, | |
| visibility: repo.isPrivate ? 'private' : 'public', | |
| isFork: repo.isFork, | |
| isArchived: repo.isArchived, | |
| diskUsage: repo.diskUsage || 0, | |
| defaultBranch: repo.defaultBranchRef ? repo.defaultBranchRef.name : null, | |
| createdAt: repo.createdAt, | |
| updatedAt: repo.updatedAt | |
| })) | |
| }; | |
| fs.writeFileSync(CONFIG.JSON_FILE_PATH, JSON.stringify(jsonOutput, null, 2)); | |
| } | |
| const QUERIES = { | |
| getOrgId: `query($login: String!) { organization(login: $login) { login id name databaseId } }`, | |
| createMigrationSource: `mutation createMigrationSource($name: String!, $ownerId: ID!) { createMigrationSource(input: { name: $name, url: "https://github.com", ownerId: $ownerId, type: GITHUB_ARCHIVE }) { migrationSource { id name url type } } }`, | |
| startRepositoryMigration: `mutation startRepositoryMigration($sourceId: ID!, $ownerId: ID!, $sourceRepositoryUrl: URI!, $repositoryName: String!, $continueOnError: Boolean!, $accessToken: String!, $githubPat: String!, $targetRepoVisibility: String!) { startRepositoryMigration(input: { sourceId: $sourceId, ownerId: $ownerId, repositoryName: $repositoryName, continueOnError: $continueOnError, accessToken: $accessToken, githubPat: $githubPat, targetRepoVisibility: $targetRepoVisibility, sourceRepositoryUrl: $sourceRepositoryUrl }) { repositoryMigration { id migrationSource { id name type } sourceUrl } } }`, | |
| getMigrationStatus: `query($id: ID!) { node(id: $id) { ... on Migration { id sourceUrl migrationSource { name } state failureReason } } }`, | |
| }; | |
| function delay(ms) { | |
| return new Promise(resolve => setTimeout(resolve, ms)); | |
| } | |
| async function getDestinationOrgId() { | |
| const data = await makeGraphQLRequest(QUERIES.getOrgId, { login: CONFIG.DESTINATION_ORG }, CONFIG.DESTINATION_TOKEN); | |
| if (!data.organization) throw new Error('Destination organization not found'); | |
| return data.organization.id; | |
| } | |
| async function createMigrationSource(ownerId) { | |
| const data = await makeGraphQLRequest(QUERIES.createMigrationSource, { name: `${CONFIG.SOURCE_ORG} Source`, ownerId }, CONFIG.DESTINATION_TOKEN); | |
| return data.createMigrationSource.migrationSource.id; | |
| } | |
| async function startRepositoryMigration(sourceId, ownerId, repo) { | |
| const variables = { | |
| sourceId: sourceId, | |
| ownerId: ownerId, | |
| sourceRepositoryUrl: `https://github.com/${CONFIG.SOURCE_ORG}/${repo.sourceName}`, | |
| repositoryName: repo.targetName, | |
| continueOnError: true, | |
| accessToken: CONFIG.SOURCE_TOKEN, | |
| githubPat: CONFIG.DESTINATION_TOKEN, | |
| targetRepoVisibility: repo.visibility | |
| }; | |
| const data = await makeGraphQLRequest(QUERIES.startRepositoryMigration, variables, CONFIG.DESTINATION_TOKEN); | |
| const migrationId = data.startRepositoryMigration.repositoryMigration.id; | |
| return { | |
| migrationId: migrationId, | |
| repoName: repo.targetName, | |
| sourceUrl: data.startRepositoryMigration.repositoryMigration.sourceUrl | |
| }; | |
| } | |
| async function checkMigrationStatus(migrationId) { | |
| const data = await makeGraphQLRequest(QUERIES.getMigrationStatus, { id: migrationId }, CONFIG.DESTINATION_TOKEN); | |
| return data.node; | |
| } | |
| async function monitorMigrations(migrations) { | |
| const pending = new Map(migrations.map(m => [m.migrationId, m])); | |
| const completed = []; | |
| const failed = []; | |
| while (pending.size > 0) { | |
| for (const [migrationId, migration] of Array.from(pending.entries())) { | |
| try { | |
| const status = await checkMigrationStatus(migrationId); | |
| if (status && status.state === 'SUCCEEDED') { | |
| completed.push({ ...migration, status }); | |
| pending.delete(migrationId); | |
| } else if (status && status.state === 'FAILED') { | |
| failed.push({ ...migration, status }); | |
| pending.delete(migrationId); | |
| } | |
| } catch (e) {} | |
| } | |
| if (pending.size > 0) await delay(CONFIG.STATUS_CHECK_INTERVAL); | |
| } | |
| return { completed, failed }; | |
| } | |
| async function processMigrationBatches(sourceId, ownerId, repositories) { | |
| const allMigrations = []; | |
| const batches = []; | |
| for (let i = 0; i < repositories.length; i += CONFIG.MAX_CONCURRENT) { | |
| batches.push(repositories.slice(i, i + CONFIG.MAX_CONCURRENT)); | |
| } | |
| for (const batch of batches) { | |
| const migrations = []; | |
| for (const repo of batch) { | |
| try { | |
| const migration = await startRepositoryMigration(sourceId, ownerId, repo); | |
| migrations.push(migration); | |
| await delay(1000); | |
| } catch (e) {} | |
| } | |
| if (migrations.length > 0) { | |
| const results = await monitorMigrations(migrations); | |
| allMigrations.push(...results.completed, ...results.failed); | |
| } | |
| } | |
| return allMigrations; | |
| } | |
| function loadRepositories() { | |
| try { | |
| if (CONFIG.REPOSITORY_SOURCE === 'json' && fs.existsSync(CONFIG.JSON_FILE_PATH)) { | |
| const fileContent = fs.readFileSync(CONFIG.JSON_FILE_PATH, 'utf8'); | |
| const data = JSON.parse(fileContent); | |
| if (data && data.repositories) return data.repositories; | |
| } | |
| } catch (e) {} | |
| return CONFIG.REPOSITORIES; | |
| } | |
| async function main() { | |
| try { | |
| if (!CONFIG.SOURCE_TOKEN || !CONFIG.DESTINATION_TOKEN) { | |
| process.stderr.write('missing tokens\n'); | |
| process.exit(1); | |
| } | |
| const allRepositories = await fetchAllRepositories(CONFIG.SOURCE_ORG, CONFIG.SOURCE_TOKEN); | |
| const filtered = filterRepositories(allRepositories); | |
| const sorted = sortRepositories(filtered); | |
| const migrationRepos = convertToMigrationFormat(sorted); | |
| saveRepositoriesJson(migrationRepos, sorted); | |
| const repositories = loadRepositories(); | |
| if (!repositories || repositories.length === 0) { | |
| process.stderr.write('no repositories to migrate\n'); | |
| process.exit(1); | |
| } | |
| const ownerId = await getDestinationOrgId(); | |
| await delay(1000); | |
| const sourceId = await createMigrationSource(ownerId); | |
| await delay(1000); | |
| await processMigrationBatches(sourceId, ownerId, repositories); | |
| process.exit(0); | |
| } catch (e) { | |
| try { process.stderr.write(String(e.stack || e.message || e) + '\n'); } catch (e2) {} | |
| process.exit(1); | |
| } | |
| } | |
| if (require.main === module) main(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment