|
| 1 | +import expect from 'expect.js'; |
| 2 | +import { createScmAdaptersForTests, globalWithFetch } from './utils.js'; |
| 3 | +const { gh, gl } = createScmAdaptersForTests(); |
| 4 | + |
| 5 | +function mockGithubPullFetch(fileCount = 120) { |
| 6 | + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { |
| 7 | + const url = typeof input === 'string' ? input : input.url; |
| 8 | + if ( |
| 9 | + url.startsWith('https://api.github.com/repos/foo/bar/pulls/1') && |
| 10 | + !url.includes('/files') |
| 11 | + ) { |
| 12 | + return Promise.resolve( |
| 13 | + new Response( |
| 14 | + JSON.stringify({ |
| 15 | + base: { sha: 'baseSha' }, |
| 16 | + head: { sha: 'headSha' }, |
| 17 | + }), |
| 18 | + { status: 200 }, |
| 19 | + ), |
| 20 | + ); |
| 21 | + } |
| 22 | + if (url.includes('/repos/foo/bar/pulls/1/files')) { |
| 23 | + const page = Number(new URL(url).searchParams.get('page')) || 1; |
| 24 | + if (fileCount <= 100) { |
| 25 | + // Only one page |
| 26 | + const batch = Array(fileCount).fill({ |
| 27 | + filename: `f1.pkg`, |
| 28 | + previous_filename: `f1.pkg`, |
| 29 | + additions: 1, |
| 30 | + deletions: 0, |
| 31 | + status: 'modified', |
| 32 | + sha: 'h', |
| 33 | + blob_url: '', |
| 34 | + raw_url: '', |
| 35 | + content_url: '', |
| 36 | + }); |
| 37 | + return Promise.resolve( |
| 38 | + new Response(JSON.stringify(batch), { status: 200 }), |
| 39 | + ); |
| 40 | + } else { |
| 41 | + // Pagination: first page 100, second page fileCount-100 |
| 42 | + const count = page === 1 ? 100 : fileCount - 100; |
| 43 | + const batch = Array(count).fill({ |
| 44 | + filename: `f${page}.pkg`, |
| 45 | + previous_filename: `f${page}.pkg`, |
| 46 | + additions: 1, |
| 47 | + deletions: 0, |
| 48 | + status: 'modified', |
| 49 | + sha: 'h', |
| 50 | + blob_url: '', |
| 51 | + raw_url: '', |
| 52 | + content_url: '', |
| 53 | + }); |
| 54 | + return Promise.resolve( |
| 55 | + new Response(JSON.stringify(batch), { status: 200 }), |
| 56 | + ); |
| 57 | + } |
| 58 | + } |
| 59 | + return Promise.reject(new Error('Unexpected fetch: ' + url)); |
| 60 | + }; |
| 61 | +} |
| 62 | + |
| 63 | +const GITLAB_FILE_BASE = { |
| 64 | + diff: '+a\n-b', |
| 65 | + |
| 66 | + new_file: false, |
| 67 | + renamed_file: false, |
| 68 | + deleted_file: false, |
| 69 | + generated_file: null, |
| 70 | +}; |
| 71 | + |
| 72 | +function mockGitlabCommitFetch(fileCount = 120) { |
| 73 | + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { |
| 74 | + const url = typeof input === 'string' ? input : input.url; |
| 75 | + |
| 76 | + if ( |
| 77 | + url === |
| 78 | + 'https://gitlab.com/api/v4/projects/foo%2Fbar/repository/commits/123abc' |
| 79 | + ) { |
| 80 | + return Promise.resolve( |
| 81 | + new Response(JSON.stringify({ parent_ids: ['p1'] }), { |
| 82 | + status: 200, |
| 83 | + }), |
| 84 | + ); |
| 85 | + } |
| 86 | + if ( |
| 87 | + url.startsWith( |
| 88 | + 'https://gitlab.com/api/v4/projects/foo%2Fbar/repository/commits/123abc/diff', |
| 89 | + ) |
| 90 | + ) { |
| 91 | + const page = Number(new URL(url).searchParams.get('page')) || 1; |
| 92 | + const gitlabFile = { |
| 93 | + ...GITLAB_FILE_BASE, |
| 94 | + new_path: `${page}.pkg`, |
| 95 | + old_path: `${page}.pkg`, |
| 96 | + }; |
| 97 | + if (fileCount <= 100) { |
| 98 | + const batch = Array(fileCount).fill(gitlabFile); |
| 99 | + const headers = new Headers({ 'x-total-pages': '1' }); |
| 100 | + return Promise.resolve( |
| 101 | + new Response(JSON.stringify(batch), { status: 200, headers }), |
| 102 | + ); |
| 103 | + } else { |
| 104 | + const count = page === 1 ? 100 : fileCount - 100; |
| 105 | + const batch = Array(count).fill(gitlabFile); |
| 106 | + const headers = new Headers({ 'x-total-pages': '2' }); |
| 107 | + return Promise.resolve( |
| 108 | + new Response(JSON.stringify(batch), { status: 200, headers }), |
| 109 | + ); |
| 110 | + } |
| 111 | + } |
| 112 | + return Promise.reject(new Error('Unexpected fetch: ' + url)); |
| 113 | + }; |
| 114 | +} |
| 115 | + |
| 116 | +function mockGitlabPullFetch(fileCount = 120) { |
| 117 | + globalWithFetch.fetch = (input: RequestInfo | { url: string }) => { |
| 118 | + const url = typeof input === 'string' ? input : input.url; |
| 119 | + const u = new URL(url); |
| 120 | + |
| 121 | + if ( |
| 122 | + u.pathname.toLocaleLowerCase() === |
| 123 | + '/api/v4/projects/foo%2fbar/merge_requests/1' && |
| 124 | + !u.searchParams.has('page') |
| 125 | + ) { |
| 126 | + return Promise.resolve( |
| 127 | + new Response( |
| 128 | + JSON.stringify({ |
| 129 | + diff_refs: { base_sha: 'baseSha', head_sha: 'headSha' }, |
| 130 | + }), |
| 131 | + { status: 200, headers: new Headers() }, |
| 132 | + ), |
| 133 | + ); |
| 134 | + } |
| 135 | + if ( |
| 136 | + u.pathname.toLocaleLowerCase() === |
| 137 | + '/api/v4/projects/foo%2fbar/merge_requests/1/diffs' && |
| 138 | + u.searchParams.has('page') |
| 139 | + ) { |
| 140 | + const page = Number(u.searchParams.get('page')) || 1; |
| 141 | + const gitlabFile = { |
| 142 | + ...GITLAB_FILE_BASE, |
| 143 | + new_path: `${page}.pkg`, |
| 144 | + old_path: `${page}.pkg`, |
| 145 | + }; |
| 146 | + if (fileCount <= 100) { |
| 147 | + const batch = Array(fileCount).fill(gitlabFile); |
| 148 | + const headers = new Headers({ 'x-total-pages': '1' }); |
| 149 | + return Promise.resolve( |
| 150 | + new Response(JSON.stringify(batch), { status: 200, headers }), |
| 151 | + ); |
| 152 | + } else { |
| 153 | + const count = page === 1 ? 100 : fileCount - 100; |
| 154 | + const batch = Array(count).fill(gitlabFile); |
| 155 | + const headers = new Headers({ 'x-total-pages': '2' }); |
| 156 | + return Promise.resolve( |
| 157 | + new Response(JSON.stringify(batch), { status: 200, headers }), |
| 158 | + ); |
| 159 | + } |
| 160 | + } |
| 161 | + return Promise.reject(new Error('Unexpected fetch: ' + url)); |
| 162 | + }; |
| 163 | +} |
| 164 | + |
| 165 | +function clearFetchMock() { |
| 166 | + delete globalWithFetch.fetch; |
| 167 | +} |
| 168 | + |
| 169 | +describe('Commit and Pull Details', () => { |
| 170 | + describe('GitHub Adapter', () => { |
| 171 | + describe('getPullDetails()', () => { |
| 172 | + const fakePullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; |
| 173 | + |
| 174 | + afterEach(clearFetchMock); |
| 175 | + |
| 176 | + it('collects all files across pull request pages (pagination)', async () => { |
| 177 | + mockGithubPullFetch(120); |
| 178 | + const prData = await gh.getPullDetails(fakePullInfo, 'token'); |
| 179 | + expect(prData.info.base.sha).to.equal('baseSha'); |
| 180 | + expect(prData.info.head.sha).to.equal('headSha'); |
| 181 | + expect(prData.files).to.have.length(120); |
| 182 | + expect(prData.files[0].filename).to.equal('f1.pkg'); |
| 183 | + expect(prData.files[119].filename).to.equal('f2.pkg'); |
| 184 | + }); |
| 185 | + |
| 186 | + it('collects all files when only one page is returned (no pagination)', async () => { |
| 187 | + mockGithubPullFetch(5); |
| 188 | + const prData = await gh.getPullDetails(fakePullInfo, 'token'); |
| 189 | + expect(prData.info.base.sha).to.equal('baseSha'); |
| 190 | + expect(prData.info.head.sha).to.equal('headSha'); |
| 191 | + expect(prData.files).to.have.length(5); |
| 192 | + expect(prData.files[0].filename).to.equal('f1.pkg'); |
| 193 | + expect(prData.files[4].filename).to.equal('f1.pkg'); |
| 194 | + }); |
| 195 | + }); |
| 196 | + }); |
| 197 | + |
| 198 | + describe('GitLab Adapter', () => { |
| 199 | + describe('getCommitDetails()', () => { |
| 200 | + const fakeCommitInfo = { |
| 201 | + owner: 'foo', |
| 202 | + repo: 'bar', |
| 203 | + commitHash: '123abc', |
| 204 | + }; |
| 205 | + |
| 206 | + afterEach(clearFetchMock); |
| 207 | + |
| 208 | + it('collects all files across commit pages (pagination)', async () => { |
| 209 | + mockGitlabCommitFetch(120); |
| 210 | + const commitData = await gl.getCommitDetails(fakeCommitInfo, 'token'); |
| 211 | + expect(commitData.sha).to.equal('123abc'); |
| 212 | + expect(commitData.parents[0].sha).to.equal('p1'); |
| 213 | + expect(commitData.files).to.have.length(120); |
| 214 | + }); |
| 215 | + |
| 216 | + it('collects all files when only one page is returned (no pagination)', async () => { |
| 217 | + mockGitlabCommitFetch(5); |
| 218 | + const commitData = await gl.getCommitDetails(fakeCommitInfo, 'token'); |
| 219 | + expect(commitData.sha).to.equal('123abc'); |
| 220 | + expect(commitData.parents[0].sha).to.equal('p1'); |
| 221 | + expect(commitData.files).to.have.length(5); |
| 222 | + }); |
| 223 | + }); |
| 224 | + |
| 225 | + describe('getPullDetails()', () => { |
| 226 | + const fakePullInfo = { owner: 'foo', repo: 'bar', pullNumber: '1' }; |
| 227 | + |
| 228 | + afterEach(clearFetchMock); |
| 229 | + |
| 230 | + it('collects all files across merge request pages (pagination)', async () => { |
| 231 | + mockGitlabPullFetch(120); |
| 232 | + const mrData = await gl.getPullDetails(fakePullInfo, 'token'); |
| 233 | + expect(mrData.info.base.sha).to.equal('baseSha'); |
| 234 | + expect(mrData.info.head.sha).to.equal('headSha'); |
| 235 | + expect(mrData.files).to.have.length(120); |
| 236 | + expect(mrData.files[0].filename).to.equal('1.pkg'); |
| 237 | + expect(mrData.files[119].filename).to.equal('2.pkg'); |
| 238 | + }); |
| 239 | + |
| 240 | + it('collects all files when only one page is returned (no pagination)', async () => { |
| 241 | + mockGitlabPullFetch(5); |
| 242 | + const mrData = await gl.getPullDetails(fakePullInfo, 'token'); |
| 243 | + expect(mrData.info.base.sha).to.equal('baseSha'); |
| 244 | + expect(mrData.info.head.sha).to.equal('headSha'); |
| 245 | + expect(mrData.files).to.have.length(5); |
| 246 | + expect(mrData.files[0].filename).to.equal('1.pkg'); |
| 247 | + expect(mrData.files[4].filename).to.equal('1.pkg'); |
| 248 | + }); |
| 249 | + }); |
| 250 | + }); |
| 251 | +}); |
0 commit comments