| b69ab31 | | | 1 | /** |
| b69ab31 | | | 2 | * Copyright (c) Meta Platforms, Inc. and affiliates. |
| b69ab31 | | | 3 | * |
| b69ab31 | | | 4 | * This source code is licensed under the MIT license found in the |
| b69ab31 | | | 5 | * LICENSE file in the root directory of this source tree. |
| b69ab31 | | | 6 | */ |
| b69ab31 | | | 7 | |
| b69ab31 | | | 8 | import type { |
| b69ab31 | | | 9 | ChangedFile, |
| b69ab31 | | | 10 | CodeReviewSystem, |
| b69ab31 | | | 11 | CommitInfo, |
| b69ab31 | | | 12 | CommitPhaseType, |
| b69ab31 | | | 13 | Hash, |
| b69ab31 | | | 14 | RepoRelativePath, |
| b69ab31 | | | 15 | ShelvedChange, |
| b69ab31 | | | 16 | SmartlogCommits, |
| b69ab31 | | | 17 | StableCommitFetchConfig, |
| b69ab31 | | | 18 | StableInfo, |
| b69ab31 | | | 19 | SuccessorInfo, |
| b69ab31 | | | 20 | } from 'isl/src/types'; |
| b69ab31 | | | 21 | import type {Logger} from './logger'; |
| b69ab31 | | | 22 | |
| b69ab31 | | | 23 | import path from 'path'; |
| b69ab31 | | | 24 | import {Internal} from './Internal'; |
| b69ab31 | | | 25 | import {MAX_FETCHED_FILES_PER_COMMIT} from './commands'; |
| b69ab31 | | | 26 | import {fromEntries} from './utils'; |
| b69ab31 | | | 27 | |
| b69ab31 | | | 28 | export const COMMIT_END_MARK = '<<COMMIT_END_MARK>>'; |
| b69ab31 | | | 29 | export const NULL_CHAR = '\0'; |
| b69ab31 | | | 30 | export const ESCAPED_NULL_CHAR = '\\0'; |
| b69ab31 | | | 31 | export const WDIR_PARENT_MARKER = '@'; |
| b69ab31 | | | 32 | |
| b69ab31 | | | 33 | ///// Main commits fetch ///// |
| b69ab31 | | | 34 | |
| b69ab31 | | | 35 | export const mainFetchTemplateFields = (codeReviewSystem: CodeReviewSystem) => ({ |
| b69ab31 | | | 36 | hash: '{node}', |
| b69ab31 | | | 37 | title: '{desc|firstline}', |
| b69ab31 | | | 38 | author: '{author}', |
| b69ab31 | | | 39 | // We prefer committerdate over authordate as authordate sometimes makes |
| b69ab31 | | | 40 | // amended or rebased commits look stale |
| b69ab31 | | | 41 | date: '{committerdate|isodatesec}', |
| b69ab31 | | | 42 | phase: '{phase}', |
| b69ab31 | | | 43 | bookmarks: `{bookmarks % '{bookmark}${ESCAPED_NULL_CHAR}'}`, |
| b69ab31 | | | 44 | remoteBookmarks: `{remotenames % '{remotename}${ESCAPED_NULL_CHAR}'}`, |
| b69ab31 | | | 45 | parents: `{parents % "{node}${ESCAPED_NULL_CHAR}"}`, |
| b69ab31 | | | 46 | grandparents: `{grandparents % "{node}${ESCAPED_NULL_CHAR}"}`, |
| b69ab31 | | | 47 | isDot: `{ifcontains(rev, revset('.'), '${WDIR_PARENT_MARKER}')}`, |
| b69ab31 | | | 48 | // We don't need files for public commits, and public commits are sometimes gigantic codemods without you realizing. |
| b69ab31 | | | 49 | // No need to fetch if not draft. |
| b69ab31 | | | 50 | files: `{ifeq(phase, 'draft', join(files,'${ESCAPED_NULL_CHAR}'), '')}`, |
| b69ab31 | | | 51 | totalFileCount: '{files|count}', // We skip getting files for public commits, but we still want to know how many files there would be |
| b69ab31 | | | 52 | successorInfo: '{mutations % "{operation}:{successors % "{node}"},"}', |
| b69ab31 | | | 53 | closestPredecessors: '{predecessors % "{node},"}', |
| b69ab31 | | | 54 | // This would be more elegant as a new built-in template |
| b69ab31 | | | 55 | diffId: |
| b69ab31 | | | 56 | codeReviewSystem.type === 'phabricator' |
| b69ab31 | | | 57 | ? '{phabdiff}' |
| b69ab31 | | | 58 | : codeReviewSystem.type === 'github' |
| b69ab31 | | | 59 | ? '{github_pull_request_number}' |
| b69ab31 | | | 60 | : '', |
| b69ab31 | | | 61 | isFollower: '{sapling_pr_follower|json}', |
| b69ab31 | | | 62 | stableCommitMetadata: Internal.stableCommitConfig?.template ?? '', |
| b69ab31 | | | 63 | // Description must be last |
| b69ab31 | | | 64 | description: '{desc}', |
| b69ab31 | | | 65 | }); |
| b69ab31 | | | 66 | |
| b69ab31 | | | 67 | export function getMainFetchTemplate(codeReviewSystem: CodeReviewSystem): string { |
| b69ab31 | | | 68 | return [...Object.values(mainFetchTemplateFields(codeReviewSystem)), COMMIT_END_MARK].join('\n'); |
| b69ab31 | | | 69 | } |
| b69ab31 | | | 70 | |
| b69ab31 | | | 71 | /** |
| b69ab31 | | | 72 | * Extract CommitInfos from log calls that use FETCH_TEMPLATE. |
| b69ab31 | | | 73 | */ |
| b69ab31 | | | 74 | export function parseCommitInfoOutput( |
| b69ab31 | | | 75 | logger: Logger, |
| b69ab31 | | | 76 | output: string, |
| b69ab31 | | | 77 | reviewSystem: CodeReviewSystem, |
| b69ab31 | | | 78 | stableCommitConfig = Internal.stableCommitConfig as StableCommitFetchConfig | null, |
| b69ab31 | | | 79 | ): SmartlogCommits { |
| b69ab31 | | | 80 | const fields = mainFetchTemplateFields(reviewSystem); |
| b69ab31 | | | 81 | const index = fromEntries(Object.keys(fields).map((key, i) => [key, i])) as { |
| b69ab31 | | | 82 | [key in Required<keyof typeof fields>]: number; |
| b69ab31 | | | 83 | }; |
| b69ab31 | | | 84 | |
| b69ab31 | | | 85 | const revisions = output.split(COMMIT_END_MARK); |
| b69ab31 | | | 86 | const commitInfos: Array<CommitInfo> = []; |
| b69ab31 | | | 87 | for (const chunk of revisions) { |
| b69ab31 | | | 88 | try { |
| b69ab31 | | | 89 | const lines = chunk.trimStart().split('\n'); |
| b69ab31 | | | 90 | if (lines.length < Object.keys(fields).length) { |
| b69ab31 | | | 91 | continue; |
| b69ab31 | | | 92 | } |
| b69ab31 | | | 93 | const files = lines[index.files].split(NULL_CHAR).filter(e => e.length > 0); |
| b69ab31 | | | 94 | |
| b69ab31 | | | 95 | // Find if the commit is entirely within the cwd and therefore more relevant to the user. |
| b69ab31 | | | 96 | // Note: this must be done on the server using the full list of files, not just the sample that the client gets. |
| b69ab31 | | | 97 | // TODO: should we cache this by commit hash to avoid iterating all files on the same commits every time? |
| b69ab31 | | | 98 | const maxCommonPathPrefix = findMaxCommonPathPrefix(files); |
| b69ab31 | | | 99 | |
| b69ab31 | | | 100 | commitInfos.push({ |
| b69ab31 | | | 101 | hash: lines[index.hash], |
| b69ab31 | | | 102 | title: lines[index.title], |
| b69ab31 | | | 103 | author: lines[index.author], |
| b69ab31 | | | 104 | date: new Date(lines[index.date]), |
| b69ab31 | | | 105 | parents: splitLine(lines[index.parents]) as string[], |
| b69ab31 | | | 106 | grandparents: splitLine(lines[index.grandparents]) as string[], |
| b69ab31 | | | 107 | phase: lines[index.phase] as CommitPhaseType, |
| b69ab31 | | | 108 | bookmarks: splitLine(lines[index.bookmarks]), |
| b69ab31 | | | 109 | remoteBookmarks: splitLine(lines[index.remoteBookmarks]), |
| b69ab31 | | | 110 | isDot: lines[index.isDot] === WDIR_PARENT_MARKER, |
| b69ab31 | | | 111 | filePathsSample: files.slice(0, MAX_FETCHED_FILES_PER_COMMIT), |
| b69ab31 | | | 112 | totalFileCount: parseInt(lines[index.totalFileCount], 10), |
| b69ab31 | | | 113 | successorInfo: parseSuccessorData(lines[index.successorInfo]), |
| b69ab31 | | | 114 | closestPredecessors: splitLine(lines[index.closestPredecessors], ','), |
| b69ab31 | | | 115 | description: lines |
| b69ab31 | | | 116 | .slice(index.description + 1 /* first field of description is title; skip it */) |
| b69ab31 | | | 117 | .join('\n') |
| b69ab31 | | | 118 | .trim(), |
| b69ab31 | | | 119 | diffId: lines[index.diffId] != '' ? lines[index.diffId] : undefined, |
| ab83ad3 | | | 120 | isFollower: lines[index.isFollower] !== '' ? (JSON.parse(lines[index.isFollower]) as boolean) : false, |
| b69ab31 | | | 121 | stableCommitMetadata: |
| b69ab31 | | | 122 | lines[index.stableCommitMetadata] != '' |
| b69ab31 | | | 123 | ? stableCommitConfig?.parse(lines[index.stableCommitMetadata]) |
| b69ab31 | | | 124 | : undefined, |
| b69ab31 | | | 125 | maxCommonPathPrefix, |
| b69ab31 | | | 126 | }); |
| b69ab31 | | | 127 | } catch (err) { |
| b69ab31 | | | 128 | logger.error('failed to parse commit', err); |
| b69ab31 | | | 129 | } |
| b69ab31 | | | 130 | } |
| b69ab31 | | | 131 | return commitInfos; |
| b69ab31 | | | 132 | } |
| b69ab31 | | | 133 | |
| b69ab31 | | | 134 | /** |
| b69ab31 | | | 135 | * Given a set of changed files, find the longest common path prefix. |
| b69ab31 | | | 136 | * See {@link CommitInfo}.maxCommonPathPrefix |
| b69ab31 | | | 137 | * TODO: This could be cached by commit hash |
| b69ab31 | | | 138 | */ |
| b69ab31 | | | 139 | export function findMaxCommonPathPrefix(filePaths: Array<RepoRelativePath>): RepoRelativePath { |
| b69ab31 | | | 140 | let max: null | Array<string> = null; |
| b69ab31 | | | 141 | let maxLength = 0; |
| b69ab31 | | | 142 | |
| b69ab31 | | | 143 | // Path module separator should match what `sl` gives us |
| b69ab31 | | | 144 | const sep = path.sep; |
| b69ab31 | | | 145 | |
| b69ab31 | | | 146 | for (const path of filePaths) { |
| b69ab31 | | | 147 | if (max == null) { |
| b69ab31 | | | 148 | max = path.split(sep); |
| b69ab31 | | | 149 | max.pop(); // ignore file part, only care about directory |
| b69ab31 | | | 150 | maxLength = max.reduce((acc, part) => acc + part.length + 1, 0); // +1 for slash |
| b69ab31 | | | 151 | continue; |
| b69ab31 | | | 152 | } |
| b69ab31 | | | 153 | // small optimization: we only need to look as long as the max so far, max common path will always be shorter |
| b69ab31 | | | 154 | const parts = path.slice(0, maxLength).split(sep); |
| b69ab31 | | | 155 | for (const [i, part] of parts.entries()) { |
| b69ab31 | | | 156 | if (part !== max[i]) { |
| b69ab31 | | | 157 | max = max.slice(0, i); |
| b69ab31 | | | 158 | maxLength = max.reduce((acc, part) => acc + part.length + 1, 0); // +1 for slash |
| b69ab31 | | | 159 | break; |
| b69ab31 | | | 160 | } |
| b69ab31 | | | 161 | } |
| b69ab31 | | | 162 | if (max.length === 0) { |
| b69ab31 | | | 163 | return ''; // we'll never get *more* specific, early exit |
| b69ab31 | | | 164 | } |
| b69ab31 | | | 165 | } |
| b69ab31 | | | 166 | |
| b69ab31 | | | 167 | const result = (max ?? []).join(sep); |
| b69ab31 | | | 168 | if (result == '') { |
| b69ab31 | | | 169 | return result; |
| b69ab31 | | | 170 | } |
| b69ab31 | | | 171 | return result + sep; |
| b69ab31 | | | 172 | } |
| b69ab31 | | | 173 | |
| b69ab31 | | | 174 | /** |
| b69ab31 | | | 175 | * Additional stable locations in the commit fetch will not automatically |
| b69ab31 | | | 176 | * include "stableCommitMetadata". Insert this data onto the commits. |
| b69ab31 | | | 177 | */ |
| b69ab31 | | | 178 | export function attachStableLocations(commits: Array<CommitInfo>, locations: Array<StableInfo>) { |
| b69ab31 | | | 179 | const map: Record<Hash, Array<StableInfo>> = {}; |
| b69ab31 | | | 180 | for (const location of locations) { |
| b69ab31 | | | 181 | const existing = map[location.hash] ?? []; |
| b69ab31 | | | 182 | map[location.hash] = [...existing, location]; |
| b69ab31 | | | 183 | } |
| b69ab31 | | | 184 | |
| b69ab31 | | | 185 | for (const commit of commits) { |
| b69ab31 | | | 186 | if (commit.hash in map) { |
| b69ab31 | | | 187 | commit.stableCommitMetadata = [ |
| b69ab31 | | | 188 | ...(commit.stableCommitMetadata ?? []), |
| b69ab31 | | | 189 | ...map[commit.hash].map(location => ({ |
| b69ab31 | | | 190 | value: location.name, |
| b69ab31 | | | 191 | description: location.info ?? '', |
| b69ab31 | | | 192 | })), |
| b69ab31 | | | 193 | ]; |
| b69ab31 | | | 194 | } |
| b69ab31 | | | 195 | } |
| b69ab31 | | | 196 | } |
| b69ab31 | | | 197 | |
| b69ab31 | | | 198 | ///// Shelve ///// |
| b69ab31 | | | 199 | |
| b69ab31 | | | 200 | export const SHELVE_FIELDS = { |
| b69ab31 | | | 201 | hash: '{node}', |
| b69ab31 | | | 202 | name: '{shelvename}', |
| b69ab31 | | | 203 | author: '{author}', |
| b69ab31 | | | 204 | date: '{date|isodatesec}', |
| b69ab31 | | | 205 | filesAdded: '{file_adds|json}', |
| b69ab31 | | | 206 | filesModified: '{file_mods|json}', |
| b69ab31 | | | 207 | filesRemoved: '{file_dels|json}', |
| b69ab31 | | | 208 | description: '{desc}', |
| b69ab31 | | | 209 | }; |
| b69ab31 | | | 210 | export const SHELVE_FIELD_INDEX = fromEntries( |
| b69ab31 | | | 211 | Object.keys(SHELVE_FIELDS).map((key, i) => [key, i]), |
| b69ab31 | | | 212 | ) as { |
| b69ab31 | | | 213 | [key in Required<keyof typeof SHELVE_FIELDS>]: number; |
| b69ab31 | | | 214 | }; |
| b69ab31 | | | 215 | export const SHELVE_FETCH_TEMPLATE = [...Object.values(SHELVE_FIELDS), COMMIT_END_MARK].join('\n'); |
| b69ab31 | | | 216 | |
| b69ab31 | | | 217 | export function parseShelvedCommitsOutput(logger: Logger, output: string): Array<ShelvedChange> { |
| b69ab31 | | | 218 | const shelves = output.split(COMMIT_END_MARK); |
| b69ab31 | | | 219 | const commitInfos: Array<ShelvedChange> = []; |
| b69ab31 | | | 220 | for (const chunk of shelves) { |
| b69ab31 | | | 221 | try { |
| b69ab31 | | | 222 | const lines = chunk.trim().split('\n'); |
| b69ab31 | | | 223 | if (lines.length < Object.keys(SHELVE_FIELDS).length) { |
| b69ab31 | | | 224 | continue; |
| b69ab31 | | | 225 | } |
| b69ab31 | | | 226 | const files: Array<ChangedFile> = [ |
| b69ab31 | | | 227 | ...(JSON.parse(lines[SHELVE_FIELD_INDEX.filesModified]) as Array<string>).map(path => ({ |
| b69ab31 | | | 228 | path, |
| b69ab31 | | | 229 | status: 'M' as const, |
| b69ab31 | | | 230 | })), |
| b69ab31 | | | 231 | ...(JSON.parse(lines[SHELVE_FIELD_INDEX.filesAdded]) as Array<string>).map(path => ({ |
| b69ab31 | | | 232 | path, |
| b69ab31 | | | 233 | status: 'A' as const, |
| b69ab31 | | | 234 | })), |
| b69ab31 | | | 235 | ...(JSON.parse(lines[SHELVE_FIELD_INDEX.filesRemoved]) as Array<string>).map(path => ({ |
| b69ab31 | | | 236 | path, |
| b69ab31 | | | 237 | status: 'R' as const, |
| b69ab31 | | | 238 | })), |
| b69ab31 | | | 239 | ]; |
| b69ab31 | | | 240 | commitInfos.push({ |
| b69ab31 | | | 241 | hash: lines[SHELVE_FIELD_INDEX.hash], |
| b69ab31 | | | 242 | name: lines[SHELVE_FIELD_INDEX.name], |
| b69ab31 | | | 243 | date: new Date(lines[SHELVE_FIELD_INDEX.date]), |
| b69ab31 | | | 244 | filesSample: files.slice(0, MAX_FETCHED_FILES_PER_COMMIT), |
| b69ab31 | | | 245 | totalFileCount: files.length, |
| b69ab31 | | | 246 | description: lines.slice(SHELVE_FIELD_INDEX.description).join('\n'), |
| b69ab31 | | | 247 | }); |
| b69ab31 | | | 248 | } catch (err) { |
| b69ab31 | | | 249 | logger.error('failed to parse shelved change'); |
| b69ab31 | | | 250 | } |
| b69ab31 | | | 251 | } |
| b69ab31 | | | 252 | return commitInfos; |
| b69ab31 | | | 253 | } |
| b69ab31 | | | 254 | |
| b69ab31 | | | 255 | ///// Changed Files ///// |
| b69ab31 | | | 256 | |
| b69ab31 | | | 257 | export const CHANGED_FILES_FIELDS = { |
| b69ab31 | | | 258 | hash: '{node}', |
| b69ab31 | | | 259 | filesAdded: '{file_adds|json}', |
| b69ab31 | | | 260 | filesModified: '{file_mods|json}', |
| b69ab31 | | | 261 | filesRemoved: '{file_dels|json}', |
| b69ab31 | | | 262 | }; |
| b69ab31 | | | 263 | export const CHANGED_FILES_INDEX = fromEntries( |
| b69ab31 | | | 264 | Object.keys(CHANGED_FILES_FIELDS).map((key, i) => [key, i]), |
| b69ab31 | | | 265 | ) as { |
| b69ab31 | | | 266 | [key in Required<keyof typeof CHANGED_FILES_FIELDS>]: number; |
| b69ab31 | | | 267 | }; |
| b69ab31 | | | 268 | export const CHANGED_FILES_TEMPLATE = [ |
| b69ab31 | | | 269 | ...Object.values(CHANGED_FILES_FIELDS), |
| b69ab31 | | | 270 | COMMIT_END_MARK, |
| b69ab31 | | | 271 | ].join('\n'); |
| b69ab31 | | | 272 | |
| b69ab31 | | | 273 | ///// Helpers ///// |
| b69ab31 | | | 274 | |
| b69ab31 | | | 275 | function parseSuccessorData(successorData: string): SuccessorInfo | undefined { |
| b69ab31 | | | 276 | const [successorString] = successorData.split(',', 1); // we're only interested in the first available mutation |
| b69ab31 | | | 277 | if (!successorString) { |
| b69ab31 | | | 278 | return undefined; |
| b69ab31 | | | 279 | } |
| b69ab31 | | | 280 | const successor = successorString.split(':'); |
| b69ab31 | | | 281 | return { |
| b69ab31 | | | 282 | hash: successor[1], |
| b69ab31 | | | 283 | type: successor[0], |
| b69ab31 | | | 284 | }; |
| b69ab31 | | | 285 | } |
| b69ab31 | | | 286 | function splitLine(line: string, separator = NULL_CHAR): Array<string> { |
| b69ab31 | | | 287 | return line.split(separator).filter(e => e.length > 0); |
| b69ab31 | | | 288 | } |