10.0 KB289 lines
Blame
1/**
2 * Copyright (c) Meta Platforms, Inc. and affiliates.
3 *
4 * This source code is licensed under the MIT license found in the
5 * LICENSE file in the root directory of this source tree.
6 */
7
8import type {
9 ChangedFile,
10 CodeReviewSystem,
11 CommitInfo,
12 CommitPhaseType,
13 Hash,
14 RepoRelativePath,
15 ShelvedChange,
16 SmartlogCommits,
17 StableCommitFetchConfig,
18 StableInfo,
19 SuccessorInfo,
20} from 'isl/src/types';
21import type {Logger} from './logger';
22
23import path from 'path';
24import {Internal} from './Internal';
25import {MAX_FETCHED_FILES_PER_COMMIT} from './commands';
26import {fromEntries} from './utils';
27
28export const COMMIT_END_MARK = '<<COMMIT_END_MARK>>';
29export const NULL_CHAR = '\0';
30export const ESCAPED_NULL_CHAR = '\\0';
31export const WDIR_PARENT_MARKER = '@';
32
33///// Main commits fetch /////
34
35export const mainFetchTemplateFields = (codeReviewSystem: CodeReviewSystem) => ({
36 hash: '{node}',
37 title: '{desc|firstline}',
38 author: '{author}',
39 // We prefer committerdate over authordate as authordate sometimes makes
40 // amended or rebased commits look stale
41 date: '{committerdate|isodatesec}',
42 phase: '{phase}',
43 bookmarks: `{bookmarks % '{bookmark}${ESCAPED_NULL_CHAR}'}`,
44 remoteBookmarks: `{remotenames % '{remotename}${ESCAPED_NULL_CHAR}'}`,
45 parents: `{parents % "{node}${ESCAPED_NULL_CHAR}"}`,
46 grandparents: `{grandparents % "{node}${ESCAPED_NULL_CHAR}"}`,
47 isDot: `{ifcontains(rev, revset('.'), '${WDIR_PARENT_MARKER}')}`,
48 // We don't need files for public commits, and public commits are sometimes gigantic codemods without you realizing.
49 // No need to fetch if not draft.
50 files: `{ifeq(phase, 'draft', join(files,'${ESCAPED_NULL_CHAR}'), '')}`,
51 totalFileCount: '{files|count}', // We skip getting files for public commits, but we still want to know how many files there would be
52 successorInfo: '{mutations % "{operation}:{successors % "{node}"},"}',
53 closestPredecessors: '{predecessors % "{node},"}',
54 // This would be more elegant as a new built-in template
55 diffId:
56 codeReviewSystem.type === 'phabricator'
57 ? '{phabdiff}'
58 : codeReviewSystem.type === 'github'
59 ? '{github_pull_request_number}'
60 : '',
61 isFollower: '{sapling_pr_follower|json}',
62 stableCommitMetadata: Internal.stableCommitConfig?.template ?? '',
63 // Description must be last
64 description: '{desc}',
65});
66
67export function getMainFetchTemplate(codeReviewSystem: CodeReviewSystem): string {
68 return [...Object.values(mainFetchTemplateFields(codeReviewSystem)), COMMIT_END_MARK].join('\n');
69}
70
71/**
72 * Extract CommitInfos from log calls that use FETCH_TEMPLATE.
73 */
74export function parseCommitInfoOutput(
75 logger: Logger,
76 output: string,
77 reviewSystem: CodeReviewSystem,
78 stableCommitConfig = Internal.stableCommitConfig as StableCommitFetchConfig | null,
79): SmartlogCommits {
80 const fields = mainFetchTemplateFields(reviewSystem);
81 const index = fromEntries(Object.keys(fields).map((key, i) => [key, i])) as {
82 [key in Required<keyof typeof fields>]: number;
83 };
84
85 const revisions = output.split(COMMIT_END_MARK);
86 const commitInfos: Array<CommitInfo> = [];
87 for (const chunk of revisions) {
88 try {
89 const lines = chunk.trimStart().split('\n');
90 if (lines.length < Object.keys(fields).length) {
91 continue;
92 }
93 const files = lines[index.files].split(NULL_CHAR).filter(e => e.length > 0);
94
95 // Find if the commit is entirely within the cwd and therefore more relevant to the user.
96 // Note: this must be done on the server using the full list of files, not just the sample that the client gets.
97 // TODO: should we cache this by commit hash to avoid iterating all files on the same commits every time?
98 const maxCommonPathPrefix = findMaxCommonPathPrefix(files);
99
100 commitInfos.push({
101 hash: lines[index.hash],
102 title: lines[index.title],
103 author: lines[index.author],
104 date: new Date(lines[index.date]),
105 parents: splitLine(lines[index.parents]) as string[],
106 grandparents: splitLine(lines[index.grandparents]) as string[],
107 phase: lines[index.phase] as CommitPhaseType,
108 bookmarks: splitLine(lines[index.bookmarks]),
109 remoteBookmarks: splitLine(lines[index.remoteBookmarks]),
110 isDot: lines[index.isDot] === WDIR_PARENT_MARKER,
111 filePathsSample: files.slice(0, MAX_FETCHED_FILES_PER_COMMIT),
112 totalFileCount: parseInt(lines[index.totalFileCount], 10),
113 successorInfo: parseSuccessorData(lines[index.successorInfo]),
114 closestPredecessors: splitLine(lines[index.closestPredecessors], ','),
115 description: lines
116 .slice(index.description + 1 /* first field of description is title; skip it */)
117 .join('\n')
118 .trim(),
119 diffId: lines[index.diffId] != '' ? lines[index.diffId] : undefined,
120 isFollower: lines[index.isFollower] !== '' ? (JSON.parse(lines[index.isFollower]) as boolean) : false,
121 stableCommitMetadata:
122 lines[index.stableCommitMetadata] != ''
123 ? stableCommitConfig?.parse(lines[index.stableCommitMetadata])
124 : undefined,
125 maxCommonPathPrefix,
126 });
127 } catch (err) {
128 logger.error('failed to parse commit', err);
129 }
130 }
131 return commitInfos;
132}
133
134/**
135 * Given a set of changed files, find the longest common path prefix.
136 * See {@link CommitInfo}.maxCommonPathPrefix
137 * TODO: This could be cached by commit hash
138 */
139export function findMaxCommonPathPrefix(filePaths: Array<RepoRelativePath>): RepoRelativePath {
140 let max: null | Array<string> = null;
141 let maxLength = 0;
142
143 // Path module separator should match what `sl` gives us
144 const sep = path.sep;
145
146 for (const path of filePaths) {
147 if (max == null) {
148 max = path.split(sep);
149 max.pop(); // ignore file part, only care about directory
150 maxLength = max.reduce((acc, part) => acc + part.length + 1, 0); // +1 for slash
151 continue;
152 }
153 // small optimization: we only need to look as long as the max so far, max common path will always be shorter
154 const parts = path.slice(0, maxLength).split(sep);
155 for (const [i, part] of parts.entries()) {
156 if (part !== max[i]) {
157 max = max.slice(0, i);
158 maxLength = max.reduce((acc, part) => acc + part.length + 1, 0); // +1 for slash
159 break;
160 }
161 }
162 if (max.length === 0) {
163 return ''; // we'll never get *more* specific, early exit
164 }
165 }
166
167 const result = (max ?? []).join(sep);
168 if (result == '') {
169 return result;
170 }
171 return result + sep;
172}
173
174/**
175 * Additional stable locations in the commit fetch will not automatically
176 * include "stableCommitMetadata". Insert this data onto the commits.
177 */
178export function attachStableLocations(commits: Array<CommitInfo>, locations: Array<StableInfo>) {
179 const map: Record<Hash, Array<StableInfo>> = {};
180 for (const location of locations) {
181 const existing = map[location.hash] ?? [];
182 map[location.hash] = [...existing, location];
183 }
184
185 for (const commit of commits) {
186 if (commit.hash in map) {
187 commit.stableCommitMetadata = [
188 ...(commit.stableCommitMetadata ?? []),
189 ...map[commit.hash].map(location => ({
190 value: location.name,
191 description: location.info ?? '',
192 })),
193 ];
194 }
195 }
196}
197
198///// Shelve /////
199
200export const SHELVE_FIELDS = {
201 hash: '{node}',
202 name: '{shelvename}',
203 author: '{author}',
204 date: '{date|isodatesec}',
205 filesAdded: '{file_adds|json}',
206 filesModified: '{file_mods|json}',
207 filesRemoved: '{file_dels|json}',
208 description: '{desc}',
209};
210export const SHELVE_FIELD_INDEX = fromEntries(
211 Object.keys(SHELVE_FIELDS).map((key, i) => [key, i]),
212) as {
213 [key in Required<keyof typeof SHELVE_FIELDS>]: number;
214};
215export const SHELVE_FETCH_TEMPLATE = [...Object.values(SHELVE_FIELDS), COMMIT_END_MARK].join('\n');
216
217export function parseShelvedCommitsOutput(logger: Logger, output: string): Array<ShelvedChange> {
218 const shelves = output.split(COMMIT_END_MARK);
219 const commitInfos: Array<ShelvedChange> = [];
220 for (const chunk of shelves) {
221 try {
222 const lines = chunk.trim().split('\n');
223 if (lines.length < Object.keys(SHELVE_FIELDS).length) {
224 continue;
225 }
226 const files: Array<ChangedFile> = [
227 ...(JSON.parse(lines[SHELVE_FIELD_INDEX.filesModified]) as Array<string>).map(path => ({
228 path,
229 status: 'M' as const,
230 })),
231 ...(JSON.parse(lines[SHELVE_FIELD_INDEX.filesAdded]) as Array<string>).map(path => ({
232 path,
233 status: 'A' as const,
234 })),
235 ...(JSON.parse(lines[SHELVE_FIELD_INDEX.filesRemoved]) as Array<string>).map(path => ({
236 path,
237 status: 'R' as const,
238 })),
239 ];
240 commitInfos.push({
241 hash: lines[SHELVE_FIELD_INDEX.hash],
242 name: lines[SHELVE_FIELD_INDEX.name],
243 date: new Date(lines[SHELVE_FIELD_INDEX.date]),
244 filesSample: files.slice(0, MAX_FETCHED_FILES_PER_COMMIT),
245 totalFileCount: files.length,
246 description: lines.slice(SHELVE_FIELD_INDEX.description).join('\n'),
247 });
248 } catch (err) {
249 logger.error('failed to parse shelved change');
250 }
251 }
252 return commitInfos;
253}
254
255///// Changed Files /////
256
257export const CHANGED_FILES_FIELDS = {
258 hash: '{node}',
259 filesAdded: '{file_adds|json}',
260 filesModified: '{file_mods|json}',
261 filesRemoved: '{file_dels|json}',
262};
263export const CHANGED_FILES_INDEX = fromEntries(
264 Object.keys(CHANGED_FILES_FIELDS).map((key, i) => [key, i]),
265) as {
266 [key in Required<keyof typeof CHANGED_FILES_FIELDS>]: number;
267};
268export const CHANGED_FILES_TEMPLATE = [
269 ...Object.values(CHANGED_FILES_FIELDS),
270 COMMIT_END_MARK,
271].join('\n');
272
273///// Helpers /////
274
275function parseSuccessorData(successorData: string): SuccessorInfo | undefined {
276 const [successorString] = successorData.split(',', 1); // we're only interested in the first available mutation
277 if (!successorString) {
278 return undefined;
279 }
280 const successor = successorString.split(':');
281 return {
282 hash: successor[1],
283 type: successor[0],
284 };
285}
286function splitLine(line: string, separator = NULL_CHAR): Array<string> {
287 return line.split(separator).filter(e => e.length > 0);
288}
289