addons/isl-server/src/Repository.tsblame
View source
b69ab311/**
b69ab312 * Copyright (c) Meta Platforms, Inc. and affiliates.
b69ab313 *
b69ab314 * This source code is licensed under the MIT license found in the
b69ab315 * LICENSE file in the root directory of this source tree.
b69ab316 */
b69ab317
b69ab318import type {
b69ab319 AbsolutePath,
b69ab3110 Alert,
b69ab3111 ChangedFile,
b69ab3112 CodeReviewSystem,
b69ab3113 CommitCloudSyncState,
b69ab3114 CommitInfo,
b69ab3115 ConfigName,
b69ab3116 CwdInfo,
b69ab3117 DiffId,
b69ab3118 Disposable,
b69ab3119 FetchedCommits,
b69ab3120 FetchedUncommittedChanges,
b69ab3121 Hash,
b69ab3122 MergeConflicts,
b69ab3123 OperationCommandProgressReporter,
b69ab3124 OperationProgress,
b69ab3125 PageVisibility,
b69ab3126 PreferredSubmitCommand,
b69ab3127 RepoInfo,
b69ab3128 RepoRelativePath,
b69ab3129 Revset,
b69ab3130 RunnableOperation,
b69ab3131 SettableConfigName,
b69ab3132 ShelvedChange,
b69ab3133 StableInfo,
b69ab3134 Submodule,
b69ab3135 SubmodulesByRoot,
b69ab3136 UncommittedChanges,
b69ab3137 ValidatedRepoInfo,
b69ab3138} from 'isl/src/types';
b69ab3139import type {Comparison} from 'shared/Comparison';
b69ab3140import type {EjecaChildProcess, EjecaOptions} from 'shared/ejeca';
b69ab3141import type {CodeReviewProvider} from './CodeReviewProvider';
b69ab3142import type {KindOfChange, PollKind} from './WatchForChanges';
b69ab3143import type {TrackEventName} from './analytics/eventNames';
b69ab3144import type {ConfigLevel, ResolveCommandConflictOutput} from './commands';
b69ab3145import type {RepositoryContext} from './serverTypes';
b69ab3146
b69ab3147import {Set as ImSet} from 'immutable';
b69ab3148import {
b69ab3149 CommandRunner,
b69ab3150 CommitCloudBackupStatus,
b69ab3151 allConfigNames,
b69ab3152 settableConfigNames,
b69ab3153} from 'isl/src/types';
b69ab3154import fs from 'node:fs';
b69ab3155import path from 'node:path';
b69ab3156import {revsetArgsForComparison} from 'shared/Comparison';
b69ab3157import {LRU} from 'shared/LRU';
b69ab3158import {RateLimiter} from 'shared/RateLimiter';
b69ab3159import {TypedEventEmitter} from 'shared/TypedEventEmitter';
b69ab3160import {ejeca, simplifyEjecaError} from 'shared/ejeca';
b69ab3161import {exists} from 'shared/fs';
b69ab3162import {removeLeadingPathSep} from 'shared/pathUtils';
b69ab3163import {notEmpty, nullthrows, randomId} from 'shared/utils';
b69ab3164import {Internal} from './Internal';
b69ab3165import {OperationQueue} from './OperationQueue';
b69ab3166import {PageFocusTracker} from './PageFocusTracker';
b69ab3167import {WatchForChanges} from './WatchForChanges';
b69ab3168import {parseAlerts} from './alerts';
b69ab3169import {
b69ab3170 MAX_SIMULTANEOUS_CAT_CALLS,
b69ab3171 READ_COMMAND_TIMEOUT_MS,
b69ab3172 computeNewConflicts,
b69ab3173 extractRepoInfoFromUrl,
b69ab3174 findDotDir,
b69ab3175 findRoot,
b69ab3176 findRoots,
b69ab3177 getConfigs,
b69ab3178 getExecParams,
b69ab3179 runCommand,
b69ab3180 setConfig,
b69ab3181} from './commands';
b69ab3182import {DEFAULT_DAYS_OF_COMMITS_TO_LOAD, ErrorShortMessages} from './constants';
b69ab3183import {GitHubCodeReviewProvider} from './github/githubCodeReviewProvider';
b69ab3184import {isGithubEnterprise} from './github/queryGraphQL';
fb66e1e85import {GroveCodeReviewProvider} from './grove/groveCodeReviewProvider';
b69ab3186import {
b69ab3187 CHANGED_FILES_FIELDS,
b69ab3188 CHANGED_FILES_INDEX,
b69ab3189 CHANGED_FILES_TEMPLATE,
b69ab3190 COMMIT_END_MARK,
b69ab3191 SHELVE_FETCH_TEMPLATE,
b69ab3192 attachStableLocations,
b69ab3193 getMainFetchTemplate,
b69ab3194 parseCommitInfoOutput,
b69ab3195 parseShelvedCommitsOutput,
b69ab3196} from './templates';
b69ab3197import {
b69ab3198 findPublicAncestor,
b69ab3199 handleAbortSignalOnProcess,
b69ab31100 isEjecaError,
b69ab31101 serializeAsyncCall,
b69ab31102} from './utils';
b69ab31103
b69ab31104/**
b69ab31105 * This class is responsible for providing information about the working copy
b69ab31106 * for a Sapling repository.
b69ab31107 *
b69ab31108 * A Repository may be reused by multiple connections, not just one ISL window.
b69ab31109 * This is so we don't duplicate watchman subscriptions and calls to status/log.
b69ab31110 * A Repository does not have a pre-defined `cwd`, so it may be reused across cwds.
b69ab31111 *
b69ab31112 * Prefer using `RepositoryCache.getOrCreate()` to access and dispose `Repository`s.
b69ab31113 */
b69ab31114export class Repository {
b69ab31115 public IGNORE_COMMIT_MESSAGE_LINES_REGEX = /^((?:HG|SL):.*)\n?/gm;
b69ab31116
b69ab31117 private mergeConflicts: MergeConflicts | undefined = undefined;
b69ab31118 private uncommittedChanges: FetchedUncommittedChanges | null = null;
b69ab31119 private smartlogCommits: FetchedCommits | null = null;
b69ab31120 private submodulesByRoot: SubmodulesByRoot | undefined = undefined;
b69ab31121 private submodulePathCache: ImSet<RepoRelativePath> | undefined = undefined;
b69ab31122
b69ab31123 private mergeConflictsEmitter = new TypedEventEmitter<'change', MergeConflicts | undefined>();
b69ab31124 private uncommittedChangesEmitter = new TypedEventEmitter<'change', FetchedUncommittedChanges>();
b69ab31125 private smartlogCommitsChangesEmitter = new TypedEventEmitter<'change', FetchedCommits>();
b69ab31126 private submodulesChangesEmitter = new TypedEventEmitter<'change', SubmodulesByRoot>();
b69ab31127
b69ab31128 private smartlogCommitsBeginFetchingEmitter = new TypedEventEmitter<'start', undefined>();
b69ab31129 private uncommittedChangesBeginFetchingEmitter = new TypedEventEmitter<'start', undefined>();
b69ab31130
b69ab31131 private disposables: Array<() => void> = [
b69ab31132 () => this.mergeConflictsEmitter.removeAllListeners(),
b69ab31133 () => this.uncommittedChangesEmitter.removeAllListeners(),
b69ab31134 () => this.smartlogCommitsChangesEmitter.removeAllListeners(),
b69ab31135 () => this.smartlogCommitsBeginFetchingEmitter.removeAllListeners(),
b69ab31136 () => this.uncommittedChangesBeginFetchingEmitter.removeAllListeners(),
b69ab31137 ];
b69ab31138 public onDidDispose(callback: () => unknown): void {
b69ab31139 this.disposables.push(callback);
b69ab31140 }
b69ab31141
b69ab31142 private operationQueue: OperationQueue;
4fe1f34143 public watchForChanges: WatchForChanges;
b69ab31144 private pageFocusTracker = new PageFocusTracker();
b69ab31145 public codeReviewProvider?: CodeReviewProvider;
b69ab31146
b69ab31147 /**
b69ab31148 * Config: milliseconds to hold off log/status refresh during the start of a command.
b69ab31149 * This is to avoid showing messy indeterminate states (like millions of files changed
b69ab31150 * during a long distance checkout, or commit graph changed but '.' is out of sync).
b69ab31151 *
b69ab31152 * Default: 10 seconds. Can be set by the `isl.hold-off-refresh-ms` setting.
b69ab31153 */
b69ab31154 public configHoldOffRefreshMs = 10000;
b69ab31155
b69ab31156 private configRateLimiter = new RateLimiter(1);
b69ab31157
b69ab31158 private currentVisibleCommitRangeIndex = 0;
b69ab31159 private visibleCommitRanges: Array<number | undefined> = [
b69ab31160 DEFAULT_DAYS_OF_COMMITS_TO_LOAD,
b69ab31161 60,
b69ab31162 undefined,
b69ab31163 ];
b69ab31164
b69ab31165 /**
b69ab31166 * Additional commits to include in batched `log` fetch,
b69ab31167 * used for additional remote bookmarks / known stable commit hashes.
b69ab31168 * After fetching commits, stable names will be added to commits in "stableCommitMetadata"
b69ab31169 */
b69ab31170 public stableLocations: Array<StableInfo> = [];
b69ab31171
b69ab31172 /**
b69ab31173 * Recommended remote bookmarks to be include in batched `log` fetch.
b69ab31174 * If a bookmark is not in the subscriptions list yet, then it will be pulled explicitly.
b69ab31175 * Undefined means not yet fetched.
b69ab31176 */
b69ab31177 public recommendedBookmarks: Array<string> | undefined;
b69ab31178
b69ab31179 /**
b69ab31180 * The context used when the repository was created.
b69ab31181 * This is needed for subscriptions to have access to ANY logger, etc.
b69ab31182 * Avoid using this, and prefer using the correct context for a given connection.
b69ab31183 */
b69ab31184 public initialConnectionContext: RepositoryContext;
b69ab31185
b69ab31186 public fullRepoBranchModule = Internal.RepositoryFullRepoBranchModule?.create(
b69ab31187 this,
b69ab31188 this.smartlogCommitsChangesEmitter,
b69ab31189 );
b69ab31190
b69ab31191 /** Prefer using `RepositoryCache.getOrCreate()` to access and dispose `Repository`s. */
b69ab31192 constructor(
b69ab31193 public info: ValidatedRepoInfo,
b69ab31194 ctx: RepositoryContext,
b69ab31195 ) {
b69ab31196 this.initialConnectionContext = ctx;
b69ab31197
b69ab31198 const remote = info.codeReviewSystem;
b69ab31199 if (remote.type === 'github') {
b69ab31200 this.codeReviewProvider = new GitHubCodeReviewProvider(remote, ctx.logger);
b69ab31201 }
b69ab31202
fb66e1e203 if (remote.type === 'grove') {
f096176204 const groveConfig = readGroveConfig(ctx.logger);
f096176205 this.codeReviewProvider = new GroveCodeReviewProvider(remote, ctx.logger, groveConfig.token);
fb66e1e206 }
fb66e1e207
b69ab31208 if (remote.type === 'phabricator' && Internal?.PhabricatorCodeReviewProvider != null) {
b69ab31209 this.codeReviewProvider = new Internal.PhabricatorCodeReviewProvider(
b69ab31210 remote,
b69ab31211 this.initialConnectionContext,
b69ab31212 this.info.dotdir,
b69ab31213 );
b69ab31214 }
b69ab31215
b69ab31216 const shouldWait = (): boolean => {
b69ab31217 const startTime = this.operationQueue.getRunningOperationStartTime();
b69ab31218 if (startTime == null) {
b69ab31219 return false;
b69ab31220 }
b69ab31221 // Prevent auto-refresh during the first 10 seconds of a running command.
b69ab31222 // When a command is running, the intermediate state can be messy:
b69ab31223 // - status errors out (edenfs), is noisy (long distance goto)
b69ab31224 // - commit graph and the `.` are updated separately and hard to predict
b69ab31225 // Let's just rely on optimistic state to provide the "clean" outcome.
b69ab31226 // In case the command takes a long time to run, allow refresh after
b69ab31227 // the time period.
b69ab31228 // Fundamentally, the intermediate states have no choice but have to
b69ab31229 // be messy because filesystems are not transactional (and reading in
b69ab31230 // `sl` is designed to be lock-free).
b69ab31231 const elapsedMs = Date.now() - startTime.valueOf();
b69ab31232 const result = elapsedMs < this.configHoldOffRefreshMs;
b69ab31233 return result;
b69ab31234 };
b69ab31235 const callback = (kind: KindOfChange, pollKind?: PollKind) => {
b69ab31236 if (pollKind !== 'force' && shouldWait()) {
b69ab31237 // Do nothing. This is fine because after the operation
b69ab31238 // there will be a refresh.
b69ab31239 ctx.logger.info('polling prevented from shouldWait');
b69ab31240 return;
b69ab31241 }
b69ab31242 if (kind === 'uncommitted changes') {
b69ab31243 this.fetchUncommittedChanges();
b69ab31244 } else if (kind === 'commits') {
b69ab31245 this.fetchSmartlogCommits();
b69ab31246 } else if (kind === 'merge conflicts') {
b69ab31247 this.checkForMergeConflicts();
b69ab31248 } else if (kind === 'everything') {
b69ab31249 this.fetchUncommittedChanges();
b69ab31250 this.fetchSmartlogCommits();
b69ab31251 this.checkForMergeConflicts();
b69ab31252
b69ab31253 this.codeReviewProvider?.triggerDiffSummariesFetch(
b69ab31254 // We could choose to only fetch the diffs that changed (`newDiffs`) rather than all diffs,
b69ab31255 // but our UI doesn't cache old values, thus all other diffs would appear empty
b69ab31256 this.getAllDiffIds(),
b69ab31257 );
30a382c258 this.codeReviewProvider?.triggerCanopySignalsFetch?.();
b69ab31259 this.initialConnectionContext.tracker.track('DiffFetchSource', {
b69ab31260 extras: {source: 'watch_for_changes', kind, pollKind},
b69ab31261 });
b69ab31262 }
b69ab31263 };
b69ab31264 this.watchForChanges = new WatchForChanges(info, this.pageFocusTracker, callback, ctx);
b69ab31265
b69ab31266 this.operationQueue = new OperationQueue(
b69ab31267 (
b69ab31268 ctx: RepositoryContext,
b69ab31269 operation: RunnableOperation,
b69ab31270 handleCommandProgress,
b69ab31271 signal: AbortSignal,
b69ab31272 ): Promise<unknown> => {
b69ab31273 const {cwd} = ctx;
b69ab31274 if (operation.runner === CommandRunner.Sapling) {
b69ab31275 return this.runOperation(ctx, operation, handleCommandProgress, signal);
b69ab31276 } else if (operation.runner === CommandRunner.CodeReviewProvider) {
b69ab31277 if (this.codeReviewProvider?.runExternalCommand == null) {
b69ab31278 return Promise.reject(
b69ab31279 Error('CodeReviewProvider does not support running external commands'),
b69ab31280 );
b69ab31281 }
b69ab31282
b69ab31283 // TODO: support stdin
b69ab31284 return (
b69ab31285 this.codeReviewProvider?.runExternalCommand(
b69ab31286 cwd,
b69ab31287 operation.args,
b69ab31288 handleCommandProgress,
b69ab31289 signal,
b69ab31290 ) ?? Promise.resolve()
b69ab31291 );
b69ab31292 } else if (operation.runner === CommandRunner.Conf) {
b69ab31293 const {args: normalizedArgs} = this.normalizeOperationArgs(cwd, operation);
b69ab31294 if (this.codeReviewProvider?.runConfCommand == null) {
b69ab31295 return Promise.reject(
b69ab31296 Error('CodeReviewProvider does not support running conf commands'),
b69ab31297 );
b69ab31298 }
b69ab31299
b69ab31300 return (
b69ab31301 this.codeReviewProvider?.runConfCommand(
b69ab31302 cwd,
b69ab31303 normalizedArgs,
b69ab31304 handleCommandProgress,
b69ab31305 signal,
b69ab31306 ) ?? Promise.resolve()
b69ab31307 );
b69ab31308 } else if (operation.runner === CommandRunner.InternalArcanist) {
b69ab31309 // TODO: support stdin
b69ab31310 const {args: normalizedArgs} = this.normalizeOperationArgs(cwd, operation);
b69ab31311 if (Internal.runArcanistCommand == null) {
b69ab31312 return Promise.reject(Error('InternalArcanist runner is not supported'));
b69ab31313 }
b69ab31314 ctx.logger.info('running arcanist command:', normalizedArgs);
b69ab31315 return Internal.runArcanistCommand(cwd, normalizedArgs, handleCommandProgress, signal);
b69ab31316 }
b69ab31317 return Promise.resolve();
b69ab31318 },
b69ab31319 );
b69ab31320
b69ab31321 // refetch summaries whenever we see new diffIds
b69ab31322 const seenDiffs = new Set();
b69ab31323 const subscription = this.subscribeToSmartlogCommitsChanges(fetched => {
b69ab31324 if (fetched.commits.value) {
b69ab31325 const newDiffs = [];
b69ab31326 const diffIds = fetched.commits.value
b69ab31327 .filter(commit => commit.diffId != null)
b69ab31328 .map(commit => commit.diffId);
b69ab31329 for (const diffId of diffIds) {
b69ab31330 if (!seenDiffs.has(diffId)) {
b69ab31331 newDiffs.push(diffId);
b69ab31332 seenDiffs.add(diffId);
b69ab31333 }
b69ab31334 }
b69ab31335 if (newDiffs.length > 0) {
b69ab31336 this.codeReviewProvider?.triggerDiffSummariesFetch(
b69ab31337 // We could choose to only fetch the diffs that changed (`newDiffs`) rather than all diffs,
b69ab31338 // but our UI doesn't cache old values, thus all other diffs would appear empty
b69ab31339 this.getAllDiffIds(),
b69ab31340 );
b69ab31341 this.initialConnectionContext.tracker.track('DiffFetchSource', {
b69ab31342 extras: {source: 'saw_new_diffs'},
b69ab31343 });
b69ab31344 }
b69ab31345 }
b69ab31346 });
b69ab31347
b69ab31348 // the repo may already be in a conflict state on startup
b69ab31349 this.checkForMergeConflicts();
b69ab31350
b69ab31351 this.disposables.push(() => subscription.dispose());
b69ab31352
b69ab31353 this.applyConfigInBackground(ctx);
b69ab31354
b69ab31355 const headTracker = this.subscribeToHeadCommit(head => {
b69ab31356 const allCommits = this.getSmartlogCommits();
b69ab31357 const ancestor = findPublicAncestor(allCommits?.commits.value, head);
b69ab31358 this.initialConnectionContext.tracker.track('HeadCommitChanged', {
b69ab31359 extras: {
b69ab31360 hash: head.hash,
b69ab31361 public: ancestor?.hash,
b69ab31362 bookmarks: ancestor?.remoteBookmarks,
b69ab31363 },
b69ab31364 });
b69ab31365 });
b69ab31366 this.disposables.push(headTracker.dispose);
b69ab31367
b69ab31368 if (this.fullRepoBranchModule != null) {
b69ab31369 this.disposables.push(() => this.fullRepoBranchModule?.dispose());
b69ab31370 }
b69ab31371 }
b69ab31372
b69ab31373 public nextVisibleCommitRangeInDays(): number | undefined {
b69ab31374 if (this.currentVisibleCommitRangeIndex + 1 < this.visibleCommitRanges.length) {
b69ab31375 this.currentVisibleCommitRangeIndex++;
b69ab31376 }
b69ab31377 return this.visibleCommitRanges[this.currentVisibleCommitRangeIndex];
b69ab31378 }
b69ab31379
b69ab31380 public isPathInsideRepo(p: AbsolutePath): boolean {
b69ab31381 return path.normalize(p).startsWith(this.info.repoRoot);
b69ab31382 }
b69ab31383
b69ab31384 /**
b69ab31385 * Typically, disposing is handled by `RepositoryCache` and not used directly.
b69ab31386 */
b69ab31387 public dispose() {
b69ab31388 this.disposables.forEach(dispose => dispose());
b69ab31389 this.codeReviewProvider?.dispose();
b69ab31390 this.watchForChanges.dispose();
b69ab31391 }
b69ab31392
b69ab31393 public onChangeConflictState(
b69ab31394 callback: (conflicts: MergeConflicts | undefined) => unknown,
b69ab31395 ): Disposable {
b69ab31396 this.mergeConflictsEmitter.on('change', callback);
b69ab31397
b69ab31398 if (this.mergeConflicts) {
b69ab31399 // if we're already in merge conflicts, let the client know right away
b69ab31400 callback(this.mergeConflicts);
b69ab31401 }
b69ab31402
b69ab31403 return {dispose: () => this.mergeConflictsEmitter.off('change', callback)};
b69ab31404 }
b69ab31405
b69ab31406 public checkForMergeConflicts = serializeAsyncCall(async () => {
b69ab31407 this.initialConnectionContext.logger.info('checking for merge conflicts');
b69ab31408 // Fast path: check if .sl/merge dir changed
b69ab31409 const wasAlreadyInConflicts = this.mergeConflicts != null;
b69ab31410 if (!wasAlreadyInConflicts) {
b69ab31411 const mergeDirExists = await exists(path.join(this.info.dotdir, 'merge'));
b69ab31412 if (!mergeDirExists) {
b69ab31413 // Not in a conflict
b69ab31414 this.initialConnectionContext.logger.info(
b69ab31415 `conflict state still the same (${
b69ab31416 wasAlreadyInConflicts ? 'IN merge conflict' : 'NOT in conflict'
b69ab31417 })`,
b69ab31418 );
b69ab31419 return;
b69ab31420 }
b69ab31421 }
b69ab31422
b69ab31423 if (this.mergeConflicts == null) {
b69ab31424 // notify UI that merge conflicts were detected and full details are loading
b69ab31425 this.mergeConflicts = {state: 'loading'};
b69ab31426 this.mergeConflictsEmitter.emit('change', this.mergeConflicts);
b69ab31427 }
b69ab31428
b69ab31429 // More expensive full check for conflicts. Necessary if we see .sl/merge change, or if
b69ab31430 // we're already in a conflict and need to re-check if a conflict was resolved.
b69ab31431
b69ab31432 let output: ResolveCommandConflictOutput;
b69ab31433 const fetchStartTimestamp = Date.now();
b69ab31434 try {
b69ab31435 // TODO: is this command fast on large files? it includes full conflicting file contents!
b69ab31436 // `sl resolve --list --all` does not seem to give any way to disambiguate (all conflicts resolved) and (not in merge)
b69ab31437 const proc = await this.runCommand(
b69ab31438 ['resolve', '--tool', 'internal:dumpjson', '--all'],
b69ab31439 'GetConflictsCommand',
b69ab31440 this.initialConnectionContext,
b69ab31441 );
b69ab31442 output = JSON.parse(proc.stdout) as ResolveCommandConflictOutput;
b69ab31443 } catch (err) {
b69ab31444 this.initialConnectionContext.logger.error(`failed to check for merge conflicts: ${err}`);
b69ab31445 // To avoid being stuck in "loading" state forever, let's pretend there's no conflicts.
b69ab31446 this.mergeConflicts = undefined;
b69ab31447 this.mergeConflictsEmitter.emit('change', this.mergeConflicts);
b69ab31448 return;
b69ab31449 }
b69ab31450
b69ab31451 this.mergeConflicts = computeNewConflicts(this.mergeConflicts, output, fetchStartTimestamp);
b69ab31452 this.initialConnectionContext.logger.info(
b69ab31453 `repo ${this.mergeConflicts ? 'IS' : 'IS NOT'} in merge conflicts`,
b69ab31454 );
b69ab31455 if (this.mergeConflicts) {
b69ab31456 const maxConflictsToLog = 20;
b69ab31457 const remainingConflicts = (this.mergeConflicts.files ?? [])
b69ab31458 .filter(conflict => conflict.status === 'U')
b69ab31459 .map(conflict => conflict.path)
b69ab31460 .slice(0, maxConflictsToLog);
b69ab31461 this.initialConnectionContext.logger.info(
b69ab31462 'remaining files with conflicts: ',
b69ab31463 remainingConflicts,
b69ab31464 );
b69ab31465 }
b69ab31466 this.mergeConflictsEmitter.emit('change', this.mergeConflicts);
b69ab31467
b69ab31468 if (!wasAlreadyInConflicts && this.mergeConflicts) {
b69ab31469 this.initialConnectionContext.tracker.track('EnterMergeConflicts', {
b69ab31470 extras: {numConflicts: this.mergeConflicts.files?.length ?? 0},
b69ab31471 });
b69ab31472 } else if (wasAlreadyInConflicts && !this.mergeConflicts) {
b69ab31473 this.initialConnectionContext.tracker.track('ExitMergeConflicts', {extras: {}});
b69ab31474 }
b69ab31475 });
b69ab31476
b69ab31477 public getMergeConflicts(): MergeConflicts | undefined {
b69ab31478 return this.mergeConflicts;
b69ab31479 }
b69ab31480
b69ab31481 public async getMergeTool(ctx: RepositoryContext): Promise<string | null> {
b69ab31482 // treat undefined as "not cached", and null as "not configured"/invalid
b69ab31483 if (ctx.cachedMergeTool !== undefined) {
b69ab31484 return ctx.cachedMergeTool;
b69ab31485 }
b69ab31486 const tool = ctx.knownConfigs?.get('ui.merge') ?? 'internal:merge';
b69ab31487 let usesCustomMerge = tool !== 'internal:merge';
b69ab31488
b69ab31489 if (usesCustomMerge) {
b69ab31490 // TODO: we could also check merge-tools.${tool}.disabled here
b69ab31491 const customToolUsesGui =
b69ab31492 (
b69ab31493 await this.forceGetConfig(ctx, `merge-tools.${tool}.gui`).catch(() => undefined)
b69ab31494 )?.toLowerCase() === 'true';
b69ab31495 if (!customToolUsesGui) {
b69ab31496 ctx.logger.warn(
b69ab31497 `configured custom merge tool '${tool}' is not a GUI tool, using :merge3 instead`,
b69ab31498 );
b69ab31499 usesCustomMerge = false;
b69ab31500 } else {
b69ab31501 ctx.logger.info(`using configured custom GUI merge tool ${tool}`);
b69ab31502 }
b69ab31503 ctx.tracker.track('UsingExternalMergeTool', {
b69ab31504 extras: {
b69ab31505 tool,
b69ab31506 isValid: usesCustomMerge,
b69ab31507 },
b69ab31508 });
b69ab31509 } else {
b69ab31510 ctx.logger.info(`using default :merge3 merge tool`);
b69ab31511 }
b69ab31512
b69ab31513 const mergeTool = usesCustomMerge ? tool : null;
b69ab31514 ctx.cachedMergeTool = mergeTool;
b69ab31515 return mergeTool;
b69ab31516 }
b69ab31517
b69ab31518 /**
b69ab31519 * Determine basic repo info including the root and important config values.
b69ab31520 * Resulting RepoInfo may have null fields if cwd is not a valid repo root.
b69ab31521 * Throws if `command` is not found.
b69ab31522 */
b69ab31523 static async getRepoInfo(ctx: RepositoryContext): Promise<RepoInfo> {
b69ab31524 const {cmd, cwd, logger} = ctx;
b69ab31525 const [repoRoot, repoRoots, dotdir, configs] = await Promise.all([
b69ab31526 findRoot(ctx).catch((err: Error) => err),
b69ab31527 findRoots(ctx),
b69ab31528 findDotDir(ctx),
b69ab31529 // TODO: This should actually use expanded paths, since the config won't handle custom schemes.
b69ab31530 // However, `sl debugexpandpaths` is currently too slow and impacts startup time.
b69ab31531 getConfigs(ctx, [
b69ab31532 'paths.default',
b69ab31533 'github.pull_request_domain',
b69ab31534 'github.preferred_submit_command',
b69ab31535 'phrevset.callsign',
fb66e1e536 'remotefilelog.reponame',
fb66e1e537 'ui.username',
fb66e1e538 'grove.owner',
fb66e1e539 'grove.api_url',
b69ab31540 ]),
b69ab31541 ]);
b69ab31542 const pathsDefault = configs.get('paths.default') ?? '';
b69ab31543 const preferredSubmitCommand = configs.get('github.preferred_submit_command');
b69ab31544
b69ab31545 if (repoRoot instanceof Error) {
b69ab31546 // first check that the cwd exists
b69ab31547 const cwdExists = await exists(cwd);
b69ab31548 if (!cwdExists) {
b69ab31549 return {type: 'cwdDoesNotExist', cwd};
b69ab31550 }
b69ab31551
b69ab31552 return {
b69ab31553 type: 'invalidCommand',
b69ab31554 command: cmd,
b69ab31555 path: process.env.PATH,
b69ab31556 };
b69ab31557 }
b69ab31558 if (repoRoot == null || dotdir == null) {
b69ab31559 // A seemingly invalid repo may just be from EdenFS not running properly
b69ab31560 if (await isUnhealthyEdenFs(cwd)) {
b69ab31561 return {type: 'edenFsUnhealthy', cwd};
b69ab31562 }
b69ab31563 return {type: 'cwdNotARepository', cwd};
b69ab31564 }
b69ab31565
b69ab31566 const isEdenFs = await isEdenFsRepo(repoRoot as AbsolutePath);
b69ab31567
b69ab31568 let codeReviewSystem: CodeReviewSystem;
b69ab31569 let pullRequestDomain;
b69ab31570 if (Internal.isMononokePath?.(pathsDefault)) {
b69ab31571 // TODO: where should we be getting this from? arcconfig instead? do we need this?
b69ab31572 const repo = pathsDefault.slice(pathsDefault.lastIndexOf('/') + 1);
b69ab31573 codeReviewSystem = {type: 'phabricator', repo, callsign: configs.get('phrevset.callsign')};
8d8e815574 } else if (/^mononoke:\/\/grove\.host/.test(pathsDefault) || /^slapi:/.test(pathsDefault)) {
fb66e1e575 // Grove-hosted Mononoke remote — use Grove code review
fb66e1e576 const repo = configs.get('remotefilelog.reponame') ?? pathsDefault.slice(pathsDefault.lastIndexOf('/') + 1);
f096176577 const groveConfig = readGroveConfig(logger);
f096176578 const apiUrl = configs.get('grove.api_url') ?? (groveConfig.hub ? `${groveConfig.hub}/api` : 'https://grove.host/api');
7e7176b579 // owner: explicit config > look up from API by repo name > fall back to username
7e7176b580 let owner = configs.get('grove.owner') ?? '';
7e7176b581 if (!owner) {
7e7176b582 owner = await resolveGroveRepoOwner(apiUrl, repo, groveConfig.token, logger) ?? groveConfig.username ?? '';
7e7176b583 }
23ab721584 const repoSettings = await fetchGroveRepoSettings(apiUrl, owner, repo, groveConfig.token, logger);
23ab721585 codeReviewSystem = {type: 'grove', apiUrl, owner, repo, requireDiffs: repoSettings.requireDiffs};
c3ed59a586 } else if (/^mononoke:\/\//.test(pathsDefault) || /\/edenapi\/?/.test(pathsDefault)) {
c3ed59a587 // Mononoke/EdenAPI remote — no code review provider
c3ed59a588 codeReviewSystem = {type: 'none'};
b69ab31589 } else if (pathsDefault === '') {
b69ab31590 codeReviewSystem = {type: 'none'};
b69ab31591 } else {
b69ab31592 const repoInfo = extractRepoInfoFromUrl(pathsDefault);
b69ab31593 if (
b69ab31594 repoInfo != null &&
b69ab31595 (repoInfo.hostname === 'github.com' || (await isGithubEnterprise(repoInfo.hostname)))
b69ab31596 ) {
b69ab31597 const {owner, repo, hostname} = repoInfo;
b69ab31598 codeReviewSystem = {
b69ab31599 type: 'github',
b69ab31600 owner,
b69ab31601 repo,
b69ab31602 hostname,
b69ab31603 };
b69ab31604 } else {
b69ab31605 codeReviewSystem = {type: 'unknown', path: pathsDefault};
b69ab31606 }
b69ab31607 pullRequestDomain = configs.get('github.pull_request_domain');
b69ab31608 }
b69ab31609
b69ab31610 const result: RepoInfo = {
b69ab31611 type: 'success',
b69ab31612 command: cmd,
b69ab31613 dotdir,
b69ab31614 repoRoot,
b69ab31615 repoRoots,
b69ab31616 codeReviewSystem,
b69ab31617 pullRequestDomain,
b69ab31618 preferredSubmitCommand: preferredSubmitCommand as PreferredSubmitCommand | undefined,
b69ab31619 isEdenFs,
b69ab31620 };
b69ab31621 logger.info('repo info: ', result);
b69ab31622 return result;
b69ab31623 }
b69ab31624
b69ab31625 /**
b69ab31626 * Determine basic information about a cwd, without fetching the full RepositoryInfo.
b69ab31627 * Useful to determine if a cwd is valid and find the repo root without constructing a Repository.
b69ab31628 */
f096176629
b69ab31630 static async getCwdInfo(ctx: RepositoryContext): Promise<CwdInfo> {
b69ab31631 const root = await findRoot(ctx).catch((err: Error) => err);
b69ab31632
b69ab31633 if (root instanceof Error || root == null) {
b69ab31634 return {cwd: ctx.cwd};
b69ab31635 }
b69ab31636
b69ab31637 const [realCwd, realRoot] = await Promise.all([
b69ab31638 fs.promises.realpath(ctx.cwd),
b69ab31639 fs.promises.realpath(root),
b69ab31640 ]);
b69ab31641 // Since we found `root` for this particular `cwd`, we expect realpath(root) is a prefix of realpath(cwd).
b69ab31642 // That is, the relative path does not contain any ".." components.
b69ab31643 const repoRelativeCwd = path.relative(realRoot, realCwd);
b69ab31644 return {
b69ab31645 cwd: ctx.cwd,
b69ab31646 repoRoot: realRoot,
b69ab31647 repoRelativeCwdLabel: path.normalize(path.join(path.basename(realRoot), repoRelativeCwd)),
b69ab31648 };
b69ab31649 }
b69ab31650
b69ab31651 /**
b69ab31652 * Run long-lived command which mutates the repository state.
b69ab31653 * Progress is streamed back as it comes in.
b69ab31654 * Operations are run immediately. For queueing, see OperationQueue.
b69ab31655 * This promise resolves when the operation exits.
b69ab31656 */
b69ab31657 async runOrQueueOperation(
b69ab31658 ctx: RepositoryContext,
b69ab31659 operation: RunnableOperation,
b69ab31660 onProgress: (progress: OperationProgress) => void,
b69ab31661 ): Promise<void> {
23ab721662 let exitCode: number | undefined;
23ab721663 const result = await this.operationQueue.runOrQueueOperation(ctx, operation, progress => {
23ab721664 if (progress.kind === 'exit') {
23ab721665 exitCode = progress.exitCode;
23ab721666 }
23ab721667 onProgress(progress);
23ab721668 });
b69ab31669
b69ab31670 if (result !== 'skipped') {
b69ab31671 // After any operation finishes, make sure we poll right away,
b69ab31672 // so the UI is guaranteed to get the latest data.
b69ab31673 this.watchForChanges.poll('force');
23ab721674
23ab721675 // Let the code review provider react to the completed operation (e.g. create a diff after push)
23ab721676 if (this.codeReviewProvider?.onPostOperation != null && exitCode != null) {
23ab721677 const SEP = '\x00';
23ab721678 this.codeReviewProvider.onPostOperation(operation, exitCode, async (rev: string) => {
23ab721679 try {
23ab721680 const output = await this.runCommand(
23ab721681 ['log', '--rev', rev, '--template', `{node}${SEP}{desc|firstline}${SEP}{desc}${SEP}{p1node}`],
23ab721682 'PostOperationCommitInfoCommand',
23ab721683 ctx,
23ab721684 );
23ab721685 const parts = output.stdout.split(SEP);
23ab721686 if (parts.length < 4) {
23ab721687 return undefined;
23ab721688 }
23ab721689 const [hash, title, fullDesc, parentHash] = parts;
23ab721690 // Description is the full commit message minus the title (first line)
23ab721691 const descLines = fullDesc.split('\n');
23ab721692 const description = descLines.slice(1).join('\n').trim();
23ab721693 return {title, description, hash, parentHash};
23ab721694 } catch (err) {
23ab721695 ctx.logger.error('[grove] failed to fetch commit info for post-operation hook', err);
23ab721696 return undefined;
23ab721697 }
23ab721698 });
23ab721699 }
b69ab31700 }
b69ab31701 }
b69ab31702
b69ab31703 /**
b69ab31704 * Abort the running operation if it matches the given id.
b69ab31705 */
b69ab31706 abortRunningOperation(operationId: string) {
b69ab31707 this.operationQueue.abortRunningOperation(operationId);
b69ab31708 }
b69ab31709
b69ab31710 /** The currently running operation tracked by the server. */
b69ab31711 getRunningOperation() {
b69ab31712 return this.operationQueue.getRunningOperation();
b69ab31713 }
b69ab31714
b69ab31715 private normalizeOperationArgs(
b69ab31716 cwd: string,
b69ab31717 operation: RunnableOperation,
b69ab31718 ): {args: Array<string>; stdin?: string | undefined} {
b69ab31719 const repoRoot = nullthrows(this.info.repoRoot);
b69ab31720 const illegalArgs = new Set(['--cwd', '--config', '--insecure', '--repository', '-R']);
b69ab31721 let stdin = operation.stdin;
b69ab31722 const args = [];
b69ab31723 for (const arg of operation.args) {
b69ab31724 if (typeof arg === 'object') {
b69ab31725 switch (arg.type) {
b69ab31726 case 'config':
b69ab31727 if (!(settableConfigNames as ReadonlyArray<string>).includes(arg.key)) {
b69ab31728 throw new Error(`config ${arg.key} not allowed`);
b69ab31729 }
b69ab31730 args.push('--config', `${arg.key}=${arg.value}`);
b69ab31731 continue;
b69ab31732 case 'repo-relative-file':
b69ab31733 args.push(path.normalize(path.relative(cwd, path.join(repoRoot, arg.path))));
b69ab31734 continue;
b69ab31735 case 'repo-relative-file-list':
b69ab31736 // pass long lists of files as stdin via fileset patterns
b69ab31737 // this is passed as an arg instead of directly in stdin so that we can do path normalization
b69ab31738 args.push('listfile0:-');
b69ab31739 if (stdin != null) {
b69ab31740 throw new Error('stdin already set when using repo-relative-file-list');
b69ab31741 }
b69ab31742 stdin = arg.paths
b69ab31743 .map(p => path.normalize(path.relative(cwd, path.join(repoRoot, p))))
b69ab31744 .join('\0');
b69ab31745 continue;
b69ab31746 case 'exact-revset':
b69ab31747 if (arg.revset.startsWith('-')) {
b69ab31748 // don't allow revsets to be used as flags
b69ab31749 throw new Error('invalid revset');
b69ab31750 }
b69ab31751 args.push(arg.revset);
b69ab31752 continue;
b69ab31753 case 'succeedable-revset':
b69ab31754 args.push(`max(successors(${arg.revset}))`);
b69ab31755 continue;
b69ab31756 case 'optimistic-revset':
b69ab31757 args.push(`max(successors(${arg.revset}))`);
b69ab31758 continue;
b69ab31759 }
b69ab31760 }
b69ab31761 if (illegalArgs.has(arg)) {
b69ab31762 throw new Error(`argument '${arg}' is not allowed`);
b69ab31763 }
b69ab31764 args.push(arg);
b69ab31765 }
b69ab31766 return {args, stdin};
b69ab31767 }
b69ab31768
b69ab31769 private async operationIPC(
b69ab31770 ctx: RepositoryContext,
b69ab31771 onProgress: OperationCommandProgressReporter,
b69ab31772 child: EjecaChildProcess,
b69ab31773 options: EjecaOptions,
b69ab31774 ): Promise<void> {
b69ab31775 if (!options.ipc) {
b69ab31776 return;
b69ab31777 }
b69ab31778
b69ab31779 interface IpcProgressBar {
b69ab31780 id: number;
b69ab31781 topic: string;
b69ab31782 unit: string;
b69ab31783 total: number;
b69ab31784 position: number;
b69ab31785 parent_id?: number;
b69ab31786 }
b69ab31787
b69ab31788 while (true) {
b69ab31789 try {
b69ab31790 // eslint-disable-next-line no-await-in-loop
b69ab31791 const message = await child.getOneMessage();
b69ab31792 if (message === null || typeof message !== 'object') {
b69ab31793 break;
b69ab31794 }
b69ab31795 if ('progress_bar_update' in message) {
b69ab31796 const bars = message.progress_bar_update as IpcProgressBar[];
b69ab31797 const blen = bars.length;
b69ab31798 if (blen > 0) {
b69ab31799 const msg = bars[blen - 1];
b69ab31800 onProgress('progress', {
b69ab31801 message: msg.topic,
b69ab31802 progress: msg.position,
b69ab31803 progressTotal: msg.total,
b69ab31804 unit: msg.unit,
b69ab31805 });
b69ab31806 }
b69ab31807 } else if ('warning' in message) {
b69ab31808 onProgress('warning', message.warning as string);
b69ab31809 } else {
b69ab31810 break;
b69ab31811 }
b69ab31812 } catch (err) {
b69ab31813 break;
b69ab31814 }
b69ab31815 }
b69ab31816 }
b69ab31817
b69ab31818 /**
b69ab31819 * Called by this.operationQueue in response to runOrQueueOperation when an operation is ready to actually run.
b69ab31820 */
b69ab31821 private async runOperation(
b69ab31822 ctx: RepositoryContext,
b69ab31823 operation: RunnableOperation,
b69ab31824 onProgress: OperationCommandProgressReporter,
b69ab31825 signal: AbortSignal,
b69ab31826 ): Promise<void> {
b69ab31827 const {cwd} = ctx;
b69ab31828 const {args: cwdRelativeArgs, stdin} = this.normalizeOperationArgs(cwd, operation);
b69ab31829
b69ab31830 const env = await Promise.all([
b69ab31831 Internal.additionalEnvForCommand?.(operation),
b69ab31832 this.getMergeToolEnvVars(ctx),
b69ab31833 ]);
b69ab31834
b69ab31835 const ipc = (ctx.knownConfigs?.get('isl.sl-progress-enabled') ?? 'false') === 'true';
b69ab31836 const fullArgs = [...cwdRelativeArgs];
b69ab31837 if (ctx.debug) {
b69ab31838 fullArgs.unshift('--debug');
b69ab31839 }
b69ab31840 if (ctx.verbose) {
b69ab31841 fullArgs.unshift('--verbose');
b69ab31842 }
b69ab31843 const {command, args, options} = getExecParams(
b69ab31844 this.info.command,
b69ab31845 fullArgs,
b69ab31846 cwd,
b69ab31847 stdin ? {input: stdin, ipc} : {ipc},
b69ab31848 {
b69ab31849 ...env[0],
b69ab31850 ...env[1],
b69ab31851 },
b69ab31852 );
b69ab31853
b69ab31854 ctx.logger.log('run operation: ', command, fullArgs.join(' '));
b69ab31855
b69ab31856 const commandBlocklist = new Set(['debugshell', 'dbsh', 'debugsh']);
b69ab31857 if (args.some(arg => commandBlocklist.has(arg))) {
b69ab31858 throw new Error(`command "${args.join(' ')}" is not allowed`);
b69ab31859 }
b69ab31860
b69ab31861 const execution = ejeca(command, args, options);
b69ab31862 // It would be more appropriate to call this in response to execution.on('spawn'), but
b69ab31863 // this seems to be inconsistent about firing in all versions of node.
b69ab31864 // Just send spawn immediately. Errors during spawn like ENOENT will still be reported by `exit`.
b69ab31865 onProgress('spawn');
b69ab31866 execution.stdout?.on('data', data => {
b69ab31867 onProgress('stdout', data.toString());
b69ab31868 });
b69ab31869 execution.stderr?.on('data', data => {
b69ab31870 onProgress('stderr', data.toString());
b69ab31871 });
b69ab31872 signal.addEventListener('abort', () => {
b69ab31873 ctx.logger.log('kill operation: ', command, fullArgs.join(' '));
b69ab31874 });
b69ab31875 handleAbortSignalOnProcess(execution, signal);
b69ab31876 try {
b69ab31877 this.operationIPC(ctx, onProgress, execution, options);
b69ab31878 const result = await execution;
b69ab31879 onProgress('exit', result.exitCode || 0);
b69ab31880 } catch (err) {
b69ab31881 onProgress('exit', isEjecaError(err) ? err.exitCode : -1);
b69ab31882 throw err;
b69ab31883 }
b69ab31884 }
b69ab31885
b69ab31886 /**
b69ab31887 * Get environment variables to set up which merge tool to use during an operation.
b69ab31888 * If you're using the default merge tool, use :merge3 instead for slightly better merge information.
b69ab31889 * If you've configured a custom merge tool, make sure we don't overwrite it...
b69ab31890 * ...unless the custom merge tool is *not* a GUI tool, like vimdiff, which would not be interactable in ISL.
b69ab31891 */
b69ab31892 async getMergeToolEnvVars(ctx: RepositoryContext): Promise<Record<string, string> | undefined> {
b69ab31893 const tool = await this.getMergeTool(ctx);
b69ab31894 return tool != null
b69ab31895 ? // allow sl to use the already configured merge tool
b69ab31896 {}
b69ab31897 : // otherwise, use 3-way merge
b69ab31898 {
b69ab31899 HGMERGE: ':merge3',
b69ab31900 SL_MERGE: ':merge3',
b69ab31901 };
b69ab31902 }
b69ab31903
b69ab31904 setPageFocus(page: string, state: PageVisibility) {
b69ab31905 this.pageFocusTracker.setState(page, state);
b69ab31906 this.initialConnectionContext.tracker.track('FocusChanged', {extras: {state}});
b69ab31907 }
b69ab31908
b69ab31909 private refcount = 0;
b69ab31910 ref() {
b69ab31911 this.refcount++;
b69ab31912 if (this.refcount === 1) {
b69ab31913 this.watchForChanges.setupSubscriptions(this.initialConnectionContext);
b69ab31914 }
b69ab31915 }
b69ab31916 unref() {
b69ab31917 this.refcount--;
b69ab31918 if (this.refcount === 0) {
b69ab31919 this.watchForChanges.disposeWatchmanSubscriptions();
b69ab31920 }
b69ab31921 }
b69ab31922
b69ab31923 /** Return the latest fetched value for UncommittedChanges. */
b69ab31924 getUncommittedChanges(): FetchedUncommittedChanges | null {
b69ab31925 return this.uncommittedChanges;
b69ab31926 }
b69ab31927
b69ab31928 subscribeToUncommittedChanges(
b69ab31929 callback: (result: FetchedUncommittedChanges) => unknown,
b69ab31930 ): Disposable {
b69ab31931 this.uncommittedChangesEmitter.on('change', callback);
b69ab31932 return {
b69ab31933 dispose: () => {
b69ab31934 this.uncommittedChangesEmitter.off('change', callback);
b69ab31935 },
b69ab31936 };
b69ab31937 }
b69ab31938
b69ab31939 fetchUncommittedChanges = serializeAsyncCall(async () => {
b69ab31940 const fetchStartTimestamp = Date.now();
b69ab31941 try {
b69ab31942 this.uncommittedChangesBeginFetchingEmitter.emit('start');
b69ab31943 // Note `status -tjson` run with PLAIN are repo-relative
b69ab31944 const proc = await this.runCommand(
b69ab31945 ['status', '-Tjson', '--copies'],
b69ab31946 'StatusCommand',
b69ab31947 this.initialConnectionContext,
b69ab31948 );
b69ab31949 const files = (JSON.parse(proc.stdout) as UncommittedChanges).map(change => ({
b69ab31950 ...change,
b69ab31951 path: removeLeadingPathSep(change.path),
b69ab31952 }));
b69ab31953
b69ab31954 this.uncommittedChanges = {
b69ab31955 fetchStartTimestamp,
b69ab31956 fetchCompletedTimestamp: Date.now(),
b69ab31957 files: {value: files},
b69ab31958 };
b69ab31959 this.uncommittedChangesEmitter.emit('change', this.uncommittedChanges);
b69ab31960 } catch (err) {
b69ab31961 let error = err;
b69ab31962 if (isEjecaError(error)) {
b69ab31963 if (error.stderr.includes('checkout is currently in progress')) {
b69ab31964 this.initialConnectionContext.logger.info(
b69ab31965 'Ignoring `sl status` error caused by in-progress checkout',
b69ab31966 );
b69ab31967 return;
b69ab31968 }
b69ab31969 }
b69ab31970
b69ab31971 this.initialConnectionContext.logger.error('Error fetching files: ', error);
b69ab31972 if (isEjecaError(error)) {
b69ab31973 error = simplifyEjecaError(error);
b69ab31974 }
b69ab31975
b69ab31976 // emit an error, but don't save it to this.uncommittedChanges
b69ab31977 this.uncommittedChangesEmitter.emit('change', {
b69ab31978 fetchStartTimestamp,
b69ab31979 fetchCompletedTimestamp: Date.now(),
b69ab31980 files: {error: error instanceof Error ? error : new Error(error as string)},
b69ab31981 });
b69ab31982 }
b69ab31983 });
b69ab31984
b69ab31985 /** Return the latest fetched value for SmartlogCommits. */
b69ab31986 getSmartlogCommits(): FetchedCommits | null {
b69ab31987 return this.smartlogCommits;
b69ab31988 }
b69ab31989
b69ab31990 subscribeToSmartlogCommitsChanges(callback: (result: FetchedCommits) => unknown) {
b69ab31991 this.smartlogCommitsChangesEmitter.on('change', callback);
b69ab31992 return {
b69ab31993 dispose: () => {
b69ab31994 this.smartlogCommitsChangesEmitter.off('change', callback);
b69ab31995 },
b69ab31996 };
b69ab31997 }
b69ab31998
b69ab31999 subscribeToSmartlogCommitsBeginFetching(callback: (isFetching: boolean) => unknown) {
b69ab311000 const onStart = () => callback(true);
b69ab311001 this.smartlogCommitsBeginFetchingEmitter.on('start', onStart);
b69ab311002 return {
b69ab311003 dispose: () => {
b69ab311004 this.smartlogCommitsBeginFetchingEmitter.off('start', onStart);
b69ab311005 },
b69ab311006 };
b69ab311007 }
b69ab311008
b69ab311009 subscribeToUncommittedChangesBeginFetching(callback: (isFetching: boolean) => unknown) {
b69ab311010 const onStart = () => callback(true);
b69ab311011 this.uncommittedChangesBeginFetchingEmitter.on('start', onStart);
b69ab311012 return {
b69ab311013 dispose: () => {
b69ab311014 this.uncommittedChangesBeginFetchingEmitter.off('start', onStart);
b69ab311015 },
b69ab311016 };
b69ab311017 }
b69ab311018
b69ab311019 subscribeToSubmodulesChanges(callback: (result: SubmodulesByRoot) => unknown) {
b69ab311020 this.submodulesChangesEmitter.on('change', callback);
b69ab311021 return {
b69ab311022 dispose: () => {
b69ab311023 this.submodulesChangesEmitter.off('change', callback);
b69ab311024 },
b69ab311025 };
b69ab311026 }
b69ab311027
b69ab311028 fetchSmartlogCommits = serializeAsyncCall(async () => {
b69ab311029 const fetchStartTimestamp = Date.now();
b69ab311030 try {
b69ab311031 this.smartlogCommitsBeginFetchingEmitter.emit('start');
b69ab311032
b69ab311033 const visibleCommitDayRange = this.visibleCommitRanges[this.currentVisibleCommitRangeIndex];
b69ab311034
ab83ad31035 const primaryRevset = '(interestingbookmarks() + heads(draft()) + ancestors(., 50))';
b69ab311036
b69ab311037 // Revset to fetch for commits, e.g.:
b69ab311038 // smartlog(interestingbookmarks() + heads(draft()) + .)
b69ab311039 // smartlog((interestingbookmarks() + heads(draft()) & date(-14)) + .)
b69ab311040 // smartlog((interestingbookmarks() + heads(draft()) & date(-14)) + . + present(a1b2c3d4))
b69ab311041 const revset = `smartlog(${[
b69ab311042 !visibleCommitDayRange
b69ab311043 ? primaryRevset
b69ab311044 : // filter default smartlog query by date range
b69ab311045 `(${primaryRevset} & date(-${visibleCommitDayRange}))`,
b69ab311046 '.', // always include wdir parent
b69ab311047 // stable locations hashes may be newer than the repo has, wrap in `present()` to only include if available.
b69ab311048 ...this.stableLocations.map(location => `present(${location.hash})`),
b69ab311049 ...(this.recommendedBookmarks ?? []).map(bookmark => `present(${bookmark})`),
b69ab311050 ...(this.fullRepoBranchModule?.genRevset() ?? []),
b69ab311051 ]
b69ab311052 .filter(notEmpty)
b69ab311053 .join(' + ')})`;
b69ab311054
b69ab311055 const template = getMainFetchTemplate(this.info.codeReviewSystem);
b69ab311056
ab83ad31057 this.initialConnectionContext.logger.info('[grove] fetchSmartlogCommits revset:', revset);
ab83ad31058 this.initialConnectionContext.logger.info('[grove] codeReviewSystem:', JSON.stringify(this.info.codeReviewSystem));
ab83ad31059
b69ab311060 const proc = await this.runCommand(
b69ab311061 ['log', '--template', template, '--rev', revset],
b69ab311062 'LogCommand',
b69ab311063 this.initialConnectionContext,
b69ab311064 );
ab83ad31065
ab83ad31066 this.initialConnectionContext.logger.info('[grove] sl log stdout length:', proc.stdout.length);
ab83ad31067 this.initialConnectionContext.logger.info('[grove] sl log stderr:', proc.stderr?.trim() || '(empty)');
ab83ad31068
b69ab311069 const commits = parseCommitInfoOutput(
b69ab311070 this.initialConnectionContext.logger,
b69ab311071 proc.stdout.trim(),
b69ab311072 this.info.codeReviewSystem,
b69ab311073 );
ab83ad31074
ab83ad31075 this.initialConnectionContext.logger.info('[grove] parsed commits count:', commits.length);
ab83ad31076
b69ab311077 if (commits.length === 0) {
b69ab311078 throw new Error(ErrorShortMessages.NoCommitsFetched);
b69ab311079 }
b69ab311080 attachStableLocations(commits, this.stableLocations);
b69ab311081
b69ab311082 if (this.fullRepoBranchModule) {
b69ab311083 this.fullRepoBranchModule.populateSmartlogCommits(commits);
b69ab311084 }
b69ab311085
b69ab311086 this.smartlogCommits = {
b69ab311087 fetchStartTimestamp,
b69ab311088 fetchCompletedTimestamp: Date.now(),
b69ab311089 commits: {value: commits},
b69ab311090 };
b69ab311091 this.smartlogCommitsChangesEmitter.emit('change', this.smartlogCommits);
b69ab311092 } catch (err) {
b69ab311093 let error = err;
b69ab311094 const internalError = Internal.checkInternalError?.(err);
b69ab311095 if (internalError) {
b69ab311096 error = internalError;
b69ab311097 }
b69ab311098 if (isEjecaError(error) && error.stderr.includes('Please check your internet connection')) {
b69ab311099 error = Error('Network request failed. Please check your internet connection.');
b69ab311100 }
b69ab311101
b69ab311102 this.initialConnectionContext.logger.error('Error fetching commits: ', error);
b69ab311103 if (isEjecaError(error)) {
b69ab311104 error = simplifyEjecaError(error);
b69ab311105 }
b69ab311106
b69ab311107 this.smartlogCommitsChangesEmitter.emit('change', {
b69ab311108 fetchStartTimestamp,
b69ab311109 fetchCompletedTimestamp: Date.now(),
b69ab311110 commits: {error: error instanceof Error ? error : new Error(error as string)},
b69ab311111 });
b69ab311112 }
b69ab311113 });
b69ab311114
b69ab311115 public async fetchAndSetRecommendedBookmarks(onFetched?: (bookmarks: Array<string>) => void) {
b69ab311116 if (!Internal.getRecommendedBookmarks) {
b69ab311117 return;
b69ab311118 }
b69ab311119
b69ab311120 try {
b69ab311121 const bookmarks = await Internal.getRecommendedBookmarks(this.initialConnectionContext);
b69ab311122 onFetched?.((this.recommendedBookmarks = bookmarks.map((b: string) => `remote/${b}`)));
b69ab311123 void this.pullRecommendedBookmarks(this.initialConnectionContext);
b69ab311124 } catch (err) {
b69ab311125 this.initialConnectionContext.logger.error('Error fetching recommended bookmarks:', err);
b69ab311126 onFetched?.([]);
b69ab311127 }
b69ab311128 }
b69ab311129
b69ab311130 public async fetchAndSetHiddenMasterConfig(
b69ab311131 onFetched?: (config: Record<string, Array<string>> | null, odType: string | null) => void,
b69ab311132 ) {
b69ab311133 if (!Internal.fetchHiddenMasterBranchConfig) {
b69ab311134 return;
b69ab311135 }
b69ab311136
b69ab311137 try {
b69ab311138 const [config, odType] = await Promise.all([
b69ab311139 Internal.fetchHiddenMasterBranchConfig(this.initialConnectionContext).catch(
b69ab311140 (err: unknown) => {
b69ab311141 this.initialConnectionContext.logger.warn(
b69ab311142 'Failed to fetch hidden master branch config:',
b69ab311143 err,
b69ab311144 );
b69ab311145 return null;
b69ab311146 },
b69ab311147 ),
b69ab311148 Internal.getDevEnvType?.().catch((err: unknown) => {
b69ab311149 this.initialConnectionContext.logger.warn('Failed to fetch OD type:', err);
b69ab311150 return null;
b69ab311151 }),
b69ab311152 ]);
b69ab311153
b69ab311154 onFetched?.(config ?? {}, odType ?? '');
b69ab311155 } catch (err) {
b69ab311156 this.initialConnectionContext.logger.error(
b69ab311157 'Error fetching hidden master branch config:',
b69ab311158 err,
b69ab311159 );
b69ab311160 onFetched?.({}, '');
b69ab311161 }
b69ab311162 }
b69ab311163
b69ab311164 async pullRecommendedBookmarks(ctx: RepositoryContext): Promise<void> {
b69ab311165 if (!this.recommendedBookmarks || !this.recommendedBookmarks.length) {
b69ab311166 return;
b69ab311167 }
b69ab311168
b69ab311169 try {
b69ab311170 const result = await this.runCommand(
b69ab311171 ['bookmarks', '--list-subscriptions'],
b69ab311172 'BookmarksCommand',
b69ab311173 ctx,
b69ab311174 );
b69ab311175 const subscribed = this.parseSubscribedBookmarks(result.stdout);
b69ab311176 const missingBookmarks = this.recommendedBookmarks.filter(
b69ab311177 bookmark => !subscribed.has(bookmark),
b69ab311178 );
b69ab311179
b69ab311180 if (missingBookmarks.length > 0) {
b69ab311181 // We need to strip to pull the remote names
b69ab311182 const missingRemoteNames = missingBookmarks.map(bookmark =>
b69ab311183 bookmark.replace(/^remote\//, ''),
b69ab311184 );
b69ab311185
b69ab311186 const pullBookmarkOperation = this.createPullBookmarksOperation(missingRemoteNames);
b69ab311187 await this.runOrQueueOperation(ctx, pullBookmarkOperation, () => {});
b69ab311188 ctx.logger.info(`Ran pull on new recommended bookmarks: ${missingRemoteNames.join(', ')}`);
b69ab311189 } else {
b69ab311190 // Fetch again as recommended bookmarks likely would not have been set before the startup fetch
b69ab311191 // If bookmarks were pulled, this is automatically called
b69ab311192 this.fetchSmartlogCommits();
b69ab311193 }
b69ab311194 } catch (err) {
b69ab311195 let error = err;
b69ab311196 if (isEjecaError(error)) {
b69ab311197 error = simplifyEjecaError(error);
b69ab311198 }
b69ab311199
b69ab311200 ctx.logger.error('Unable to pull new recommended bookmark(s): ', error);
b69ab311201 }
b69ab311202 }
b69ab311203
b69ab311204 /** Get the current head commit if loaded */
b69ab311205 getHeadCommit(): CommitInfo | undefined {
b69ab311206 return this.smartlogCommits?.commits.value?.find(commit => commit.isDot);
b69ab311207 }
b69ab311208
b69ab311209 /** Watch for changes to the head commit, e.g. from checking out a new commit */
b69ab311210 subscribeToHeadCommit(callback: (head: CommitInfo) => unknown) {
b69ab311211 let headCommit = this.getHeadCommit();
b69ab311212 if (headCommit != null) {
b69ab311213 callback(headCommit);
b69ab311214 }
b69ab311215 const onData = (data: FetchedCommits) => {
b69ab311216 const newHead = data?.commits.value?.find(commit => commit.isDot);
b69ab311217 if (newHead != null && newHead.hash !== headCommit?.hash) {
b69ab311218 callback(newHead);
b69ab311219 headCommit = newHead;
b69ab311220 }
b69ab311221 };
b69ab311222 this.smartlogCommitsChangesEmitter.on('change', onData);
b69ab311223 return {
b69ab311224 dispose: () => {
b69ab311225 this.smartlogCommitsChangesEmitter.off('change', onData);
b69ab311226 },
b69ab311227 };
b69ab311228 }
b69ab311229
b69ab311230 getSubmoduleMap(): SubmodulesByRoot | undefined {
b69ab311231 return this.submodulesByRoot;
b69ab311232 }
b69ab311233
b69ab311234 getSubmodulePathCache(): ImSet<RepoRelativePath> | undefined {
b69ab311235 if (this.submodulePathCache === undefined) {
b69ab311236 const paths = this.submodulesByRoot?.get(this.info.repoRoot)?.value?.map(m => m.path);
b69ab311237 this.submodulePathCache = paths ? ImSet(paths) : undefined;
b69ab311238 }
b69ab311239 return this.submodulePathCache;
b69ab311240 }
b69ab311241
b69ab311242 async fetchSubmoduleMap(): Promise<void> {
b69ab311243 if (this.info.repoRoots == null) {
b69ab311244 return;
b69ab311245 }
b69ab311246 const submoduleMap = new Map();
b69ab311247 await Promise.all(
b69ab311248 this.info.repoRoots?.map(async root => {
b69ab311249 try {
b69ab311250 const proc = await this.runCommand(
b69ab311251 ['debuggitmodules', '--json', '--repo', root],
b69ab311252 'LogCommand',
b69ab311253 this.initialConnectionContext,
b69ab311254 );
b69ab311255 const submodules = JSON.parse(proc.stdout) as Submodule[];
b69ab311256 submoduleMap.set(root, {value: submodules?.length === 0 ? undefined : submodules});
b69ab311257 } catch (err) {
b69ab311258 let error = err;
b69ab311259 if (isEjecaError(error)) {
b69ab311260 // debuggitmodules may not be supported by older versions of Sapling
b69ab311261 error = error.stderr.includes('unknown command')
b69ab311262 ? Error('debuggitmodules command is not supported by your sapling version.')
b69ab311263 : simplifyEjecaError(error);
b69ab311264 }
b69ab311265 this.initialConnectionContext.logger.error('Error fetching submodules: ', error);
b69ab311266
b69ab311267 submoduleMap.set(root, {error: new Error(err as string)});
b69ab311268 }
b69ab311269 }),
b69ab311270 );
b69ab311271
b69ab311272 this.submodulesByRoot = submoduleMap;
b69ab311273 this.submodulePathCache = undefined; // Invalidate path cache
b69ab311274 this.submodulesChangesEmitter.emit('change', submoduleMap);
b69ab311275 }
b69ab311276
b69ab311277 private catLimiter = new RateLimiter(MAX_SIMULTANEOUS_CAT_CALLS, s =>
b69ab311278 this.initialConnectionContext.logger.info('[cat]', s),
b69ab311279 );
b69ab311280 /** Return file content at a given revset, e.g. hash or `.` */
b69ab311281 public cat(ctx: RepositoryContext, file: AbsolutePath, rev: Revset): Promise<string> {
b69ab311282 return this.catLimiter.enqueueRun(async () => {
b69ab311283 // For `sl cat`, we want the output of the command verbatim.
b69ab311284 const options = {stripFinalNewline: false};
b69ab311285 return (await this.runCommand(['cat', file, '--rev', rev], 'CatCommand', ctx, options))
b69ab311286 .stdout;
b69ab311287 });
b69ab311288 }
b69ab311289
b69ab311290 /**
b69ab311291 * Returns line-by-line blame information for a file at a given commit.
b69ab311292 * Returns the line content and commit info.
b69ab311293 * Note: the line will including trailing newline.
b69ab311294 */
b69ab311295 public async blame(
b69ab311296 ctx: RepositoryContext,
b69ab311297 filePath: string,
b69ab311298 hash: string,
b69ab311299 ): Promise<Array<[line: string, info: CommitInfo | undefined]>> {
b69ab311300 const t1 = Date.now();
b69ab311301 const output = await this.runCommand(
b69ab311302 ['blame', filePath, '-Tjson', '--change', '--rev', hash],
b69ab311303 'BlameCommand',
b69ab311304 ctx,
b69ab311305 undefined,
b69ab311306 /* don't timeout */ 0,
b69ab311307 );
b69ab311308 const blame = JSON.parse(output.stdout) as Array<{lines: Array<{line: string; node: string}>}>;
b69ab311309 const t2 = Date.now();
b69ab311310
b69ab311311 if (blame.length === 0) {
b69ab311312 // no blame for file, perhaps it was added or untracked
b69ab311313 return [];
b69ab311314 }
b69ab311315
b69ab311316 const hashes = new Set<string>();
b69ab311317 for (const line of blame[0].lines) {
b69ab311318 hashes.add(line.node);
b69ab311319 }
b69ab311320 // We don't get all the info we need from the blame command, so we run `sl log` on the hashes.
b69ab311321 // TODO: we could make the blame command return this directly, which is probably faster.
b69ab311322 // TODO: We don't actually need all the fields in FETCH_TEMPLATE for blame. Reducing this template may speed it up as well.
b69ab311323 const commits = await this.lookupCommits(ctx, [...hashes]);
b69ab311324 const t3 = Date.now();
b69ab311325 ctx.logger.info(
b69ab311326 `Fetched ${commits.size} commits for blame. Blame took ${(t2 - t1) / 1000}s, commits took ${
b69ab311327 (t3 - t2) / 1000
b69ab311328 }s`,
b69ab311329 );
b69ab311330 return blame[0].lines.map(({node, line}) => [line, commits.get(node)]);
b69ab311331 }
b69ab311332
b69ab311333 public async getCommitCloudState(ctx: RepositoryContext): Promise<CommitCloudSyncState> {
b69ab311334 const lastChecked = new Date();
b69ab311335
b69ab311336 const [extension, backupStatuses, cloudStatus] = await Promise.allSettled([
b69ab311337 this.forceGetConfig(ctx, 'extensions.commitcloud'),
b69ab311338 this.fetchCommitCloudBackupStatuses(ctx),
b69ab311339 this.fetchCommitCloudStatus(ctx),
b69ab311340 ]);
b69ab311341 if (extension.status === 'fulfilled' && extension.value !== '') {
b69ab311342 return {
b69ab311343 lastChecked,
b69ab311344 isDisabled: true,
b69ab311345 };
b69ab311346 }
b69ab311347
b69ab311348 if (backupStatuses.status === 'rejected') {
b69ab311349 return {
b69ab311350 lastChecked,
b69ab311351 syncError: backupStatuses.reason,
b69ab311352 };
b69ab311353 } else if (cloudStatus.status === 'rejected') {
b69ab311354 return {
b69ab311355 lastChecked,
b69ab311356 workspaceError: cloudStatus.reason,
b69ab311357 };
b69ab311358 }
b69ab311359
b69ab311360 return {
b69ab311361 lastChecked,
b69ab311362 ...cloudStatus.value,
b69ab311363 commitStatuses: backupStatuses.value,
b69ab311364 };
b69ab311365 }
b69ab311366
b69ab311367 private async fetchCommitCloudBackupStatuses(
b69ab311368 ctx: RepositoryContext,
b69ab311369 ): Promise<Map<Hash, CommitCloudBackupStatus>> {
b69ab311370 const revset = 'draft() - backedup()';
b69ab311371 const commitCloudBackupStatusTemplate = `{dict(
b69ab311372 hash="{node}",
b69ab311373 backingup="{backingup}",
b69ab311374 date="{date|isodatesec}"
b69ab311375 )|json}\n`;
b69ab311376
b69ab311377 const output = await this.runCommand(
b69ab311378 ['log', '--rev', revset, '--template', commitCloudBackupStatusTemplate],
b69ab311379 'CommitCloudSyncBackupStatusCommand',
b69ab311380 ctx,
b69ab311381 );
b69ab311382
b69ab311383 const rawObjects = output.stdout.trim().split('\n');
b69ab311384 const parsedObjects = rawObjects
b69ab311385 .map(rawObject => {
b69ab311386 try {
b69ab311387 return JSON.parse(rawObject) as {hash: Hash; backingup: 'True' | 'False'; date: string};
b69ab311388 } catch (err) {
b69ab311389 return null;
b69ab311390 }
b69ab311391 })
b69ab311392 .filter(notEmpty);
b69ab311393
b69ab311394 const now = new Date();
b69ab311395 const TEN_MIN = 10 * 60 * 1000;
b69ab311396 const statuses = new Map<Hash, CommitCloudBackupStatus>(
b69ab311397 parsedObjects.map(obj => [
b69ab311398 obj.hash,
b69ab311399 obj.backingup === 'True'
b69ab311400 ? CommitCloudBackupStatus.InProgress
b69ab311401 : now.valueOf() - new Date(obj.date).valueOf() < TEN_MIN
b69ab311402 ? CommitCloudBackupStatus.Pending
b69ab311403 : CommitCloudBackupStatus.Failed,
b69ab311404 ]),
b69ab311405 );
b69ab311406 return statuses;
b69ab311407 }
b69ab311408
b69ab311409 private async fetchCommitCloudStatus(ctx: RepositoryContext): Promise<{
b69ab311410 lastBackup: Date | undefined;
b69ab311411 currentWorkspace: string;
b69ab311412 workspaceChoices: Array<string>;
b69ab311413 }> {
b69ab311414 const [cloudStatusOutput, cloudListOutput] = await Promise.all([
b69ab311415 this.runCommand(['cloud', 'status'], 'CommitCloudStatusCommand', ctx),
b69ab311416 this.runCommand(['cloud', 'list'], 'CommitCloudListCommand', ctx),
b69ab311417 ]);
b69ab311418
b69ab311419 const currentWorkspace =
b69ab311420 /Workspace: ([a-zA-Z/0-9._-]+)/.exec(cloudStatusOutput.stdout)?.[1] ?? 'default';
b69ab311421 const lastSyncTimeStr = /Last Sync Time: (.*)/.exec(cloudStatusOutput.stdout)?.[1];
b69ab311422 const lastBackup = lastSyncTimeStr != null ? new Date(lastSyncTimeStr) : undefined;
b69ab311423 const workspaceChoices = cloudListOutput.stdout
b69ab311424 .split('\n')
b69ab311425 .map(line => /^ {8}([a-zA-Z/0-9._-]+)(?: \(connected\))?/.exec(line)?.[1] as string)
b69ab311426 .filter(l => l != null);
b69ab311427
b69ab311428 return {
b69ab311429 lastBackup,
b69ab311430 currentWorkspace,
b69ab311431 workspaceChoices,
b69ab311432 };
b69ab311433 }
b69ab311434
b69ab311435 private commitCache = new LRU<string, CommitInfo>(100); // TODO: normal commits fetched from smartlog() aren't put in this cache---this is mostly for blame right now.
b69ab311436 public async lookupCommits(
b69ab311437 ctx: RepositoryContext,
b69ab311438 hashes: Array<string>,
b69ab311439 ): Promise<Map<string, CommitInfo>> {
b69ab311440 const hashesToFetch = hashes.filter(hash => this.commitCache.get(hash) == undefined);
b69ab311441
b69ab311442 const commits =
b69ab311443 hashesToFetch.length === 0
b69ab311444 ? [] // don't bother running log
b69ab311445 : await this.runCommand(
b69ab311446 [
b69ab311447 'log',
b69ab311448 '--template',
b69ab311449 getMainFetchTemplate(this.info.codeReviewSystem),
b69ab311450 '--rev',
b69ab311451 hashesToFetch.join('+'),
b69ab311452 ],
b69ab311453 'LookupCommitsCommand',
b69ab311454 ctx,
b69ab311455 ).then(output => {
b69ab311456 return parseCommitInfoOutput(
b69ab311457 ctx.logger,
b69ab311458 output.stdout.trim(),
b69ab311459 this.info.codeReviewSystem,
b69ab311460 );
b69ab311461 });
b69ab311462
b69ab311463 const result = new Map();
b69ab311464 for (const hash of hashes) {
b69ab311465 const found = this.commitCache.get(hash);
b69ab311466 if (found != undefined) {
b69ab311467 result.set(hash, found);
b69ab311468 }
b69ab311469 }
b69ab311470
b69ab311471 for (const commit of commits) {
b69ab311472 if (commit) {
b69ab311473 this.commitCache.set(commit.hash, commit);
b69ab311474 result.set(commit.hash, commit);
b69ab311475 }
b69ab311476 }
b69ab311477
b69ab311478 return result;
b69ab311479 }
b69ab311480
b69ab311481 public async fetchSignificantLinesOfCode(
b69ab311482 ctx: RepositoryContext,
b69ab311483 hash: Hash,
b69ab311484 excludedFiles: string[],
b69ab311485 ): Promise<number | undefined> {
b69ab311486 const exclusions = excludedFiles.flatMap(file => [
b69ab311487 '-X',
b69ab311488 absolutePathForFileInRepo(file, this) ?? file,
b69ab311489 ]);
b69ab311490
b69ab311491 const output = (
b69ab311492 await this.runCommand(
b69ab311493 ['diff', '--stat', '-B', '-X', '**__generated__**', ...exclusions, '-c', hash],
b69ab311494 'SlocCommand',
b69ab311495 ctx,
b69ab311496 )
b69ab311497 ).stdout;
b69ab311498
b69ab311499 const sloc = this.parseSlocFrom(output);
b69ab311500
b69ab311501 ctx.logger.info('Fetched SLOC for commit:', hash, output, `SLOC: ${sloc}`);
b69ab311502 return sloc;
b69ab311503 }
b69ab311504
b69ab311505 public async fetchPendingAmendSignificantLinesOfCode(
b69ab311506 ctx: RepositoryContext,
b69ab311507 hash: Hash,
b69ab311508 includedFiles: string[],
b69ab311509 ): Promise<number | undefined> {
b69ab311510 if (includedFiles.length === 0) {
b69ab311511 return undefined;
b69ab311512 }
b69ab311513 const inclusions = includedFiles.flatMap(file => [
b69ab311514 '-I',
b69ab311515 absolutePathForFileInRepo(file, this) ?? file,
b69ab311516 ]);
b69ab311517
b69ab311518 const output = (
b69ab311519 await this.runCommand(
b69ab311520 ['diff', '--stat', '-B', '-X', '**__generated__**', ...inclusions, '-r', '.^'],
b69ab311521 'PendingSlocCommand',
b69ab311522 ctx,
b69ab311523 )
b69ab311524 ).stdout;
b69ab311525
b69ab311526 if (output.trim() === '') {
b69ab311527 return undefined;
b69ab311528 }
b69ab311529
b69ab311530 const sloc = this.parseSlocFrom(output);
b69ab311531
b69ab311532 ctx.logger.info('Fetched Pending AMEND SLOC for commit:', hash, output, `SLOC: ${sloc}`);
b69ab311533 return sloc;
b69ab311534 }
b69ab311535
b69ab311536 public async fetchPendingSignificantLinesOfCode(
b69ab311537 ctx: RepositoryContext,
b69ab311538 hash: Hash,
b69ab311539 includedFiles: string[],
b69ab311540 ): Promise<number | undefined> {
b69ab311541 if (includedFiles.length === 0) {
b69ab311542 return undefined; // don't bother running sl diff if there are no files to include
b69ab311543 }
b69ab311544 const inclusions = includedFiles.flatMap(file => [
b69ab311545 '-I',
b69ab311546 absolutePathForFileInRepo(file, this) ?? file,
b69ab311547 ]);
b69ab311548
b69ab311549 const output = (
b69ab311550 await this.runCommand(
b69ab311551 ['diff', '--stat', '-B', '-X', '**__generated__**', ...inclusions],
b69ab311552 'PendingSlocCommand',
b69ab311553 ctx,
b69ab311554 )
b69ab311555 ).stdout;
b69ab311556
b69ab311557 const sloc = this.parseSlocFrom(output);
b69ab311558
b69ab311559 ctx.logger.info('Fetched Pending SLOC for commit:', hash, output, `SLOC: ${sloc}`);
b69ab311560 return sloc;
b69ab311561 }
b69ab311562
b69ab311563 private parseSlocFrom(output: string) {
b69ab311564 const lines = output.trim().split('\n');
b69ab311565 const changes = lines[lines.length - 1];
b69ab311566 const diffStatRe = /\d+ files changed, (\d+) insertions\(\+\), (\d+) deletions\(-\)/;
b69ab311567 const diffStatMatch = changes.match(diffStatRe);
b69ab311568 const insertions = parseInt(diffStatMatch?.[1] ?? '0', 10);
b69ab311569 const deletions = parseInt(diffStatMatch?.[2] ?? '0', 10);
b69ab311570 const sloc = insertions + deletions;
b69ab311571 return sloc;
b69ab311572 }
b69ab311573
b69ab311574 private parseSubscribedBookmarks(output: string): Set<string> {
b69ab311575 return new Set(
b69ab311576 output
b69ab311577 .split('\n')
b69ab311578 .filter(line => line.trim())
b69ab311579 .map(line => line.trim().split(/\s+/)[0]),
b69ab311580 );
b69ab311581 }
b69ab311582
b69ab311583 /**
b69ab311584 * Create a runnable operation for pulling bookmarks.
b69ab311585 */
b69ab311586 private createPullBookmarksOperation(bookmarks: Array<string>): RunnableOperation {
b69ab311587 const args = ['pull'];
b69ab311588 for (const bookmark of bookmarks) {
b69ab311589 args.push('-B', bookmark);
b69ab311590 }
b69ab311591
b69ab311592 return {
b69ab311593 args,
b69ab311594 id: randomId(),
b69ab311595 runner: CommandRunner.Sapling,
b69ab311596 trackEventName: 'PullOperation',
b69ab311597 };
b69ab311598 }
b69ab311599
b69ab311600 public async getAllChangedFiles(ctx: RepositoryContext, hash: Hash): Promise<Array<ChangedFile>> {
b69ab311601 const output = (
b69ab311602 await this.runCommand(
b69ab311603 ['log', '--template', CHANGED_FILES_TEMPLATE, '--rev', hash],
b69ab311604 'LookupAllCommitChangedFilesCommand',
b69ab311605 ctx,
b69ab311606 )
b69ab311607 ).stdout;
b69ab311608
b69ab311609 const [chunk] = output.split(COMMIT_END_MARK, 1);
b69ab311610
b69ab311611 const lines = chunk.trim().split('\n');
b69ab311612 if (lines.length < Object.keys(CHANGED_FILES_FIELDS).length) {
b69ab311613 return [];
b69ab311614 }
b69ab311615
b69ab311616 const files: Array<ChangedFile> = [
b69ab311617 ...(JSON.parse(lines[CHANGED_FILES_INDEX.filesModified]) as Array<string>).map(path => ({
b69ab311618 path,
b69ab311619 status: 'M' as const,
b69ab311620 })),
b69ab311621 ...(JSON.parse(lines[CHANGED_FILES_INDEX.filesAdded]) as Array<string>).map(path => ({
b69ab311622 path,
b69ab311623 status: 'A' as const,
b69ab311624 })),
b69ab311625 ...(JSON.parse(lines[CHANGED_FILES_INDEX.filesRemoved]) as Array<string>).map(path => ({
b69ab311626 path,
b69ab311627 status: 'R' as const,
b69ab311628 })),
b69ab311629 ];
b69ab311630
b69ab311631 return files;
b69ab311632 }
b69ab311633
b69ab311634 public async getShelvedChanges(ctx: RepositoryContext): Promise<Array<ShelvedChange>> {
b69ab311635 const output = (
b69ab311636 await this.runCommand(
b69ab311637 ['log', '--rev', 'shelved()', '--template', SHELVE_FETCH_TEMPLATE],
b69ab311638 'GetShelvesCommand',
b69ab311639 ctx,
b69ab311640 )
b69ab311641 ).stdout;
b69ab311642
b69ab311643 const shelves = parseShelvedCommitsOutput(ctx.logger, output.trim());
b69ab311644 // sort by date ascending
b69ab311645 shelves.sort((a, b) => b.date.getTime() - a.date.getTime());
b69ab311646 return shelves;
b69ab311647 }
b69ab311648
b69ab311649 public getAllDiffIds(): Array<DiffId> {
b69ab311650 return (
b69ab311651 this.getSmartlogCommits()
b69ab311652 ?.commits.value?.map(commit => commit.diffId)
b69ab311653 .filter(notEmpty) ?? []
b69ab311654 );
b69ab311655 }
b69ab311656
b69ab311657 public async getActiveAlerts(ctx: RepositoryContext): Promise<Array<Alert>> {
b69ab311658 const result = await this.runCommand(['config', '-Tjson', 'alerts'], 'GetAlertsCommand', ctx, {
b69ab311659 reject: false,
b69ab311660 });
b69ab311661 if (result.exitCode !== 0 || !result.stdout) {
b69ab311662 return [];
b69ab311663 }
b69ab311664 try {
b69ab311665 const configs = JSON.parse(result.stdout) as [{name: string; value: unknown}];
b69ab311666 const alerts = parseAlerts(configs);
b69ab311667 ctx.logger.info('Found active alerts:', alerts);
b69ab311668 return alerts;
b69ab311669 } catch (e) {
b69ab311670 return [];
b69ab311671 }
b69ab311672 }
b69ab311673
b69ab311674 public async getRagePaste(ctx: RepositoryContext): Promise<string> {
b69ab311675 const output = await this.runCommand(['rage'], 'RageCommand', ctx, undefined, 90_000);
b69ab311676 const match = /P\d{9,}/.exec(output.stdout);
b69ab311677 if (match) {
b69ab311678 return match[0];
b69ab311679 }
b69ab311680 throw new Error('No paste found in rage output: ' + output.stdout);
b69ab311681 }
b69ab311682
b69ab311683 public async runDiff(
b69ab311684 ctx: RepositoryContext,
b69ab311685 comparison: Comparison,
b69ab311686 contextLines = 4,
b69ab311687 ): Promise<string> {
b69ab311688 const output = await this.runCommand(
b69ab311689 [
b69ab311690 'diff',
b69ab311691 ...revsetArgsForComparison(comparison),
b69ab311692 // don't include a/ and b/ prefixes on files
b69ab311693 '--noprefix',
b69ab311694 '--no-binary',
b69ab311695 '--nodate',
b69ab311696 '--unified',
b69ab311697 String(contextLines),
b69ab311698 ],
b69ab311699 'DiffCommand',
b69ab311700 ctx,
b69ab311701 );
b69ab311702 return output.stdout;
b69ab311703 }
b69ab311704
b69ab311705 public runCommand(
b69ab311706 args: Array<string>,
b69ab311707 /** Which event name to track for this command. If undefined, generic 'RunCommand' is used. */
b69ab311708 eventName: TrackEventName | undefined,
b69ab311709 ctx: RepositoryContext,
b69ab311710 options?: EjecaOptions,
b69ab311711 timeout?: number,
b69ab311712 ) {
b69ab311713 const id = randomId();
b69ab311714 return ctx.tracker.operation(
b69ab311715 eventName ?? 'RunCommand',
b69ab311716 'RunCommandError',
b69ab311717 {
b69ab311718 // if we don't specify a specific eventName, provide the command arguments in logging
b69ab311719 extras: eventName == null ? {args} : undefined,
b69ab311720 operationId: `isl:${id}`,
b69ab311721 },
b69ab311722 async () =>
b69ab311723 runCommand(
b69ab311724 ctx,
b69ab311725 args,
b69ab311726 {
b69ab311727 ...options,
b69ab311728 env: {
b69ab311729 ...options?.env,
b69ab311730 ...((await Internal.additionalEnvForCommand?.(id)) ?? {}),
b69ab311731 } as NodeJS.ProcessEnv,
b69ab311732 },
b69ab311733 timeout ?? READ_COMMAND_TIMEOUT_MS,
b69ab311734 ),
b69ab311735 );
b69ab311736 }
b69ab311737
b69ab311738 /** Read a config. The config name must be part of `allConfigNames`. */
b69ab311739 public async getConfig(
b69ab311740 ctx: RepositoryContext,
b69ab311741 configName: ConfigName,
b69ab311742 ): Promise<string | undefined> {
b69ab311743 return (await this.getKnownConfigs(ctx)).get(configName);
b69ab311744 }
b69ab311745
b69ab311746 /**
b69ab311747 * Read a single config, forcing a new dedicated call to `sl config`.
b69ab311748 * Prefer `getConfig` to batch fetches when possible.
b69ab311749 */
b69ab311750 public async forceGetConfig(
b69ab311751 ctx: RepositoryContext,
b69ab311752 configName: string,
b69ab311753 ): Promise<string | undefined> {
b69ab311754 const result = (await runCommand(ctx, ['config', configName])).stdout;
b69ab311755 this.initialConnectionContext.logger.info(
b69ab311756 `loaded configs from ${ctx.cwd}: ${configName} => ${result}`,
b69ab311757 );
b69ab311758 return result;
b69ab311759 }
b69ab311760
b69ab311761 /** Load all "known" configs. Cached on `this`. */
b69ab311762 public getKnownConfigs(
b69ab311763 ctx: RepositoryContext,
b69ab311764 ): Promise<ReadonlyMap<ConfigName, string | undefined>> {
b69ab311765 if (ctx.knownConfigs != null) {
b69ab311766 return Promise.resolve(ctx.knownConfigs);
b69ab311767 }
b69ab311768 return this.configRateLimiter.enqueueRun(async () => {
b69ab311769 if (ctx.knownConfigs == null) {
b69ab311770 // Fetch all configs using one command.
b69ab311771 const knownConfig = new Map<ConfigName, string>(
b69ab311772 await getConfigs<ConfigName>(ctx, allConfigNames),
b69ab311773 );
b69ab311774 ctx.knownConfigs = knownConfig;
b69ab311775 }
b69ab311776 return ctx.knownConfigs;
b69ab311777 });
b69ab311778 }
b69ab311779
b69ab311780 public setConfig(
b69ab311781 ctx: RepositoryContext,
b69ab311782 level: ConfigLevel,
b69ab311783 configName: SettableConfigName,
b69ab311784 configValue: string,
b69ab311785 ): Promise<void> {
b69ab311786 if (!settableConfigNames.includes(configName)) {
b69ab311787 return Promise.reject(
b69ab311788 new Error(`config ${configName} not in allowlist for settable configs`),
b69ab311789 );
b69ab311790 }
b69ab311791 // Attempt to avoid racy config read/write.
b69ab311792 return this.configRateLimiter.enqueueRun(() => setConfig(ctx, level, configName, configValue));
b69ab311793 }
b69ab311794
b69ab311795 /** Load and apply configs to `this` in background. */
b69ab311796 private applyConfigInBackground(ctx: RepositoryContext) {
b69ab311797 this.getConfig(ctx, 'isl.hold-off-refresh-ms').then(configValue => {
b69ab311798 if (configValue != null) {
b69ab311799 const numberValue = parseInt(configValue, 10);
b69ab311800 if (numberValue >= 0) {
b69ab311801 this.configHoldOffRefreshMs = numberValue;
b69ab311802 }
b69ab311803 }
b69ab311804 });
b69ab311805 }
b69ab311806}
b69ab311807
b69ab311808export function repoRelativePathForAbsolutePath(
b69ab311809 absolutePath: AbsolutePath,
b69ab311810 repo: Repository,
b69ab311811 pathMod = path,
b69ab311812): RepoRelativePath {
b69ab311813 return pathMod.relative(repo.info.repoRoot, absolutePath);
b69ab311814}
b69ab311815
b69ab311816/**
b69ab311817 * Returns absolute path for a repo-relative file path.
b69ab311818 * If the path "escapes" the repository's root dir, returns null
b69ab311819 * Used to validate that a file path does not "escape" the repo, and the file can safely be modified on the filesystem.
b69ab311820 * absolutePathForFileInRepo("foo/bar/file.txt", repo) -> /path/to/repo/foo/bar/file.txt
b69ab311821 * absolutePathForFileInRepo("../file.txt", repo) -> null
b69ab311822 */
b69ab311823export function absolutePathForFileInRepo(
b69ab311824 filePath: RepoRelativePath,
b69ab311825 repo: Repository,
b69ab311826 pathMod = path,
b69ab311827): AbsolutePath | null {
b69ab311828 // Note that resolve() is contractually obligated to return an absolute path.
b69ab311829 const fullPath = pathMod.resolve(repo.info.repoRoot, filePath);
b69ab311830 // Prefix checks on paths can be footguns on Windows for C:\\ vs c:\\, but since
b69ab311831 // we use the same exact path check here and in the resolve, there should be
b69ab311832 // no incompatibility here.
b69ab311833 if (fullPath.startsWith(repo.info.repoRoot + pathMod.sep)) {
b69ab311834 return fullPath;
b69ab311835 } else {
b69ab311836 return null;
b69ab311837 }
b69ab311838}
b69ab311839
b69ab311840function isUnhealthyEdenFs(cwd: string): Promise<boolean> {
b69ab311841 return exists(path.join(cwd, 'README_EDEN.txt'));
b69ab311842}
b69ab311843
6c9fcae1844export type GroveConfig = {hub?: string; token?: string; username?: string};
f0961761845
f0961761846/** Read ~/.grove/config.json and decode the username from the JWT payload */
6c9fcae1847export function readGroveConfig(logger: {error: (...args: unknown[]) => void}): GroveConfig {
f0961761848 try {
f0961761849 const homedir = process.env.HOME ?? process.env.USERPROFILE ?? '';
f0961761850 const raw = fs.readFileSync(path.join(homedir, '.grove', 'config.json'), 'utf8');
f0961761851 const config = JSON.parse(raw) as {hub?: string; token?: string};
f0961761852 let username: string | undefined;
f0961761853 if (config.token) {
f0961761854 const payload = config.token.split('.')[1];
f0961761855 if (payload) {
f0961761856 const decoded = JSON.parse(Buffer.from(payload, 'base64').toString());
f0961761857 username = decoded.username;
f0961761858 }
f0961761859 }
f0961761860 return {hub: config.hub, token: config.token, username};
f0961761861 } catch (e) {
d8030e41862 // Not an error — user may not have logged in yet
f0961761863 return {};
f0961761864 }
f0961761865}
f0961761866
7e7176b1867/** Query the Grove API to find which owner owns a given repo name */
7e7176b1868async function resolveGroveRepoOwner(
7e7176b1869 apiUrl: string,
7e7176b1870 repoName: string,
7e7176b1871 token: string | undefined,
7e7176b1872 logger: {info: (...args: unknown[]) => void; error: (...args: unknown[]) => void},
7e7176b1873): Promise<string | undefined> {
7e7176b1874 try {
7e7176b1875 const headers: Record<string, string> = {};
7e7176b1876 if (token) {
7e7176b1877 headers['Authorization'] = `Bearer ${token}`;
7e7176b1878 }
7e7176b1879 const res = await fetch(`${apiUrl}/repos`, {headers});
7e7176b1880 if (!res.ok) {
7e7176b1881 return undefined;
7e7176b1882 }
7e7176b1883 const data = (await res.json()) as {repos: Array<{name: string; owner_name: string}>};
7e7176b1884 const match = data.repos.find(r => r.name === repoName);
7e7176b1885 if (match) {
7e7176b1886 logger.info(`[grove] resolved repo "${repoName}" owner to "${match.owner_name}"`);
7e7176b1887 return match.owner_name;
7e7176b1888 }
7e7176b1889 } catch (e) {
d8030e41890 // Expected to fail if user hasn't logged in yet
7e7176b1891 }
7e7176b1892 return undefined;
7e7176b1893}
7e7176b1894
23ab7211895/** Fetch repo-level settings from the Grove API */
23ab7211896async function fetchGroveRepoSettings(
23ab7211897 apiUrl: string,
23ab7211898 owner: string,
23ab7211899 repoName: string,
23ab7211900 token: string | undefined,
23ab7211901 logger: {info: (...args: unknown[]) => void; error: (...args: unknown[]) => void},
23ab7211902): Promise<{requireDiffs: boolean}> {
23ab7211903 try {
23ab7211904 const headers: Record<string, string> = {};
23ab7211905 if (token) {
23ab7211906 headers['Authorization'] = `Bearer ${token}`;
23ab7211907 }
23ab7211908 const res = await fetch(`${apiUrl}/repos/${owner}/${repoName}`, {headers});
23ab7211909 if (!res.ok) {
23ab7211910 return {requireDiffs: false};
23ab7211911 }
23ab7211912 const data = (await res.json()) as {repo: {require_diffs?: number}};
23ab7211913 return {requireDiffs: data.repo.require_diffs === 1};
23ab7211914 } catch (e) {
23ab7211915 logger.error('[grove] failed to fetch repo settings', e);
23ab7211916 return {requireDiffs: false};
23ab7211917 }
23ab7211918}
23ab7211919
b69ab311920async function isEdenFsRepo(repoRoot: AbsolutePath): Promise<boolean> {
b69ab311921 try {
b69ab311922 await fs.promises.access(path.join(repoRoot, '.eden'));
b69ab311923 return true;
b69ab311924 } catch {}
b69ab311925 return false;
b69ab311926}