Skip to content
This repository was archived by the owner on Aug 6, 2025. It is now read-only.

Commit 417c7fb

Browse files
caesarbellCaesar Bell
andauthored
📝 DOP-4098 updates comment (#931)
* 📝 DOP-4098 updates comment * 📝 DOP-4098 updates comment * 🏷️ DOP-4098 updates types to reflect change between repos_branches and docsets --------- Co-authored-by: Caesar Bell <[email protected]>
1 parent 64535ea commit 417c7fb

File tree

5 files changed

+31
-28
lines changed

5 files changed

+31
-28
lines changed

api/controllers/v1/github.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,13 @@ import { ConsoleLogger } from '../../../src/services/logger';
55
import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository';
66
import { markBuildArtifactsForDeletion, validateJsonWebhook } from '../../handlers/github';
77
import { DocsetsRepository } from '../../../src/repositories/docsetsRepository';
8-
import { ReposBranchesDocument } from '../../../modules/persistence/src/services/metadata/repos_branches';
8+
import { ReposBranchesDocsetsDocument } from '../../../modules/persistence/src/services/metadata/repos_branches';
99

1010
async function prepGithubPushPayload(
1111
githubEvent: any,
1212
repoBranchesRepository: RepoBranchesRepository,
1313
prefix: string,
14-
repoInfo: ReposBranchesDocument
14+
repoInfo: ReposBranchesDocsetsDocument
1515
) {
1616
const branch_name = githubEvent.ref.split('/')[2];
1717
const branch_info = await repoBranchesRepository.getRepoBranchAliases(

api/controllers/v2/github.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,14 +11,14 @@ import { markBuildArtifactsForDeletion, validateJsonWebhook } from '../../handle
1111
import { DocsetsRepository } from '../../../src/repositories/docsetsRepository';
1212
import { getMonorepoPaths } from '../../../src/monorepo';
1313
import { getUpdatedFilePaths } from '../../../src/monorepo/utils/path-utils';
14-
import { ReposBranchesDocument } from '../../../modules/persistence/src/services/metadata/associated_products';
14+
import { ReposBranchesDocsetsDocument } from '../../../modules/persistence/src/services/metadata/repos_branches';
1515
import { MONOREPO_NAME } from '../../../src/monorepo/utils/monorepo-constants';
1616

1717
async function prepGithubPushPayload(
1818
githubEvent: PushEvent,
1919
repoBranchesRepository: RepoBranchesRepository,
2020
prefix: string,
21-
repoInfo: ReposBranchesDocument
21+
repoInfo: ReposBranchesDocsetsDocument
2222
): Promise<Omit<EnhancedJob, '_id'>> {
2323
const branch_name = githubEvent.ref.split('/')[2];
2424
const branch_info = await repoBranchesRepository.getRepoBranchAliases(

modules/persistence/src/services/metadata/ToC/utils/prefixFromEnvironment.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
import { ReposBranchesDocument } from '../../associated_products';
1+
import { DocsetsDocument } from '../../repos_branches';
22

3-
export const prefixFromEnvironment = (repoBranchEntry: ReposBranchesDocument) => {
3+
export const prefixFromEnvironment = (repoBranchEntry: DocsetsDocument) => {
44
const env = process.env.SNOOTY_ENV ?? 'dotcomprd';
55
return {
66
url: repoBranchEntry.url[env],

modules/persistence/src/services/metadata/associated_products/index.ts

Lines changed: 10 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,13 @@
11
import { AggregationCursor } from 'mongodb';
22
import { Metadata } from '..';
33
import { db } from '../../connector';
4-
import { getAllAssociatedRepoBranchesEntries, getRepoBranchesEntry } from '../repos_branches';
4+
import {
5+
ReposBranchesDocsetsDocument,
6+
ReposBranchesDocument,
7+
DocsetsDocument,
8+
getAllAssociatedRepoBranchesEntries,
9+
getRepoBranchesEntry,
10+
} from '../repos_branches';
511
import { ToCInsertions, TocOrderInsertions, traverseAndMerge, copyToCTree, project } from '../ToC';
612
import { prefixFromEnvironment } from '../ToC/utils/prefixFromEnvironment';
713

@@ -33,16 +39,7 @@ export interface BranchEntry {
3339
[key: string]: any;
3440
}
3541

36-
export interface ReposBranchesDocument {
37-
repoName: string;
38-
project: string;
39-
branches: BranchEntry[];
40-
url: EnvKeyedObject;
41-
prefix: EnvKeyedObject;
42-
[key: string]: any;
43-
}
44-
45-
const mapRepoBranches = (repoBranches: ReposBranchesDocument[]) =>
42+
const mapRepoBranches = (repoBranches: ReposBranchesDocsetsDocument[]) =>
4643
Object.fromEntries(
4744
repoBranches.map((entry) => {
4845
const { url, prefix } = entry;
@@ -94,7 +91,7 @@ const umbrellaMetadataEntry = async (project: string): Promise<Metadata> => {
9491
// Convert our cursor from the associated metadata aggregation query into a series of ToC objects and their parent metadata entries
9592
const shapeToCsCursor = async (
9693
tocCursor: AggregationCursor,
97-
repoBranchesMap: { [k: string]: ReposBranchesDocument }
94+
repoBranchesMap: { [k: string]: ReposBranchesDocsetsDocument }
9895
): Promise<{
9996
tocInsertions: ToCInsertions;
10097
tocOrderInsertions: TocOrderInsertions;
@@ -192,7 +189,7 @@ export const mergeAssociatedToCs = async (metadata: Metadata) => {
192189
);
193190

194191
// We need to have copies of the main umbrella product's ToC here, to handle multiple metadata entry support
195-
const umbrellaPrefixes = prefixFromEnvironment(umbrellaRepoBranchesEntry as any as ReposBranchesDocument);
192+
const umbrellaPrefixes = prefixFromEnvironment(umbrellaRepoBranchesEntry as any as DocsetsDocument);
196193
const umbrellaToCs = {
197194
original: copyToCTree(umbrellaMetadata.toctree),
198195
urlified: copyToCTree(umbrellaMetadata.toctree, umbrellaPrefixes.prefix, umbrellaPrefixes.url),

modules/persistence/src/services/metadata/repos_branches/index.ts

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -16,17 +16,23 @@ export interface BranchEntry {
1616
[key: string]: any;
1717
}
1818

19+
export interface DocsetsDocument extends WithId<Document> {
20+
url: EnvKeyedObject;
21+
prefix: EnvKeyedObject;
22+
bucket: EnvKeyedObject;
23+
}
24+
1925
export interface ReposBranchesDocument extends WithId<Document> {
2026
repoName: string;
2127
project: string;
2228
branches: BranchEntry[];
23-
url: EnvKeyedObject;
24-
prefix: EnvKeyedObject;
2529
internalOnly: boolean;
2630
[key: string]: any;
2731
}
2832

29-
const internals: { [key: project]: ReposBranchesDocument } = {};
33+
export type ReposBranchesDocsetsDocument = ReposBranchesDocument & DocsetsDocument;
34+
35+
const internals: { [key: project]: ReposBranchesDocsetsDocument } = {};
3036

3137
const getAggregationPipeline = (matchCondition: any) => {
3238
return [
@@ -67,7 +73,7 @@ export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) =>
6773
const { associated_products = [] } = metadata;
6874
if (!associated_products.length) return [];
6975

70-
const res: ReposBranchesDocument[] = [],
76+
const res: ReposBranchesDocsetsDocument[] = [],
7177
fetch: project[] = [];
7278
associated_products.forEach((ap) => {
7379
if (internals[ap.name]) {
@@ -85,8 +91,8 @@ export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) =>
8591
const db = await pool();
8692
const aggregationPipeline = getAggregationPipeline({ project: { $in: fetch }, internalOnly: false });
8793
const cursor = db.collection('docsets').aggregate(aggregationPipeline);
88-
const docsets = (await cursor.toArray()) as ReposBranchesDocument[];
89-
docsets.forEach((doc: ReposBranchesDocument) => {
94+
const docsets = (await cursor.toArray()) as DocsetsDocument[];
95+
docsets.forEach((doc: ReposBranchesDocsetsDocument) => {
9096
// TODO: store in cache
9197
internals[doc['project']] = doc;
9298
res.push(doc);
@@ -98,7 +104,7 @@ export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) =>
98104
}
99105
};
100106

101-
// Queries pool*.repos_branches for any entries for the given project and branch from a metadata entry.
107+
// Queries pool*.repos_branches and pool*. for any entries for the given project and branch from a metadata entry.
102108
export const getRepoBranchesEntry = async (project: project, branch = ''): Promise<ReposBranchesDocument> => {
103109
const cachedDoc = internals[project];
104110
// return cached repo doc if exists
@@ -126,7 +132,7 @@ export const getRepoBranchesEntry = async (project: project, branch = ''): Promi
126132
const aggregationPipeline = getAggregationPipeline(matchCondition);
127133

128134
const cursor = db.collection('docsets').aggregate(aggregationPipeline);
129-
const res = (await cursor.toArray()) as unknown as ReposBranchesDocument[];
135+
const res = (await cursor.toArray()) as unknown as ReposBranchesDocsetsDocument[];
130136
const returnedEntry = res[0];
131137

132138
if (res.length > 1) {
@@ -135,7 +141,7 @@ export const getRepoBranchesEntry = async (project: project, branch = ''): Promi
135141
);
136142
}
137143

138-
// if not already set, set cache value for repo_branches
144+
// if not already set, set cache value for docsets
139145
if (!internals[project]) {
140146
internals[project] = returnedEntry;
141147
}

0 commit comments

Comments
 (0)