-
Notifications
You must be signed in to change notification settings - Fork 533
feat(@huggingface/hub): adding scanCacheDir
#908
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
coyotte508
merged 15 commits into
huggingface:main
from
axel7083:feature/support-scan-cache
Sep 26, 2024
Merged
Changes from 7 commits
Commits
Show all changes
15 commits
Select commit
Hold shift + click to select a range
4eb3c85
feat(hub): support scan cache
axel7083 8e4e46e
fix: formatting and linter
axel7083 e003bd6
fix: tests
axel7083 629e8c5
fix: naming convention
axel7083 fa170b9
fix: adding cache-management to index.ts
axel7083 fde3dee
fix(browser): exclude cache management
axel7083 7a53407
revert: useless changes
axel7083 77f2198
fix: avoid homedir call on import
axel7083 53c81ff
fix: apply @coyotte508 code suggestion
axel7083 0f33684
fix: blob property type
axel7083 25b4c8a
fix: missing @coyotte508 comment
axel7083 f264a81
fix: refactor CachedFileInfo
axel7083 76a8ec7
Apply suggestions from @Wauplin
axel7083 2080c0d
fix: unit tests
axel7083 70afe80
Merge branch 'main' into feature/support-scan-cache
Wauplin File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,137 @@ | ||
import { describe, test, expect, vi, beforeEach } from "vitest"; | ||
import { | ||
scanCacheDir, | ||
scanCachedRepo, | ||
REPO_TYPE_T, | ||
scanSnapshotDir, | ||
parseRepoType, | ||
getBlobStat, | ||
type CachedFileInfo, | ||
} from "./cache-management"; | ||
import { stat, readdir, realpath, lstat } from "node:fs/promises"; | ||
import type { Dirent, Stats } from "node:fs"; | ||
import { join } from "node:path"; | ||
|
||
// Mocks | ||
vi.mock("node:fs/promises"); | ||
|
||
beforeEach(() => { | ||
vi.resetAllMocks(); | ||
vi.restoreAllMocks(); | ||
}); | ||
|
||
describe("scanCacheDir", () => { | ||
test("should throw an error if cacheDir is not a directory", async () => { | ||
vi.mocked(stat).mockResolvedValueOnce({ | ||
isDirectory: () => false, | ||
} as Stats); | ||
|
||
await expect(scanCacheDir("/fake/dir")).rejects.toThrow("Scan cache expects a directory"); | ||
}); | ||
|
||
test("empty directory should return an empty set of repository and no warnings", async () => { | ||
vi.mocked(stat).mockResolvedValueOnce({ | ||
isDirectory: () => true, | ||
} as Stats); | ||
|
||
// mock empty cache folder | ||
vi.mocked(readdir).mockResolvedValue([]); | ||
|
||
const result = await scanCacheDir("/fake/dir"); | ||
|
||
// cacheDir must have been read | ||
expect(readdir).toHaveBeenCalledWith("/fake/dir"); | ||
|
||
expect(result.warnings.length).toBe(0); | ||
expect(result.repos.size).toBe(0); | ||
expect(result.sizeOnDisk).toBe(0); | ||
}); | ||
}); | ||
|
||
describe("scanCachedRepo", () => { | ||
test("should throw an error for invalid repo path", async () => { | ||
await expect(() => { | ||
return scanCachedRepo("/fake/repo_path"); | ||
}).rejects.toThrow("Repo path is not a valid HuggingFace cache directory"); | ||
}); | ||
|
||
test("should throw an error if the snapshot folder does not exist", async () => { | ||
vi.mocked(readdir).mockResolvedValue([]); | ||
vi.mocked(stat).mockResolvedValue({ | ||
isDirectory: () => false, | ||
} as Stats); | ||
|
||
await expect(() => { | ||
return scanCachedRepo("/fake/cacheDir/models--hello-world--name"); | ||
}).rejects.toThrow("Snapshots dir doesn't exist in cached repo"); | ||
}); | ||
|
||
test("should properly parse the repository name", async () => { | ||
const repoPath = "/fake/cacheDir/models--hello-world--name"; | ||
vi.mocked(readdir).mockResolvedValue([]); | ||
vi.mocked(stat).mockResolvedValue({ | ||
isDirectory: () => true, | ||
} as Stats); | ||
|
||
const result = await scanCachedRepo(repoPath); | ||
expect(readdir).toHaveBeenCalledWith(join(repoPath, "refs"), { | ||
withFileTypes: true, | ||
}); | ||
|
||
expect(result.repoId).toBe("hello-world/name"); | ||
}); | ||
}); | ||
|
||
describe("scanSnapshotDir", () => { | ||
test("should scan a valid snapshot directory", async () => { | ||
const cachedFiles = new Set<CachedFileInfo>(); | ||
const blobStats = new Map<string, Stats>(); | ||
vi.mocked(readdir).mockResolvedValueOnce([{ name: "file1", isDirectory: () => false } as Dirent]); | ||
|
||
vi.mocked(realpath).mockResolvedValueOnce("/fake/realpath"); | ||
vi.mocked(lstat).mockResolvedValueOnce({ size: 1024, atimeMs: Date.now(), mtimeMs: Date.now() } as Stats); | ||
|
||
await scanSnapshotDir("/fake/revision", cachedFiles, blobStats); | ||
|
||
expect(cachedFiles.size).toBe(1); | ||
expect(blobStats.size).toBe(1); | ||
}); | ||
}); | ||
|
||
describe("getBlobStat", () => { | ||
test("should retrieve blob stat if already cached", async () => { | ||
const blobStats = new Map<string, Stats>([["/fake/blob", { size: 1024 } as Stats]]); | ||
const result = await getBlobStat("/fake/blob", blobStats); | ||
|
||
expect(lstat).not.toHaveBeenCalled(); | ||
expect(result.size).toBe(1024); | ||
}); | ||
|
||
test("should fetch and cache blob stat if not cached", async () => { | ||
const blobStats = new Map(); | ||
vi.mocked(lstat).mockResolvedValueOnce({ size: 2048 } as Stats); | ||
|
||
const result = await getBlobStat("/fake/blob", blobStats); | ||
|
||
expect(result.size).toBe(2048); | ||
expect(blobStats.size).toBe(1); | ||
}); | ||
}); | ||
|
||
describe("parseRepoType", () => { | ||
test("should parse models repo type", () => { | ||
expect(parseRepoType("models")).toBe(REPO_TYPE_T.MODEL); | ||
}); | ||
|
||
test("should parse model repo type", () => { | ||
expect(parseRepoType("model")).toBe(REPO_TYPE_T.MODEL); | ||
}); | ||
|
||
test("should parse dataset repo type", () => { | ||
expect(parseRepoType("dataset")).toBe(REPO_TYPE_T.DATASET); | ||
}); | ||
|
||
test("should throw an error for invalid repo type", () => { | ||
expect(() => parseRepoType("invalid")).toThrow(); | ||
}); | ||
}); |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,252 @@ | ||
import { homedir } from "node:os"; | ||
import { join, basename } from "node:path"; | ||
import { stat, readdir, readFile, realpath, lstat } from "node:fs/promises"; | ||
import type { Stats } from "node:fs"; | ||
|
||
const default_home = join(homedir(), ".cache"); | ||
export const HF_HOME: string = | ||
process.env["HF_HOME"] ?? join(process.env["XDG_CACHE_HOME"] ?? default_home, "huggingface"); | ||
|
||
const default_cache_path = join(HF_HOME, "hub"); | ||
|
||
// Legacy env variable | ||
export const HUGGINGFACE_HUB_CACHE = process.env["HUGGINGFACE_HUB_CACHE"] ?? default_cache_path; | ||
// New env variable | ||
export const HF_HUB_CACHE = process.env["HF_HUB_CACHE"] ?? HUGGINGFACE_HUB_CACHE; | ||
|
||
const FILES_TO_IGNORE: string[] = [".DS_Store"]; | ||
|
||
export enum REPO_TYPE_T { | ||
MODEL = "model", | ||
DATASET = "dataset", | ||
SPACE = "space", | ||
} | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
||
export interface CachedFileInfo { | ||
filename: string; | ||
filePath: string; | ||
blobPath: string; | ||
sizeOnDisk: number; | ||
|
||
blobLastAccessed: number; | ||
blobLastModified: number; | ||
} | ||
|
||
export interface CachedRevisionInfo { | ||
commitHash: string; | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
snapshotPath: string; | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
sizeOnDisk: number; | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
readonly files: Set<CachedFileInfo>; | ||
readonly refs: Set<string>; | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
||
lastModified: number; | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
} | ||
|
||
export interface CachedRepoInfo { | ||
repoId: string; | ||
repoType: REPO_TYPE_T; | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
repoPath: string; | ||
sizeOnDisk: number; | ||
nbFiles: number; | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
readonly revisions: Set<CachedRevisionInfo>; | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
|
||
lastAccessed: number; | ||
lastModified: number; | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
} | ||
|
||
export interface HFCacheInfo { | ||
sizeOnDisk: number; | ||
readonly repos: Set<CachedRepoInfo>; | ||
warnings: Error[]; | ||
} | ||
|
||
export async function scanCacheDir(cacheDir: string | undefined = undefined): Promise<HFCacheInfo> { | ||
if (!cacheDir) cacheDir = HF_HUB_CACHE; | ||
|
||
const s = await stat(cacheDir); | ||
if (!s.isDirectory()) { | ||
throw new Error( | ||
`Scan cache expects a directory but found a file: ${cacheDir}. Please use \`cacheDir\` argument or set \`HF_HUB_CACHE\` environment variable.` | ||
); | ||
} | ||
|
||
const repos = new Set<CachedRepoInfo>(); | ||
const warnings: Error[] = []; | ||
|
||
const directories = await readdir(cacheDir); | ||
for (const repo of directories) { | ||
// skip .locks folder | ||
if (repo === ".locks") continue; | ||
|
||
// get the absolute path of the repo | ||
const absolute = join(cacheDir, repo); | ||
|
||
// ignore non-directory element | ||
const s = await stat(absolute); | ||
if (!s.isDirectory()) { | ||
continue; | ||
} | ||
|
||
try { | ||
const cached = await scanCachedRepo(absolute); | ||
repos.add(cached); | ||
} catch (err: unknown) { | ||
warnings.push(err as Error); | ||
} | ||
} | ||
|
||
return { | ||
repos: repos, | ||
sizeOnDisk: [...repos.values()].reduce((sum, repo) => sum + repo.sizeOnDisk, 0), | ||
warnings: warnings, | ||
}; | ||
} | ||
|
||
export async function scanCachedRepo(repoPath: string): Promise<CachedRepoInfo> { | ||
// get the directory name | ||
const name = basename(repoPath); | ||
if (!name.includes("--")) { | ||
throw new Error(`Repo path is not a valid HuggingFace cache directory: ${name}`); | ||
} | ||
|
||
// parse the repoId from directory name | ||
const [type, ...remaining] = name.split("--"); | ||
const repoType = parseRepoType(type); | ||
const repoId = remaining.join("/"); | ||
|
||
const snapshotsPath = join(repoPath, "snapshots"); | ||
const refsPath = join(repoPath, "refs"); | ||
|
||
const snapshotStat = await stat(snapshotsPath); | ||
if (!snapshotStat.isDirectory()) { | ||
throw new Error(`Snapshots dir doesn't exist in cached repo ${snapshotsPath}`); | ||
} | ||
|
||
// Check if the refs directory exists and scan it | ||
const refsByHash: Map<string, Set<string>> = new Map(); | ||
const refsStat = await stat(refsPath); | ||
if (refsStat.isDirectory()) { | ||
await scanRefsDir(refsPath, refsByHash); | ||
} | ||
|
||
// Scan snapshots directory and collect cached revision information | ||
const cachedRevisions: Set<CachedRevisionInfo> = new Set(); | ||
const blobStats: Map<string, Stats> = new Map(); // Store blob stats | ||
|
||
const snapshotDirs = await readdir(snapshotsPath); | ||
for (const dir of snapshotDirs) { | ||
if (FILES_TO_IGNORE.includes(dir)) continue; // Ignore unwanted files | ||
|
||
const revisionPath = join(snapshotsPath, dir); | ||
const revisionStat = await stat(revisionPath); | ||
if (!revisionStat.isDirectory()) { | ||
throw new Error(`Snapshots folder corrupted. Found a file: ${revisionPath}`); | ||
} | ||
|
||
const cachedFiles: Set<CachedFileInfo> = new Set(); | ||
await scanSnapshotDir(revisionPath, cachedFiles, blobStats); | ||
|
||
const revisionLastModified = | ||
cachedFiles.size > 0 ? Math.max(...[...cachedFiles].map((file) => file.blobLastModified)) : revisionStat.mtimeMs; | ||
|
||
cachedRevisions.add({ | ||
commitHash: dir, | ||
files: cachedFiles, | ||
refs: refsByHash.get(dir) || new Set(), | ||
sizeOnDisk: [...cachedFiles].reduce((sum, file) => sum + file.sizeOnDisk, 0), | ||
snapshotPath: revisionPath, | ||
lastModified: revisionLastModified, | ||
}); | ||
|
||
refsByHash.delete(dir); | ||
} | ||
|
||
// Verify that all refs refer to a valid revision | ||
if (refsByHash.size > 0) { | ||
throw new Error( | ||
`Reference(s) refer to missing commit hashes: ${JSON.stringify(Object.fromEntries(refsByHash))} (${repoPath})` | ||
); | ||
} | ||
|
||
const repoStats = await stat(repoPath); | ||
const repoLastAccessed = | ||
blobStats.size > 0 ? Math.max(...[...blobStats.values()].map((stat) => stat.atimeMs)) : repoStats.atimeMs; | ||
|
||
const repoLastModified = | ||
blobStats.size > 0 ? Math.max(...[...blobStats.values()].map((stat) => stat.mtimeMs)) : repoStats.mtimeMs; | ||
|
||
// Return the constructed CachedRepoInfo object | ||
return { | ||
repoId: repoId, | ||
repoType: repoType, | ||
repoPath: repoPath, | ||
nbFiles: blobStats.size, | ||
revisions: cachedRevisions, | ||
sizeOnDisk: [...blobStats.values()].reduce((sum, stat) => sum + stat.size, 0), | ||
lastAccessed: repoLastAccessed, | ||
lastModified: repoLastModified, | ||
}; | ||
} | ||
|
||
export async function scanRefsDir(refsPath: string, refsByHash: Map<string, Set<string>>): Promise<void> { | ||
const refFiles = await readdir(refsPath, { withFileTypes: true }); | ||
for (const refFile of refFiles) { | ||
const refFilePath = join(refsPath, refFile.name); | ||
if (refFile.isDirectory()) continue; // Skip directories | ||
|
||
const commitHash = await readFile(refFilePath, "utf-8"); | ||
const refName = refFile.name; | ||
if (!refsByHash.has(commitHash)) { | ||
refsByHash.set(commitHash, new Set()); | ||
} | ||
refsByHash.get(commitHash)?.add(refName); | ||
} | ||
} | ||
|
||
export async function scanSnapshotDir( | ||
revisionPath: string, | ||
cachedFiles: Set<CachedFileInfo>, | ||
blobStats: Map<string, Stats> | ||
): Promise<void> { | ||
const files = await readdir(revisionPath, { withFileTypes: true }); | ||
for (const file of files) { | ||
if (file.isDirectory()) continue; // Skip directories | ||
|
||
const filePath = join(revisionPath, file.name); | ||
const blobPath = await realpath(filePath); | ||
coyotte508 marked this conversation as resolved.
Show resolved
Hide resolved
|
||
const blobStat = await getBlobStat(blobPath, blobStats); | ||
|
||
cachedFiles.add({ | ||
filename: file.name, | ||
filePath: filePath, | ||
blobPath: blobPath, | ||
sizeOnDisk: blobStat.size, | ||
blobLastAccessed: blobStat.atimeMs, | ||
blobLastModified: blobStat.mtimeMs, | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
}); | ||
} | ||
} | ||
|
||
export async function getBlobStat(blobPath: string, blobStats: Map<string, Stats>): Promise<Stats> { | ||
const blob = blobStats.get(blobPath); | ||
if (!blob) { | ||
const statResult = await lstat(blobPath); | ||
blobStats.set(blobPath, statResult); | ||
return statResult; | ||
} | ||
return blob; | ||
} | ||
|
||
export function parseRepoType(type: string): REPO_TYPE_T { | ||
switch (type) { | ||
case "models": | ||
case "model": | ||
return REPO_TYPE_T.MODEL; | ||
case REPO_TYPE_T.DATASET: | ||
return REPO_TYPE_T.DATASET; | ||
case REPO_TYPE_T.SPACE: | ||
return REPO_TYPE_T.SPACE; | ||
default: | ||
throw new Error(""); | ||
axel7083 marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
} | ||
} |
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.