Skip to content

Commit 49860a0

Browse files
committed
Refactor environment variable access to use 'env' for consistency across the application
1 parent 201d413 commit 49860a0

File tree

15 files changed

+41
-28
lines changed

15 files changed

+41
-28
lines changed

actions/actions.tsx

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,11 @@
33
import { generateObject } from "ai";
44
import { z } from "zod";
55
import { createOpenAI } from "@ai-sdk/openai";
6+
import { env } from "process";
67

78
const openai = createOpenAI({
8-
baseURL: process.env.OPENAI_API_URL,
9-
apiKey: process.env.OPENAI_API_KEY,
9+
baseURL: env.OPENAI_API_URL,
10+
apiKey: env.OPENAI_API_KEY,
1011
});
1112

1213
export async function generateFollowUpQuestions(
@@ -16,8 +17,8 @@ export async function generateFollowUpQuestions(
1617
try {
1718
const { object } = await generateObject({
1819
model: openai.chat(
19-
process.env.OPENAI_API_MODEL_SUGGESTION ||
20-
process.env.OPENAI_API_MODEL ||
20+
env.OPENAI_API_MODEL_SUGGESTION ||
21+
env.OPENAI_API_MODEL ||
2122
"gpt-3.5-turbo"
2223
),
2324
schema: z.object({

app/api/chat/route.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,16 @@ import { createOpenAI } from "@ai-sdk/openai";
22
import { streamText, tool, convertToModelMessages } from "ai";
33
import { z } from "zod";
44
import { fetchSlurmData } from "@/lib/slurm-api";
5+
import { env } from "process";
56

67
export const maxDuration = 30;
78

89
export async function POST(req: Request) {
910
const { messages } = await req.json();
1011

1112
const openai = createOpenAI({
12-
baseURL: process.env.OPENAI_API_URL,
13-
apiKey: process.env.OPENAI_API_KEY,
13+
baseURL: env.OPENAI_API_URL,
14+
apiKey: env.OPENAI_API_KEY,
1415
fetch: async (url, options) => {
1516
console.log("OpenAI Fetch URL:", url);
1617
// console.log("OpenAI Fetch Options:", JSON.stringify(options, null, 2)); // Don't log full options to avoid leaking keys in logs if possible, or just log headers keys
@@ -21,7 +22,7 @@ export async function POST(req: Request) {
2122
});
2223

2324
const result = await streamText({
24-
model: openai.chat(process.env.OPENAI_API_MODEL || "gpt-3.5-turbo"),
25+
model: openai.chat(env.OPENAI_API_MODEL || "gpt-3.5-turbo"),
2526
messages: convertToModelMessages(messages),
2627
onError: (error) => {
2728
console.error("StreamText Error:", error);

app/api/prometheus/dcgm/route.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
import { NextResponse } from "next/server";
22
import { PrometheusDriver } from "prometheus-query";
3+
import { env } from "process";
34

4-
const PROMETHEUS_URL = process.env.PROMETHEUS_URL;
5+
const PROMETHEUS_URL = env.PROMETHEUS_URL;
56

67
let prom: PrometheusDriver | null = null;
78

app/api/prometheus/ipmi/route.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
11
import { PrometheusQueryResponse } from "@/types/types";
22
import { NextResponse } from "next/server";
33
import { PrometheusDriver } from "prometheus-query";
4+
import { env } from "process";
45

56
export const revalidate = 0;
6-
const PROMETHEUS_URL = process.env.PROMETHEUS_URL;
7+
const PROMETHEUS_URL = env.PROMETHEUS_URL;
78
const MAX_DATA_POINTS = 200;
89
const CACHE_TTL = 5 * 60 * 1000; // 5 minutes cache for node list
910

@@ -38,7 +39,7 @@ async function getClusterNodes(): Promise<string[]> {
3839

3940
try {
4041
// Fetch node information from Slurm API
41-
const baseURL = process.env.NEXT_PUBLIC_BASE_URL || "http://localhost:3000";
42+
const baseURL = env.NEXT_PUBLIC_BASE_URL || "http://localhost:3000";
4243
const response = await fetch(`${baseURL}/api/slurm/nodes`);
4344

4445
if (!response.ok) {

app/api/prometheus/route.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
import { PrometheusQueryResponse } from "@/types/types";
22
import { NextResponse } from "next/server";
33
import { PrometheusDriver } from "prometheus-query";
4+
import { env } from "process";
45

5-
const PROMETHEUS_URL = process.env.PROMETHEUS_URL;
6+
const PROMETHEUS_URL = env.PROMETHEUS_URL;
67

78
let prom: PrometheusDriver | null = null;
89

app/api/prometheus/utilization/route.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import { NextResponse } from "next/server";
22
import { PrometheusDriver } from "prometheus-query";
3+
import { env } from "process";
34

45
type PrometheusMetric = {
56
labels: Record<string, string>;
@@ -23,7 +24,7 @@ type UtilizationResponse = {
2324
error?: string;
2425
};
2526

26-
const PROMETHEUS_URL = process.env.PROMETHEUS_URL!;
27+
const PROMETHEUS_URL = env.PROMETHEUS_URL!;
2728
const HOURS_TO_ANALYZE = 24;
2829
const STEP_INTERVAL = 900; // 15 minutes in seconds
2930

app/api/reporting/gpu/route.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
// app/api/reporting/gpu/route.ts
22
import { NextResponse } from "next/server";
33
import { PrometheusDriver } from "prometheus-query";
4+
import { env } from "process";
45

5-
const PROMETHEUS_URL = process.env.PROMETHEUS_URL;
6+
const PROMETHEUS_URL = env.PROMETHEUS_URL;
67
const STALE_JOB_THRESHOLD_SECONDS = 30; // Consider a job stale if no metrics in the last 30 seconds
78

89
let prom: PrometheusDriver | null = null;
@@ -118,7 +119,7 @@ const checkJobFreshness = async (jobId: string): Promise<boolean> => {
118119
// Get the actual running jobs from Slurm
119120
const getRunningJobsFromSlurm = async (): Promise<Set<string>> => {
120121
try {
121-
const baseURL = process.env.NEXT_PUBLIC_BASE_URL || "http://localhost:3000";
122+
const baseURL = env.NEXT_PUBLIC_BASE_URL || "http://localhost:3000";
122123
const response = await fetch(`${baseURL}/api/slurm/jobs`);
123124

124125
if (!response.ok) {

app/api/rewind/route.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import fs from "fs/promises";
33
import path from "path";
44
import zlib from "zlib";
55
import { promisify } from "util";
6+
import { env } from "process";
67

78
const gunzip = promisify(zlib.gunzip);
89

@@ -45,7 +46,7 @@ export async function GET(request: Request) {
4546
}
4647

4748
const dataDir =
48-
process.env.HISTORICAL_DATA_DIR || path.join(process.cwd(), "data");
49+
env.HISTORICAL_DATA_DIR || path.join(process.cwd(), "data");
4950
const files = await fs.readdir(dataDir);
5051
const [hour] = time.split(":");
5152
const targetStartTime = new Date(`${date}T${hour}:00:00.000Z`);

app/api/slurm/reservations/route.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
export const dynamic = 'force-dynamic';
2+
import { env } from "process";
23

34
import { NextResponse } from "next/server";
45
import { fetchSlurmData } from "@/lib/slurm-api";
56

67
export async function GET() {
78
const isEnabled =
8-
String(process.env.MAINT_NOTIFICATIONS_ENABLED ?? "true").toLowerCase() === "true";
9+
String(env.MAINT_NOTIFICATIONS_ENABLED ?? "true").toLowerCase() === "true";
910

1011
if (!isEnabled) {
1112
return NextResponse.json({ meta: { enabled: false }, reservations: [] });

app/layout.tsx

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,12 @@ import type { Metadata } from "next";
22
import { Inter } from "next/font/google";
33
import "./globals.css";
44
import { ThemeProvider } from "@/components/theme-provider";
5+
import { env } from "process";
56

67
const inter = Inter({ subsets: ["latin"] });
78

89
export const metadata: Metadata = {
9-
title: `${process.env.CLUSTER_NAME} Supercomputer`,
10+
title: `${env.CLUSTER_NAME} Supercomputer`,
1011
description: "A Slurm supercomputer dashboard.",
1112
icons: {
1213
icon: "/favicon.ico",

0 commit comments

Comments
 (0)