|
| 1 | +/** |
| 2 | + * @fileoverview Script to make a social post about the latest release. |
| 3 | + * @author Nicholas C. Zakas |
| 4 | + */ |
| 5 | + |
| 6 | +/* global fetch */ |
| 7 | + |
| 8 | +//----------------------------------------------------------------------------- |
| 9 | +// Imports |
| 10 | +//----------------------------------------------------------------------------- |
| 11 | + |
| 12 | +import fsp from "node:fs/promises"; |
| 13 | +import { fileURLToPath } from "node:url"; |
| 14 | +import { dirname, join } from "node:path"; |
| 15 | + |
| 16 | +//----------------------------------------------------------------------------- |
| 17 | +// Type Definitions |
| 18 | +//----------------------------------------------------------------------------- |
| 19 | + |
| 20 | +/** @typedef {import("./types.js").ReleaseInfo} ReleaseInfo */ |
| 21 | +/** @typedef {import("./types.js").GptMessage} GptMessage */ |
| 22 | +/** @typedef {import("./types.js").GptChatCompletionResponse} GptChatCompletionResponse */ |
| 23 | + |
| 24 | +//----------------------------------------------------------------------------- |
| 25 | +// Constants |
| 26 | +//----------------------------------------------------------------------------- |
| 27 | + |
| 28 | +const MAX_CHARACTERS = 280; |
| 29 | +const MAX_RETRIES = 3; |
| 30 | +const URL_LENGTH = 27; // Bluesky counts URLs as 27 characters |
| 31 | +const API_BASE_URL = "https://api.openai.com/v1/"; |
| 32 | +const DEFAULT_MODEL = "gpt-4o-mini"; |
| 33 | + |
| 34 | +//----------------------------------------------------------------------------- |
| 35 | +// Helpers |
| 36 | +//----------------------------------------------------------------------------- |
| 37 | + |
| 38 | +/** |
| 39 | + * Reads the AI prompt from disk. |
| 40 | + * @returns {Promise<string>} The prompt text. |
| 41 | + * @throws {Error} If the file cannot be read. |
| 42 | + */ |
| 43 | +export async function readPrompt() { |
| 44 | + const currentDir = dirname(fileURLToPath(import.meta.url)); |
| 45 | + return fsp.readFile(join(currentDir, "prompt.txt"), "utf8"); |
| 46 | +} |
| 47 | + |
| 48 | +const gptRoles = new Set(["user", "system", "developer"]); |
| 49 | + |
| 50 | +/** |
| 51 | + * Validates the role of a GPT message. |
| 52 | + * @param {string} role The role to validate. |
| 53 | + * @returns {void} |
| 54 | + * @throws {Error} If the role is invalid. |
| 55 | + */ |
| 56 | +function validateGptRole(role) { |
| 57 | + if (typeof role !== "string" || role.trim() === "") { |
| 58 | + throw new Error("Invalid role"); |
| 59 | + } |
| 60 | + |
| 61 | + if (!gptRoles.has(role)) { |
| 62 | + throw new Error(`Invalid role: ${role}`); |
| 63 | + } |
| 64 | +} |
| 65 | + |
| 66 | +/** |
| 67 | + * Measures the length of a social media post in characters using Bluesky rules. |
| 68 | + * @param {string} text The text to measure. |
| 69 | + * @returns {number} The length in characters. |
| 70 | + */ |
| 71 | +function getPostLength(text) { |
| 72 | + // URLs count as exactly 27 characters on Bluesky |
| 73 | + const urlRegex = /https?:\/\/[^\s]+/g; |
| 74 | + return text.replace(urlRegex, "x".repeat(URL_LENGTH)).length; |
| 75 | +} |
| 76 | + |
| 77 | +/** |
| 78 | + * Removes leading and trailing quotation marks from a string. |
| 79 | + * @param {string} text The text to clean. |
| 80 | + * @returns {string} The text without leading/trailing quotes. |
| 81 | + */ |
| 82 | +function removeQuotes(text) { |
| 83 | + return text.replace(/^["']|["']$/g, "").trim(); |
| 84 | +} |
| 85 | + |
| 86 | +//----------------------------------------------------------------------------- |
| 87 | +// Exports |
| 88 | +//----------------------------------------------------------------------------- |
| 89 | + |
| 90 | +/** |
| 91 | + * Generates a social media post using OpenAI. |
| 92 | + */ |
| 93 | +export class ChatCompletionPostGenerator { |
| 94 | + /** |
| 95 | + * The OpenAI API token. |
| 96 | + * @type {string} |
| 97 | + */ |
| 98 | + #token; |
| 99 | + |
| 100 | + /** |
| 101 | + * The AI prompt. |
| 102 | + * @type {string} |
| 103 | + */ |
| 104 | + #prompt; |
| 105 | + |
| 106 | + /** |
| 107 | + * The API base URL. |
| 108 | + * @type {string} |
| 109 | + */ |
| 110 | + #baseUrl = API_BASE_URL; |
| 111 | + |
| 112 | + /** |
| 113 | + * The model to use. |
| 114 | + * @type {string} |
| 115 | + */ |
| 116 | + #model = DEFAULT_MODEL; |
| 117 | + |
| 118 | + /** |
| 119 | + * Creates a new PostGenerator instance. |
| 120 | + * @param {string|undefined} token The OpenAI API token. |
| 121 | + * @param {Object} [options] The options for the generator. |
| 122 | + * @param {string} [options.prompt] The AI prompt. |
| 123 | + * @param {string} [options.baseUrl] The API base URL. |
| 124 | + * @param {string} [options.model] The model to use. |
| 125 | + * @throws {Error} If the token is missing. |
| 126 | + */ |
| 127 | + constructor(token, { prompt = "", baseUrl, model } = {}) { |
| 128 | + if (!token) { |
| 129 | + throw new Error("Missing API token"); |
| 130 | + } |
| 131 | + |
| 132 | + if (typeof token !== "string") { |
| 133 | + throw new Error("API token isn't a string"); |
| 134 | + } |
| 135 | + |
| 136 | + if (baseUrl) { |
| 137 | + if (!model) { |
| 138 | + throw new Error( |
| 139 | + "Model is required when using a custom base URL", |
| 140 | + ); |
| 141 | + } |
| 142 | + |
| 143 | + this.#baseUrl = baseUrl; |
| 144 | + this.#model = model; |
| 145 | + |
| 146 | + // Ensure the base URL ends with a slash |
| 147 | + if (!this.#baseUrl.endsWith("/")) { |
| 148 | + this.#baseUrl += "/"; |
| 149 | + } |
| 150 | + } |
| 151 | + |
| 152 | + this.#token = token; |
| 153 | + this.#prompt = prompt; |
| 154 | + } |
| 155 | + |
| 156 | + /** |
| 157 | + * Fetches a completion from OpenAI. |
| 158 | + * @param {Object} options The options for the completion. |
| 159 | + * @param {string} options.model The model to use. |
| 160 | + * @param {Array<GptMessage>} options.messages The messages to send. |
| 161 | + * @returns {Promise<GptChatCompletionResponse>} The completion response. |
| 162 | + * @throws {Error} If the response is not ok. |
| 163 | + */ |
| 164 | + async #fetchCompletion({ model, messages }) { |
| 165 | + messages.forEach(message => { |
| 166 | + validateGptRole(message.role); |
| 167 | + }); |
| 168 | + |
| 169 | + const url = new URL("chat/completions", this.#baseUrl); |
| 170 | + const response = await fetch(url, { |
| 171 | + method: "POST", |
| 172 | + headers: { |
| 173 | + "Content-Type": "application/json", |
| 174 | + Authorization: `Bearer ${this.#token}`, |
| 175 | + }, |
| 176 | + body: JSON.stringify({ |
| 177 | + model, |
| 178 | + messages, |
| 179 | + temperature: 0.7, |
| 180 | + }), |
| 181 | + }); |
| 182 | + |
| 183 | + if (!response.ok) { |
| 184 | + throw new Error( |
| 185 | + `${response.status} ${response.statusText}: Chat completion failed`, |
| 186 | + ); |
| 187 | + } |
| 188 | + |
| 189 | + return await response.json(); |
| 190 | + } |
| 191 | + |
| 192 | + /** |
| 193 | + * Generates a tweet summary using OpenAI with retry logic for length. |
| 194 | + * @param {string} projectName The name of the project. |
| 195 | + * @param {ReleaseInfo} release The release information. |
| 196 | + * @returns {Promise<string>} The generated tweet |
| 197 | + * @throws {Error} If unable to generate a valid post within retries |
| 198 | + */ |
| 199 | + async generateSocialPost(projectName, release) { |
| 200 | + const systemPrompt = this.#prompt || (await readPrompt()); |
| 201 | + const { details, url, version } = release; |
| 202 | + |
| 203 | + let attempts = 0; |
| 204 | + |
| 205 | + while (attempts < MAX_RETRIES) { |
| 206 | + const completion = await this.#fetchCompletion({ |
| 207 | + model: this.#model, |
| 208 | + messages: [ |
| 209 | + { |
| 210 | + role: "system", |
| 211 | + content: |
| 212 | + attempts > 0 |
| 213 | + ? `${systemPrompt}\n\nPREVIOUS ATTEMPT WAS TOO LONG. Make it shorter!` |
| 214 | + : systemPrompt, |
| 215 | + }, |
| 216 | + { |
| 217 | + role: "user", |
| 218 | + content: `Create a post summarizing this release for ${projectName} ${version}: ${details}\n\nURL is ${url}`, |
| 219 | + }, |
| 220 | + ], |
| 221 | + }); |
| 222 | + |
| 223 | + const post = completion.choices[0]?.message?.content; |
| 224 | + if (!post) { |
| 225 | + throw new Error("No content received from OpenAI"); |
| 226 | + } |
| 227 | + |
| 228 | + const cleanPost = removeQuotes(post); |
| 229 | + if (getPostLength(cleanPost) <= MAX_CHARACTERS) { |
| 230 | + return cleanPost; |
| 231 | + } |
| 232 | + |
| 233 | + attempts++; |
| 234 | + } |
| 235 | + |
| 236 | + throw new Error( |
| 237 | + `Failed to generate post within ${MAX_CHARACTERS} characters after ${MAX_RETRIES} attempts`, |
| 238 | + ); |
| 239 | + } |
| 240 | +} |
0 commit comments