initial commit: wwcompanion v1
This commit is contained in:
189
background.js
Normal file
189
background.js
Normal file
@@ -0,0 +1,189 @@
|
||||
const DEFAULT_TASKS = [
|
||||
{
|
||||
id: "task-fit-summary",
|
||||
name: "Fit Summary",
|
||||
text:
|
||||
"Summarize the role, highlight key requirements, and assess my fit using the resume. Note any gaps and what to emphasize."
|
||||
}
|
||||
];
|
||||
|
||||
const DEFAULT_SETTINGS = {
|
||||
apiKey: "",
|
||||
model: "gpt-4o-mini",
|
||||
systemPrompt:
|
||||
"You are a precise, honest assistant. Be concise, highlight uncertainties, and avoid inventing details.",
|
||||
resume: "",
|
||||
tasks: DEFAULT_TASKS
|
||||
};
|
||||
|
||||
chrome.runtime.onInstalled.addListener(async () => {
|
||||
const stored = await chrome.storage.local.get(Object.keys(DEFAULT_SETTINGS));
|
||||
const updates = {};
|
||||
|
||||
for (const [key, value] of Object.entries(DEFAULT_SETTINGS)) {
|
||||
const existing = stored[key];
|
||||
const missing =
|
||||
existing === undefined ||
|
||||
existing === null ||
|
||||
(key === "tasks" && !Array.isArray(existing));
|
||||
|
||||
if (missing) updates[key] = value;
|
||||
}
|
||||
|
||||
if (Object.keys(updates).length) {
|
||||
await chrome.storage.local.set(updates);
|
||||
}
|
||||
});
|
||||
|
||||
chrome.runtime.onConnect.addListener((port) => {
|
||||
if (port.name !== "analysis") return;
|
||||
|
||||
let abortController = null;
|
||||
|
||||
const resetAbort = () => {
|
||||
if (abortController) abortController.abort();
|
||||
abortController = null;
|
||||
};
|
||||
|
||||
port.onMessage.addListener((message) => {
|
||||
if (message?.type === "START_ANALYSIS") {
|
||||
resetAbort();
|
||||
abortController = new AbortController();
|
||||
void handleAnalysisRequest(port, message.payload, abortController.signal).catch(
|
||||
(error) => {
|
||||
if (error?.name === "AbortError") {
|
||||
port.postMessage({ type: "ABORTED" });
|
||||
return;
|
||||
}
|
||||
port.postMessage({
|
||||
type: "ERROR",
|
||||
message: error?.message || "Unknown error during analysis."
|
||||
});
|
||||
}
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (message?.type === "ABORT_ANALYSIS") {
|
||||
resetAbort();
|
||||
}
|
||||
});
|
||||
|
||||
port.onDisconnect.addListener(() => {
|
||||
resetAbort();
|
||||
});
|
||||
});
|
||||
|
||||
function buildUserMessage(resume, task, posting) {
|
||||
return [
|
||||
"=== RESUME ===",
|
||||
resume || "",
|
||||
"",
|
||||
"=== TASK ===",
|
||||
task || "",
|
||||
"",
|
||||
"=== JOB POSTING ===",
|
||||
posting || ""
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
async function handleAnalysisRequest(port, payload, signal) {
|
||||
const { apiKey, model, systemPrompt, resume, taskText, postingText } = payload || {};
|
||||
|
||||
if (!apiKey) {
|
||||
port.postMessage({ type: "ERROR", message: "Missing OpenAI API key." });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!model) {
|
||||
port.postMessage({ type: "ERROR", message: "Missing model name." });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!postingText) {
|
||||
port.postMessage({ type: "ERROR", message: "No job posting text provided." });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!taskText) {
|
||||
port.postMessage({ type: "ERROR", message: "No task prompt selected." });
|
||||
return;
|
||||
}
|
||||
|
||||
const userMessage = buildUserMessage(resume, taskText, postingText);
|
||||
|
||||
await streamChatCompletion({
|
||||
apiKey,
|
||||
model,
|
||||
systemPrompt: systemPrompt || "",
|
||||
userMessage,
|
||||
signal,
|
||||
onDelta: (text) => port.postMessage({ type: "DELTA", text })
|
||||
});
|
||||
|
||||
port.postMessage({ type: "DONE" });
|
||||
}
|
||||
|
||||
async function streamChatCompletion({
|
||||
apiKey,
|
||||
model,
|
||||
systemPrompt,
|
||||
userMessage,
|
||||
signal,
|
||||
onDelta
|
||||
}) {
|
||||
const response = await fetch("https://api.openai.com/v1/chat/completions", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${apiKey}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model,
|
||||
stream: true,
|
||||
messages: [
|
||||
{ role: "system", content: systemPrompt },
|
||||
{ role: "user", content: userMessage }
|
||||
]
|
||||
}),
|
||||
signal
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`OpenAI API error ${response.status}: ${errorText}`);
|
||||
}
|
||||
|
||||
const reader = response.body.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = "";
|
||||
|
||||
// OpenAI streams Server-Sent Events; parse incremental deltas from data lines.
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
|
||||
buffer += decoder.decode(value, { stream: true });
|
||||
const lines = buffer.split("\n");
|
||||
buffer = lines.pop() || "";
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed.startsWith("data:")) continue;
|
||||
|
||||
const data = trimmed.slice(5).trim();
|
||||
if (!data) continue;
|
||||
if (data === "[DONE]") return;
|
||||
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(data);
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
const delta = parsed?.choices?.[0]?.delta?.content;
|
||||
if (delta) onDelta(delta);
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user