feat: encrypted git-backed vault with idle auto-lock

- Master password derives an AES-GCM-256 key via PBKDF2-SHA256; the key never leaves the device and the remote only sees ciphertext blobs.
- One file per entry under entries/; conflicts produce a sidecar the user resolves in the UI.
- Vault stays unlocked across background-script suspensions via storage.session; auto-locks after 5 min of inactivity.
This commit is contained in:
schmop 2026-04-23 01:26:32 +02:00
parent 8c05cffa67
commit c965d0f4ad
10 changed files with 1076 additions and 0 deletions

View File

@ -3,6 +3,11 @@
"name": "Passchmop",
"version": "0.1.0",
"description": "Git-backed, client-side-encrypted password manager.",
"background": {
"scripts": ["build/background.js"]
},
"permissions": ["storage", "clipboardWrite", "alarms"],
"host_permissions": ["https://*/*"],
"browser_specific_settings": {
"gecko": {
"id": "passchmop@schmop",

103
src/background/crypto.ts Normal file
View File

@ -0,0 +1,103 @@
// Crypto helpers — PBKDF2 key derivation + AES-GCM blob encryption.
// Uses only Web Crypto (SubtleCrypto); no dependencies.
const subtle = crypto.subtle;
const TE = new TextEncoder();
const TD = new TextDecoder();
const VERIFIER_PLAINTEXT = 'passchmop-v1';
// TS 6's lib.dom narrowed `BufferSource` to exclude SharedArrayBuffer-backed
// typed arrays. Our Uint8Arrays always sit on plain ArrayBuffers; this cast
// makes that explicit for Web Crypto APIs.
const bs = (u: Uint8Array): BufferSource => u as unknown as BufferSource;
export async function deriveKey(
password: string,
saltBytes: Uint8Array,
iterations: number,
): Promise<CryptoKey> {
const material = await subtle.importKey(
'raw',
TE.encode(password),
'PBKDF2',
false,
['deriveKey'],
);
// extractable: true so we can export raw bytes into browser.storage.session
// (in-memory only) and re-import on event-page wake-up. Keeps the vault
// unlocked across Firefox MV3 background-script suspensions.
return subtle.deriveKey(
{ name: 'PBKDF2', salt: bs(saltBytes), iterations, hash: 'SHA-256' },
material,
{ name: 'AES-GCM', length: 256 },
true,
['encrypt', 'decrypt'],
);
}
export async function encrypt(key: CryptoKey, plaintext: Uint8Array): Promise<Uint8Array> {
const iv = crypto.getRandomValues(new Uint8Array(12));
const ct = new Uint8Array(await subtle.encrypt({ name: 'AES-GCM', iv: bs(iv) }, key, bs(plaintext)));
const out = new Uint8Array(iv.length + ct.length);
out.set(iv, 0);
out.set(ct, iv.length);
return out;
}
export async function decrypt(key: CryptoKey, blob: Uint8Array | ArrayBuffer): Promise<Uint8Array> {
const bytes = blob instanceof Uint8Array ? blob : new Uint8Array(blob);
const iv = bytes.slice(0, 12);
const ct = bytes.slice(12);
const pt = await subtle.decrypt({ name: 'AES-GCM', iv: bs(iv) }, key, bs(ct));
return new Uint8Array(pt);
}
export async function encryptJson(key: CryptoKey, obj: unknown): Promise<Uint8Array> {
return encrypt(key, TE.encode(JSON.stringify(obj)));
}
export async function decryptJson<T = unknown>(key: CryptoKey, blob: Uint8Array): Promise<T> {
const pt = await decrypt(key, blob);
return JSON.parse(TD.decode(pt)) as T;
}
export function randomSalt(n = 16): Uint8Array {
return crypto.getRandomValues(new Uint8Array(n));
}
export function uuid(): string {
const b = crypto.getRandomValues(new Uint8Array(16));
b[6] = (b[6]! & 0x0f) | 0x40;
b[8] = (b[8]! & 0x3f) | 0x80;
const h = [...b].map(x => x.toString(16).padStart(2, '0')).join('');
return `${h.slice(0, 8)}-${h.slice(8, 12)}-${h.slice(12, 16)}-${h.slice(16, 20)}-${h.slice(20)}`;
}
export function b64encode(bytes: Uint8Array): string {
let s = '';
for (const byte of bytes) s += String.fromCharCode(byte);
return btoa(s);
}
export function b64decode(str: string): Uint8Array {
const bin = atob(str);
const out = new Uint8Array(bin.length);
for (let i = 0; i < bin.length; i++) out[i] = bin.charCodeAt(i);
return out;
}
export async function makeVerifier(key: CryptoKey): Promise<string> {
const blob = await encrypt(key, TE.encode(VERIFIER_PLAINTEXT));
return b64encode(blob);
}
export async function checkVerifier(key: CryptoKey, b64verifier: string): Promise<boolean> {
try {
const blob = b64decode(b64verifier);
const pt = await decrypt(key, blob);
return TD.decode(pt) === VERIFIER_PLAINTEXT;
} catch {
return false;
}
}

84
src/background/fs.ts Normal file
View File

@ -0,0 +1,84 @@
// Filesystem wrapper around lightning-fs (IndexedDB-backed). isomorphic-git
// needs the instance itself; our code uses the Promises-style helpers.
import LightningFS from '@isomorphic-git/lightning-fs';
const inst = new LightningFS('passchmop-repo');
export const fs = inst;
export const pfs = inst.promises;
export const dir = '/repo';
export const entriesDir = `${dir}/entries`;
interface FsError extends Error { code?: string }
export async function ensureDir(path: string): Promise<void> {
try {
await pfs.mkdir(path);
} catch (e) {
if ((e as FsError).code !== 'EEXIST') throw e;
}
}
export async function listEntryFiles(): Promise<string[]> {
try {
const names = await pfs.readdir(entriesDir);
return names.filter(n => n.endsWith('.enc'));
} catch (e) {
if ((e as FsError).code === 'ENOENT') return [];
throw e;
}
}
export async function readEntryFile(filename: string): Promise<Uint8Array> {
const buf = await pfs.readFile(`${entriesDir}/${filename}`);
if (typeof buf === 'string') throw new Error('unexpected string read for entry file');
return new Uint8Array(buf);
}
export async function writeEntryFile(filename: string, bytes: Uint8Array): Promise<void> {
await ensureDir(entriesDir);
await pfs.writeFile(`${entriesDir}/${filename}`, bytes);
}
export async function removeEntryFile(filename: string): Promise<void> {
try {
await pfs.unlink(`${entriesDir}/${filename}`);
} catch (e) {
if ((e as FsError).code !== 'ENOENT') throw e;
}
}
export async function readJson<T = unknown>(path: string): Promise<T | null> {
try {
const buf = await pfs.readFile(`${dir}/${path}`, { encoding: 'utf8' });
const text = typeof buf === 'string' ? buf : new TextDecoder().decode(buf);
return JSON.parse(text) as T;
} catch (e) {
if ((e as FsError).code === 'ENOENT') return null;
throw e;
}
}
export async function writeJson(path: string, obj: unknown): Promise<void> {
await ensureDir(dir);
await pfs.writeFile(`${dir}/${path}`, JSON.stringify(obj, null, 2), 'utf8');
}
export async function wipeRepo(): Promise<void> {
async function rm(path: string): Promise<void> {
try {
const stat = await pfs.stat(path);
if (stat.isDirectory()) {
const kids = await pfs.readdir(path);
for (const k of kids) await rm(`${path}/${k}`);
await pfs.rmdir(path);
} else {
await pfs.unlink(path);
}
} catch (e) {
if ((e as FsError).code !== 'ENOENT') throw e;
}
}
await rm(dir);
}

84
src/background/index.ts Normal file
View File

@ -0,0 +1,84 @@
// Background entry point. Routes runtime messages to the vault API,
// runs periodic sync, and enforces the idle auto-lock.
import * as vault from './vault.js';
import type { Message, BackgroundResponse } from '../common/types.js';
const SYNC_ALARM = 'passchmop-sync';
const IDLE_ALARM = 'passchmop-idle';
browser.runtime.onMessage.addListener(async (rawMsg: unknown): Promise<BackgroundResponse> => {
const msg = rawMsg as Message;
try {
let value: unknown = undefined;
switch (msg.type) {
case 'isConfigured':
value = await vault.isConfigured();
break;
case 'isUnlocked':
value = await vault.isUnlocked();
break;
case 'setup':
await vault.setup(msg.args);
break;
case 'unlock':
await vault.unlock(msg.password);
break;
case 'lock':
await vault.lock();
break;
case 'list':
value = await vault.list();
break;
case 'put':
value = await vault.put(msg.entry);
break;
case 'delete':
await vault.remove(msg.id, msg.file);
break;
case 'sync':
value = await vault.syncNow();
break;
case 'reset':
await vault.reset();
break;
default: {
const _exhaustive: never = msg;
void _exhaustive;
return { ok: false, error: `unknown message type` };
}
}
// Any interaction with an unlocked vault counts as activity and
// resets the idle-lock timer. Explicit lock/reset do not bump.
if (msg.type !== 'lock' && msg.type !== 'reset') {
await vault.touchActivity();
}
return { ok: true, value };
} catch (e) {
console.error('background error:', e);
const error = e instanceof Error ? e.message : String(e);
return { ok: false, error };
}
});
// Periodic sync — fires only when the event page is alive AND unlocked.
// Otherwise the next popup open triggers a sync on unlock.
browser.alarms.create(SYNC_ALARM, { periodInMinutes: 2 });
// Idle auto-lock check — nukes the session key after the inactivity window.
browser.alarms.create(IDLE_ALARM, { periodInMinutes: 0.5 });
browser.alarms.onAlarm.addListener(async (alarm) => {
if (alarm.name === IDLE_ALARM) {
try { await vault.idleCheck(); }
catch (e) { console.warn('idle check failed:', e instanceof Error ? e.message : e); }
return;
}
if (alarm.name === SYNC_ALARM) {
if (!(await vault.isUnlocked())) return;
try { await vault.syncNow(); }
catch (e) { console.warn('periodic sync failed:', e instanceof Error ? e.message : e); }
}
});
console.log('passchmop background ready');

322
src/background/sync.ts Normal file
View File

@ -0,0 +1,322 @@
// Git operations + per-entry conflict resolution.
//
// Strategy: keep a pending-ops queue locally. On sync:
// 1. fetch origin
// 2. hard-reset working tree to origin/main
// 3. replay each pending op, resolving conflicts per-entry
// 4. commit + push; on non-fast-forward, the caller retries
//
// Conflict rules:
// * add/add is impossible (UUIDs are random)
// * linear edit (remote.modified_at === op.baseModifiedAt) → overwrite
// * genuine edit/edit → keep newer as primary, loser as sidecar
// * edit/delete → edit wins (no silent data loss)
import * as git from 'isomorphic-git';
import http from 'isomorphic-git/http/web';
import {
fs, pfs, dir, entriesDir,
ensureDir, wipeRepo, writeJson, readJson,
listEntryFiles, readEntryFile, writeEntryFile, removeEntryFile,
} from './fs.js';
import {
encryptJson, decryptJson, makeVerifier, b64encode,
} from './crypto.js';
import type { Entry, VaultMeta, RepoConfig, PendingOp } from '../common/types.js';
interface InternalConfig {
repoUrl: string;
corsProxyUrl?: string;
auth: { username: string; token: string };
deviceId: string;
author: { name: string; email: string };
}
let cfg: InternalConfig | null = null;
export function configure(input: RepoConfig): void {
cfg = {
repoUrl: input.repoUrl,
corsProxyUrl: input.corsProxyUrl || undefined,
auth: input.auth,
deviceId: input.deviceId,
author: {
name: `passchmop-${input.deviceId.slice(0, 8)}`,
email: `${input.deviceId}@passchmop.local`,
},
};
}
function must(): InternalConfig {
if (!cfg) throw new Error('sync not configured');
return cfg;
}
// Opts common to every isomorphic-git call. Spread before your own fields.
function gitOpts<E extends object>(extra: E): E & {
fs: typeof fs; http: typeof http; dir: string;
corsProxy: string | undefined;
author: { name: string; email: string };
committer: { name: string; email: string };
onAuth: () => { username: string; password: string };
} {
const c = must();
return {
fs, http, dir,
corsProxy: c.corsProxyUrl,
author: c.author,
committer: c.author,
onAuth: () => ({ username: c.auth.username, password: c.auth.token }),
...extra,
};
}
// Create a brand-new vault and push to the remote.
//
// Handles three remote states:
// 1. Empty repo (no refs) → init + commit + push.
// 2. Remote has main, no vault-meta → base our init commit on origin/main
// so our files are added on top of the existing content
// (e.g. an auto-created README).
// 3. Remote already has vault-meta → refuse; user should use "Join
// existing vault" instead.
export async function createVault(
masterKey: CryptoKey,
saltBytes: Uint8Array,
iterations: number,
): Promise<VaultMeta> {
const c = must();
await wipeRepo();
await ensureDir(dir);
await git.init({ fs, dir, defaultBranch: 'main' });
await git.addRemote({ fs, dir, remote: 'origin', url: c.repoUrl });
let baseOid: string | null = null;
try {
await git.fetch(gitOpts({ ref: 'main', singleBranch: true, depth: 1 }));
baseOid = await git.resolveRef({ fs, dir, ref: 'refs/remotes/origin/main' });
} catch {
// Empty remote or no `main` yet — that's fine, we'll create it.
}
if (baseOid) {
let vaultAlreadyExists = false;
try {
await git.readBlob({ fs, dir, oid: baseOid, filepath: 'vault-meta.json' });
vaultAlreadyExists = true;
} catch { /* not found → good */ }
if (vaultAlreadyExists) {
throw new Error('This repo already contains a vault. Use "Join existing vault" instead.');
}
// Fast-forward local main to origin/main so our commit is a proper child.
// IMPORTANT: writeRef main first, then checkout by *branch name*. Checking
// out by SHA leaves HEAD detached, after which commit() updates HEAD but
// not refs/heads/main, and push('main') silently sends a stale ref.
await git.writeRef({ fs, dir, ref: 'refs/heads/main', value: baseOid, force: true });
await git.checkout({ fs, dir, ref: 'main', force: true });
}
const meta: VaultMeta = {
version: 1,
kdf: {
name: 'PBKDF2',
hash: 'SHA-256',
iterations,
salt: b64encode(saltBytes),
},
verifier: await makeVerifier(masterKey),
};
await writeJson('vault-meta.json', meta);
await ensureDir(entriesDir);
await pfs.writeFile(`${entriesDir}/.gitkeep`, '');
await git.add({ fs, dir, filepath: 'vault-meta.json' });
await git.add({ fs, dir, filepath: 'entries/.gitkeep' });
await git.commit({
fs, dir,
message: 'init vault',
author: c.author,
committer: c.author,
});
await git.push(gitOpts({ remote: 'origin', ref: 'main' }));
return meta;
}
// Clone an existing vault repo. Caller is responsible for deriving the
// master key from the resulting `vault-meta.json`.
export async function cloneVault(): Promise<VaultMeta> {
const c = must();
await wipeRepo();
await git.clone(gitOpts({
url: c.repoUrl,
ref: 'main',
singleBranch: true,
depth: 1,
}));
const meta = await readJson<VaultMeta>('vault-meta.json');
if (!meta) throw new Error('repo has no vault-meta.json');
return meta;
}
export async function readVaultMeta(): Promise<VaultMeta | null> {
return readJson<VaultMeta>('vault-meta.json');
}
// Fetch remote, reset to origin/main, apply pending ops, commit, push.
// Returns { changed }. Throws on push failure; caller may retry.
export async function sync(
masterKey: CryptoKey,
pendingOps: readonly PendingOp[],
): Promise<{ changed: boolean }> {
const c = must();
await git.fetch(gitOpts({ ref: 'main', singleBranch: true, depth: 1 }));
let remoteOid: string;
try {
remoteOid = await git.resolveRef({ fs, dir, ref: 'refs/remotes/origin/main' });
} catch {
remoteOid = await git.resolveRef({ fs, dir, ref: 'HEAD' });
}
// writeRef *before* checkout, and check out by branch name, so HEAD stays
// symbolic → refs/heads/main. Checkout by SHA would leave HEAD detached,
// and subsequent commits wouldn't advance main; push becomes a silent no-op.
await git.writeRef({ fs, dir, ref: 'refs/heads/main', value: remoteOid, force: true });
await git.checkout({ fs, dir, ref: 'main', force: true });
let changed = false;
for (const op of pendingOps) {
if (await applyOp(op, masterKey, remoteOid)) changed = true;
}
if (changed) {
await stageAll();
await git.commit({
fs, dir,
message: `sync from ${c.deviceId.slice(0, 8)}`,
author: c.author,
committer: c.author,
});
}
await git.push(gitOpts({ remote: 'origin', ref: 'main' }));
return { changed };
}
async function applyOp(op: PendingOp, masterKey: CryptoKey, remoteOid: string): Promise<boolean> {
if (op.type === 'put') return applyPut(op, masterKey, remoteOid);
if (op.type === 'delete') return applyDelete(op, masterKey, remoteOid);
return false;
}
// Read what origin/main actually holds for this entry. Do NOT read from the
// working tree: `vault.put()` pre-writes blobs there for immediate UI feedback,
// and `git.checkout -f` doesn't remove untracked files, so the workdir can
// contain our own fresh-IV encryption that would look like a divergent remote.
async function readRemoteEntryBlob(remoteOid: string, filename: string): Promise<Uint8Array | null> {
try {
const { blob } = await git.readBlob({
fs, dir, oid: remoteOid, filepath: `entries/${filename}`,
});
return blob;
} catch {
return null;
}
}
async function applyPut(
op: Extract<PendingOp, { type: 'put' }>,
masterKey: CryptoKey,
remoteOid: string,
): Promise<boolean> {
const filename = `${op.id}.enc`;
const remoteBlob = await readRemoteEntryBlob(remoteOid, filename);
const localBlob = await encryptJson(masterKey, op.entry);
if (!remoteBlob) {
await writeEntryFile(filename, localBlob);
return true;
}
let remoteModAt: string | null = null;
try {
const r = await decryptJson<Entry>(masterKey, remoteBlob);
remoteModAt = r.modified_at || null;
} catch { /* undecryptable — treat as divergent */ }
// Linear edit: remote is still at the version we based our edit on.
// Overwrite with no sidecar.
if (op.baseModifiedAt && remoteModAt && remoteModAt === op.baseModifiedAt) {
await writeEntryFile(filename, localBlob);
return true;
}
// Actual divergence — another device edited the same entry. Keep the
// newer one as primary, save the loser as a `<uuid>.conflict-<ts>.enc`
// sidecar so the user can resolve in the UI.
const localTs = Date.parse(op.entry.modified_at) || 0;
const remoteTs = remoteModAt ? (Date.parse(remoteModAt) || 0) : 0;
if (localTs >= remoteTs) {
await writeEntryFile(filename, localBlob);
await writeEntryFile(`${op.id}.conflict-${remoteTs || Date.now()}.enc`, remoteBlob);
} else {
await writeEntryFile(`${op.id}.conflict-${localTs}.enc`, localBlob);
}
return true;
}
async function applyDelete(
op: Extract<PendingOp, { type: 'delete' }>,
masterKey: CryptoKey,
remoteOid: string,
): Promise<boolean> {
const filename = op.file || `${op.id}.enc`;
const remoteBlob = await readRemoteEntryBlob(remoteOid, filename);
if (!remoteBlob) return false;
// Edit-beats-delete check only applies to the primary file; sidecar
// deletion is just "user resolved the conflict by picking the other one".
if (filename === `${op.id}.enc`) {
let remoteTs = 0;
try {
const e = await decryptJson<Entry>(masterKey, remoteBlob);
remoteTs = Date.parse(e.modified_at) || 0;
} catch { /* ignore */ }
if (remoteTs > (op.initiatedAt || 0)) {
return false;
}
}
await removeEntryFile(filename);
return true;
}
async function stageAll(): Promise<void> {
const matrix = await git.statusMatrix({ fs, dir, filepaths: ['entries'] });
for (const [filepath, , workdir] of matrix) {
if (workdir === 0) {
await git.remove({ fs, dir, filepath });
} else {
await git.add({ fs, dir, filepath });
}
}
}
export async function listBlobFiles(): Promise<string[]> {
return listEntryFiles();
}
export async function readBlobFile(filename: string): Promise<Uint8Array> {
return readEntryFile(filename);
}
export async function writeBlobFile(filename: string, bytes: Uint8Array): Promise<void> {
return writeEntryFile(filename, bytes);
}
export async function removeBlobFile(filename: string): Promise<void> {
return removeEntryFile(filename);
}

324
src/background/vault.ts Normal file
View File

@ -0,0 +1,324 @@
// Vault — the background script's main state machine.
// Holds the in-memory master key, exposes CRUD, drives sync.js.
//
// Firefox MV3 suspends the background event page when idle, which
// wipes our in-memory state. To keep the vault unlocked across those
// suspensions we mirror the raw AES-GCM key to browser.storage.session
// (an in-memory store that survives event-page cycles but is cleared
// on browser restart). Each interaction bumps a lastActivityAt stamp,
// and a periodic alarm in index.ts calls idleCheck() to lock after
// the configured inactivity window.
import * as sync from './sync.js';
import * as c from './crypto.js';
import { wipeRepo } from './fs.js';
import type {
Entry, EntryInput, EntryWithMeta, Auth, RepoConfig,
VaultMeta, PendingOp, SetupArgs,
} from '../common/types.js';
const STORAGE = {
repoUrl: 'repoUrl',
authCiphertext: 'repoAuthCiphertext',
deviceId: 'deviceId',
corsProxyUrl: 'corsProxyUrl',
vaultMeta: 'vaultMeta',
pending: 'pendingOps',
} as const;
const SESSION = {
rawKey: 'rawMasterKey', // base64 AES-GCM raw bytes
lastActivity: 'lastActivity', // ms epoch
} as const;
export const IDLE_TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes
let masterKey: CryptoKey | null = null;
let meta: VaultMeta | null = null;
let repoConfig: RepoConfig | null = null;
let pending: PendingOp[] = [];
let syncTimer: ReturnType<typeof setTimeout> | null = null;
let inflightSync: Promise<{ changed: boolean } | undefined> | null = null;
async function storageGet<T = Record<string, unknown>>(keys: string | string[]): Promise<T> {
return browser.storage.local.get(keys) as Promise<T>;
}
async function storageSet(obj: Record<string, unknown>): Promise<void> {
return browser.storage.local.set(obj);
}
async function storageRemove(keys: string | string[]): Promise<void> {
return browser.storage.local.remove(keys);
}
async function savePending(): Promise<void> {
await storageSet({ [STORAGE.pending]: pending });
}
async function persistSession(): Promise<void> {
if (!masterKey) return;
const raw = new Uint8Array(await crypto.subtle.exportKey('raw', masterKey));
await browser.storage.session.set({
[SESSION.rawKey]: c.b64encode(raw),
[SESSION.lastActivity]: Date.now(),
});
}
async function clearSession(): Promise<void> {
await browser.storage.session.remove(Object.values(SESSION) as string[]);
}
async function restoreFromSession(): Promise<boolean> {
if (masterKey) return true;
const r = await browser.storage.session.get(Object.values(SESSION) as string[]) as {
[k: string]: unknown;
};
const raw = r[SESSION.rawKey] as string | undefined;
const t = (r[SESSION.lastActivity] as number | undefined) || 0;
if (!raw) return false;
if (Date.now() - t > IDLE_TIMEOUT_MS) {
await clearSession();
return false;
}
const bytes = c.b64decode(raw);
masterKey = await crypto.subtle.importKey(
'raw', bytes as unknown as BufferSource,
{ name: 'AES-GCM', length: 256 },
true, ['encrypt', 'decrypt'],
);
const local = await storageGet<Record<string, unknown>>(Object.values(STORAGE) as string[]);
meta = (local[STORAGE.vaultMeta] as VaultMeta | undefined) || null;
let auth: Auth = { username: '', token: '' };
const authCt = local[STORAGE.authCiphertext] as string | undefined;
if (authCt) {
try {
auth = await c.decryptJson<Auth>(masterKey, c.b64decode(authCt));
} catch (e) { console.warn('failed to decrypt stored auth', e); }
}
repoConfig = {
repoUrl: local[STORAGE.repoUrl] as string,
corsProxyUrl: local[STORAGE.corsProxyUrl] as string | undefined,
deviceId: local[STORAGE.deviceId] as string,
auth,
};
sync.configure(repoConfig);
pending = (local[STORAGE.pending] as PendingOp[] | undefined) || [];
return true;
}
async function ensureUnlocked(): Promise<void> {
if (masterKey) return;
if (!(await restoreFromSession())) throw new Error('locked');
}
export async function touchActivity(): Promise<void> {
if (!masterKey) return;
await browser.storage.session.set({ [SESSION.lastActivity]: Date.now() });
}
export async function idleCheck(): Promise<void> {
const r = await browser.storage.session.get(SESSION.lastActivity) as { [k: string]: unknown };
const t = (r[SESSION.lastActivity] as number | undefined) || 0;
if (!t) return;
if (Date.now() - t > IDLE_TIMEOUT_MS) await lock();
}
export async function isConfigured(): Promise<boolean> {
const r = await storageGet<Record<string, unknown>>([STORAGE.repoUrl, STORAGE.vaultMeta]);
return !!(r[STORAGE.repoUrl] && r[STORAGE.vaultMeta]);
}
export async function isUnlocked(): Promise<boolean> {
if (masterKey) return true;
return restoreFromSession();
}
export async function setup(args: SetupArgs): Promise<void> {
const { repoUrl, username, token, masterPassword, existing, corsProxyUrl } = args;
if (!repoUrl || !masterPassword) throw new Error('repoUrl and masterPassword are required');
const deviceRec = await storageGet<{ [k: string]: unknown }>(STORAGE.deviceId);
let deviceId = deviceRec[STORAGE.deviceId] as string | undefined;
if (!deviceId) deviceId = c.uuid();
const auth: Auth = { username: username || '', token: token || '' };
sync.configure({ repoUrl, corsProxyUrl, auth, deviceId });
let derivedKey: CryptoKey;
let metaObj: VaultMeta;
if (existing) {
metaObj = await sync.cloneVault();
const salt = c.b64decode(metaObj.kdf.salt);
derivedKey = await c.deriveKey(masterPassword, salt, metaObj.kdf.iterations);
const ok = await c.checkVerifier(derivedKey, metaObj.verifier);
if (!ok) throw new Error('wrong master password for existing vault');
} else {
const salt = c.randomSalt(16);
const iterations = 600000;
derivedKey = await c.deriveKey(masterPassword, salt, iterations);
metaObj = await sync.createVault(derivedKey, salt, iterations);
}
const authBlob = await c.encryptJson(derivedKey, auth);
await storageSet({
[STORAGE.repoUrl]: repoUrl,
[STORAGE.corsProxyUrl]: corsProxyUrl || '',
[STORAGE.deviceId]: deviceId,
[STORAGE.authCiphertext]: c.b64encode(authBlob),
[STORAGE.vaultMeta]: metaObj,
[STORAGE.pending]: [],
});
masterKey = derivedKey;
meta = metaObj;
repoConfig = { repoUrl, corsProxyUrl, auth, deviceId };
pending = [];
await persistSession();
}
export async function unlock(password: string): Promise<void> {
const r = await storageGet<Record<string, unknown>>(Object.values(STORAGE) as string[]);
const storedMeta = r[STORAGE.vaultMeta] as VaultMeta | undefined;
const repoUrl = r[STORAGE.repoUrl] as string | undefined;
if (!storedMeta || !repoUrl) throw new Error('no vault configured');
meta = storedMeta;
const salt = c.b64decode(storedMeta.kdf.salt);
const key = await c.deriveKey(password, salt, storedMeta.kdf.iterations);
const ok = await c.checkVerifier(key, storedMeta.verifier);
if (!ok) throw new Error('wrong master password');
let auth: Auth = { username: '', token: '' };
const authCt = r[STORAGE.authCiphertext] as string | undefined;
if (authCt) {
try {
auth = await c.decryptJson<Auth>(key, c.b64decode(authCt));
} catch (e) { console.warn('failed to decrypt stored auth', e); }
}
masterKey = key;
repoConfig = {
repoUrl,
corsProxyUrl: r[STORAGE.corsProxyUrl] as string | undefined,
deviceId: r[STORAGE.deviceId] as string,
auth,
};
sync.configure(repoConfig);
pending = (r[STORAGE.pending] as PendingOp[] | undefined) || [];
await persistSession();
syncLater();
}
export async function lock(): Promise<void> {
masterKey = null;
meta = null;
repoConfig = null;
pending = [];
if (syncTimer) { clearTimeout(syncTimer); syncTimer = null; }
await clearSession();
}
export async function list(): Promise<EntryWithMeta[]> {
await ensureUnlocked();
const files = await sync.listBlobFiles();
const entries: EntryWithMeta[] = [];
for (const f of files) {
const isConflict = /^(.+?)\.conflict-(.+)\.enc$/.test(f);
try {
const blob = await sync.readBlobFile(f);
const entry = await c.decryptJson<Entry>(masterKey!, blob);
entries.push({ ...entry, _file: f, _conflict: isConflict });
} catch {
// skip undecryptable
}
}
return entries;
}
export async function put(input: EntryInput): Promise<Entry> {
await ensureUnlocked();
const now = new Date().toISOString();
const id = input.id || c.uuid();
// Record what version this edit is based on. On sync, if the remote's
// modified_at still matches this base, our edit is a linear update —
// no conflict, no sidecar. If someone else edited since, we diverge.
let baseModifiedAt: string | null = null;
if (input.id) {
try {
const existingBlob = await sync.readBlobFile(`${id}.enc`);
const existing = await c.decryptJson<Entry>(masterKey!, existingBlob);
baseModifiedAt = existing.modified_at || null;
} catch { /* not found — treat as new */ }
}
const full: Entry = {
id,
title: input.title || '',
url: input.url || '',
username: input.username || '',
password: input.password || '',
notes: input.notes || '',
created_at: input.created_at || now,
modified_at: now,
device_id: repoConfig!.deviceId,
};
const blob = await c.encryptJson(masterKey!, full);
await sync.writeBlobFile(`${id}.enc`, blob);
pending.push({ type: 'put', id, entry: full, baseModifiedAt });
await savePending();
syncLater();
return full;
}
export async function remove(id: string, file?: string): Promise<void> {
await ensureUnlocked();
const filename = file || `${id}.enc`;
await sync.removeBlobFile(filename);
pending.push({ type: 'delete', id, file: filename, initiatedAt: Date.now() });
await savePending();
syncLater();
}
export async function syncNow(retries = 3): Promise<{ changed: boolean } | undefined> {
await ensureUnlocked();
if (inflightSync) return inflightSync;
inflightSync = (async () => {
for (let i = 0; i < retries; i++) {
try {
const { changed } = await sync.sync(masterKey!, pending);
pending = [];
await savePending();
return { changed };
} catch (e) {
const msg = (e instanceof Error ? e.message : String(e)) || '';
const retryable = /not a fast-forward|non-fast|push|rejected/i.test(msg);
if (!retryable || i === retries - 1) throw e;
await new Promise(r => setTimeout(r, 500 * (i + 1)));
}
}
return undefined;
})();
try { return await inflightSync; }
finally { inflightSync = null; }
}
function syncLater(): void {
if (syncTimer) return;
syncTimer = setTimeout(async () => {
syncTimer = null;
try { await syncNow(); }
catch (e) { console.warn('background sync failed', e); }
}, 400);
}
export async function reset(): Promise<void> {
await lock();
await storageRemove(Object.values(STORAGE) as string[]);
await wipeRepo();
}

24
src/common/messages.ts Normal file
View File

@ -0,0 +1,24 @@
import type {
Message, BackgroundResponse, SetupArgs, EntryInput, Entry, EntryWithMeta,
} from './types.js';
async function sendRaw<V = unknown>(message: Message): Promise<V | undefined> {
const res = await browser.runtime.sendMessage(message) as BackgroundResponse<V> | undefined;
if (!res || !res.ok) throw new Error(res?.error || 'background error');
return res.value;
}
// Typed API the UI layers talk to. Centralises the message contract so
// background and UI don't drift out of sync on field names.
export const api = {
isConfigured: () => sendRaw<boolean>({ type: 'isConfigured' }) as Promise<boolean>,
isUnlocked: () => sendRaw<boolean>({ type: 'isUnlocked' }) as Promise<boolean>,
setup: (args: SetupArgs) => sendRaw<void>({ type: 'setup', args }),
unlock: (password: string) => sendRaw<void>({ type: 'unlock', password }),
lock: () => sendRaw<void>({ type: 'lock' }),
list: () => sendRaw<EntryWithMeta[]>({ type: 'list' }) as Promise<EntryWithMeta[]>,
put: (entry: EntryInput) => sendRaw<Entry>({ type: 'put', entry }) as Promise<Entry>,
delete: (id: string, file?: string) => sendRaw<void>({ type: 'delete', id, file }),
sync: () => sendRaw<{ changed: boolean }>({ type: 'sync' }),
reset: () => sendRaw<void>({ type: 'reset' }),
};

3
src/common/polyfills.ts Normal file
View File

@ -0,0 +1,3 @@
// esbuild `inject` target: any bundle that references bare `Buffer`
// will have it resolved to this re-export of the browser polyfill.
export { Buffer } from 'buffer';

83
src/common/types.ts Normal file
View File

@ -0,0 +1,83 @@
// Shared type definitions used across background / popup / options.
export interface Entry {
id: string;
title: string;
url: string;
username: string;
password: string;
notes: string;
created_at: string;
modified_at: string;
device_id: string;
}
// Input shape when creating or updating an entry from the UI.
// All fields optional; the vault fills in id, timestamps, device_id.
export interface EntryInput {
id?: string;
title?: string;
url?: string;
username?: string;
password?: string;
notes?: string;
created_at?: string;
}
// Entry with filesystem metadata, as returned by vault.list().
export interface EntryWithMeta extends Entry {
_file: string;
_conflict: boolean;
}
export interface Auth {
username: string;
token: string;
}
export interface RepoConfig {
repoUrl: string;
corsProxyUrl?: string;
auth: Auth;
deviceId: string;
}
export interface VaultMeta {
version: 1;
kdf: {
name: 'PBKDF2';
hash: 'SHA-256';
iterations: number;
salt: string; // base64(raw 16B)
};
verifier: string; // base64(iv||ciphertext)
}
export type PendingOp =
| { type: 'put'; id: string; entry: Entry; baseModifiedAt: string | null }
| { type: 'delete'; id: string; file: string; initiatedAt: number };
export interface SetupArgs {
repoUrl: string;
username: string;
token: string;
masterPassword: string;
existing: boolean;
corsProxyUrl?: string;
}
export type Message =
| { type: 'isConfigured' }
| { type: 'isUnlocked' }
| { type: 'setup'; args: SetupArgs }
| { type: 'unlock'; password: string }
| { type: 'lock' }
| { type: 'list' }
| { type: 'put'; entry: EntryInput }
| { type: 'delete'; id: string; file?: string }
| { type: 'sync' }
| { type: 'reset' };
export type BackgroundResponse<V = unknown> =
| { ok: true; value?: V }
| { ok: false; error: string };

44
src/types/modules.d.ts vendored Normal file
View File

@ -0,0 +1,44 @@
// Minimal type shim for @isomorphic-git/lightning-fs — the package ships
// no official types. We only use a small surface of its Promises-style API,
// plus hand the instance itself to isomorphic-git.
declare module '@isomorphic-git/lightning-fs' {
interface LightningFSOptions {
wipe?: boolean;
url?: string;
fileDbName?: string;
fileStoreName?: string;
lockDbName?: string;
lockStoreName?: string;
defer?: boolean;
}
interface LightningStats {
isDirectory(): boolean;
isFile(): boolean;
size: number;
mtimeMs: number;
}
interface LightningPromises {
mkdir(path: string, opts?: { mode?: number }): Promise<void>;
rmdir(path: string): Promise<void>;
readdir(path: string): Promise<string[]>;
readFile(path: string, opts?: { encoding?: 'utf8' } | 'utf8'): Promise<Uint8Array | string>;
writeFile(path: string, data: Uint8Array | string, opts?: { encoding?: 'utf8' } | 'utf8'): Promise<void>;
unlink(path: string): Promise<void>;
stat(path: string): Promise<LightningStats>;
lstat(path: string): Promise<LightningStats>;
readlink(path: string): Promise<string>;
symlink(target: string, path: string): Promise<void>;
}
class LightningFS {
constructor(name?: string, options?: LightningFSOptions);
promises: LightningPromises;
// Callback-style API used by isomorphic-git internally — left untyped.
[key: string]: unknown;
}
export default LightningFS;
}