added more support for supabase-js

This commit is contained in:
2026-03-12 10:18:52 +02:00
parent c0792f2e1d
commit 6708cf28a7
62 changed files with 6563 additions and 526 deletions

View File

@@ -39,4 +39,52 @@ describe('Authentication', () => {
expect(error).toBeDefined();
expect(data.session).toBeNull();
});
it('should persist session (getUser)', async () => {
// Ensure we are logged in
await client.auth.signInWithPassword({ email, password });
const { data, error } = await client.auth.getUser();
expect(error).toBeNull();
expect(data.user).toBeDefined();
expect(data.user?.email).toBe(email);
});
it('should refresh session', async () => {
// Ensure we are logged in
const { data: loginData } = await client.auth.signInWithPassword({ email, password });
expect(loginData.session).toBeDefined();
const oldAccessToken = loginData.session?.access_token;
const oldRefreshToken = loginData.session?.refresh_token;
// Refresh
const { data, error } = await client.auth.refreshSession();
expect(error).toBeNull();
expect(data.session).toBeDefined();
expect(data.session?.refresh_token).not.toBe(oldRefreshToken);
expect(data.user).toBeDefined();
});
it('should request password reset', async () => {
const { data, error } = await client.auth.resetPasswordForEmail(email);
expect(error).toBeNull();
expect(data).toBeDefined();
});
it('should update user metadata', async () => {
const { data: loginData } = await client.auth.signInWithPassword({ email, password });
expect(loginData.session).toBeDefined();
const { data, error } = await client.auth.updateUser({
data: { hello: 'world' },
});
expect(error).toBeNull();
expect(data.user).toBeDefined();
// Debug output
// console.log('Updated user:', JSON.stringify(data.user, null, 2));
// Check both potential locations
const metadata = data.user?.user_metadata || (data.user as any).raw_user_meta_data;
expect(metadata).toEqual({ hello: 'world' });
});
});

View File

@@ -0,0 +1,423 @@
import { describe, it, expect } from 'vitest';
import { createMockedFunction } from './test-utils';
describe('Edge Functions', () => {
const functionName = `hello-world-${Date.now()}`;
// Simple WASI module that prints "Hello from WASM!" to stdout
const wat = `
(module
(import "wasi_snapshot_preview1" "fd_write" (func $fd_write (param i32 i32 i32 i32) (result i32)))
(memory 1)
(export "memory" (memory 0))
(data (i32.const 8) "Hello from WASM!")
(func $main (export "_start")
(i32.store (i32.const 0) (i32.const 8)) ;; iov.iov_base
(i32.store (i32.const 4) (i32.const 16)) ;; iov.iov_len
(call $fd_write
(i32.const 1) ;; stdout
(i32.const 0) ;; iovs ptr
(i32.const 1) ;; iovs len
(i32.const 20) ;; nwritten ptr
)
drop
)
)
`;
it('should deploy a function', async () => {
const res = await fetch(`${process.env.MADBASE_URL}/functions/v1`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_SERVICE_ROLE_KEY}`
},
body: JSON.stringify({
name: functionName,
code_base64: Buffer.from(wat).toString('base64')
})
});
if (res.status !== 200) {
console.error('Deploy failed:', await res.text());
}
expect(res.status).toBe(200);
});
it('should invoke a function', async () => {
const res = await fetch(`${process.env.MADBASE_URL}/functions/v1/${functionName}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_ANON_KEY}`
},
body: JSON.stringify({ payload: { name: 'World' } })
});
if (res.status !== 200) {
console.error('Invoke failed:', await res.text());
}
expect(res.status).toBe(200);
const data = await res.json();
console.log('Invoke response:', data);
expect(data.result).toContain('Hello from WASM!');
});
it('should deploy and invoke a Deno function', async () => {
const name = `deno-hello-${Date.now()}`;
// Simple Deno function that uses Deno.serve shim
const code = `
Deno.serve(async (req) => {
const body = await req.json();
return new Response("Hello " + body.name + " from Deno!");
});
`;
// Deploy
const deployRes = await fetch(`${process.env.MADBASE_URL}/functions/v1`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_SERVICE_ROLE_KEY}`
},
body: JSON.stringify({
name,
code_base64: Buffer.from(code).toString('base64'),
runtime: 'deno'
})
});
if (deployRes.status !== 200) {
console.error('Deno Deploy failed:', await deployRes.text());
}
expect(deployRes.status).toBe(200);
// Invoke
const invokeRes = await fetch(`${process.env.MADBASE_URL}/functions/v1/${name}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_ANON_KEY}`
},
body: JSON.stringify({ payload: { name: 'World' } })
});
if (invokeRes.status !== 200) {
console.error('Deno Invoke failed:', await invokeRes.text());
}
expect(invokeRes.status).toBe(200);
const data = await invokeRes.json();
console.log('Deno Invoke response:', data);
expect(data.result).toBe('Hello World from Deno!');
});
describe('Unit Tests (Component Logic)', () => {
it('should handle missing environment variables', async () => {
const name = `env-check-${Date.now()}`;
const code = createMockedFunction(`
Deno.serve(async (req) => {
const key = Deno.env.get("MY_SECRET_KEY");
if (!key) {
return new Response("Missing Key", { status: 500 });
}
return new Response("Found Key: " + key);
});
`, { env: {} }); // Empty env
// Deploy
await fetch(`${process.env.MADBASE_URL}/functions/v1`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_SERVICE_ROLE_KEY}`
},
body: JSON.stringify({
name,
code_base64: Buffer.from(code).toString('base64'),
runtime: 'deno'
})
});
// Invoke
const res = await fetch(`${process.env.MADBASE_URL}/functions/v1/${name}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_ANON_KEY}`
},
body: JSON.stringify({ payload: {} })
});
const data = await res.json();
expect(data.result).toBe("Missing Key");
expect(data.status).toBe(500);
});
it('should validate request body', async () => {
const name = `body-check-${Date.now()}`;
const code = createMockedFunction(`
Deno.serve(async (req) => {
const body = await req.json();
if (!body.requiredField) {
return new Response("Missing Field", { status: 400 });
}
return new Response("OK");
});
`);
// Deploy
await fetch(`${process.env.MADBASE_URL}/functions/v1`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_SERVICE_ROLE_KEY}`
},
body: JSON.stringify({
name,
code_base64: Buffer.from(code).toString('base64'),
runtime: 'deno'
})
});
// Invoke (Missing Field)
const res = await fetch(`${process.env.MADBASE_URL}/functions/v1/${name}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_ANON_KEY}`
},
body: JSON.stringify({ payload: {} })
});
const data = await res.json();
expect(data.result).toBe("Missing Field");
expect(data.status).toBe(400);
});
});
describe('Integration Tests (System Interactions)', () => {
it('should handle CORS preflight requests', async () => {
const name = `cors-check-${Date.now()}`;
const code = createMockedFunction(`
const corsHeaders = {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "POST, OPTIONS",
};
Deno.serve(async (req) => {
if (req.method === "OPTIONS") {
return new Response("ok", { headers: corsHeaders });
}
return new Response("ok", { headers: corsHeaders });
});
`);
await fetch(`${process.env.MADBASE_URL}/functions/v1`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_SERVICE_ROLE_KEY}`
},
body: JSON.stringify({
name,
code_base64: Buffer.from(code).toString('base64'),
runtime: 'deno'
})
});
// Invoke with OPTIONS (Note: The Gateway might handle this or pass it through.
// Our Deno runtime shim creates a request with POST method by default for invocations,
// so testing OPTIONS strictly via invocation endpoint might need support in the handler/shim.
// For now, we test that the function *can* set headers in response.)
const res = await fetch(`${process.env.MADBASE_URL}/functions/v1/${name}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_ANON_KEY}`
},
body: JSON.stringify({ payload: {} })
});
const data = await res.json();
// Check if headers are returned (requires handler update to return headers, which we did)
expect(data.headers['access-control-allow-origin']).toBe('*');
});
});
describe('E2E Workflows (User Flows)', () => {
it('should execute invite staff workflow', async () => {
const name = `invite-staff-${Date.now()}`;
const code = createMockedFunction(`
Deno.serve(async (req) => {
const { email } = await req.json();
// 1. Insert into DB (mocked)
const supabase = createClient();
const { error } = await supabase.from('invitations').insert({ email });
if (error) return new Response("DB Error", { status: 500 });
// 2. Send Email (mocked fetch)
const res = await fetch("https://api.resend.com/emails", {
method: "POST",
body: JSON.stringify({ to: email })
});
if (!res.ok) return new Response("Email Error", { status: 502 });
return new Response("Invite Sent");
});
`, {
fetch: [{ urlPattern: "api.resend.com", status: 200, response: { id: "email_123" } }],
supabase: { insertResult: { id: "invite_123" } }
});
await fetch(`${process.env.MADBASE_URL}/functions/v1`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_SERVICE_ROLE_KEY}`
},
body: JSON.stringify({
name,
code_base64: Buffer.from(code).toString('base64'),
runtime: 'deno'
})
});
const res = await fetch(`${process.env.MADBASE_URL}/functions/v1/${name}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_ANON_KEY}`
},
body: JSON.stringify({ payload: { email: "newuser@example.com" } })
});
const data = await res.json();
expect(data.result).toBe("Invite Sent");
expect(data.status).toBe(200);
});
});
it('should deploy and invoke a complex Polar Checkout-like function', async () => {
const name = `polar-checkout-${Date.now()}`;
const code = createMockedFunction(`
const corsHeaders = {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Headers": "authorization, x-client-info, apikey, content-type",
};
Deno.serve(async (req) => {
if (req.method === "OPTIONS") {
return new Response(null, { headers: corsHeaders });
}
try {
const POLAR_API_KEY = Deno.env.get("POLAR_API_KEY");
if (!POLAR_API_KEY) throw new Error("POLAR_API_KEY is not configured");
// Authenticate user
const authHeader = req.headers.get("Authorization");
if (!authHeader || !authHeader.startsWith("Bearer ")) {
return new Response(JSON.stringify({ error: "Unauthorized: Missing or invalid token" }), {
status: 401,
headers: { ...corsHeaders, "Content-Type": "application/json" },
});
}
const supabase = createClient(
Deno.env.get("SUPABASE_URL"),
Deno.env.get("SUPABASE_ANON_KEY"),
{ global: { headers: { Authorization: authHeader } } }
);
const token = authHeader.replace("Bearer ", "");
const { data: claimsData, error: claimsError } = await supabase.auth.getClaims(token);
if (claimsError || !claimsData?.claims) {
return new Response(JSON.stringify({ error: "Unauthorized: Invalid claims" }), { status: 401 });
}
const { productId, successUrl } = await req.json();
// Create Polar checkout session
const polarRes = await fetch("https://sandbox-api.polar.sh/v1/checkouts/", {
method: "POST",
headers: {
Authorization: "Bearer " + POLAR_API_KEY,
"Content-Type": "application/json",
},
body: JSON.stringify({
products: [productId],
success_url: successUrl,
metadata: { user_id: claimsData.claims.sub }
}),
});
const polarData = await polarRes.json();
if (!polarRes.ok) {
throw new Error("Polar API error");
}
return new Response(
JSON.stringify({ url: polarData.url, id: polarData.id }),
{ status: 200, headers: { ...corsHeaders, "Content-Type": "application/json" } }
);
} catch (error) {
return new Response(JSON.stringify({ error: String(error) }), {
status: 500,
headers: { ...corsHeaders, "Content-Type": "application/json" },
});
}
});
`, {
env: {
"POLAR_API_KEY": "mock_polar_key",
"SUPABASE_URL": "http://mock-supabase",
"SUPABASE_ANON_KEY": "mock_anon_key",
"SUPABASE_SERVICE_ROLE_KEY": "mock_service_key"
},
supabase: {
claims: { sub: "user_123", email: "test@example.com" }
},
fetch: [{
urlPattern: "sandbox-api.polar.sh/v1/checkouts/",
status: 200,
response: { url: "https://sandbox.polar.sh/checkout/123", id: "checkout_123" }
}]
});
// Deploy
const deployRes = await fetch(`${process.env.MADBASE_URL}/functions/v1`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_SERVICE_ROLE_KEY}`
},
body: JSON.stringify({
name,
code_base64: Buffer.from(code).toString('base64'),
runtime: 'deno'
})
});
expect(deployRes.status).toBe(200);
// Invoke
const invokeRes = await fetch(`${process.env.MADBASE_URL}/functions/v1/${name}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${process.env.MADBASE_ANON_KEY}`
},
body: JSON.stringify({ payload: { productId: "prod_123", successUrl: "http://example.com" } })
});
expect(invokeRes.status).toBe(200);
const data = await invokeRes.json();
console.log('Polar Invoke response:', data);
const result = JSON.parse(data.result);
expect(result.url).toBe("https://sandbox.polar.sh/checkout/123");
});
});

View File

@@ -0,0 +1,13 @@
import { describe, it } from 'vitest';
import jwt from 'jsonwebtoken';
describe('Generate Keys', () => {
it('should generate keys', () => {
const secret = 'testsecret';
const anon = jwt.sign({ role: 'anon', iss: 'madbase' }, secret, { algorithm: 'HS256' });
const service = jwt.sign({ role: 'service_role', iss: 'madbase' }, secret, { algorithm: 'HS256' });
console.log(`ANON_KEY=${anon}`);
console.log(`SERVICE_KEY=${service}`);
});
});

View File

@@ -11,6 +11,7 @@
"dependencies": {
"@supabase/supabase-js": "^2.49.1",
"dotenv": "^16.4.7",
"jsonwebtoken": "^9.0.3",
"vitest": "^3.0.7"
}
},
@@ -1004,6 +1005,12 @@
"node": ">=12"
}
},
"node_modules/buffer-equal-constant-time": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==",
"license": "BSD-3-Clause"
},
"node_modules/cac": {
"version": "6.7.14",
"resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz",
@@ -1076,6 +1083,15 @@
"url": "https://dotenvx.com"
}
},
"node_modules/ecdsa-sig-formatter": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz",
"integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==",
"license": "Apache-2.0",
"dependencies": {
"safe-buffer": "^5.0.1"
}
},
"node_modules/es-module-lexer": {
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz",
@@ -1187,6 +1203,91 @@
"integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==",
"license": "MIT"
},
"node_modules/jsonwebtoken": {
"version": "9.0.3",
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.3.tgz",
"integrity": "sha512-MT/xP0CrubFRNLNKvxJ2BYfy53Zkm++5bX9dtuPbqAeQpTVe0MQTFhao8+Cp//EmJp244xt6Drw/GVEGCUj40g==",
"license": "MIT",
"dependencies": {
"jws": "^4.0.1",
"lodash.includes": "^4.3.0",
"lodash.isboolean": "^3.0.3",
"lodash.isinteger": "^4.0.4",
"lodash.isnumber": "^3.0.3",
"lodash.isplainobject": "^4.0.6",
"lodash.isstring": "^4.0.1",
"lodash.once": "^4.0.0",
"ms": "^2.1.1",
"semver": "^7.5.4"
},
"engines": {
"node": ">=12",
"npm": ">=6"
}
},
"node_modules/jwa": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.1.tgz",
"integrity": "sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg==",
"license": "MIT",
"dependencies": {
"buffer-equal-constant-time": "^1.0.1",
"ecdsa-sig-formatter": "1.0.11",
"safe-buffer": "^5.0.1"
}
},
"node_modules/jws": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/jws/-/jws-4.0.1.tgz",
"integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==",
"license": "MIT",
"dependencies": {
"jwa": "^2.0.1",
"safe-buffer": "^5.0.1"
}
},
"node_modules/lodash.includes": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz",
"integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==",
"license": "MIT"
},
"node_modules/lodash.isboolean": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz",
"integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==",
"license": "MIT"
},
"node_modules/lodash.isinteger": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz",
"integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==",
"license": "MIT"
},
"node_modules/lodash.isnumber": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz",
"integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==",
"license": "MIT"
},
"node_modules/lodash.isplainobject": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
"integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==",
"license": "MIT"
},
"node_modules/lodash.isstring": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz",
"integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==",
"license": "MIT"
},
"node_modules/lodash.once": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz",
"integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==",
"license": "MIT"
},
"node_modules/loupe": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz",
@@ -1331,6 +1432,38 @@
"fsevents": "~2.3.2"
}
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT"
},
"node_modules/semver": {
"version": "7.7.4",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz",
"integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==",
"license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/siginfo": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz",

View File

@@ -13,6 +13,7 @@
"dependencies": {
"@supabase/supabase-js": "^2.49.1",
"dotenv": "^16.4.7",
"jsonwebtoken": "^9.0.3",
"vitest": "^3.0.7"
}
}

View File

@@ -4,35 +4,48 @@ import { createAnonClient } from './setup.ts';
const client = createAnonClient();
describe('Realtime', () => {
it('should receive insert events', async () => {
it('should resume subscription from last_event_id', async () => {
// 1. Create a message while no one is listening
const { data: inserted, error } = await client
.from('todos')
.insert({ title: 'Missed Event', completed: false })
.select()
.single();
expect(error).toBeNull();
// We need to know the ID of this event in realtime history.
// Ideally we query `madbase_realtime.messages` but client can't.
// So we just assume ID > 0.
// Wait, we need to pass `last_event_id` < actual_id.
// Let's assume we want everything after ID=0.
return new Promise<void>((resolve, reject) => {
// 2. Connect with last_event_id = 0 (should fetch all history)
const channel = client
.channel('public:todos')
.channel('public:todos', { config: { last_event_id: 0 } as any })
.on(
'postgres_changes',
{ event: 'INSERT', schema: 'public', table: 'todos' },
(payload) => {
console.log('Received INSERT event:', payload);
expect(payload.new).toBeDefined();
expect(payload.new.title).toBe('Realtime Test');
client.removeChannel(channel).then(() => resolve());
console.log('Received missed event:', payload);
if (payload.new && payload.new.title === 'Missed Event') {
expect(payload.new.id).toBe(inserted.id);
client.removeChannel(channel).then(() => resolve());
}
}
)
.subscribe(async (status) => {
if (status === 'SUBSCRIBED') {
// Trigger an insert
const { error } = await client
.from('todos')
.insert({ title: 'Realtime Test', completed: false });
if (error) reject(error);
}
.subscribe((status, err) => {
if (status === 'SUBSCRIBED') {
console.log('Subscribed with resume');
}
if (status === 'CHANNEL_ERROR') {
reject(err);
}
});
// Timeout if no event received
setTimeout(() => {
reject(new Error('Timeout waiting for Realtime event'));
}, 10000);
reject(new Error('Timeout waiting for missed event'));
}, 5000);
});
}, 10000);
});
});

View File

@@ -37,17 +37,19 @@ FOR EACH ROW EXECUTE FUNCTION madbase_realtime.broadcast_changes();
-- Storage Setup
INSERT INTO storage.buckets (id, name, public) VALUES ('test-bucket', 'test-bucket', true) ON CONFLICT DO NOTHING;
INSERT INTO storage.buckets (id, name, public) VALUES ('public-bucket', 'public-bucket', true) ON CONFLICT DO NOTHING;
INSERT INTO storage.buckets (id, name, public) VALUES ('private-bucket', 'private-bucket', false) ON CONFLICT DO NOTHING;
-- Allow anon to upload to test-bucket
-- Allow anon to upload to test-bucket and public-bucket
DO $$
BEGIN
IF NOT EXISTS (
SELECT FROM pg_policies WHERE tablename = 'objects' AND policyname = 'Anon can insert into test-bucket'
SELECT FROM pg_policies WHERE tablename = 'objects' AND policyname = 'Anon can insert into public buckets'
) THEN
CREATE POLICY "Anon can insert into test-bucket"
CREATE POLICY "Anon can insert into public buckets"
ON storage.objects FOR INSERT
TO anon
WITH CHECK ( bucket_id = 'test-bucket' );
WITH CHECK ( bucket_id IN ('test-bucket', 'public-bucket') );
END IF;
END
$$;

View File

@@ -1,39 +1,143 @@
import { describe, it, expect } from 'vitest';
import { describe, it, expect, beforeAll } from 'vitest';
import { createAnonClient, createServiceRoleClient } from './setup.ts';
const client = createAnonClient();
const admin = createServiceRoleClient();
const bucket = 'test-bucket';
const PUBLIC_BUCKET = 'public-bucket';
const PRIVATE_BUCKET = 'private-bucket';
describe('Storage', () => {
it('should upload a file', async () => {
// Use Buffer for Node environment reliability
const file = Buffer.from('Hello, MadBase!');
// Use admin to bypass RLS/Permission issues for now to verify S3 connectivity
const { data, error } = await admin.storage
.from(bucket)
.upload('hello.txt', file, { upsert: true });
const fileName = `hello-${Date.now()}.txt`;
const fileContent = Buffer.from('Hello, MadBase!');
if (error) console.error('Upload error:', error);
it('should list buckets', async () => {
const { data, error } = await client.storage.listBuckets();
expect(error).toBeNull();
expect(data).toBeDefined();
expect(data?.path).toBe('hello.txt');
expect(data?.some((b) => b.name === PUBLIC_BUCKET)).toBe(true);
// Private buckets might be visible in list depending on RLS, usually they are if user has access.
// But anon might only see public ones if we restricted list policy?
// Our migration says: "Public Buckets are viewable by everyone" using (public=true).
// So anon should NOT see private bucket.
expect(data?.some((b) => b.name === PRIVATE_BUCKET)).toBe(false);
});
it('should list files', async () => {
const { data, error } = await client.storage.from(bucket).list();
describe('Public Bucket', () => {
it('should allow anon to list files', async () => {
const { error } = await client.storage.from(PUBLIC_BUCKET).list();
expect(error).toBeNull();
});
expect(error).toBeNull();
expect(data).toBeDefined();
expect(data?.some((f) => f.name === 'hello.txt')).toBe(true);
it('should allow upload (via policy)', async () => {
const { data, error } = await client.storage
.from(PUBLIC_BUCKET)
.upload(fileName, fileContent);
expect(error).toBeNull();
expect(data?.path).toBe(fileName);
});
it('should allow download', async () => {
const { data, error } = await client.storage
.from(PUBLIC_BUCKET)
.download(fileName);
expect(error).toBeNull();
const text = await data?.text();
expect(text).toBe('Hello, MadBase!');
});
});
it('should download a file', async () => {
const { data, error } = await client.storage.from(bucket).download('hello.txt');
describe('Private Bucket', () => {
const privateFile = `secret-${Date.now()}.txt`;
expect(error).toBeNull();
expect(data).toBeDefined();
const text = await data?.text();
expect(text).toBe('Hello, MadBase!');
it('should NOT allow anon to list files', async () => {
// Policy: "Users can view their own buckets" OR "Public Buckets".
// Anon is not owner (owner is usually null or specific user).
// If bucket is not public, anon shouldn't see it or its objects.
// List objects checks: bucket_id IN (SELECT id FROM buckets WHERE public=true) OR owner = sub.
const { data, error } = await client.storage.from(PRIVATE_BUCKET).list();
// It might return empty list or error depending on implementation
// Supabase storage usually returns empty list if no access to objects, or error if bucket not found/accessible.
// Our handler checks bucket existence first.
// Bucket exists, but RLS on buckets table filters it out for anon?
// `list_objects` handler does:
// `SELECT id FROM storage.buckets WHERE id = $1`
// If RLS hides it, it returns None -> "Bucket not found" or just "Not Found" if axum returns 404.
expect(error).toBeDefined();
expect(error?.message).toContain('Not Found');
});
it('should allow admin (service role) to upload', async () => {
const { data, error } = await admin.storage
.from(PRIVATE_BUCKET)
.upload(privateFile, fileContent);
expect(error).toBeNull();
expect(data?.path).toBe(privateFile);
});
it('should NOT allow anon to download', async () => {
const { data, error } = await client.storage
.from(PRIVATE_BUCKET)
.download(privateFile);
expect(error).toBeDefined();
expect(data).toBeNull();
});
it('should allow admin to download', async () => {
const { data, error } = await admin.storage
.from(PRIVATE_BUCKET)
.download(privateFile);
expect(error).toBeNull();
const text = await data?.text();
expect(text).toBe('Hello, MadBase!');
});
});
describe('Signed URLs', () => {
const privateFile = `signed-secret-${Date.now()}.txt`;
const fileContent = Buffer.from('Hello, MadBase!');
beforeAll(async () => {
// Upload a private file as admin
const { error } = await admin.storage
.from(PRIVATE_BUCKET)
.upload(privateFile, fileContent);
expect(error).toBeNull();
});
it('should generate and use a signed URL', async () => {
// 1. Generate Signed URL (as admin who has access)
const { data, error } = await admin.storage
.from(PRIVATE_BUCKET)
.createSignedUrl(privateFile, 60);
expect(error).toBeNull();
expect(data?.signedUrl).toBeDefined();
// 2. Access the file using the signed URL (without auth headers)
// The signedUrl from supabase-js might be relative or absolute depending on client config.
// Our backend returns relative path: /storage/v1/object/sign/...
// So we prepend the API URL.
// Note: Supabase JS might construct the full URL if `signedUrl` is returned as path.
// Let's inspect what we get.
console.log('Signed URL:', data?.signedUrl);
const url = data?.signedUrl.startsWith('http')
? data?.signedUrl
: `${process.env.MADBASE_URL}${data?.signedUrl}`;
const res = await fetch(url);
expect(res.status).toBe(200);
const text = await res.text();
expect(text).toBe('Hello, MadBase!');
});
it('should fail with invalid token', async () => {
const url = `${process.env.MADBASE_URL}/storage/v1/object/sign/${PRIVATE_BUCKET}/${privateFile}?token=invalid-token`;
const res = await fetch(url);
expect(res.status).toBe(403);
});
});
});

View File

@@ -0,0 +1,79 @@
export interface MockOptions {
env?: Record<string, string>;
supabase?: {
claims?: Record<string, any>;
dbResults?: Record<string, any>; // simplified for now
insertResult?: any;
};
fetch?: {
urlPattern: string;
response: any;
status?: number;
}[];
}
export function createMockedFunction(code: string, mocks: MockOptions = {}): string {
const envMock = mocks.env ? `
globalThis._env = ${JSON.stringify(mocks.env)};
` : 'globalThis._env = {};';
const supabaseMock = mocks.supabase ? `
const mockSupabase = {
auth: {
getClaims: async (token) => {
if (token && token !== "invalid") {
return { data: { claims: ${JSON.stringify(mocks.supabase?.claims || {})} }, error: null };
}
return { data: null, error: "Invalid token" };
}
},
from: (table) => {
return {
select: (cols) => ({
eq: (col, val) => ({
limit: (n) => ({
maybeSingle: async () => {
// Simple mock: return configured result or null
return { data: ${JSON.stringify(mocks.supabase?.dbResults || null)} };
}
}),
single: async () => {
return { data: ${JSON.stringify(mocks.supabase?.dbResults || null)} };
}
})
}),
insert: async (data) => ({ data: ${JSON.stringify(mocks.supabase?.insertResult || {})}, error: null })
};
}
};
globalThis.createClient = (url, key, options) => mockSupabase;
` : `
globalThis.createClient = () => ({
auth: { getClaims: async () => ({ data: { claims: {} }, error: null }) },
from: () => ({ select: () => ({ eq: () => ({ limit: () => ({ maybeSingle: async () => ({ data: null }) }) }) }) })
});
`;
const fetchMock = mocks.fetch ? `
globalThis.fetch = async (url, options) => {
${mocks.fetch.map(mock => `
if (url.includes("${mock.urlPattern}")) {
return {
ok: ${mock.status ? mock.status >= 200 && mock.status < 300 : 'true'},
status: ${mock.status || 200},
json: async () => (${JSON.stringify(mock.response)}),
text: async () => JSON.stringify(${JSON.stringify(mock.response)})
};
}
`).join('\n')}
return { ok: false, status: 404, text: async () => "Not Found" };
};
` : '';
return `
${envMock}
${supabaseMock}
${fetchMock}
${code}
`;
}