wip:milestone 0 fixes
Some checks failed
CI/CD Pipeline / unit-tests (push) Failing after 1m16s
CI/CD Pipeline / integration-tests (push) Failing after 2m32s
CI/CD Pipeline / lint (push) Successful in 5m22s
CI/CD Pipeline / e2e-tests (push) Has been skipped
CI/CD Pipeline / build (push) Has been skipped

This commit is contained in:
2026-03-15 12:35:42 +02:00
parent 6708cf28a7
commit cffdf8af86
61266 changed files with 4511646 additions and 1938 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,62 @@
{
"name": "@supabase/storage-js",
"version": "2.99.1",
"description": "Isomorphic storage client for Supabase.",
"keywords": [
"javascript",
"typescript",
"supabase"
],
"homepage": "https://github.com/supabase/supabase-js/tree/master/packages/core/storage-js",
"bugs": "https://github.com/supabase/supabase-js/issues",
"license": "MIT",
"author": "Supabase",
"files": [
"dist",
"src"
],
"main": "dist/index.cjs",
"module": "dist/index.mjs",
"types": "dist/index.d.cts",
"exports": {
".": {
"import": {
"types": "./dist/index.d.mts",
"default": "./dist/index.mjs"
},
"require": {
"types": "./dist/index.d.cts",
"default": "./dist/index.cjs"
}
},
"./dist/*": "./dist/*",
"./package.json": "./package.json"
},
"sideEffects": false,
"repository": {
"type": "git",
"url": "https://github.com/supabase/supabase-js.git",
"directory": "packages/core/storage-js"
},
"scripts": {
"build": "tsdown",
"build:watch": "tsdown --watch",
"docs": "typedoc --entryPoints src/index.ts --out docs/v2 --entryPoints src/packages/* --excludePrivate --excludeProtected",
"docs:json": "typedoc --json docs/v2/spec.json --entryPoints src/index.ts --entryPoints src/packages/* --excludePrivate --excludeExternals --excludeProtected"
},
"dependencies": {
"iceberg-js": "^0.8.1",
"tslib": "2.8.1"
},
"devDependencies": {
"form-data": "^4.0.0"
},
"jsdelivr": "dist/umd/supabase.js",
"unpkg": "dist/umd/supabase.js",
"publishConfig": {
"access": "public"
},
"engines": {
"node": ">=20.0.0"
}
}

View File

@@ -0,0 +1,82 @@
import StorageFileApi from './packages/StorageFileApi'
import StorageBucketApi from './packages/StorageBucketApi'
import StorageAnalyticsClient from './packages/StorageAnalyticsClient'
import { Fetch } from './lib/common/fetch'
import { StorageVectorsClient } from './packages/StorageVectorsClient'
export interface StorageClientOptions {
useNewHostname?: boolean
}
export class StorageClient extends StorageBucketApi {
/**
* Creates a client for Storage buckets, files, analytics, and vectors.
*
* @category File Buckets
* @example
* ```ts
* import { StorageClient } from '@supabase/storage-js'
*
* const storage = new StorageClient('https://xyzcompany.supabase.co/storage/v1', {
* apikey: 'public-anon-key',
* })
* const avatars = storage.from('avatars')
* ```
*/
constructor(
url: string,
headers: { [key: string]: string } = {},
fetch?: Fetch,
opts?: StorageClientOptions
) {
super(url, headers, fetch, opts)
}
/**
* Perform file operation in a bucket.
*
* @category File Buckets
* @param id The bucket id to operate on.
*
* @example
* ```typescript
* const avatars = supabase.storage.from('avatars')
* ```
*/
from(id: string): StorageFileApi {
return new StorageFileApi(this.url, this.headers, id, this.fetch)
}
/**
*
* @alpha
*
* Access vector storage operations.
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @returns A StorageVectorsClient instance configured with the current storage settings.
*/
get vectors(): StorageVectorsClient {
return new StorageVectorsClient(this.url + '/vector', {
headers: this.headers,
fetch: this.fetch,
})
}
/**
*
* @alpha
*
* Access analytics storage operations using Iceberg tables.
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Analytics Buckets
* @returns A StorageAnalyticsClient instance configured with the current storage settings.
*/
get analytics(): StorageAnalyticsClient {
return new StorageAnalyticsClient(this.url + '/iceberg', this.headers, this.fetch)
}
}

View File

@@ -0,0 +1,19 @@
export { StorageClient } from './StorageClient'
export type { StorageClientOptions } from './StorageClient'
export { default as StorageAnalyticsClient } from './packages/StorageAnalyticsClient'
// Vector Storage
export {
StorageVectorsClient,
VectorBucketScope,
VectorIndexScope,
} from './packages/StorageVectorsClient'
export type { StorageVectorsClientOptions } from './packages/StorageVectorsClient'
export { default as VectorBucketApi } from './packages/VectorBucketApi'
export { default as VectorDataApi } from './packages/VectorDataApi'
export { default as VectorIndexApi } from './packages/VectorIndexApi'
export type { CreateIndexOptions } from './packages/VectorIndexApi'
// Types and Errors
export * from './lib/types'
export * from './lib/common/errors'

View File

@@ -0,0 +1,103 @@
import { ErrorNamespace, isStorageError, StorageError } from './errors'
import { Fetch } from './fetch'
import { resolveFetch } from './helpers'
/**
* @ignore
* Base API client class for all Storage API classes
* Provides common infrastructure for error handling and configuration
*
* @typeParam TError - The error type (StorageError or subclass)
*/
export default abstract class BaseApiClient<TError extends StorageError = StorageError> {
protected url: string
protected headers: { [key: string]: string }
protected fetch: Fetch
protected shouldThrowOnError = false
protected namespace: ErrorNamespace
/**
* Creates a new BaseApiClient instance
* @param url - Base URL for API requests
* @param headers - Default headers for API requests
* @param fetch - Optional custom fetch implementation
* @param namespace - Error namespace ('storage' or 'vectors')
*/
constructor(
url: string,
headers: { [key: string]: string } = {},
fetch?: Fetch,
namespace: ErrorNamespace = 'storage'
) {
this.url = url
this.headers = headers
this.fetch = resolveFetch(fetch)
this.namespace = namespace
}
/**
* Enable throwing errors instead of returning them.
* When enabled, errors are thrown instead of returned in { data, error } format.
*
* @returns this - For method chaining
*/
public throwOnError(): this {
this.shouldThrowOnError = true
return this
}
/**
* Set an HTTP header for the request.
* Creates a shallow copy of headers to avoid mutating shared state.
*
* @param name - Header name
* @param value - Header value
* @returns this - For method chaining
*/
public setHeader(name: string, value: string): this {
this.headers = { ...this.headers, [name]: value }
return this
}
/**
* Handles API operation with standardized error handling
* Eliminates repetitive try-catch blocks across all API methods
*
* This wrapper:
* 1. Executes the operation
* 2. Returns { data, error: null } on success
* 3. Returns { data: null, error } on failure (if shouldThrowOnError is false)
* 4. Throws error on failure (if shouldThrowOnError is true)
*
* @typeParam T - The expected data type from the operation
* @param operation - Async function that performs the API call
* @returns Promise with { data, error } tuple
*
* @example
* ```typescript
* async listBuckets() {
* return this.handleOperation(async () => {
* return await get(this.fetch, `${this.url}/bucket`, {
* headers: this.headers,
* })
* })
* }
* ```
*/
protected async handleOperation<T>(
operation: () => Promise<T>
): Promise<{ data: T; error: null } | { data: null; error: TError }> {
try {
const data = await operation()
return { data, error: null }
} catch (error) {
if (this.shouldThrowOnError) {
throw error
}
if (isStorageError(error)) {
return { data: null, error: error as TError }
}
throw error
}
}
}

View File

@@ -0,0 +1,144 @@
/**
* Namespace type for error classes
* Determines the error class names and type guards
*/
export type ErrorNamespace = 'storage' | 'vectors'
/**
* Base error class for all Storage errors
* Supports both 'storage' and 'vectors' namespaces
*/
export class StorageError extends Error {
protected __isStorageError = true
protected namespace: ErrorNamespace
status?: number
statusCode?: string
constructor(
message: string,
namespace: ErrorNamespace = 'storage',
status?: number,
statusCode?: string
) {
super(message)
this.namespace = namespace
this.name = namespace === 'vectors' ? 'StorageVectorsError' : 'StorageError'
this.status = status
this.statusCode = statusCode
}
}
/**
* Type guard to check if an error is a StorageError
* @param error - The error to check
* @returns True if the error is a StorageError
*/
export function isStorageError(error: unknown): error is StorageError {
return typeof error === 'object' && error !== null && '__isStorageError' in error
}
/**
* API error returned from Storage service
* Includes HTTP status code and service-specific error code
*/
export class StorageApiError extends StorageError {
override status: number
override statusCode: string
constructor(
message: string,
status: number,
statusCode: string,
namespace: ErrorNamespace = 'storage'
) {
super(message, namespace, status, statusCode)
this.name = namespace === 'vectors' ? 'StorageVectorsApiError' : 'StorageApiError'
this.status = status
this.statusCode = statusCode
}
toJSON() {
return {
name: this.name,
message: this.message,
status: this.status,
statusCode: this.statusCode,
}
}
}
/**
* Unknown error that doesn't match expected error patterns
* Wraps the original error for debugging
*/
export class StorageUnknownError extends StorageError {
originalError: unknown
constructor(message: string, originalError: unknown, namespace: ErrorNamespace = 'storage') {
super(message, namespace)
this.name = namespace === 'vectors' ? 'StorageVectorsUnknownError' : 'StorageUnknownError'
this.originalError = originalError
}
}
// ============================================================================
// Backward Compatibility Exports for Vectors
// ============================================================================
/**
* @deprecated Use StorageError with namespace='vectors' instead
* Alias for backward compatibility with existing vector storage code
*/
export class StorageVectorsError extends StorageError {
constructor(message: string) {
super(message, 'vectors')
}
}
/**
* Type guard to check if an error is a StorageVectorsError
* @param error - The error to check
* @returns True if the error is a StorageVectorsError
*/
export function isStorageVectorsError(error: unknown): error is StorageVectorsError {
return isStorageError(error) && (error as StorageError)['namespace'] === 'vectors'
}
/**
* @deprecated Use StorageApiError with namespace='vectors' instead
* Alias for backward compatibility with existing vector storage code
*/
export class StorageVectorsApiError extends StorageApiError {
constructor(message: string, status: number, statusCode: string) {
super(message, status, statusCode, 'vectors')
}
}
/**
* @deprecated Use StorageUnknownError with namespace='vectors' instead
* Alias for backward compatibility with existing vector storage code
*/
export class StorageVectorsUnknownError extends StorageUnknownError {
constructor(message: string, originalError: unknown) {
super(message, originalError, 'vectors')
}
}
/**
* Error codes specific to S3 Vectors API
* Maps AWS service errors to application-friendly error codes
*/
export enum StorageVectorsErrorCode {
/** Internal server fault (HTTP 500) */
InternalError = 'InternalError',
/** Resource already exists / conflict (HTTP 409) */
S3VectorConflictException = 'S3VectorConflictException',
/** Resource not found (HTTP 404) */
S3VectorNotFoundException = 'S3VectorNotFoundException',
/** Delete bucket while not empty (HTTP 400) */
S3VectorBucketNotEmpty = 'S3VectorBucketNotEmpty',
/** Exceeds bucket quota/limit (HTTP 400) */
S3VectorMaxBucketsExceeded = 'S3VectorMaxBucketsExceeded',
/** Exceeds index quota/limit (HTTP 400) */
S3VectorMaxIndexesExceeded = 'S3VectorMaxIndexesExceeded',
}

View File

@@ -0,0 +1,294 @@
import { StorageApiError, StorageUnknownError, ErrorNamespace } from './errors'
import { isPlainObject, resolveResponse } from './helpers'
import { FetchParameters } from '../types'
export type Fetch = typeof fetch
/**
* Options for fetch requests
*/
export interface FetchOptions {
headers?: {
[key: string]: string
}
duplex?: string
noResolveJson?: boolean
}
/**
* HTTP methods supported by the API
*/
export type RequestMethodType = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'HEAD'
/**
* Extracts error message from various error response formats
* @param err - Error object from API
* @returns Human-readable error message
*/
const _getErrorMessage = (err: any): string =>
err.msg ||
err.message ||
err.error_description ||
(typeof err.error === 'string' ? err.error : err.error?.message) ||
JSON.stringify(err)
/**
* Handles fetch errors and converts them to Storage error types
* @param error - The error caught from fetch
* @param reject - Promise rejection function
* @param options - Fetch options that may affect error handling
* @param namespace - Error namespace ('storage' or 'vectors')
*/
const handleError = async (
error: unknown,
reject: (reason?: any) => void,
options: FetchOptions | undefined,
namespace: ErrorNamespace
) => {
// Check if error is a Response-like object (has status and ok properties)
// This is more reliable than instanceof which can fail across realms
const isResponseLike =
error &&
typeof error === 'object' &&
'status' in error &&
'ok' in error &&
typeof (error as any).status === 'number'
if (isResponseLike) {
const responseError = error as any
const status = responseError.status || 500
// Try to parse JSON body if available
if (typeof responseError.json === 'function') {
responseError
.json()
.then((err: any) => {
const statusCode = err?.statusCode || err?.code || status + ''
reject(new StorageApiError(_getErrorMessage(err), status, statusCode, namespace))
})
.catch(() => {
// If JSON parsing fails for vectors, create ApiError with HTTP status
if (namespace === 'vectors') {
const statusCode = status + ''
const message = responseError.statusText || `HTTP ${status} error`
reject(new StorageApiError(message, status, statusCode, namespace))
} else {
const statusCode = status + ''
const message = responseError.statusText || `HTTP ${status} error`
reject(new StorageApiError(message, status, statusCode, namespace))
}
})
} else {
// No json() method available, create error from status
const statusCode = status + ''
const message = responseError.statusText || `HTTP ${status} error`
reject(new StorageApiError(message, status, statusCode, namespace))
}
} else {
reject(new StorageUnknownError(_getErrorMessage(error), error, namespace))
}
}
/**
* Builds request parameters for fetch calls
* @param method - HTTP method
* @param options - Custom fetch options
* @param parameters - Additional fetch parameters like AbortSignal
* @param body - Request body (will be JSON stringified if plain object)
* @returns Complete fetch request parameters
*/
const _getRequestParams = (
method: RequestMethodType,
options?: FetchOptions,
parameters?: FetchParameters,
body?: object
) => {
const params: { [k: string]: any } = { method, headers: options?.headers || {} }
if (method === 'GET' || method === 'HEAD' || !body) {
return { ...params, ...parameters }
}
if (isPlainObject(body)) {
params.headers = { 'Content-Type': 'application/json', ...options?.headers }
params.body = JSON.stringify(body)
} else {
params.body = body
}
if (options?.duplex) {
params.duplex = options.duplex
}
return { ...params, ...parameters }
}
/**
* Internal request handler that wraps fetch with error handling
* @param fetcher - Fetch function to use
* @param method - HTTP method
* @param url - Request URL
* @param options - Custom fetch options
* @param parameters - Additional fetch parameters
* @param body - Request body
* @param namespace - Error namespace ('storage' or 'vectors')
* @returns Promise with parsed response or error
*/
async function _handleRequest(
fetcher: Fetch,
method: RequestMethodType,
url: string,
options: FetchOptions | undefined,
parameters: FetchParameters | undefined,
body: object | undefined,
namespace: ErrorNamespace
): Promise<any> {
return new Promise((resolve, reject) => {
fetcher(url, _getRequestParams(method, options, parameters, body))
.then((result) => {
if (!result.ok) throw result
if (options?.noResolveJson) return result
// AWS S3 Vectors API returns 200 OK with content-length: 0 for successful mutations
// (putVectors, deleteVectors) instead of 204 or JSON response. This is AWS's design choice
// for performance optimization of bulk operations (up to 500 vectors per request).
// We handle this to prevent "Unexpected end of JSON input" errors when calling result.json()
if (namespace === 'vectors') {
const contentType = result.headers.get('content-type')
const contentLength = result.headers.get('content-length')
// Return empty object for explicitly empty responses
if (contentLength === '0' || result.status === 204) {
return {}
}
// Return empty object if no JSON content type
if (!contentType || !contentType.includes('application/json')) {
return {}
}
}
return result.json()
})
.then((data) => resolve(data))
.catch((error) => handleError(error, reject, options, namespace))
})
}
/**
* Creates a fetch API with the specified namespace
* @param namespace - Error namespace ('storage' or 'vectors')
* @returns Object with HTTP method functions
*/
export function createFetchApi(namespace: ErrorNamespace = 'storage') {
return {
/**
* Performs a GET request
* @param fetcher - Fetch function to use
* @param url - Request URL
* @param options - Custom fetch options
* @param parameters - Additional fetch parameters
* @returns Promise with parsed response
*/
get: async (
fetcher: Fetch,
url: string,
options?: FetchOptions,
parameters?: FetchParameters
): Promise<any> => {
return _handleRequest(fetcher, 'GET', url, options, parameters, undefined, namespace)
},
/**
* Performs a POST request
* @param fetcher - Fetch function to use
* @param url - Request URL
* @param body - Request body to be JSON stringified
* @param options - Custom fetch options
* @param parameters - Additional fetch parameters
* @returns Promise with parsed response
*/
post: async (
fetcher: Fetch,
url: string,
body: object,
options?: FetchOptions,
parameters?: FetchParameters
): Promise<any> => {
return _handleRequest(fetcher, 'POST', url, options, parameters, body, namespace)
},
/**
* Performs a PUT request
* @param fetcher - Fetch function to use
* @param url - Request URL
* @param body - Request body to be JSON stringified
* @param options - Custom fetch options
* @param parameters - Additional fetch parameters
* @returns Promise with parsed response
*/
put: async (
fetcher: Fetch,
url: string,
body: object,
options?: FetchOptions,
parameters?: FetchParameters
): Promise<any> => {
return _handleRequest(fetcher, 'PUT', url, options, parameters, body, namespace)
},
/**
* Performs a HEAD request
* @param fetcher - Fetch function to use
* @param url - Request URL
* @param options - Custom fetch options
* @param parameters - Additional fetch parameters
* @returns Promise with Response object (not JSON parsed)
*/
head: async (
fetcher: Fetch,
url: string,
options?: FetchOptions,
parameters?: FetchParameters
): Promise<any> => {
return _handleRequest(
fetcher,
'HEAD',
url,
{
...options,
noResolveJson: true,
},
parameters,
undefined,
namespace
)
},
/**
* Performs a DELETE request
* @param fetcher - Fetch function to use
* @param url - Request URL
* @param body - Request body to be JSON stringified
* @param options - Custom fetch options
* @param parameters - Additional fetch parameters
* @returns Promise with parsed response
*/
remove: async (
fetcher: Fetch,
url: string,
body: object,
options?: FetchOptions,
parameters?: FetchParameters
): Promise<any> => {
return _handleRequest(fetcher, 'DELETE', url, options, parameters, body, namespace)
},
}
}
// Default exports for backward compatibility with 'storage' namespace
const defaultApi = createFetchApi('storage')
export const { get, post, put, head, remove } = defaultApi
// Vectors API with 'vectors' namespace for proper error handling
export const vectorsApi = createFetchApi('vectors')

View File

@@ -0,0 +1,146 @@
type Fetch = typeof fetch
/**
* Resolves the fetch implementation to use
* Uses custom fetch if provided, otherwise uses native fetch
*
* @param customFetch - Optional custom fetch implementation
* @returns Resolved fetch function
*/
export const resolveFetch = (customFetch?: Fetch): Fetch => {
if (customFetch) {
return (...args) => customFetch(...args)
}
return (...args) => fetch(...args)
}
/**
* Resolves the Response constructor to use
* Returns native Response constructor
*
* @returns Response constructor
*/
export const resolveResponse = (): typeof Response => {
return Response
}
/**
* Determine if input is a plain object
* An object is plain if it's created by either {}, new Object(), or Object.create(null)
*
* @param value - Value to check
* @returns True if value is a plain object
* @source https://github.com/sindresorhus/is-plain-obj
*/
export const isPlainObject = (value: object): boolean => {
if (typeof value !== 'object' || value === null) {
return false
}
const prototype = Object.getPrototypeOf(value)
return (
(prototype === null ||
prototype === Object.prototype ||
Object.getPrototypeOf(prototype) === null) &&
!(Symbol.toStringTag in value) &&
!(Symbol.iterator in value)
)
}
/**
* Recursively converts object keys from snake_case to camelCase
* Used for normalizing API responses
*
* @param item - Object to convert
* @returns Converted object with camelCase keys
*/
export const recursiveToCamel = (item: Record<string, any>): unknown => {
if (Array.isArray(item)) {
return item.map((el) => recursiveToCamel(el))
} else if (typeof item === 'function' || item !== Object(item)) {
return item
}
const result: Record<string, any> = {}
Object.entries(item).forEach(([key, value]) => {
const newKey = key.replace(/([-_][a-z])/gi, (c) => c.toUpperCase().replace(/[-_]/g, ''))
result[newKey] = recursiveToCamel(value)
})
return result
}
/**
* Validates if a given bucket name is valid according to Supabase Storage API rules
* Mirrors backend validation from: storage/src/storage/limits.ts:isValidBucketName()
*
* Rules:
* - Length: 1-100 characters
* - Allowed characters: alphanumeric (a-z, A-Z, 0-9), underscore (_), and safe special characters
* - Safe special characters: ! - . * ' ( ) space & $ @ = ; : + , ?
* - Forbidden: path separators (/, \), path traversal (..), leading/trailing whitespace
*
* AWS S3 Reference: https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html
*
* @param bucketName - The bucket name to validate
* @returns true if valid, false otherwise
*/
export const isValidBucketName = (bucketName: string): boolean => {
if (!bucketName || typeof bucketName !== 'string') {
return false
}
// Check length constraints (1-100 characters)
if (bucketName.length === 0 || bucketName.length > 100) {
return false
}
// Check for leading/trailing whitespace
if (bucketName.trim() !== bucketName) {
return false
}
// Explicitly reject path separators (security)
// Note: Consecutive periods (..) are allowed by backend - the AWS restriction
// on relative paths applies to object keys, not bucket names
if (bucketName.includes('/') || bucketName.includes('\\')) {
return false
}
// Validate against allowed character set
// Pattern matches backend regex: /^(\w|!|-|\.|\*|'|\(|\)| |&|\$|@|=|;|:|\+|,|\?)*$/
// This explicitly excludes path separators (/, \) and other problematic characters
const bucketNameRegex = /^[\w!.\*'() &$@=;:+,?-]+$/
return bucketNameRegex.test(bucketName)
}
/**
* Normalizes a number array to float32 format
* Ensures all vector values are valid 32-bit floats
*
* @param values - Array of numbers to normalize
* @returns Normalized float32 array
*/
export const normalizeToFloat32 = (values: number[]): number[] => {
// Use Float32Array to ensure proper precision
return Array.from(new Float32Array(values))
}
/**
* Validates vector dimensions match expected dimension
* Throws error if dimensions don't match
*
* @param vector - Vector data to validate
* @param expectedDimension - Expected vector dimension
* @throws Error if dimensions don't match
*/
export const validateVectorDimension = (
vector: { float32: number[] },
expectedDimension?: number
): void => {
if (expectedDimension !== undefined && vector.float32.length !== expectedDimension) {
throw new Error(
`Vector dimension mismatch: expected ${expectedDimension}, got ${vector.float32.length}`
)
}
}

View File

@@ -0,0 +1,4 @@
import { version } from './version'
export const DEFAULT_HEADERS = {
'X-Client-Info': `storage-js/${version}`,
}

View File

@@ -0,0 +1,654 @@
import { StorageError } from './common/errors'
/**
* Type of storage bucket
* - STANDARD: Regular file storage buckets
* - ANALYTICS: Iceberg table-based buckets for analytical workloads
*/
export type BucketType = 'STANDARD' | 'ANALYTICS'
export interface Bucket {
id: string
type?: BucketType
name: string
owner: string
file_size_limit?: number
allowed_mime_types?: string[]
created_at: string
updated_at: string
public: boolean
}
export interface ListBucketOptions {
limit?: number
offset?: number
sortColumn?: 'id' | 'name' | 'created_at' | 'updated_at'
sortOrder?: 'asc' | 'desc'
search?: string
}
/**
* Represents an Analytics Bucket using Apache Iceberg table format.
* Analytics buckets are optimized for analytical queries and data processing.
*/
export interface AnalyticBucket {
/** Unique identifier for the bucket */
name: string
/** Bucket type - always 'ANALYTICS' for analytics buckets */
type: 'ANALYTICS'
/** Storage format used (e.g., 'iceberg') */
format: string
/** ISO 8601 timestamp of bucket creation */
created_at: string
/** ISO 8601 timestamp of last update */
updated_at: string
}
/**
* Metadata object returned by the Storage API for files
* Contains information about file size, type, caching, and HTTP response details
*/
export interface FileMetadata {
/** Entity tag for caching and conditional requests */
eTag: string
/** File size in bytes */
size: number
/** MIME type of the file */
mimetype: string
/** Cache control directive (e.g., "max-age=3600") */
cacheControl: string
/** Last modification timestamp (ISO 8601) */
lastModified: string
/** Content length in bytes (usually same as size) */
contentLength: number
/** HTTP status code from the storage backend */
httpStatusCode: number
/** Any additional custom metadata stored with the file */
[key: string]: any
}
/**
* File object returned by the List V1 API (list() method)
* Note: Folder entries will have null values for most fields except name
*
* Warning: Some fields may not be present in all API responses. Fields like
* bucket_id, owner, and buckets are not returned by list() operations.
*/
export interface FileObject {
/** File or folder name (relative to the prefix) - always present */
name: string
/** Unique identifier for the file (null for folders) */
id: string | null
/** Last update timestamp (null for folders) */
updated_at: string | null
/** Creation timestamp (null for folders) */
created_at: string | null
/** @deprecated Last access timestamp (null for folders) */
last_accessed_at: string | null
/** File metadata including size, mimetype, etc. (null for folders) */
metadata: FileMetadata | null
/**
* @deprecated Bucket identifier - NOT returned by list() operations.
* May be present in remove() responses. Do not rely on this field.
*/
bucket_id?: string
/**
* @deprecated Owner identifier - NOT returned by list() or remove() operations.
* This field should not be relied upon.
*/
owner?: string
/**
* @deprecated Bucket object - NOT returned by list() or remove() operations.
* This field should not be relied upon.
*/
buckets?: Bucket
}
/**
* File object returned by the Info endpoint (info() method)
* Contains detailed metadata for a specific file
*/
export interface FileObjectV2 {
/** Unique identifier for the file */
id: string
/** File version identifier */
version: string
/** File name */
name: string
/** Bucket identifier */
bucket_id: string
/** Creation timestamp */
created_at: string
/** File size in bytes */
size?: number
/** Cache control header value */
cache_control?: string
/** MIME content type */
content_type?: string
/** Entity tag for caching */
etag?: string
/** Last modification timestamp (replaces updated_at) */
last_modified?: string
/** Custom file metadata */
metadata?: FileMetadata
/**
* @deprecated The API returns last_modified instead.
* This field may not be present in responses.
*/
updated_at?: string
}
export interface SortBy {
column?: string
order?: string
}
export interface FileOptions {
/**
* The number of seconds the asset is cached in the browser and in the Supabase CDN. This is set in the `Cache-Control: max-age=<seconds>` header. Defaults to 3600 seconds.
*/
cacheControl?: string
/**
* the `Content-Type` header value. Should be specified if using a `fileBody` that is neither `Blob` nor `File` nor `FormData`, otherwise will default to `text/plain;charset=UTF-8`.
*/
contentType?: string
/**
* When upsert is set to true, the file is overwritten if it exists. When set to false, an error is thrown if the object already exists. Defaults to false.
*/
upsert?: boolean
/**
* The duplex option is a string parameter that enables or disables duplex streaming, allowing for both reading and writing data in the same stream. It can be passed as an option to the fetch() method.
*/
duplex?: string
/**
* The metadata option is an object that allows you to store additional information about the file. This information can be used to filter and search for files. The metadata object can contain any key-value pairs you want to store.
*/
metadata?: Record<string, any>
/**
* Optionally add extra headers
*/
headers?: Record<string, string>
}
export interface DestinationOptions {
destinationBucket?: string
}
export interface SearchOptions {
/**
* The number of files you want to be returned.
* @default 100
*/
limit?: number
/**
* The starting position.
*/
offset?: number
/**
* The column to sort by. Can be any column inside a FileObject.
*/
sortBy?: SortBy
/**
* The search string to filter files by.
*/
search?: string
}
export interface SortByV2 {
column: 'name' | 'updated_at' | 'created_at'
order?: 'asc' | 'desc'
}
export interface SearchV2Options {
/**
* The number of files you want to be returned.
* @default 1000
*/
limit?: number
/**
* The prefix search string to filter files by.
*/
prefix?: string
/**
* The cursor used for pagination. Pass the value received from nextCursor of the previous request.
*/
cursor?: string
/**
* Whether to emulate a hierarchical listing of objects using delimiters.
*
* - When `false` (default), all objects are listed as flat key/value pairs.
* - When `true`, the response groups objects by delimiter, making it appear
* like a file/folder hierarchy.
*
* @default false
*/
with_delimiter?: boolean
/**
* The column and order to sort by
* @default 'name asc'
*/
sortBy?: SortByV2
}
/**
* File object returned by the List V2 API (listV2() method)
* Objects and folders are returned in separate arrays - this type represents
* actual files only. Use SearchV2Folder for folder entries.
*/
export interface SearchV2Object {
/** File name */
name: string
/** Full object key/path */
key?: string
/** Unique identifier for the file */
id: string
/** Last update timestamp */
updated_at: string
/** Creation timestamp */
created_at: string
/** File metadata including size, mimetype, etc. (null if not yet set) */
metadata: FileMetadata | null
/** @deprecated Last access timestamp */
last_accessed_at: string
}
/**
* Folder entry returned by the List V2 API (listV2() method) when using with_delimiter: true
*/
export interface SearchV2Folder {
/** Folder name/prefix */
name: string
/** Full folder key/path */
key?: string
}
export interface SearchV2Result {
hasNext: boolean
folders: SearchV2Folder[]
objects: SearchV2Object[]
nextCursor?: string
}
export interface FetchParameters {
/**
* Pass in an AbortController's signal to cancel the request.
*/
signal?: AbortSignal
/**
* Controls how the request interacts with the browser's HTTP cache.
* - 'default': Use standard cache behavior
* - 'no-store': Bypass cache entirely (useful in Edge Functions)
* - 'reload': Bypass cache but update it with response
* - 'no-cache': Validate with server before using cached response
* - 'force-cache': Use cache even if stale
* - 'only-if-cached': Only use cache, fail if not cached
*/
cache?: 'default' | 'no-store' | 'reload' | 'no-cache' | 'force-cache' | 'only-if-cached'
}
// TODO: need to check for metadata props. The api swagger doesnt have.
export interface Metadata {
name: string
}
export interface TransformOptions {
/**
* The width of the image in pixels.
*/
width?: number
/**
* The height of the image in pixels.
*/
height?: number
/**
* The resize mode can be cover, contain or fill. Defaults to cover.
* Cover resizes the image to maintain it's aspect ratio while filling the entire width and height.
* Contain resizes the image to maintain it's aspect ratio while fitting the entire image within the width and height.
* Fill resizes the image to fill the entire width and height. If the object's aspect ratio does not match the width and height, the image will be stretched to fit.
*/
resize?: 'cover' | 'contain' | 'fill'
/**
* Set the quality of the returned image.
* A number from 20 to 100, with 100 being the highest quality.
* Defaults to 80
*/
quality?: number
/**
* Specify the format of the image requested.
*
* When using 'origin' we force the format to be the same as the original image.
* When this option is not passed in, images are optimized to modern image formats like Webp.
*/
format?: 'origin'
}
type CamelCase<S extends string> = S extends `${infer P1}_${infer P2}${infer P3}`
? `${Lowercase<P1>}${Uppercase<P2>}${CamelCase<P3>}`
: S
export type Camelize<T> = {
[K in keyof T as CamelCase<Extract<K, string>>]: T[K]
}
export type DownloadResult<T> =
| {
data: T
error: null
}
| {
data: null
error: StorageError
}
// ============================================================================
// VECTOR STORAGE TYPES
// ============================================================================
/**
* Configuration for encryption at rest
* @property kmsKeyArn - ARN of the KMS key used for encryption
* @property sseType - Server-side encryption type (e.g., 'KMS')
*/
export interface EncryptionConfiguration {
kmsKeyArn?: string
sseType?: string
}
/**
* Vector bucket metadata
* @property vectorBucketName - Unique name of the vector bucket
* @property creationTime - Unix timestamp of when the bucket was created
* @property encryptionConfiguration - Optional encryption settings
*/
export interface VectorBucket {
vectorBucketName: string
creationTime?: number
encryptionConfiguration?: EncryptionConfiguration
}
/**
* Metadata configuration for vector index
* Defines which metadata keys should not be indexed for filtering
* @property nonFilterableMetadataKeys - Array of metadata keys that cannot be used in filters
*/
export interface MetadataConfiguration {
nonFilterableMetadataKeys?: string[]
}
/**
* Supported data types for vectors
* Currently only float32 is supported
*/
export type VectorDataType = 'float32'
/**
* Distance metrics for vector similarity search
*/
export type DistanceMetric = 'cosine' | 'euclidean' | 'dotproduct'
/**
* Vector index configuration and metadata
* @property indexName - Unique name of the index within the bucket
* @property vectorBucketName - Name of the parent vector bucket
* @property dataType - Data type of vector components (currently only 'float32')
* @property dimension - Dimensionality of vectors (e.g., 384, 768, 1536)
* @property distanceMetric - Similarity metric used for queries
* @property metadataConfiguration - Configuration for metadata filtering
* @property creationTime - Unix timestamp of when the index was created
*/
export interface VectorIndex {
indexName: string
vectorBucketName: string
dataType: VectorDataType
dimension: number
distanceMetric: DistanceMetric
metadataConfiguration?: MetadataConfiguration
creationTime?: number
}
/**
* Vector data representation
* Vectors must be float32 arrays with dimensions matching the index
* @property float32 - Array of 32-bit floating point numbers
*/
export interface VectorData {
float32: number[]
}
/**
* Arbitrary JSON metadata attached to vectors
* Keys configured as non-filterable in the index can be stored but not queried
*/
export type VectorMetadata = Record<string, any>
/**
* Single vector object for insertion/update
* @property key - Unique identifier for the vector
* @property data - Vector embedding data
* @property metadata - Optional arbitrary metadata
*/
export interface VectorObject {
key: string
data: VectorData
metadata?: VectorMetadata
}
/**
* Vector object returned from queries with optional distance
* @property key - Unique identifier for the vector
* @property data - Vector embedding data (if requested)
* @property metadata - Arbitrary metadata (if requested)
* @property distance - Similarity distance from query vector (if requested)
*/
export interface VectorMatch {
key: string
data?: VectorData
metadata?: VectorMetadata
distance?: number
}
/**
* Options for fetching vector buckets
* @property prefix - Filter buckets by name prefix
* @property maxResults - Maximum number of results to return (default: 100)
* @property nextToken - Token for pagination from previous response
*/
export interface ListVectorBucketsOptions {
prefix?: string
maxResults?: number
nextToken?: string
}
/**
* Response from listing vector buckets
* @property vectorBuckets - Array of bucket names
* @property nextToken - Token for fetching next page (if more results exist)
*/
export interface ListVectorBucketsResponse {
vectorBuckets: { vectorBucketName: string }[]
nextToken?: string
}
/**
* Options for listing indexes within a bucket
* @property vectorBucketName - Name of the parent vector bucket
* @property prefix - Filter indexes by name prefix
* @property maxResults - Maximum number of results to return (default: 100)
* @property nextToken - Token for pagination from previous response
*/
export interface ListIndexesOptions {
vectorBucketName: string
prefix?: string
maxResults?: number
nextToken?: string
}
/**
* Response from listing indexes
* @property indexes - Array of index names
* @property nextToken - Token for fetching next page (if more results exist)
*/
export interface ListIndexesResponse {
indexes: { indexName: string }[]
nextToken?: string
}
/**
* Options for batch reading vectors
* @property vectorBucketName - Name of the vector bucket
* @property indexName - Name of the index
* @property keys - Array of vector keys to retrieve
* @property returnData - Whether to include vector data in response
* @property returnMetadata - Whether to include metadata in response
*/
export interface GetVectorsOptions {
vectorBucketName: string
indexName: string
keys: string[]
returnData?: boolean
returnMetadata?: boolean
}
/**
* Response from getting vectors
* @property vectors - Array of retrieved vector objects
*/
export interface GetVectorsResponse {
vectors: VectorMatch[]
}
/**
* Options for batch inserting/updating vectors
* @property vectorBucketName - Name of the vector bucket
* @property indexName - Name of the index
* @property vectors - Array of vectors to insert/upsert (1-500 items)
*/
export interface PutVectorsOptions {
vectorBucketName: string
indexName: string
vectors: VectorObject[]
}
/**
* Options for batch deleting vectors
* @property vectorBucketName - Name of the vector bucket
* @property indexName - Name of the index
* @property keys - Array of vector keys to delete (1-500 items)
*/
export interface DeleteVectorsOptions {
vectorBucketName: string
indexName: string
keys: string[]
}
/**
* Options for listing/scanning vectors in an index
* Supports parallel scanning via segment configuration
* @property vectorBucketName - Name of the vector bucket
* @property indexName - Name of the index
* @property maxResults - Maximum number of results to return (default: 500, max: 1000)
* @property nextToken - Token for pagination from previous response
* @property returnData - Whether to include vector data in response
* @property returnMetadata - Whether to include metadata in response
* @property segmentCount - Total number of parallel segments (1-16)
* @property segmentIndex - Zero-based index of this segment (0 to segmentCount-1)
*/
export interface ListVectorsOptions {
vectorBucketName: string
indexName: string
maxResults?: number
nextToken?: string
returnData?: boolean
returnMetadata?: boolean
segmentCount?: number
segmentIndex?: number
}
/**
* Response from listing vectors
* @property vectors - Array of vector objects
* @property nextToken - Token for fetching next page (if more results exist)
*/
export interface ListVectorsResponse {
vectors: VectorMatch[]
nextToken?: string
}
/**
* JSON filter expression for metadata filtering
* Format and syntax depend on the S3 Vectors service implementation
*/
export type VectorFilter = Record<string, any>
/**
* Options for querying similar vectors (ANN search)
* @property vectorBucketName - Name of the vector bucket
* @property indexName - Name of the index
* @property queryVector - Query vector to find similar vectors
* @property topK - Number of nearest neighbors to return (default: 10)
* @property filter - Optional JSON filter for metadata
* @property returnDistance - Whether to include distance scores
* @property returnMetadata - Whether to include metadata in results
*/
export interface QueryVectorsOptions {
vectorBucketName: string
indexName: string
queryVector: VectorData
topK?: number
filter?: VectorFilter
returnDistance?: boolean
returnMetadata?: boolean
}
/**
* Response from vector similarity query
* @property vectors - Array of similar vectors ordered by distance
* @property distanceMetric - The distance metric used for the similarity search
*/
export interface QueryVectorsResponse {
vectors: VectorMatch[]
distanceMetric?: DistanceMetric
}
/**
* Fetch-specific parameters like abort signals
* @property signal - AbortSignal for cancelling requests
*/
export interface VectorFetchParameters {
signal?: AbortSignal
}
/**
* Standard response wrapper for successful operations
* @property data - Response data of type T
* @property error - Null on success
*/
export interface SuccessResponse<T> {
data: T
error: null
}
/**
* Standard response wrapper for failed operations
* @property data - Null on error
* @property error - StorageError with details (named StorageVectorsError for vector operations)
*/
export interface ErrorResponse {
data: null
error: StorageError
}
/**
* Union type for all API responses
* Follows the pattern: { data: T, error: null } | { data: null, error: Error }
*/
export type ApiResponse<T> = SuccessResponse<T> | ErrorResponse

View File

@@ -0,0 +1,7 @@
// Generated automatically during releases by scripts/update-version-files.ts
// This file provides runtime access to the package version for:
// - HTTP request headers (e.g., X-Client-Info header for API requests)
// - Debugging and support (identifying which version is running)
// - Telemetry and logging (version reporting in errors/analytics)
// - Ensuring build artifacts match the published package version
export const version = '2.99.1'

View File

@@ -0,0 +1,62 @@
import { isStorageError } from '../lib/common/errors'
import { DownloadResult } from '../lib/types'
import StreamDownloadBuilder from './StreamDownloadBuilder'
export default class BlobDownloadBuilder implements Promise<DownloadResult<Blob>> {
readonly [Symbol.toStringTag]: string = 'BlobDownloadBuilder'
private promise: Promise<DownloadResult<Blob>> | null = null
constructor(
private downloadFn: () => Promise<Response>,
private shouldThrowOnError: boolean
) {}
asStream(): StreamDownloadBuilder {
return new StreamDownloadBuilder(this.downloadFn, this.shouldThrowOnError)
}
then<TResult1 = DownloadResult<Blob>, TResult2 = never>(
onfulfilled?: ((value: DownloadResult<Blob>) => TResult1 | PromiseLike<TResult1>) | null,
onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | null
): Promise<TResult1 | TResult2> {
return this.getPromise().then(onfulfilled, onrejected)
}
catch<TResult = never>(
onrejected?: ((reason: any) => TResult | PromiseLike<TResult>) | null
): Promise<DownloadResult<Blob> | TResult> {
return this.getPromise().catch(onrejected)
}
finally(onfinally?: (() => void) | null): Promise<DownloadResult<Blob>> {
return this.getPromise().finally(onfinally)
}
private getPromise(): Promise<DownloadResult<Blob>> {
if (!this.promise) {
this.promise = this.execute()
}
return this.promise
}
private async execute(): Promise<DownloadResult<Blob>> {
try {
const result = await this.downloadFn()
return {
data: await result.blob(),
error: null,
}
} catch (error) {
if (this.shouldThrowOnError) {
throw error
}
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
}

View File

@@ -0,0 +1,394 @@
import { IcebergRestCatalog, IcebergError } from 'iceberg-js'
import { DEFAULT_HEADERS } from '../lib/constants'
import { StorageError } from '../lib/common/errors'
import { Fetch, get, post, remove } from '../lib/common/fetch'
import { isValidBucketName } from '../lib/common/helpers'
import BaseApiClient from '../lib/common/BaseApiClient'
import { AnalyticBucket } from '../lib/types'
type WrapAsyncMethod<T> = T extends (...args: infer A) => Promise<infer R>
? (...args: A) => Promise<{ data: R; error: null } | { data: null; error: IcebergError }>
: T
export type WrappedIcebergRestCatalog = {
[K in keyof IcebergRestCatalog]: WrapAsyncMethod<IcebergRestCatalog[K]>
}
/**
* Client class for managing Analytics Buckets using Iceberg tables
* Provides methods for creating, listing, and deleting analytics buckets
*/
export default class StorageAnalyticsClient extends BaseApiClient<StorageError> {
/**
* @alpha
*
* Creates a new StorageAnalyticsClient instance
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Analytics Buckets
* @param url - The base URL for the storage API
* @param headers - HTTP headers to include in requests
* @param fetch - Optional custom fetch implementation
*
* @example
* ```typescript
* const client = new StorageAnalyticsClient(url, headers)
* ```
*/
constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {
const finalUrl = url.replace(/\/$/, '')
const finalHeaders = { ...DEFAULT_HEADERS, ...headers }
super(finalUrl, finalHeaders, fetch, 'storage')
}
/**
* @alpha
*
* Creates a new analytics bucket using Iceberg tables
* Analytics buckets are optimized for analytical queries and data processing
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Analytics Buckets
* @param name A unique name for the bucket you are creating
* @returns Promise with response containing newly created analytics bucket or error
*
* @example Create analytics bucket
* ```js
* const { data, error } = await supabase
* .storage
* .analytics
* .createBucket('analytics-data')
* ```
*
* Response:
* ```json
* {
* "data": {
* "name": "analytics-data",
* "type": "ANALYTICS",
* "format": "iceberg",
* "created_at": "2024-05-22T22:26:05.100Z",
* "updated_at": "2024-05-22T22:26:05.100Z"
* },
* "error": null
* }
* ```
*/
async createBucket(name: string): Promise<
| {
data: AnalyticBucket
error: null
}
| {
data: null
error: StorageError
}
> {
return this.handleOperation(async () => {
return await post(this.fetch, `${this.url}/bucket`, { name }, { headers: this.headers })
})
}
/**
* @alpha
*
* Retrieves the details of all Analytics Storage buckets within an existing project
* Only returns buckets of type 'ANALYTICS'
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Analytics Buckets
* @param options Query parameters for listing buckets
* @param options.limit Maximum number of buckets to return
* @param options.offset Number of buckets to skip
* @param options.sortColumn Column to sort by ('name', 'created_at', 'updated_at')
* @param options.sortOrder Sort order ('asc' or 'desc')
* @param options.search Search term to filter bucket names
* @returns Promise with response containing array of analytics buckets or error
*
* @example List analytics buckets
* ```js
* const { data, error } = await supabase
* .storage
* .analytics
* .listBuckets({
* limit: 10,
* offset: 0,
* sortColumn: 'created_at',
* sortOrder: 'desc'
* })
* ```
*
* Response:
* ```json
* {
* "data": [
* {
* "name": "analytics-data",
* "type": "ANALYTICS",
* "format": "iceberg",
* "created_at": "2024-05-22T22:26:05.100Z",
* "updated_at": "2024-05-22T22:26:05.100Z"
* }
* ],
* "error": null
* }
* ```
*/
async listBuckets(options?: {
limit?: number
offset?: number
sortColumn?: 'name' | 'created_at' | 'updated_at'
sortOrder?: 'asc' | 'desc'
search?: string
}): Promise<
| {
data: AnalyticBucket[]
error: null
}
| {
data: null
error: StorageError
}
> {
return this.handleOperation(async () => {
// Build query string from options
const queryParams = new URLSearchParams()
if (options?.limit !== undefined) queryParams.set('limit', options.limit.toString())
if (options?.offset !== undefined) queryParams.set('offset', options.offset.toString())
if (options?.sortColumn) queryParams.set('sortColumn', options.sortColumn)
if (options?.sortOrder) queryParams.set('sortOrder', options.sortOrder)
if (options?.search) queryParams.set('search', options.search)
const queryString = queryParams.toString()
const url = queryString ? `${this.url}/bucket?${queryString}` : `${this.url}/bucket`
return await get(this.fetch, url, { headers: this.headers })
})
}
/**
* @alpha
*
* Deletes an existing analytics bucket
* A bucket can't be deleted with existing objects inside it
* You must first empty the bucket before deletion
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Analytics Buckets
* @param bucketName The unique identifier of the bucket you would like to delete
* @returns Promise with response containing success message or error
*
* @example Delete analytics bucket
* ```js
* const { data, error } = await supabase
* .storage
* .analytics
* .deleteBucket('analytics-data')
* ```
*
* Response:
* ```json
* {
* "data": {
* "message": "Successfully deleted"
* },
* "error": null
* }
* ```
*/
async deleteBucket(bucketName: string): Promise<
| {
data: { message: string }
error: null
}
| {
data: null
error: StorageError
}
> {
return this.handleOperation(async () => {
return await remove(
this.fetch,
`${this.url}/bucket/${bucketName}`,
{},
{ headers: this.headers }
)
})
}
/**
* @alpha
*
* Get an Iceberg REST Catalog client configured for a specific analytics bucket
* Use this to perform advanced table and namespace operations within the bucket
* The returned client provides full access to the Apache Iceberg REST Catalog API
* with the Supabase `{ data, error }` pattern for consistent error handling on all operations.
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Analytics Buckets
* @param bucketName - The name of the analytics bucket (warehouse) to connect to
* @returns The wrapped Iceberg catalog client
* @throws {StorageError} If the bucket name is invalid
*
* @example Get catalog and create table
* ```js
* // First, create an analytics bucket
* const { data: bucket, error: bucketError } = await supabase
* .storage
* .analytics
* .createBucket('analytics-data')
*
* // Get the Iceberg catalog for that bucket
* const catalog = supabase.storage.analytics.from('analytics-data')
*
* // Create a namespace
* const { error: nsError } = await catalog.createNamespace({ namespace: ['default'] })
*
* // Create a table with schema
* const { data: tableMetadata, error: tableError } = await catalog.createTable(
* { namespace: ['default'] },
* {
* name: 'events',
* schema: {
* type: 'struct',
* fields: [
* { id: 1, name: 'id', type: 'long', required: true },
* { id: 2, name: 'timestamp', type: 'timestamp', required: true },
* { id: 3, name: 'user_id', type: 'string', required: false }
* ],
* 'schema-id': 0,
* 'identifier-field-ids': [1]
* },
* 'partition-spec': {
* 'spec-id': 0,
* fields: []
* },
* 'write-order': {
* 'order-id': 0,
* fields: []
* },
* properties: {
* 'write.format.default': 'parquet'
* }
* }
* )
* ```
*
* @example List tables in namespace
* ```js
* const catalog = supabase.storage.analytics.from('analytics-data')
*
* // List all tables in the default namespace
* const { data: tables, error: listError } = await catalog.listTables({ namespace: ['default'] })
* if (listError) {
* if (listError.isNotFound()) {
* console.log('Namespace not found')
* }
* return
* }
* console.log(tables) // [{ namespace: ['default'], name: 'events' }]
* ```
*
* @example Working with namespaces
* ```js
* const catalog = supabase.storage.analytics.from('analytics-data')
*
* // List all namespaces
* const { data: namespaces } = await catalog.listNamespaces()
*
* // Create namespace with properties
* await catalog.createNamespace(
* { namespace: ['production'] },
* { properties: { owner: 'data-team', env: 'prod' } }
* )
* ```
*
* @example Cleanup operations
* ```js
* const catalog = supabase.storage.analytics.from('analytics-data')
*
* // Drop table with purge option (removes all data)
* const { error: dropError } = await catalog.dropTable(
* { namespace: ['default'], name: 'events' },
* { purge: true }
* )
*
* if (dropError?.isNotFound()) {
* console.log('Table does not exist')
* }
*
* // Drop namespace (must be empty)
* await catalog.dropNamespace({ namespace: ['default'] })
* ```
*
* @remarks
* This method provides a bridge between Supabase's bucket management and the standard
* Apache Iceberg REST Catalog API. The bucket name maps to the Iceberg warehouse parameter.
* All authentication and configuration is handled automatically using your Supabase credentials.
*
* **Error Handling**: Invalid bucket names throw immediately. All catalog
* operations return `{ data, error }` where errors are `IcebergError` instances from iceberg-js.
* Use helper methods like `error.isNotFound()` or check `error.status` for specific error handling.
* Use `.throwOnError()` on the analytics client if you prefer exceptions for catalog operations.
*
* **Cleanup Operations**: When using `dropTable`, the `purge: true` option permanently
* deletes all table data. Without it, the table is marked as deleted but data remains.
*
* **Library Dependency**: The returned catalog wraps `IcebergRestCatalog` from iceberg-js.
* For complete API documentation and advanced usage, refer to the
* [iceberg-js documentation](https://supabase.github.io/iceberg-js/).
*/
from(bucketName: string): WrappedIcebergRestCatalog {
// Validate bucket name using same rules as Supabase Storage API backend
if (!isValidBucketName(bucketName)) {
throw new StorageError(
'Invalid bucket name: File, folder, and bucket names must follow AWS object key naming guidelines ' +
'and should avoid the use of any other characters.'
)
}
// Construct the Iceberg REST Catalog URL
// The base URL is /storage/v1/iceberg
// Note: IcebergRestCatalog from iceberg-js automatically adds /v1/ prefix to API paths
// so we should NOT append /v1 here (it would cause double /v1/v1/ in the URL)
const catalog = new IcebergRestCatalog({
baseUrl: this.url,
catalogName: bucketName, // Maps to the warehouse parameter in Supabase's implementation
auth: {
type: 'custom',
getHeaders: async () => this.headers,
},
fetch: this.fetch,
})
const shouldThrowOnError = this.shouldThrowOnError
const wrappedCatalog = new Proxy(catalog, {
get(target, prop: keyof IcebergRestCatalog) {
const value = target[prop]
if (typeof value !== 'function') {
return value
}
return async (...args: unknown[]) => {
try {
const data = await (value as Function).apply(target, args)
return { data, error: null }
} catch (error) {
if (shouldThrowOnError) {
throw error
}
return { data: null, error: error as IcebergError }
}
}
},
}) as unknown as WrappedIcebergRestCatalog
return wrappedCatalog
}
}

View File

@@ -0,0 +1,372 @@
import { DEFAULT_HEADERS } from '../lib/constants'
import { StorageError } from '../lib/common/errors'
import { Fetch, get, post, put, remove } from '../lib/common/fetch'
import BaseApiClient from '../lib/common/BaseApiClient'
import { Bucket, BucketType, ListBucketOptions } from '../lib/types'
import { StorageClientOptions } from '../StorageClient'
export default class StorageBucketApi extends BaseApiClient<StorageError> {
constructor(
url: string,
headers: { [key: string]: string } = {},
fetch?: Fetch,
opts?: StorageClientOptions
) {
const baseUrl = new URL(url)
// if legacy uri is used, replace with new storage host (disables request buffering to allow > 50GB uploads)
// "project-ref.supabase.co" becomes "project-ref.storage.supabase.co"
if (opts?.useNewHostname) {
const isSupabaseHost = /supabase\.(co|in|red)$/.test(baseUrl.hostname)
if (isSupabaseHost && !baseUrl.hostname.includes('storage.supabase.')) {
baseUrl.hostname = baseUrl.hostname.replace('supabase.', 'storage.supabase.')
}
}
const finalUrl = baseUrl.href.replace(/\/$/, '')
const finalHeaders = { ...DEFAULT_HEADERS, ...headers }
super(finalUrl, finalHeaders, fetch, 'storage')
}
/**
* Retrieves the details of all Storage buckets within an existing project.
*
* @category File Buckets
* @param options Query parameters for listing buckets
* @param options.limit Maximum number of buckets to return
* @param options.offset Number of buckets to skip
* @param options.sortColumn Column to sort by ('id', 'name', 'created_at', 'updated_at')
* @param options.sortOrder Sort order ('asc' or 'desc')
* @param options.search Search term to filter bucket names
* @returns Promise with response containing array of buckets or error
*
* @example List buckets
* ```js
* const { data, error } = await supabase
* .storage
* .listBuckets()
* ```
*
* @example List buckets with options
* ```js
* const { data, error } = await supabase
* .storage
* .listBuckets({
* limit: 10,
* offset: 0,
* sortColumn: 'created_at',
* sortOrder: 'desc',
* search: 'prod'
* })
* ```
*/
async listBuckets(options?: ListBucketOptions): Promise<
| {
data: Bucket[]
error: null
}
| {
data: null
error: StorageError
}
> {
return this.handleOperation(async () => {
const queryString = this.listBucketOptionsToQueryString(options)
return await get(this.fetch, `${this.url}/bucket${queryString}`, {
headers: this.headers,
})
})
}
/**
* Retrieves the details of an existing Storage bucket.
*
* @category File Buckets
* @param id The unique identifier of the bucket you would like to retrieve.
* @returns Promise with response containing bucket details or error
*
* @example Get bucket
* ```js
* const { data, error } = await supabase
* .storage
* .getBucket('avatars')
* ```
*
* Response:
* ```json
* {
* "data": {
* "id": "avatars",
* "name": "avatars",
* "owner": "",
* "public": false,
* "file_size_limit": 1024,
* "allowed_mime_types": [
* "image/png"
* ],
* "created_at": "2024-05-22T22:26:05.100Z",
* "updated_at": "2024-05-22T22:26:05.100Z"
* },
* "error": null
* }
* ```
*/
async getBucket(id: string): Promise<
| {
data: Bucket
error: null
}
| {
data: null
error: StorageError
}
> {
return this.handleOperation(async () => {
return await get(this.fetch, `${this.url}/bucket/${id}`, { headers: this.headers })
})
}
/**
* Creates a new Storage bucket
*
* @category File Buckets
* @param id A unique identifier for the bucket you are creating.
* @param options.public The visibility of the bucket. Public buckets don't require an authorization token to download objects, but still require a valid token for all other operations. By default, buckets are private.
* @param options.fileSizeLimit specifies the max file size in bytes that can be uploaded to this bucket.
* The global file size limit takes precedence over this value.
* The default value is null, which doesn't set a per bucket file size limit.
* @param options.allowedMimeTypes specifies the allowed mime types that this bucket can accept during upload.
* The default value is null, which allows files with all mime types to be uploaded.
* Each mime type specified can be a wildcard, e.g. image/*, or a specific mime type, e.g. image/png.
* @param options.type (private-beta) specifies the bucket type. see `BucketType` for more details.
* - default bucket type is `STANDARD`
* @returns Promise with response containing newly created bucket name or error
*
* @example Create bucket
* ```js
* const { data, error } = await supabase
* .storage
* .createBucket('avatars', {
* public: false,
* allowedMimeTypes: ['image/png'],
* fileSizeLimit: 1024
* })
* ```
*
* Response:
* ```json
* {
* "data": {
* "name": "avatars"
* },
* "error": null
* }
* ```
*/
async createBucket(
id: string,
options: {
public: boolean
fileSizeLimit?: number | string | null
allowedMimeTypes?: string[] | null
type?: BucketType
} = {
public: false,
}
): Promise<
| {
data: Pick<Bucket, 'name'>
error: null
}
| {
data: null
error: StorageError
}
> {
return this.handleOperation(async () => {
return await post(
this.fetch,
`${this.url}/bucket`,
{
id,
name: id,
type: options.type,
public: options.public,
file_size_limit: options.fileSizeLimit,
allowed_mime_types: options.allowedMimeTypes,
},
{ headers: this.headers }
)
})
}
/**
* Updates a Storage bucket
*
* @category File Buckets
* @param id A unique identifier for the bucket you are updating.
* @param options.public The visibility of the bucket. Public buckets don't require an authorization token to download objects, but still require a valid token for all other operations.
* @param options.fileSizeLimit specifies the max file size in bytes that can be uploaded to this bucket.
* The global file size limit takes precedence over this value.
* The default value is null, which doesn't set a per bucket file size limit.
* @param options.allowedMimeTypes specifies the allowed mime types that this bucket can accept during upload.
* The default value is null, which allows files with all mime types to be uploaded.
* Each mime type specified can be a wildcard, e.g. image/*, or a specific mime type, e.g. image/png.
* @returns Promise with response containing success message or error
*
* @example Update bucket
* ```js
* const { data, error } = await supabase
* .storage
* .updateBucket('avatars', {
* public: false,
* allowedMimeTypes: ['image/png'],
* fileSizeLimit: 1024
* })
* ```
*
* Response:
* ```json
* {
* "data": {
* "message": "Successfully updated"
* },
* "error": null
* }
* ```
*/
async updateBucket(
id: string,
options: {
public: boolean
fileSizeLimit?: number | string | null
allowedMimeTypes?: string[] | null
}
): Promise<
| {
data: { message: string }
error: null
}
| {
data: null
error: StorageError
}
> {
return this.handleOperation(async () => {
return await put(
this.fetch,
`${this.url}/bucket/${id}`,
{
id,
name: id,
public: options.public,
file_size_limit: options.fileSizeLimit,
allowed_mime_types: options.allowedMimeTypes,
},
{ headers: this.headers }
)
})
}
/**
* Removes all objects inside a single bucket.
*
* @category File Buckets
* @param id The unique identifier of the bucket you would like to empty.
* @returns Promise with success message or error
*
* @example Empty bucket
* ```js
* const { data, error } = await supabase
* .storage
* .emptyBucket('avatars')
* ```
*
* Response:
* ```json
* {
* "data": {
* "message": "Successfully emptied"
* },
* "error": null
* }
* ```
*/
async emptyBucket(id: string): Promise<
| {
data: { message: string }
error: null
}
| {
data: null
error: StorageError
}
> {
return this.handleOperation(async () => {
return await post(this.fetch, `${this.url}/bucket/${id}/empty`, {}, { headers: this.headers })
})
}
/**
* Deletes an existing bucket. A bucket can't be deleted with existing objects inside it.
* You must first `empty()` the bucket.
*
* @category File Buckets
* @param id The unique identifier of the bucket you would like to delete.
* @returns Promise with success message or error
*
* @example Delete bucket
* ```js
* const { data, error } = await supabase
* .storage
* .deleteBucket('avatars')
* ```
*
* Response:
* ```json
* {
* "data": {
* "message": "Successfully deleted"
* },
* "error": null
* }
* ```
*/
async deleteBucket(id: string): Promise<
| {
data: { message: string }
error: null
}
| {
data: null
error: StorageError
}
> {
return this.handleOperation(async () => {
return await remove(this.fetch, `${this.url}/bucket/${id}`, {}, { headers: this.headers })
})
}
private listBucketOptionsToQueryString(options?: ListBucketOptions): string {
const params: Record<string, string> = {}
if (options) {
if ('limit' in options) {
params.limit = String(options.limit)
}
if ('offset' in options) {
params.offset = String(options.offset)
}
if (options.search) {
params.search = options.search
}
if (options.sortColumn) {
params.sortColumn = options.sortColumn
}
if (options.sortOrder) {
params.sortOrder = options.sortOrder
}
}
return Object.keys(params).length > 0 ? '?' + new URLSearchParams(params).toString() : ''
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,616 @@
import VectorIndexApi, { CreateIndexOptions } from './VectorIndexApi'
import VectorDataApi from './VectorDataApi'
import { Fetch } from '../lib/common/fetch'
import VectorBucketApi from './VectorBucketApi'
import {
ApiResponse,
DeleteVectorsOptions,
GetVectorsOptions,
ListIndexesOptions,
ListVectorsOptions,
ListVectorBucketsOptions,
ListVectorBucketsResponse,
PutVectorsOptions,
QueryVectorsOptions,
VectorBucket,
} from '../lib/types'
/**
*
* @alpha
*
* Configuration options for the Storage Vectors client
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*/
export interface StorageVectorsClientOptions {
/**
* Custom headers to include in all requests
*/
headers?: { [key: string]: string }
/**
* Custom fetch implementation (optional)
* Useful for testing or custom request handling
*/
fetch?: Fetch
}
/**
*
* @alpha
*
* Main client for interacting with S3 Vectors API
* Provides access to bucket, index, and vector data operations
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* **Usage Patterns:**
*
* ```typescript
* const { data, error } = await supabase
* .storage
* .vectors
* .createBucket('embeddings-prod')
*
* // Access index operations via buckets
* const bucket = supabase.storage.vectors.from('embeddings-prod')
* await bucket.createIndex({
* indexName: 'documents',
* dataType: 'float32',
* dimension: 1536,
* distanceMetric: 'cosine'
* })
*
* // Access vector operations via index
* const index = bucket.index('documents')
* await index.putVectors({
* vectors: [
* { key: 'doc-1', data: { float32: [...] }, metadata: { title: 'Intro' } }
* ]
* })
*
* // Query similar vectors
* const { data } = await index.queryVectors({
* queryVector: { float32: [...] },
* topK: 5,
* returnDistance: true
* })
* ```
*/
export class StorageVectorsClient extends VectorBucketApi {
/**
* @alpha
*
* Creates a StorageVectorsClient that can manage buckets, indexes, and vectors.
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param url - Base URL of the Storage Vectors REST API.
* @param options.headers - Optional headers (for example `Authorization`) applied to every request.
* @param options.fetch - Optional custom `fetch` implementation for non-browser runtimes.
*
* @example
* ```typescript
* const client = new StorageVectorsClient(url, options)
* ```
*/
constructor(url: string, options: StorageVectorsClientOptions = {}) {
super(url, options.headers || {}, options.fetch)
}
/**
*
* @alpha
*
* Access operations for a specific vector bucket
* Returns a scoped client for index and vector operations within the bucket
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param vectorBucketName - Name of the vector bucket
* @returns Bucket-scoped client with index and vector operations
*
* @example
* ```typescript
* const bucket = supabase.storage.vectors.from('embeddings-prod')
* ```
*/
from(vectorBucketName: string): VectorBucketScope {
return new VectorBucketScope(this.url, this.headers, vectorBucketName, this.fetch)
}
/**
*
* @alpha
*
* Creates a new vector bucket
* Vector buckets are containers for vector indexes and their data
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param vectorBucketName - Unique name for the vector bucket
* @returns Promise with empty response on success or error
*
* @example
* ```typescript
* const { data, error } = await supabase
* .storage
* .vectors
* .createBucket('embeddings-prod')
* ```
*/
async createBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {
return super.createBucket(vectorBucketName)
}
/**
*
* @alpha
*
* Retrieves metadata for a specific vector bucket
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param vectorBucketName - Name of the vector bucket
* @returns Promise with bucket metadata or error
*
* @example
* ```typescript
* const { data, error } = await supabase
* .storage
* .vectors
* .getBucket('embeddings-prod')
*
* console.log('Bucket created:', data?.vectorBucket.creationTime)
* ```
*/
async getBucket(vectorBucketName: string): Promise<ApiResponse<{ vectorBucket: VectorBucket }>> {
return super.getBucket(vectorBucketName)
}
/**
*
* @alpha
*
* Lists all vector buckets with optional filtering and pagination
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param options - Optional filters (prefix, maxResults, nextToken)
* @returns Promise with list of buckets or error
*
* @example
* ```typescript
* const { data, error } = await supabase
* .storage
* .vectors
* .listBuckets({ prefix: 'embeddings-' })
*
* data?.vectorBuckets.forEach(bucket => {
* console.log(bucket.vectorBucketName)
* })
* ```
*/
async listBuckets(
options: ListVectorBucketsOptions = {}
): Promise<ApiResponse<ListVectorBucketsResponse>> {
return super.listBuckets(options)
}
/**
*
* @alpha
*
* Deletes a vector bucket (bucket must be empty)
* All indexes must be deleted before deleting the bucket
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param vectorBucketName - Name of the vector bucket to delete
* @returns Promise with empty response on success or error
*
* @example
* ```typescript
* const { data, error } = await supabase
* .storage
* .vectors
* .deleteBucket('embeddings-old')
* ```
*/
async deleteBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {
return super.deleteBucket(vectorBucketName)
}
}
/**
*
* @alpha
*
* Scoped client for operations within a specific vector bucket
* Provides index management and access to vector operations
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*/
export class VectorBucketScope extends VectorIndexApi {
private vectorBucketName: string
/**
* @alpha
*
* Creates a helper that automatically scopes all index operations to the provided bucket.
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @example
* ```typescript
* const bucket = supabase.storage.vectors.from('embeddings-prod')
* ```
*/
constructor(
url: string,
headers: { [key: string]: string },
vectorBucketName: string,
fetch?: Fetch
) {
super(url, headers, fetch)
this.vectorBucketName = vectorBucketName
}
/**
*
* @alpha
*
* Creates a new vector index in this bucket
* Convenience method that automatically includes the bucket name
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param options - Index configuration (vectorBucketName is automatically set)
* @returns Promise with empty response on success or error
*
* @example
* ```typescript
* const bucket = supabase.storage.vectors.from('embeddings-prod')
* await bucket.createIndex({
* indexName: 'documents-openai',
* dataType: 'float32',
* dimension: 1536,
* distanceMetric: 'cosine',
* metadataConfiguration: {
* nonFilterableMetadataKeys: ['raw_text']
* }
* })
* ```
*/
override async createIndex(options: Omit<CreateIndexOptions, 'vectorBucketName'>) {
return super.createIndex({
...options,
vectorBucketName: this.vectorBucketName,
})
}
/**
*
* @alpha
*
* Lists indexes in this bucket
* Convenience method that automatically includes the bucket name
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param options - Listing options (vectorBucketName is automatically set)
* @returns Promise with response containing indexes array and pagination token or error
*
* @example
* ```typescript
* const bucket = supabase.storage.vectors.from('embeddings-prod')
* const { data } = await bucket.listIndexes({ prefix: 'documents-' })
* ```
*/
override async listIndexes(options: Omit<ListIndexesOptions, 'vectorBucketName'> = {}) {
return super.listIndexes({
...options,
vectorBucketName: this.vectorBucketName,
})
}
/**
*
* @alpha
*
* Retrieves metadata for a specific index in this bucket
* Convenience method that automatically includes the bucket name
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param indexName - Name of the index to retrieve
* @returns Promise with index metadata or error
*
* @example
* ```typescript
* const bucket = supabase.storage.vectors.from('embeddings-prod')
* const { data } = await bucket.getIndex('documents-openai')
* console.log('Dimension:', data?.index.dimension)
* ```
*/
override async getIndex(indexName: string) {
return super.getIndex(this.vectorBucketName, indexName)
}
/**
*
* @alpha
*
* Deletes an index from this bucket
* Convenience method that automatically includes the bucket name
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param indexName - Name of the index to delete
* @returns Promise with empty response on success or error
*
* @example
* ```typescript
* const bucket = supabase.storage.vectors.from('embeddings-prod')
* await bucket.deleteIndex('old-index')
* ```
*/
override async deleteIndex(indexName: string) {
return super.deleteIndex(this.vectorBucketName, indexName)
}
/**
*
* @alpha
*
* Access operations for a specific index within this bucket
* Returns a scoped client for vector data operations
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param indexName - Name of the index
* @returns Index-scoped client with vector data operations
*
* @example
* ```typescript
* const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')
*
* // Insert vectors
* await index.putVectors({
* vectors: [
* { key: 'doc-1', data: { float32: [...] }, metadata: { title: 'Intro' } }
* ]
* })
*
* // Query similar vectors
* const { data } = await index.queryVectors({
* queryVector: { float32: [...] },
* topK: 5
* })
* ```
*/
index(indexName: string): VectorIndexScope {
return new VectorIndexScope(
this.url,
this.headers,
this.vectorBucketName,
indexName,
this.fetch
)
}
}
/**
*
* @alpha
*
* Scoped client for operations within a specific vector index
* Provides vector data operations (put, get, list, query, delete)
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*/
export class VectorIndexScope extends VectorDataApi {
private vectorBucketName: string
private indexName: string
/**
*
* @alpha
*
* Creates a helper that automatically scopes all vector operations to the provided bucket/index names.
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @example
* ```typescript
* const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')
* ```
*/
constructor(
url: string,
headers: { [key: string]: string },
vectorBucketName: string,
indexName: string,
fetch?: Fetch
) {
super(url, headers, fetch)
this.vectorBucketName = vectorBucketName
this.indexName = indexName
}
/**
*
* @alpha
*
* Inserts or updates vectors in this index
* Convenience method that automatically includes bucket and index names
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param options - Vector insertion options (bucket and index names automatically set)
* @returns Promise with empty response on success or error
*
* @example
* ```typescript
* const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')
* await index.putVectors({
* vectors: [
* {
* key: 'doc-1',
* data: { float32: [0.1, 0.2, ...] },
* metadata: { title: 'Introduction', page: 1 }
* }
* ]
* })
* ```
*/
override async putVectors(options: Omit<PutVectorsOptions, 'vectorBucketName' | 'indexName'>) {
return super.putVectors({
...options,
vectorBucketName: this.vectorBucketName,
indexName: this.indexName,
})
}
/**
*
* @alpha
*
* Retrieves vectors by keys from this index
* Convenience method that automatically includes bucket and index names
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param options - Vector retrieval options (bucket and index names automatically set)
* @returns Promise with response containing vectors array or error
*
* @example
* ```typescript
* const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')
* const { data } = await index.getVectors({
* keys: ['doc-1', 'doc-2'],
* returnMetadata: true
* })
* ```
*/
override async getVectors(options: Omit<GetVectorsOptions, 'vectorBucketName' | 'indexName'>) {
return super.getVectors({
...options,
vectorBucketName: this.vectorBucketName,
indexName: this.indexName,
})
}
/**
*
* @alpha
*
* Lists vectors in this index with pagination
* Convenience method that automatically includes bucket and index names
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param options - Listing options (bucket and index names automatically set)
* @returns Promise with response containing vectors array and pagination token or error
*
* @example
* ```typescript
* const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')
* const { data } = await index.listVectors({
* maxResults: 500,
* returnMetadata: true
* })
* ```
*/
override async listVectors(
options: Omit<ListVectorsOptions, 'vectorBucketName' | 'indexName'> = {}
) {
return super.listVectors({
...options,
vectorBucketName: this.vectorBucketName,
indexName: this.indexName,
})
}
/**
*
* @alpha
*
* Queries for similar vectors in this index
* Convenience method that automatically includes bucket and index names
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param options - Query options (bucket and index names automatically set)
* @returns Promise with response containing matches array of similar vectors ordered by distance or error
*
* @example
* ```typescript
* const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')
* const { data } = await index.queryVectors({
* queryVector: { float32: [0.1, 0.2, ...] },
* topK: 5,
* filter: { category: 'technical' },
* returnDistance: true,
* returnMetadata: true
* })
* ```
*/
override async queryVectors(
options: Omit<QueryVectorsOptions, 'vectorBucketName' | 'indexName'>
) {
return super.queryVectors({
...options,
vectorBucketName: this.vectorBucketName,
indexName: this.indexName,
})
}
/**
*
* @alpha
*
* Deletes vectors by keys from this index
* Convenience method that automatically includes bucket and index names
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*
* @category Vector Buckets
* @param options - Deletion options (bucket and index names automatically set)
* @returns Promise with empty response on success or error
*
* @example
* ```typescript
* const index = supabase.storage.vectors.from('embeddings-prod').index('documents-openai')
* await index.deleteVectors({
* keys: ['doc-1', 'doc-2', 'doc-3']
* })
* ```
*/
override async deleteVectors(
options: Omit<DeleteVectorsOptions, 'vectorBucketName' | 'indexName'>
) {
return super.deleteVectors({
...options,
vectorBucketName: this.vectorBucketName,
indexName: this.indexName,
})
}
}

View File

@@ -0,0 +1,39 @@
import { isStorageError } from '../lib/common/errors'
import { DownloadResult } from '../lib/types'
export default class StreamDownloadBuilder implements PromiseLike<DownloadResult<ReadableStream>> {
constructor(
private downloadFn: () => Promise<Response>,
private shouldThrowOnError: boolean
) {}
then<TResult1 = DownloadResult<ReadableStream>, TResult2 = never>(
onfulfilled?:
| ((value: DownloadResult<ReadableStream>) => TResult1 | PromiseLike<TResult1>)
| null,
onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | null
): Promise<TResult1 | TResult2> {
return this.execute().then(onfulfilled, onrejected)
}
private async execute(): Promise<DownloadResult<ReadableStream>> {
try {
const result = await this.downloadFn()
return {
data: result.body as ReadableStream,
error: null,
}
} catch (error) {
if (this.shouldThrowOnError) {
throw error
}
if (isStorageError(error)) {
return { data: null, error }
}
throw error
}
}
}

View File

@@ -0,0 +1,73 @@
import { DEFAULT_HEADERS } from '../lib/constants'
import { StorageError } from '../lib/common/errors'
import { Fetch, vectorsApi } from '../lib/common/fetch'
import BaseApiClient from '../lib/common/BaseApiClient'
import {
ApiResponse,
VectorBucket,
ListVectorBucketsOptions,
ListVectorBucketsResponse,
} from '../lib/types'
/**
* @hidden
* Base implementation for vector bucket operations.
* Use {@link StorageVectorsClient} via `supabase.storage.vectors` instead.
*/
export default class VectorBucketApi extends BaseApiClient<StorageError> {
/** Creates a new VectorBucketApi instance */
constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {
const finalUrl = url.replace(/\/$/, '')
const finalHeaders = { ...DEFAULT_HEADERS, 'Content-Type': 'application/json', ...headers }
super(finalUrl, finalHeaders, fetch, 'vectors')
}
/** Creates a new vector bucket */
async createBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {
return this.handleOperation(async () => {
const data = await vectorsApi.post(
this.fetch,
`${this.url}/CreateVectorBucket`,
{ vectorBucketName },
{ headers: this.headers }
)
return data || {}
})
}
/** Retrieves metadata for a specific vector bucket */
async getBucket(vectorBucketName: string): Promise<ApiResponse<{ vectorBucket: VectorBucket }>> {
return this.handleOperation(async () => {
return await vectorsApi.post(
this.fetch,
`${this.url}/GetVectorBucket`,
{ vectorBucketName },
{ headers: this.headers }
)
})
}
/** Lists vector buckets with optional filtering and pagination */
async listBuckets(
options: ListVectorBucketsOptions = {}
): Promise<ApiResponse<ListVectorBucketsResponse>> {
return this.handleOperation(async () => {
return await vectorsApi.post(this.fetch, `${this.url}/ListVectorBuckets`, options, {
headers: this.headers,
})
})
}
/** Deletes a vector bucket (must be empty first) */
async deleteBucket(vectorBucketName: string): Promise<ApiResponse<undefined>> {
return this.handleOperation(async () => {
const data = await vectorsApi.post(
this.fetch,
`${this.url}/DeleteVectorBucket`,
{ vectorBucketName },
{ headers: this.headers }
)
return data || {}
})
}
}

View File

@@ -0,0 +1,98 @@
import { DEFAULT_HEADERS } from '../lib/constants'
import { StorageError } from '../lib/common/errors'
import { Fetch, vectorsApi } from '../lib/common/fetch'
import BaseApiClient from '../lib/common/BaseApiClient'
import {
ApiResponse,
PutVectorsOptions,
GetVectorsOptions,
GetVectorsResponse,
DeleteVectorsOptions,
ListVectorsOptions,
ListVectorsResponse,
QueryVectorsOptions,
QueryVectorsResponse,
} from '../lib/types'
/**
* @hidden
* Base implementation for vector data operations.
* Use {@link VectorIndexScope} via `supabase.storage.vectors.from('bucket').index('idx')` instead.
*/
export default class VectorDataApi extends BaseApiClient<StorageError> {
/** Creates a new VectorDataApi instance */
constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {
const finalUrl = url.replace(/\/$/, '')
const finalHeaders = { ...DEFAULT_HEADERS, 'Content-Type': 'application/json', ...headers }
super(finalUrl, finalHeaders, fetch, 'vectors')
}
/** Inserts or updates vectors in batch (1-500 per request) */
async putVectors(options: PutVectorsOptions): Promise<ApiResponse<undefined>> {
// Validate batch size
if (options.vectors.length < 1 || options.vectors.length > 500) {
throw new Error('Vector batch size must be between 1 and 500 items')
}
return this.handleOperation(async () => {
const data = await vectorsApi.post(this.fetch, `${this.url}/PutVectors`, options, {
headers: this.headers,
})
return data || {}
})
}
/** Retrieves vectors by their keys in batch */
async getVectors(options: GetVectorsOptions): Promise<ApiResponse<GetVectorsResponse>> {
return this.handleOperation(async () => {
return await vectorsApi.post(this.fetch, `${this.url}/GetVectors`, options, {
headers: this.headers,
})
})
}
/** Lists vectors in an index with pagination */
async listVectors(options: ListVectorsOptions): Promise<ApiResponse<ListVectorsResponse>> {
// Validate segment configuration
if (options.segmentCount !== undefined) {
if (options.segmentCount < 1 || options.segmentCount > 16) {
throw new Error('segmentCount must be between 1 and 16')
}
if (options.segmentIndex !== undefined) {
if (options.segmentIndex < 0 || options.segmentIndex >= options.segmentCount) {
throw new Error(`segmentIndex must be between 0 and ${options.segmentCount - 1}`)
}
}
}
return this.handleOperation(async () => {
return await vectorsApi.post(this.fetch, `${this.url}/ListVectors`, options, {
headers: this.headers,
})
})
}
/** Queries for similar vectors using approximate nearest neighbor search */
async queryVectors(options: QueryVectorsOptions): Promise<ApiResponse<QueryVectorsResponse>> {
return this.handleOperation(async () => {
return await vectorsApi.post(this.fetch, `${this.url}/QueryVectors`, options, {
headers: this.headers,
})
})
}
/** Deletes vectors by their keys in batch (1-500 per request) */
async deleteVectors(options: DeleteVectorsOptions): Promise<ApiResponse<undefined>> {
// Validate batch size
if (options.keys.length < 1 || options.keys.length > 500) {
throw new Error('Keys batch size must be between 1 and 500 items')
}
return this.handleOperation(async () => {
const data = await vectorsApi.post(this.fetch, `${this.url}/DeleteVectors`, options, {
headers: this.headers,
})
return data || {}
})
}
}

View File

@@ -0,0 +1,90 @@
import { DEFAULT_HEADERS } from '../lib/constants'
import { StorageError } from '../lib/common/errors'
import { Fetch, vectorsApi } from '../lib/common/fetch'
import BaseApiClient from '../lib/common/BaseApiClient'
import {
ApiResponse,
VectorIndex,
ListIndexesOptions,
ListIndexesResponse,
VectorDataType,
DistanceMetric,
MetadataConfiguration,
} from '../lib/types'
/**
* @alpha
*
* Options for creating a vector index
*
* **Public alpha:** This API is part of a public alpha release and may not be available to your account type.
*/
export interface CreateIndexOptions {
vectorBucketName: string
indexName: string
dataType: VectorDataType
dimension: number
distanceMetric: DistanceMetric
metadataConfiguration?: MetadataConfiguration
}
/**
* @hidden
* Base implementation for vector index operations.
* Use {@link VectorBucketScope} via `supabase.storage.vectors.from('bucket')` instead.
*/
export default class VectorIndexApi extends BaseApiClient<StorageError> {
/** Creates a new VectorIndexApi instance */
constructor(url: string, headers: { [key: string]: string } = {}, fetch?: Fetch) {
const finalUrl = url.replace(/\/$/, '')
const finalHeaders = { ...DEFAULT_HEADERS, 'Content-Type': 'application/json', ...headers }
super(finalUrl, finalHeaders, fetch, 'vectors')
}
/** Creates a new vector index within a bucket */
async createIndex(options: CreateIndexOptions): Promise<ApiResponse<undefined>> {
return this.handleOperation(async () => {
const data = await vectorsApi.post(this.fetch, `${this.url}/CreateIndex`, options, {
headers: this.headers,
})
return data || {}
})
}
/** Retrieves metadata for a specific vector index */
async getIndex(
vectorBucketName: string,
indexName: string
): Promise<ApiResponse<{ index: VectorIndex }>> {
return this.handleOperation(async () => {
return await vectorsApi.post(
this.fetch,
`${this.url}/GetIndex`,
{ vectorBucketName, indexName },
{ headers: this.headers }
)
})
}
/** Lists vector indexes within a bucket with optional filtering and pagination */
async listIndexes(options: ListIndexesOptions): Promise<ApiResponse<ListIndexesResponse>> {
return this.handleOperation(async () => {
return await vectorsApi.post(this.fetch, `${this.url}/ListIndexes`, options, {
headers: this.headers,
})
})
}
/** Deletes a vector index and all its data */
async deleteIndex(vectorBucketName: string, indexName: string): Promise<ApiResponse<undefined>> {
return this.handleOperation(async () => {
const data = await vectorsApi.post(
this.fetch,
`${this.url}/DeleteIndex`,
{ vectorBucketName, indexName },
{ headers: this.headers }
)
return data || {}
})
}
}