Quickstart
Prerequisites
- Go 1.24+ to build the daemon and use the Go library
- Make to run build and test targets
- protoc >= 3.20 only if you plan to regenerate gRPC stubs
- Node.js 20+ for the TypeScript SDK
- Python 3.10+ for the Python SDK
Run Membrane
- Go library (embedded)
- Daemon (gRPC server)
Use Membrane inside your Go process when you do not need a separate daemon.
Clone the repository
git clone https://github.com/BennettSchwartz/membrane.git
cd membrane
Add the dependency
go get github.com/BennettSchwartz/membrane
Write your first memory loop
package main
import (
"context"
"fmt"
"log"
"github.com/BennettSchwartz/membrane/pkg/ingestion"
"github.com/BennettSchwartz/membrane/pkg/membrane"
"github.com/BennettSchwartz/membrane/pkg/retrieval"
"github.com/BennettSchwartz/membrane/pkg/schema"
)
func main() {
cfg := membrane.DefaultConfig()
cfg.DBPath = "my-agent.db"
m, err := membrane.New(cfg)
if err != nil {
log.Fatal(err)
}
defer m.Stop()
ctx := context.Background()
if err := m.Start(ctx); err != nil {
log.Fatal(err)
}
capture, err := m.CaptureMemory(ctx, ingestion.CaptureMemoryRequest{
Source: "build-agent",
SourceKind: "tool_output",
Content: map[string]any{
"tool": "go test",
"args": []string{"./..."},
"result": "auth package passed",
},
Context: map[string]any{"thread_id": "session-001"},
ReasonToRemember: "Keep successful test context for future auth work",
Summary: "Auth package tests passed",
Tags: []string{"auth", "tests"},
Scope: "project-auth",
Sensitivity: schema.SensitivityLow,
})
if err != nil {
log.Fatal(err)
}
fmt.Printf("Captured primary record: %s\n", capture.PrimaryRecord.ID)
_, _ = m.CaptureMemory(ctx, ingestion.CaptureMemoryRequest{
Source: "build-agent",
SourceKind: "observation",
Content: map[string]any{
"subject": "user",
"predicate": "prefers_language",
"object": "Go",
},
ReasonToRemember: "Remember user language preference",
Summary: "User prefers Go",
Tags: []string{"preference"},
Sensitivity: schema.SensitivityLow,
})
graph, err := m.RetrieveGraph(ctx, &retrieval.RetrieveGraphRequest{
TaskDescriptor: "fix auth build error",
Trust: retrieval.NewTrustContext(
schema.SensitivityMedium,
true,
"build-agent",
[]string{"project-auth"},
),
MemoryTypes: []schema.MemoryType{
schema.MemoryTypeEntity,
schema.MemoryTypeSemantic,
schema.MemoryTypeCompetence,
schema.MemoryTypeEpisodic,
},
RootLimit: 10,
NodeLimit: 25,
EdgeLimit: 100,
MaxHops: 1,
})
if err != nil {
log.Fatal(err)
}
for _, node := range graph.Nodes {
fmt.Printf("Found: %s (type=%s, root=%t, hop=%d)\n",
node.Record.ID,
node.Record.Type,
node.Root,
node.Hop,
)
}
}
Run it
go run main.go
Membrane creates my-agent.db on first run. Background decay and consolidation schedulers start with m.Start(ctx) and stop when m.Stop() is called.
Run membraned as a standalone gRPC server for TypeScript, Python, or any gRPC client.
Clone and build
git clone https://github.com/BennettSchwartz/membrane.git
cd membrane
make build
The binary is written to bin/membraned.
Start the daemon
- SQLite (default)
- Postgres + pgvector
- Custom config file
./bin/membraned
Starts with membrane.db in the current directory and listens on :9090.
docker compose up -d
./bin/membraned --postgres-dsn postgres://membrane:membrane@localhost:5432/membrane_test?sslmode=disable
./bin/membraned --config /path/to/config.yaml
See Configuration for the full YAML schema.
Verify startup
The daemon logs membraned: listening on :9090 when it starts successfully. Send SIGINT or SIGTERM to trigger graceful shutdown.
The daemon reads the API key from MEMBRANE_API_KEY if not set in the config file. Leaving it unset disables authentication, which is suitable only for local development.
Connect A Client
Once the daemon is running, connect from TypeScript or Python.
import { MembraneClient, Sensitivity, SourceKind } from "@bennettschwartz/membrane";
const client = new MembraneClient("localhost:9090", { apiKey: "your-key" });
const capture = await client.captureMemory(
{
ref: "thread-1:turn-7",
text: "Refactored auth middleware and verified package tests",
file: "src/auth.ts",
},
{
sourceKind: SourceKind.EVENT,
reasonToRemember: "Keep the auth refactor available for future debugging",
summary: "Refactored auth middleware",
tags: ["auth", "typescript"],
sensitivity: Sensitivity.LOW,
}
);
const graph = await client.retrieveGraph("debug auth", {
trust: {
max_sensitivity: Sensitivity.MEDIUM,
authenticated: true,
actor_id: "ts-agent",
scopes: [],
},
memoryTypes: ["entity", "semantic", "competence", "episodic"],
rootLimit: 5,
nodeLimit: 20,
maxHops: 1,
});
console.log(capture.primary_record.id, graph.nodes.length);
client.close();
from membrane import MembraneClient, Sensitivity, SourceKind, TrustContext
client = MembraneClient("localhost:9090", api_key="your-key")
capture = client.capture_memory(
{
"ref": "thread-1:turn-7",
"text": "Refactored auth middleware and verified package tests",
"file": "src/auth.py",
},
source_kind=SourceKind.EVENT,
reason_to_remember="Keep the auth refactor available for future debugging",
summary="Refactored auth middleware",
tags=["auth", "python"],
sensitivity=Sensitivity.LOW,
)
graph = client.retrieve_graph(
"debug auth",
trust=TrustContext(
max_sensitivity=Sensitivity.MEDIUM,
authenticated=True,
actor_id="py-agent",
),
memory_types=["entity", "semantic", "competence", "episodic"],
root_limit=5,
node_limit=20,
max_hops=1,
)
print(capture.primary_record.id, len(graph.nodes))
client.close()
Install The SDKs
npm install @bennettschwartz/membrane
pip install -e clients/python
Configuration Reference
Override defaults using a YAML config file or CLI flags. Secrets should come from environment variables.
backend: "sqlite"
db_path: "membrane.db"
listen_addr: ":9090"
decay_interval: "1h"
consolidation_interval: "6h"
default_sensitivity: "low"
selection_confidence_threshold: 0.7
# Graph retrieval defaults
graph_default_root_limit: 10
graph_default_node_limit: 25
graph_default_edge_limit: 100
graph_default_max_hops: 1
# Optional embedding-backed retrieval (Postgres only)
# embedding_endpoint: "https://api.openai.com/v1/embeddings"
# embedding_model: "text-embedding-3-small"
# embedding_dimensions: 1536
# embedding_api_key: "" # or set MEMBRANE_EMBEDDING_API_KEY
# Optional LLM-backed consolidation (Postgres only)
# llm_endpoint: "https://api.openai.com/v1/chat/completions"
# llm_model: "gpt-5-mini"
# llm_api_key: "" # or set MEMBRANE_LLM_API_KEY
# Optional ingest-side interpretation for CaptureMemory
# ingest_llm_enabled: true
# ingest_llm_endpoint: "https://api.openai.com/v1/chat/completions"
# ingest_llm_model: "gpt-5-mini"
# ingest_llm_api_key: "" # or set MEMBRANE_INGEST_LLM_API_KEY
# Security
# encryption_key: "" # or set MEMBRANE_ENCRYPTION_KEY
# api_key: "" # or set MEMBRANE_API_KEY
# tls_cert_file: ""
# tls_key_file: ""