Database Schema
Goblin uses SQLite for persistence. Schema is defined in goblin/storage/schema.sql.
Tables
linear_orgs
Stores Linear organization credentials.
CREATE TABLE linear_orgs (
name TEXT PRIMARY KEY,
token TEXT NOT NULL,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL
);
projects
Maps repositories to Linear teams.
CREATE TABLE projects (
id TEXT PRIMARY KEY,
name TEXT NOT NULL,
repo_path TEXT NOT NULL,
linear_org TEXT NOT NULL,
linear_team_id TEXT NOT NULL,
linear_team_key TEXT NOT NULL,
created_at TEXT NOT NULL,
FOREIGN KEY (linear_org) REFERENCES linear_orgs(name)
);
agents
Tracks agent instances.
CREATE TABLE agents (
id TEXT PRIMARY KEY,
project_id TEXT NOT NULL,
status TEXT NOT NULL DEFAULT 'idle',
linear_issue_id TEXT,
session_file TEXT,
pid INTEGER,
created_at TEXT NOT NULL,
last_seen_at TEXT NOT NULL,
FOREIGN KEY (project_id) REFERENCES projects(id)
);
pipelines
Tracks issue lifecycle.
CREATE TABLE pipelines (
id TEXT PRIMARY KEY,
project_id TEXT NOT NULL,
linear_issue_id TEXT NOT NULL,
stage TEXT NOT NULL DEFAULT 'scope',
branch_name TEXT,
worktree_path TEXT,
base_commit TEXT,
pr_url TEXT,
scoper_agent_id TEXT,
builder_agent_id TEXT,
reviewer_agent_id TEXT,
tester_agent_id TEXT,
scope_result TEXT,
review_result TEXT,
test_result TEXT,
test_report TEXT,
qa_steps TEXT,
assignee_user_id TEXT,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL,
FOREIGN KEY (project_id) REFERENCES projects(id)
);
events
Audit log for debugging.
CREATE TABLE events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
event_type TEXT NOT NULL,
agent_id TEXT,
linear_issue_id TEXT,
data TEXT,
created_at TEXT NOT NULL
);
project_config
Per-project settings.
CREATE TABLE project_config (
project_id TEXT NOT NULL,
key TEXT NOT NULL,
value TEXT NOT NULL,
updated_at TEXT NOT NULL,
PRIMARY KEY (project_id, key),
FOREIGN KEY (project_id) REFERENCES projects(id)
);
pipeline_stage_history
Stage transition tracking.
CREATE TABLE pipeline_stage_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
pipeline_id TEXT NOT NULL,
stage TEXT NOT NULL,
status TEXT NOT NULL,
agent_id TEXT,
summary TEXT,
result TEXT,
started_at TEXT NOT NULL,
completed_at TEXT,
FOREIGN KEY (pipeline_id) REFERENCES pipelines(id)
);
preview_environments
Active preview environments.
CREATE TABLE preview_environments (
id TEXT PRIMARY KEY,
pipeline_id TEXT NOT NULL,
project_id TEXT NOT NULL,
linear_issue_id TEXT NOT NULL,
docker_project_name TEXT NOT NULL,
port_offset INTEGER NOT NULL DEFAULT 0,
service_urls TEXT,
status TEXT NOT NULL DEFAULT 'running',
provider TEXT,
instance_id TEXT,
public_ip TEXT,
public_url TEXT,
created_at TEXT NOT NULL,
stopped_at TEXT,
FOREIGN KEY (pipeline_id) REFERENCES pipelines(id),
FOREIGN KEY (project_id) REFERENCES projects(id)
);
daemon_deployments
Cloud daemon instances.
CREATE TABLE daemon_deployments (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
provider TEXT NOT NULL,
instance_id TEXT,
public_ip TEXT NOT NULL,
region TEXT,
ssh_key_path TEXT NOT NULL,
ssh_user TEXT NOT NULL DEFAULT 'ubuntu',
status TEXT NOT NULL DEFAULT 'running',
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL
);
Indexes
CREATE INDEX idx_agents_project ON agents(project_id);
CREATE INDEX idx_agents_status ON agents(status);
CREATE INDEX idx_events_agent ON events(agent_id);
CREATE INDEX idx_events_type ON events(event_type);
CREATE INDEX idx_pipelines_project ON pipelines(project_id);
CREATE INDEX idx_pipelines_stage ON pipelines(stage);
CREATE INDEX idx_pipelines_issue ON pipelines(linear_issue_id);
CREATE INDEX idx_stage_history_pipeline ON pipeline_stage_history(pipeline_id);
CREATE INDEX idx_preview_environments_status ON preview_environments(status);
CREATE INDEX idx_preview_environments_issue ON preview_environments(linear_issue_id);
CREATE INDEX idx_daemon_deployments_status ON daemon_deployments(status);
Querying
from goblin.storage.db import Database
db = Database(".goblin/goblin.db")
# Single row
row = db.execute_one("SELECT * FROM pipelines WHERE id = ?", (id,))
# Multiple rows
rows = db.execute_many("SELECT * FROM agents WHERE status = ?", ("working",))
# Execute
db.execute("INSERT INTO events ...", params)