Agent workspaces: per-contract overlays with instant handoff

- WorkspaceManager: create/get/list workspaces with daily/weekly/monthly/pinned tiers
- Saved searches: agent stores SQL queries in workspace context
- Shortlist: tag candidates/records to a workspace with notes
- Activity log: track calls, emails, updates per workspace per agent
- Instant handoff: transfer workspace ownership with full history
  Zero data copy — just a pointer swap, receiving agent sees everything
- Persistence: workspaces stored as JSON in object storage, rebuilt on startup
- Endpoints: /workspaces/create, /{id}, /{id}/handoff, /{id}/search,
  /{id}/shortlist, /{id}/activity
- Tested: Sarah creates workspace, saves searches, shortlists 3 candidates,
  logs activity, hands off to Mike who continues seamlessly

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
root 2026-03-27 08:44:45 -05:00
parent 6df904a03c
commit 0b9da45647
20 changed files with 695 additions and 108 deletions

View File

@ -30,10 +30,16 @@ async fn main() {
tracing::warn!("catalog rebuild failed (empty store?): {e}");
}
// Query engine with 16GB memory cache (configurable)
let cache = queryd::cache::MemCache::new(16 * 1024 * 1024 * 1024); // 16GB
// Query engine with 16GB memory cache
let cache = queryd::cache::MemCache::new(16 * 1024 * 1024 * 1024);
let engine = queryd::context::QueryEngine::new(registry.clone(), store.clone(), cache);
// Workspace manager for agent-specific overlays
let workspace_mgr = queryd::workspace::WorkspaceManager::new(store.clone());
if let Err(e) = workspace_mgr.rebuild().await {
tracing::warn!("workspace rebuild: {e}");
}
// AI sidecar client
let ai_client = aibridge::client::AiClient::new(&config.sidecar.url);
@ -51,7 +57,8 @@ async fn main() {
.nest("/vectors", vectord::service::router(vectord::service::VectorState {
store: store.clone(),
ai_client: ai_client.clone(),
}));
}))
.nest("/workspaces", queryd::workspace_service::router(workspace_mgr));
// Auth middleware (if enabled)
if config.auth.enabled {

View File

@ -2,3 +2,5 @@ pub mod cache;
pub mod context;
pub mod delta;
pub mod service;
pub mod workspace;
pub mod workspace_service;

View File

@ -0,0 +1,272 @@
/// Agent workspaces — named overlays for contract/search-specific work.
/// Each workspace tracks an agent's activity on a specific contract or search,
/// with daily/weekly/monthly tiers and instant handoff capability.
use arrow::array::{ArrayRef, RecordBatch, StringArray, Int64Array};
use arrow::datatypes::{DataType, Field, Schema};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use crate::delta;
use object_store::ObjectStore;
/// Retention tier for workspace data.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Tier {
Daily, // expires end of day, active search scratch
Weekly, // expires end of week, active contract work
Monthly, // expires end of month, contract lifecycle
Pinned, // never expires, manually managed
}
/// A saved query/filter within a workspace.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SavedSearch {
pub name: String,
pub sql: String,
pub created_at: DateTime<Utc>,
}
/// A shortlisted candidate or record.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ShortlistEntry {
pub dataset: String,
pub record_id: String,
pub notes: String,
pub added_at: DateTime<Utc>,
pub added_by: String,
}
/// Activity log entry.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ActivityEntry {
pub action: String, // "search", "shortlist", "call", "email", "update", "ingest"
pub detail: String,
pub timestamp: DateTime<Utc>,
pub agent: String,
}
/// A workspace — an agent's working context for a contract or search.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Workspace {
pub id: String,
pub name: String, // "Apex Corp .NET Developers - Chicago"
pub description: String,
pub tier: Tier,
pub owner: String, // current agent
pub previous_owners: Vec<HandoffRecord>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
// Work content
pub saved_searches: Vec<SavedSearch>,
pub shortlist: Vec<ShortlistEntry>,
pub activity: Vec<ActivityEntry>,
pub ingested_datasets: Vec<String>, // datasets this workspace created
pub delta_keys: Vec<String>, // delta files specific to this workspace
pub tags: Vec<String>,
}
/// Record of a handoff between agents.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HandoffRecord {
pub from_agent: String,
pub to_agent: String,
pub reason: String,
pub timestamp: DateTime<Utc>,
}
/// Workspace manager — in-memory registry with persistence.
#[derive(Clone)]
pub struct WorkspaceManager {
workspaces: Arc<RwLock<HashMap<String, Workspace>>>,
store: Arc<dyn ObjectStore>,
}
impl WorkspaceManager {
pub fn new(store: Arc<dyn ObjectStore>) -> Self {
Self {
workspaces: Arc::new(RwLock::new(HashMap::new())),
store,
}
}
/// Rebuild from persisted workspace files on startup.
pub async fn rebuild(&self) -> Result<usize, String> {
let keys = storaged::ops::list(&self.store, Some("workspaces/")).await?;
let mut ws_map = self.workspaces.write().await;
ws_map.clear();
for key in &keys {
if !key.ends_with(".json") { continue; }
let data = storaged::ops::get(&self.store, key).await?;
match serde_json::from_slice::<Workspace>(&data) {
Ok(ws) => { ws_map.insert(ws.id.clone(), ws); }
Err(e) => tracing::warn!("failed to load workspace {key}: {e}"),
}
}
let count = ws_map.len();
if count > 0 {
tracing::info!("loaded {count} workspaces");
}
Ok(count)
}
/// Create a new workspace.
pub async fn create(&self, name: String, description: String, owner: String, tier: Tier) -> Result<Workspace, String> {
let now = Utc::now();
let id = format!("ws-{}", now.timestamp_millis());
let ws = Workspace {
id: id.clone(),
name,
description,
tier,
owner,
previous_owners: vec![],
created_at: now,
updated_at: now,
saved_searches: vec![],
shortlist: vec![],
activity: vec![],
ingested_datasets: vec![],
delta_keys: vec![],
tags: vec![],
};
self.persist(&ws).await?;
self.workspaces.write().await.insert(id.clone(), ws.clone());
tracing::info!("created workspace: {} ({})", ws.name, ws.id);
Ok(ws)
}
/// Handoff workspace to another agent. Instant — no data copy.
pub async fn handoff(&self, workspace_id: &str, to_agent: &str, reason: &str) -> Result<Workspace, String> {
let mut ws_map = self.workspaces.write().await;
let ws = ws_map.get_mut(workspace_id)
.ok_or_else(|| format!("workspace not found: {workspace_id}"))?;
let record = HandoffRecord {
from_agent: ws.owner.clone(),
to_agent: to_agent.to_string(),
reason: reason.to_string(),
timestamp: Utc::now(),
};
ws.previous_owners.push(record);
ws.owner = to_agent.to_string();
ws.updated_at = Utc::now();
ws.activity.push(ActivityEntry {
action: "handoff".to_string(),
detail: format!("handed off to {} — {}", to_agent, reason),
timestamp: Utc::now(),
agent: to_agent.to_string(),
});
let ws_clone = ws.clone();
drop(ws_map);
self.persist(&ws_clone).await?;
tracing::info!("workspace '{}' handed off to {}", ws_clone.name, to_agent);
Ok(ws_clone)
}
/// Add a saved search to a workspace.
pub async fn add_search(&self, workspace_id: &str, name: String, sql: String, agent: &str) -> Result<(), String> {
let mut ws_map = self.workspaces.write().await;
let ws = ws_map.get_mut(workspace_id)
.ok_or_else(|| format!("workspace not found: {workspace_id}"))?;
ws.saved_searches.push(SavedSearch {
name: name.clone(),
sql,
created_at: Utc::now(),
});
ws.activity.push(ActivityEntry {
action: "search".into(),
detail: format!("saved search: {name}"),
timestamp: Utc::now(),
agent: agent.to_string(),
});
ws.updated_at = Utc::now();
let ws_clone = ws.clone();
drop(ws_map);
self.persist(&ws_clone).await
}
/// Add a candidate/record to the shortlist.
pub async fn add_to_shortlist(&self, workspace_id: &str, dataset: String, record_id: String, notes: String, agent: &str) -> Result<(), String> {
let mut ws_map = self.workspaces.write().await;
let ws = ws_map.get_mut(workspace_id)
.ok_or_else(|| format!("workspace not found: {workspace_id}"))?;
ws.shortlist.push(ShortlistEntry {
dataset: dataset.clone(),
record_id: record_id.clone(),
notes,
added_at: Utc::now(),
added_by: agent.to_string(),
});
ws.activity.push(ActivityEntry {
action: "shortlist".into(),
detail: format!("added {record_id} from {dataset}"),
timestamp: Utc::now(),
agent: agent.to_string(),
});
ws.updated_at = Utc::now();
let ws_clone = ws.clone();
drop(ws_map);
self.persist(&ws_clone).await
}
/// Log an activity.
pub async fn log_activity(&self, workspace_id: &str, action: String, detail: String, agent: &str) -> Result<(), String> {
let mut ws_map = self.workspaces.write().await;
let ws = ws_map.get_mut(workspace_id)
.ok_or_else(|| format!("workspace not found: {workspace_id}"))?;
ws.activity.push(ActivityEntry {
action,
detail,
timestamp: Utc::now(),
agent: agent.to_string(),
});
ws.updated_at = Utc::now();
let ws_clone = ws.clone();
drop(ws_map);
self.persist(&ws_clone).await
}
/// Get a workspace.
pub async fn get(&self, workspace_id: &str) -> Option<Workspace> {
self.workspaces.read().await.get(workspace_id).cloned()
}
/// List all workspaces, optionally filtered by owner or tier.
pub async fn list(&self, owner: Option<&str>, tier: Option<&Tier>) -> Vec<Workspace> {
let ws_map = self.workspaces.read().await;
ws_map.values()
.filter(|ws| {
owner.map_or(true, |o| ws.owner == o) &&
tier.map_or(true, |t| ws.tier == *t)
})
.cloned()
.collect()
}
/// Persist workspace to object storage.
async fn persist(&self, ws: &Workspace) -> Result<(), String> {
let key = format!("workspaces/{}.json", ws.id);
let json = serde_json::to_vec_pretty(ws).map_err(|e| e.to_string())?;
storaged::ops::put(&self.store, &key, json.into()).await
}
}

View File

@ -0,0 +1,136 @@
use axum::{
Json, Router,
extract::{Path, Query, State},
http::StatusCode,
response::IntoResponse,
routing::{get, post},
};
use serde::Deserialize;
use crate::workspace::{Tier, WorkspaceManager};
pub fn router(manager: WorkspaceManager) -> Router {
Router::new()
.route("/", get(list_workspaces))
.route("/create", post(create_workspace))
.route("/{id}", get(get_workspace))
.route("/{id}/handoff", post(handoff))
.route("/{id}/search", post(add_search))
.route("/{id}/shortlist", post(add_to_shortlist))
.route("/{id}/activity", post(log_activity))
.with_state(manager)
}
#[derive(Deserialize)]
struct ListQuery {
owner: Option<String>,
tier: Option<Tier>,
}
async fn list_workspaces(
State(mgr): State<WorkspaceManager>,
Query(q): Query<ListQuery>,
) -> impl IntoResponse {
let workspaces = mgr.list(q.owner.as_deref(), q.tier.as_ref()).await;
Json(workspaces)
}
#[derive(Deserialize)]
struct CreateRequest {
name: String,
description: String,
owner: String,
tier: Tier,
}
async fn create_workspace(
State(mgr): State<WorkspaceManager>,
Json(req): Json<CreateRequest>,
) -> impl IntoResponse {
match mgr.create(req.name, req.description, req.owner, req.tier).await {
Ok(ws) => Ok((StatusCode::CREATED, Json(ws))),
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e)),
}
}
async fn get_workspace(
State(mgr): State<WorkspaceManager>,
Path(id): Path<String>,
) -> impl IntoResponse {
match mgr.get(&id).await {
Some(ws) => Ok(Json(ws)),
None => Err((StatusCode::NOT_FOUND, format!("workspace not found: {id}"))),
}
}
#[derive(Deserialize)]
struct HandoffRequest {
to_agent: String,
reason: String,
}
async fn handoff(
State(mgr): State<WorkspaceManager>,
Path(id): Path<String>,
Json(req): Json<HandoffRequest>,
) -> impl IntoResponse {
match mgr.handoff(&id, &req.to_agent, &req.reason).await {
Ok(ws) => Ok(Json(ws)),
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e)),
}
}
#[derive(Deserialize)]
struct SearchRequest {
name: String,
sql: String,
agent: String,
}
async fn add_search(
State(mgr): State<WorkspaceManager>,
Path(id): Path<String>,
Json(req): Json<SearchRequest>,
) -> impl IntoResponse {
match mgr.add_search(&id, req.name, req.sql, &req.agent).await {
Ok(()) => Ok((StatusCode::OK, "search saved")),
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e)),
}
}
#[derive(Deserialize)]
struct ShortlistRequest {
dataset: String,
record_id: String,
notes: String,
agent: String,
}
async fn add_to_shortlist(
State(mgr): State<WorkspaceManager>,
Path(id): Path<String>,
Json(req): Json<ShortlistRequest>,
) -> impl IntoResponse {
match mgr.add_to_shortlist(&id, req.dataset, req.record_id, req.notes, &req.agent).await {
Ok(()) => Ok((StatusCode::OK, "added to shortlist")),
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e)),
}
}
#[derive(Deserialize)]
struct ActivityRequest {
action: String,
detail: String,
agent: String,
}
async fn log_activity(
State(mgr): State<WorkspaceManager>,
Path(id): Path<String>,
Json(req): Json<ActivityRequest>,
) -> impl IntoResponse {
match mgr.log_activity(&id, req.action, req.detail, &req.agent).await {
Ok(()) => Ok((StatusCode::OK, "activity logged")),
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e)),
}
}

View File

@ -0,0 +1,15 @@
{
"id": "142c4090-fd14-4065-8c06-d9721c14ec87",
"name": "candidates",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/candidates.parquet",
"size_bytes": 10592165,
"created_at": "2026-03-27T13:43:21.924470705Z"
}
],
"created_at": "2026-03-27T13:43:21.924477421Z",
"updated_at": "2026-03-27T13:43:21.924477421Z"
}

View File

@ -1,15 +0,0 @@
{
"id": "18d22cdd-24b3-4a65-bdcb-6624753e5ab7",
"name": "job_orders",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/job_orders.parquet",
"size_bytes": 905534,
"created_at": "2026-03-27T13:36:42.130140103Z"
}
],
"created_at": "2026-03-27T13:36:42.130146127Z",
"updated_at": "2026-03-27T13:36:42.130146127Z"
}

View File

@ -0,0 +1,15 @@
{
"id": "1e7a1b8d-6211-46b5-b030-02ac76f92564",
"name": "email_log",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/email_log.parquet",
"size_bytes": 16768671,
"created_at": "2026-03-27T13:43:32.341429856Z"
}
],
"created_at": "2026-03-27T13:43:32.341435388Z",
"updated_at": "2026-03-27T13:43:32.341435388Z"
}

View File

@ -0,0 +1,15 @@
{
"id": "29c177bd-3728-428a-ab0f-95169aae1106",
"name": "timesheets",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/timesheets.parquet",
"size_bytes": 17539932,
"created_at": "2026-03-27T13:43:26.951181242Z"
}
],
"created_at": "2026-03-27T13:43:26.951188331Z",
"updated_at": "2026-03-27T13:43:26.951188331Z"
}

View File

@ -0,0 +1,15 @@
{
"id": "812e7d9a-0f50-49c0-b121-4cf758c304d9",
"name": "placements",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/placements.parquet",
"size_bytes": 1213820,
"created_at": "2026-03-27T13:43:22.173146233Z"
}
],
"created_at": "2026-03-27T13:43:22.173152301Z",
"updated_at": "2026-03-27T13:43:22.173152301Z"
}

View File

@ -1,15 +0,0 @@
{
"id": "8fa7cb8b-ab6b-4e64-9384-d2480e79dd7c",
"name": "clients",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/clients.parquet",
"size_bytes": 21971,
"created_at": "2026-03-27T13:36:42.025701092Z"
}
],
"created_at": "2026-03-27T13:36:42.025707574Z",
"updated_at": "2026-03-27T13:36:42.025707574Z"
}

View File

@ -0,0 +1,15 @@
{
"id": "91413428-b4b1-44b3-bb8d-5cb326019879",
"name": "job_orders",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/job_orders.parquet",
"size_bytes": 905534,
"created_at": "2026-03-27T13:43:22.036039453Z"
}
],
"created_at": "2026-03-27T13:43:22.036045131Z",
"updated_at": "2026-03-27T13:43:22.036045131Z"
}

View File

@ -0,0 +1,15 @@
{
"id": "9bb57bf9-2c19-42ed-84f4-83fd3c52b94a",
"name": "clients",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/clients.parquet",
"size_bytes": 21971,
"created_at": "2026-03-27T13:43:21.933347525Z"
}
],
"created_at": "2026-03-27T13:43:21.933351887Z",
"updated_at": "2026-03-27T13:43:21.933351887Z"
}

View File

@ -1,15 +0,0 @@
{
"id": "b5d49316-9c9b-4a57-8221-13b6dcda551a",
"name": "placements",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/placements.parquet",
"size_bytes": 1213820,
"created_at": "2026-03-27T13:36:42.237756183Z"
}
],
"created_at": "2026-03-27T13:36:42.237762120Z",
"updated_at": "2026-03-27T13:36:42.237762120Z"
}

View File

@ -1,15 +0,0 @@
{
"id": "c00465bd-c562-419a-b40e-c557ba9054bf",
"name": "candidates",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/candidates.parquet",
"size_bytes": 10592165,
"created_at": "2026-03-27T13:36:42.018896280Z"
}
],
"created_at": "2026-03-27T13:36:42.018904245Z",
"updated_at": "2026-03-27T13:36:42.018904245Z"
}

View File

@ -1,15 +0,0 @@
{
"id": "dce14141-f679-481b-9b48-13438cbfe057",
"name": "email_log",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/email_log.parquet",
"size_bytes": 16768671,
"created_at": "2026-03-27T13:36:52.383853471Z"
}
],
"created_at": "2026-03-27T13:36:52.383859356Z",
"updated_at": "2026-03-27T13:36:52.383859356Z"
}

View File

@ -1,15 +0,0 @@
{
"id": "e0bcb8de-a2c1-4706-bf2d-73c1b989a70d",
"name": "timesheets",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/timesheets.parquet",
"size_bytes": 17539932,
"created_at": "2026-03-27T13:36:46.998375016Z"
}
],
"created_at": "2026-03-27T13:36:46.998383728Z",
"updated_at": "2026-03-27T13:36:46.998383728Z"
}

View File

@ -0,0 +1,15 @@
{
"id": "e1607b56-a826-4826-845a-76918127c6bf",
"name": "call_log",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/call_log.parquet",
"size_bytes": 35951077,
"created_at": "2026-03-27T13:43:30.485776088Z"
}
],
"created_at": "2026-03-27T13:43:30.485783579Z",
"updated_at": "2026-03-27T13:43:30.485783579Z"
}

View File

@ -1,15 +0,0 @@
{
"id": "e47b637f-31c6-4209-ab3c-557f8c67c812",
"name": "call_log",
"schema_fingerprint": "auto",
"objects": [
{
"bucket": "data",
"key": "datasets/call_log.parquet",
"size_bytes": 35951077,
"created_at": "2026-03-27T13:36:50.546706609Z"
}
],
"created_at": "2026-03-27T13:36:50.546712358Z",
"updated_at": "2026-03-27T13:36:50.546712358Z"
}

View File

@ -0,0 +1,40 @@
{
"id": "ws-1774619041730",
"name": "Apex Corp - .NET Developers Chicago",
"description": "Fill 5 .NET developer positions for Apex Corp, downtown Chicago, $65-85/hr bill rate",
"tier": "weekly",
"owner": "Sarah",
"previous_owners": [],
"created_at": "2026-03-27T13:44:01.730143708Z",
"updated_at": "2026-03-27T13:44:08.530268827Z",
"saved_searches": [
{
"name": "Chicago .NET active candidates",
"sql": "SELECT candidate_id, first_name, last_name, phone, email, years_experience FROM candidates WHERE city = 'Chicago' AND skills LIKE '%.NET%' AND status = 'active' ORDER BY years_experience DESC",
"created_at": "2026-03-27T13:44:01.731891844Z"
},
{
"name": "test",
"sql": "SELECT 1",
"created_at": "2026-03-27T13:44:08.530262069Z"
}
],
"shortlist": [],
"activity": [
{
"action": "search",
"detail": "saved search: Chicago .NET active candidates",
"timestamp": "2026-03-27T13:44:01.731898474Z",
"agent": "Sarah"
},
{
"action": "search",
"detail": "saved search: test",
"timestamp": "2026-03-27T13:44:08.530268200Z",
"agent": "Sarah"
}
],
"ingested_datasets": [],
"delta_keys": [],
"tags": []
}

View File

@ -0,0 +1,130 @@
{
"id": "ws-1774619071313",
"name": "Apex Corp - .NET Developers Chicago",
"description": "Fill 5 .NET developer positions, downtown Chicago, $65-85/hr",
"tier": "weekly",
"owner": "Mike",
"previous_owners": [
{
"from_agent": "Sarah",
"to_agent": "Mike",
"reason": "Sarah on PTO, Mike covering Apex account",
"timestamp": "2026-03-27T13:44:31.531544562Z"
}
],
"created_at": "2026-03-27T13:44:31.313179900Z",
"updated_at": "2026-03-27T13:44:31.534554639Z",
"saved_searches": [
{
"name": "Chicago .NET active",
"sql": "SELECT candidate_id, first_name, last_name, phone, years_experience FROM candidates WHERE city = 'Chicago' AND skills LIKE '%.NET%' AND status = 'active' ORDER BY years_experience DESC",
"created_at": "2026-03-27T13:44:31.314740279Z"
},
{
"name": "High-bill .NET history",
"sql": "SELECT p.candidate_id, c.first_name, c.last_name, p.bill_rate FROM placements p JOIN candidates c ON p.candidate_id = c.candidate_id JOIN job_orders j ON p.job_order_id = j.job_order_id WHERE j.title LIKE '%.NET%' AND p.bill_rate > 60 ORDER BY p.bill_rate DESC LIMIT 20",
"created_at": "2026-03-27T13:44:31.315923201Z"
}
],
"shortlist": [
{
"dataset": "candidates",
"record_id": "CAND-006645",
"notes": "Joseph Hill — 30yr .NET exp",
"added_at": "2026-03-27T13:44:31.524757463Z",
"added_by": "Sarah"
},
{
"dataset": "candidates",
"record_id": "CAND-020078",
"notes": "Jessica Jones — 30yr .NET exp",
"added_at": "2026-03-27T13:44:31.525965891Z",
"added_by": "Sarah"
},
{
"dataset": "candidates",
"record_id": "CAND-015656",
"notes": "Barbara Wright — 30yr .NET exp",
"added_at": "2026-03-27T13:44:31.527152483Z",
"added_by": "Sarah"
},
{
"dataset": "candidates",
"record_id": "CAND-00099",
"notes": "Mike found additional candidate via LinkedIn",
"added_at": "2026-03-27T13:44:31.534551709Z",
"added_by": "Mike"
}
],
"activity": [
{
"action": "search",
"detail": "saved search: Chicago .NET active",
"timestamp": "2026-03-27T13:44:31.314743876Z",
"agent": "Sarah"
},
{
"action": "search",
"detail": "saved search: High-bill .NET history",
"timestamp": "2026-03-27T13:44:31.315925687Z",
"agent": "Sarah"
},
{
"action": "shortlist",
"detail": "added CAND-006645 from candidates",
"timestamp": "2026-03-27T13:44:31.524762385Z",
"agent": "Sarah"
},
{
"action": "shortlist",
"detail": "added CAND-020078 from candidates",
"timestamp": "2026-03-27T13:44:31.525968748Z",
"agent": "Sarah"
},
{
"action": "shortlist",
"detail": "added CAND-015656 from candidates",
"timestamp": "2026-03-27T13:44:31.527155126Z",
"agent": "Sarah"
},
{
"action": "call",
"detail": "Called top 3 candidates, 2 interested",
"timestamp": "2026-03-27T13:44:31.528254640Z",
"agent": "Sarah"
},
{
"action": "email",
"detail": "Sent job descriptions to shortlist",
"timestamp": "2026-03-27T13:44:31.529452236Z",
"agent": "Sarah"
},
{
"action": "update",
"detail": "Candidate CAND-00025 confirmed for Thursday interview",
"timestamp": "2026-03-27T13:44:31.530540919Z",
"agent": "Sarah"
},
{
"action": "handoff",
"detail": "handed off to Mike — Sarah on PTO, Mike covering Apex account",
"timestamp": "2026-03-27T13:44:31.531546876Z",
"agent": "Mike"
},
{
"action": "call",
"detail": "Followed up with CAND-00025, interview confirmed",
"timestamp": "2026-03-27T13:44:31.533529588Z",
"agent": "Mike"
},
{
"action": "shortlist",
"detail": "added CAND-00099 from candidates",
"timestamp": "2026-03-27T13:44:31.534554347Z",
"agent": "Mike"
}
],
"ingested_datasets": [],
"delta_keys": [],
"tags": []
}