Add Ollama backend + Qwen3 local inference support
- Extractor now supports two backends: ollama (local) and anthropic (cloud) - Default is ollama with qwen3:14b (fits 16GB VRAM) - Set num_ctx to 32768 for full-script processing - Added --backend and --ollama-url CLI flags - Added The Last Backup test script - Tested: 12/12 scenes valid on dialogue_heavy, 12/13 on the_last_backup Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
87d0af0748
commit
74870f7c0d
@ -1,3 +1,4 @@
|
|||||||
anthropic>=0.40.0
|
anthropic>=0.40.0
|
||||||
pydantic>=2.0.0
|
pydantic>=2.0.0
|
||||||
python-dotenv>=1.0.0
|
python-dotenv>=1.0.0
|
||||||
|
requests>=2.31.0
|
||||||
|
|||||||
24
run.py
24
run.py
@ -14,7 +14,9 @@ def main():
|
|||||||
parser = argparse.ArgumentParser(description="AI Movie Production Pipeline — Phase 1")
|
parser = argparse.ArgumentParser(description="AI Movie Production Pipeline — Phase 1")
|
||||||
parser.add_argument("--script", type=str, help="Path to .fountain script file")
|
parser.add_argument("--script", type=str, help="Path to .fountain script file")
|
||||||
parser.add_argument("--project", type=str, help="Project name (determines output directory)")
|
parser.add_argument("--project", type=str, help="Project name (determines output directory)")
|
||||||
parser.add_argument("--model", type=str, default="claude-sonnet-4-20250514", help="Model ID")
|
parser.add_argument("--model", type=str, default="qwen3:14b", help="Model ID (default: qwen3:14b)")
|
||||||
|
parser.add_argument("--backend", type=str, default="ollama", choices=["ollama", "anthropic"], help="AI backend (default: ollama)")
|
||||||
|
parser.add_argument("--ollama-url", type=str, default="http://localhost:11434", help="Ollama server URL")
|
||||||
parser.add_argument("--scene", type=int, default=None, help="Process only this scene number")
|
parser.add_argument("--scene", type=int, default=None, help="Process only this scene number")
|
||||||
parser.add_argument("--dry-run", action="store_true", help="Validate inputs only, no AI calls")
|
parser.add_argument("--dry-run", action="store_true", help="Validate inputs only, no AI calls")
|
||||||
parser.add_argument("--force", action="store_true", help="Ignore cache, re-run even if unchanged")
|
parser.add_argument("--force", action="store_true", help="Ignore cache, re-run even if unchanged")
|
||||||
@ -24,14 +26,14 @@ def main():
|
|||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if args.test:
|
if args.test:
|
||||||
run_tests(args.model, args.output_dir)
|
run_tests(args.model, args.backend, args.ollama_url, args.output_dir)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not args.script or not args.project:
|
if not args.script or not args.project:
|
||||||
parser.error("--script and --project are required (unless using --test)")
|
parser.error("--script and --project are required (unless using --test)")
|
||||||
|
|
||||||
api_key = os.environ.get("ANTHROPIC_API_KEY")
|
api_key = os.environ.get("ANTHROPIC_API_KEY", "")
|
||||||
if not api_key and not args.dry_run:
|
if args.backend == "anthropic" and not api_key and not args.dry_run:
|
||||||
print("ERROR: ANTHROPIC_API_KEY not set. Set it in .env or environment.")
|
print("ERROR: ANTHROPIC_API_KEY not set. Set it in .env or environment.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@ -44,8 +46,10 @@ def main():
|
|||||||
result = run_phase1(
|
result = run_phase1(
|
||||||
script_path=args.script,
|
script_path=args.script,
|
||||||
project_name=args.project,
|
project_name=args.project,
|
||||||
api_key=api_key or "",
|
api_key=api_key,
|
||||||
model=args.model,
|
model=args.model,
|
||||||
|
backend=args.backend,
|
||||||
|
ollama_url=args.ollama_url,
|
||||||
output_dir=args.output_dir,
|
output_dir=args.output_dir,
|
||||||
scene_filter=args.scene,
|
scene_filter=args.scene,
|
||||||
dry_run=args.dry_run,
|
dry_run=args.dry_run,
|
||||||
@ -60,11 +64,11 @@ def main():
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
def run_tests(model: str, output_dir: str):
|
def run_tests(model: str, backend: str, ollama_url: str, output_dir: str):
|
||||||
"""Run test suite against all scripts in test_scripts/."""
|
"""Run test suite against all scripts in test_scripts/."""
|
||||||
api_key = os.environ.get("ANTHROPIC_API_KEY")
|
api_key = os.environ.get("ANTHROPIC_API_KEY", "")
|
||||||
if not api_key:
|
if backend == "anthropic" and not api_key:
|
||||||
print("ERROR: ANTHROPIC_API_KEY required for tests")
|
print("ERROR: ANTHROPIC_API_KEY required for anthropic backend tests")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
test_dir = os.path.join(os.path.dirname(__file__), "test_scripts")
|
test_dir = os.path.join(os.path.dirname(__file__), "test_scripts")
|
||||||
@ -93,6 +97,8 @@ def run_tests(model: str, output_dir: str):
|
|||||||
project_name=project_name,
|
project_name=project_name,
|
||||||
api_key=api_key,
|
api_key=api_key,
|
||||||
model=model,
|
model=model,
|
||||||
|
backend=backend,
|
||||||
|
ollama_url=ollama_url,
|
||||||
output_dir=output_dir,
|
output_dir=output_dir,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -34,8 +34,10 @@ class PipelineResult:
|
|||||||
def run_phase1(
|
def run_phase1(
|
||||||
script_path: str,
|
script_path: str,
|
||||||
project_name: str,
|
project_name: str,
|
||||||
api_key: str,
|
api_key: str = "",
|
||||||
model: str = "claude-sonnet-4-20250514",
|
model: str = "qwen3:14b",
|
||||||
|
backend: str = "ollama",
|
||||||
|
ollama_url: str = "http://localhost:11434",
|
||||||
output_dir: str = "output",
|
output_dir: str = "output",
|
||||||
scene_filter: int | None = None,
|
scene_filter: int | None = None,
|
||||||
dry_run: bool = False,
|
dry_run: bool = False,
|
||||||
@ -46,8 +48,10 @@ def run_phase1(
|
|||||||
Args:
|
Args:
|
||||||
script_path: Path to .fountain file.
|
script_path: Path to .fountain file.
|
||||||
project_name: Project name for output directory.
|
project_name: Project name for output directory.
|
||||||
api_key: Anthropic API key.
|
api_key: API key (required for anthropic backend).
|
||||||
model: Model ID.
|
model: Model ID.
|
||||||
|
backend: "ollama" or "anthropic".
|
||||||
|
ollama_url: Ollama server URL.
|
||||||
output_dir: Base output directory.
|
output_dir: Base output directory.
|
||||||
scene_filter: If set, only process this scene number in Layer 2.
|
scene_filter: If set, only process this scene number in Layer 2.
|
||||||
dry_run: If True, validate inputs only — no AI calls.
|
dry_run: If True, validate inputs only — no AI calls.
|
||||||
@ -108,7 +112,7 @@ def run_phase1(
|
|||||||
for chunk_idx, chunk in enumerate(chunks):
|
for chunk_idx, chunk in enumerate(chunks):
|
||||||
# Extract scenes from this chunk
|
# Extract scenes from this chunk
|
||||||
def do_extract(data):
|
def do_extract(data):
|
||||||
return extract_scenes(data, contract_path, api_key, model)
|
return extract_scenes(data, contract_path, api_key, model, backend, ollama_url)
|
||||||
|
|
||||||
run_id = logger.start("L2", scene_id=chunk_idx)
|
run_id = logger.start("L2", scene_id=chunk_idx)
|
||||||
chunk_input_hash = f"sha256:{hashlib.sha256(json.dumps([e.model_dump() for e in chunk.elements]).encode()).hexdigest()}"
|
chunk_input_hash = f"sha256:{hashlib.sha256(json.dumps([e.model_dump() for e in chunk.elements]).encode()).hexdigest()}"
|
||||||
|
|||||||
@ -1,8 +1,15 @@
|
|||||||
"""Layer 2 AI extractor — sends normalized script to Claude, receives scene JSON."""
|
"""Layer 2 AI extractor — sends normalized script to LLM, receives scene JSON.
|
||||||
|
|
||||||
|
Supports two backends:
|
||||||
|
- Ollama (local, via OpenAI-compatible API)
|
||||||
|
- Anthropic (cloud, via Anthropic SDK)
|
||||||
|
|
||||||
|
Backend is selected by the `backend` parameter or auto-detected from model name.
|
||||||
|
"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import requests
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from anthropic import Anthropic
|
|
||||||
from src.schemas.normalized_script import NormalizedScript
|
from src.schemas.normalized_script import NormalizedScript
|
||||||
|
|
||||||
|
|
||||||
@ -19,16 +26,20 @@ class ExtractionError(Exception):
|
|||||||
def extract_scenes(
|
def extract_scenes(
|
||||||
script: NormalizedScript,
|
script: NormalizedScript,
|
||||||
contract_path: str,
|
contract_path: str,
|
||||||
api_key: str,
|
api_key: str = "",
|
||||||
model: str = "claude-sonnet-4-20250514",
|
model: str = "qwen3:14b",
|
||||||
|
backend: str = "ollama",
|
||||||
|
ollama_url: str = "http://localhost:11434",
|
||||||
) -> ExtractionResult:
|
) -> ExtractionResult:
|
||||||
"""Extract structured scene data from a normalized script using AI.
|
"""Extract structured scene data from a normalized script using AI.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
script: Validated NormalizedScript from Layer 1.
|
script: Validated NormalizedScript from Layer 1.
|
||||||
contract_path: Path to the prompt contract JSON file.
|
contract_path: Path to the prompt contract JSON file.
|
||||||
api_key: Anthropic API key.
|
api_key: API key (required for anthropic backend, ignored for ollama).
|
||||||
model: Model ID to use.
|
model: Model ID.
|
||||||
|
backend: "ollama" or "anthropic".
|
||||||
|
ollama_url: Ollama server URL (default localhost:11434).
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
ExtractionResult with raw scene dicts and token usage.
|
ExtractionResult with raw scene dicts and token usage.
|
||||||
@ -46,32 +57,31 @@ def extract_scenes(
|
|||||||
# Render user prompt
|
# Render user prompt
|
||||||
user_prompt = contract["user_prompt_template"].replace("{{script_text}}", script_text)
|
user_prompt = contract["user_prompt_template"].replace("{{script_text}}", script_text)
|
||||||
|
|
||||||
# Call Claude API
|
if backend == "ollama":
|
||||||
client = Anthropic(api_key=api_key)
|
response_text, token_usage = _call_ollama(
|
||||||
response = client.messages.create(
|
model, contract["system_prompt"], user_prompt,
|
||||||
model=model,
|
contract["max_output_tokens"], ollama_url,
|
||||||
max_tokens=contract["max_output_tokens"],
|
|
||||||
temperature=0,
|
|
||||||
system=contract["system_prompt"],
|
|
||||||
messages=[{"role": "user", "content": user_prompt}],
|
|
||||||
)
|
)
|
||||||
|
elif backend == "anthropic":
|
||||||
# Extract text content
|
response_text, token_usage = _call_anthropic(
|
||||||
response_text = response.content[0].text
|
model, contract["system_prompt"], user_prompt,
|
||||||
|
contract["max_output_tokens"], api_key,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ExtractionError(f"Unknown backend: {backend}")
|
||||||
|
|
||||||
# Parse JSON
|
# Parse JSON
|
||||||
try:
|
try:
|
||||||
parsed = json.loads(response_text)
|
parsed = json.loads(response_text)
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
# Try to extract JSON from response if wrapped in markdown
|
|
||||||
cleaned = _extract_json(response_text)
|
cleaned = _extract_json(response_text)
|
||||||
if cleaned:
|
if cleaned:
|
||||||
try:
|
try:
|
||||||
parsed = json.loads(cleaned)
|
parsed = json.loads(cleaned)
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
raise ExtractionError(f"AI response is not valid JSON: {e}") from e
|
raise ExtractionError(f"AI response is not valid JSON: {e}\nResponse:\n{response_text[:500]}") from e
|
||||||
else:
|
else:
|
||||||
raise ExtractionError(f"AI response is not valid JSON: {e}") from e
|
raise ExtractionError(f"AI response is not valid JSON: {e}\nResponse:\n{response_text[:500]}") from e
|
||||||
|
|
||||||
# Extract scenes array
|
# Extract scenes array
|
||||||
if isinstance(parsed, dict) and "scenes" in parsed:
|
if isinstance(parsed, dict) and "scenes" in parsed:
|
||||||
@ -84,12 +94,73 @@ def extract_scenes(
|
|||||||
if not isinstance(scenes, list):
|
if not isinstance(scenes, list):
|
||||||
raise ExtractionError(f"'scenes' is not a list: {type(scenes)}")
|
raise ExtractionError(f"'scenes' is not a list: {type(scenes)}")
|
||||||
|
|
||||||
|
return ExtractionResult(raw_scenes=scenes, token_usage=token_usage)
|
||||||
|
|
||||||
|
|
||||||
|
def _call_ollama(
|
||||||
|
model: str, system_prompt: str, user_prompt: str,
|
||||||
|
max_tokens: int, ollama_url: str,
|
||||||
|
) -> tuple[str, dict]:
|
||||||
|
"""Call Ollama's chat API."""
|
||||||
|
payload = {
|
||||||
|
"model": model,
|
||||||
|
"messages": [
|
||||||
|
{"role": "system", "content": system_prompt},
|
||||||
|
{"role": "user", "content": user_prompt},
|
||||||
|
],
|
||||||
|
"stream": False,
|
||||||
|
"options": {
|
||||||
|
"temperature": 0,
|
||||||
|
"num_predict": max_tokens,
|
||||||
|
"num_ctx": 32768,
|
||||||
|
},
|
||||||
|
"format": "json",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = requests.post(f"{ollama_url}/api/chat", json=payload, timeout=600)
|
||||||
|
resp.raise_for_status()
|
||||||
|
except requests.RequestException as e:
|
||||||
|
raise ExtractionError(f"Ollama API call failed: {e}") from e
|
||||||
|
|
||||||
|
data = resp.json()
|
||||||
|
response_text = data.get("message", {}).get("content", "")
|
||||||
|
|
||||||
|
# Ollama provides token counts
|
||||||
|
token_usage = {
|
||||||
|
"input": data.get("prompt_eval_count", 0),
|
||||||
|
"output": data.get("eval_count", 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
if not response_text:
|
||||||
|
raise ExtractionError("Ollama returned empty response")
|
||||||
|
|
||||||
|
return response_text, token_usage
|
||||||
|
|
||||||
|
|
||||||
|
def _call_anthropic(
|
||||||
|
model: str, system_prompt: str, user_prompt: str,
|
||||||
|
max_tokens: int, api_key: str,
|
||||||
|
) -> tuple[str, dict]:
|
||||||
|
"""Call Anthropic's messages API."""
|
||||||
|
from anthropic import Anthropic
|
||||||
|
|
||||||
|
client = Anthropic(api_key=api_key)
|
||||||
|
response = client.messages.create(
|
||||||
|
model=model,
|
||||||
|
max_tokens=max_tokens,
|
||||||
|
temperature=0,
|
||||||
|
system=system_prompt,
|
||||||
|
messages=[{"role": "user", "content": user_prompt}],
|
||||||
|
)
|
||||||
|
|
||||||
|
response_text = response.content[0].text
|
||||||
token_usage = {
|
token_usage = {
|
||||||
"input": response.usage.input_tokens,
|
"input": response.usage.input_tokens,
|
||||||
"output": response.usage.output_tokens,
|
"output": response.usage.output_tokens,
|
||||||
}
|
}
|
||||||
|
|
||||||
return ExtractionResult(raw_scenes=scenes, token_usage=token_usage)
|
return response_text, token_usage
|
||||||
|
|
||||||
|
|
||||||
def _elements_to_text(script: NormalizedScript) -> str:
|
def _elements_to_text(script: NormalizedScript) -> str:
|
||||||
|
|||||||
431
test_scripts/the_last_backup.fountain
Normal file
431
test_scripts/the_last_backup.fountain
Normal file
@ -0,0 +1,431 @@
|
|||||||
|
Title: The Last Backup
|
||||||
|
Credit: Written for pipeline testing
|
||||||
|
Author: OpenAI
|
||||||
|
Draft date: 2026-04-06
|
||||||
|
|
||||||
|
INT. SERVER ROOM - NIGHT
|
||||||
|
|
||||||
|
Rows of aging servers hum in blue light. A red STATUS LED blinks on the far rack.
|
||||||
|
|
||||||
|
MARA REYES, 38, sharp, exhausted, still in work clothes, stands over an open terminal. She grips a paper notebook filled with handwritten commands.
|
||||||
|
|
||||||
|
On the screen: BACKUP FAILED.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
No. No, no, no.
|
||||||
|
|
||||||
|
She types fast, reruns the job, watches.
|
||||||
|
|
||||||
|
The screen flashes the same error.
|
||||||
|
|
||||||
|
A metal KEY on a red tag sits beside the keyboard. A half-empty coffee cup trembles from the vibration of the cooling fans.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Come on. Just hold together one more night.
|
||||||
|
|
||||||
|
Her phone buzzes. Caller ID: ELI.
|
||||||
|
|
||||||
|
She ignores it.
|
||||||
|
|
||||||
|
INT. APARTMENT KITCHEN - NIGHT
|
||||||
|
|
||||||
|
A small apartment. Functional, cluttered, dim.
|
||||||
|
|
||||||
|
ELI REYES, 16, hoodie, anxious energy, stands at the counter eating cereal from the box. His phone is on speaker.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
Mom?
|
||||||
|
|
||||||
|
INTERCUT PHONE CALL
|
||||||
|
|
||||||
|
MARA paces in the server room, still staring at the terminal.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
I'm here.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
You said you'd be home before ten.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
I know.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
You always say that when something's on fire.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Nothing's on fire.
|
||||||
|
|
||||||
|
A beat. A LOUD POP from the server room.
|
||||||
|
|
||||||
|
MARA closes her eyes.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Something may be overheating.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
That's worse.
|
||||||
|
|
||||||
|
INT. SERVER ROOM - LATER
|
||||||
|
|
||||||
|
The STATUS LED is now solid red.
|
||||||
|
|
||||||
|
MARA has removed her blazer. Sleeves rolled. Coffee gone cold.
|
||||||
|
|
||||||
|
She flips open the notebook. A page is marked: "MANUAL FAILOVER - LAST RESORT."
|
||||||
|
|
||||||
|
At the rack, she inserts the red-tagged KEY into a locked panel.
|
||||||
|
|
||||||
|
Inside: one small drive bay. Empty.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
You have got to be kidding me.
|
||||||
|
|
||||||
|
Her phone buzzes again. This time: VOICEMAIL RECEIVED.
|
||||||
|
|
||||||
|
She doesn't listen.
|
||||||
|
|
||||||
|
EXT. PARKING LOT - NIGHT
|
||||||
|
|
||||||
|
Rain slicks the pavement outside the low industrial building.
|
||||||
|
|
||||||
|
JONAH VALE, 40s, maintenance contractor, steps out of an old pickup truck carrying a dented toolbox and a flashlight.
|
||||||
|
|
||||||
|
He looks up at the building like it insulted his family.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
Every emergency job is just somebody else's procrastination with weather.
|
||||||
|
|
||||||
|
He heads inside.
|
||||||
|
|
||||||
|
INT. SERVER ROOM - NIGHT
|
||||||
|
|
||||||
|
JONAH kneels by an exposed side panel while MARA hovers nearby.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
This system should've been retired five years ago.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
It was supposed to be replaced last quarter.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
And yet here we are, praying to obsolete metal.
|
||||||
|
|
||||||
|
He shines the flashlight into the rack.
|
||||||
|
|
||||||
|
JONAH (CONT'D)
|
||||||
|
You got a spare drive?
|
||||||
|
|
||||||
|
MARA says nothing.
|
||||||
|
|
||||||
|
JONAH looks at the empty bay. Then at her.
|
||||||
|
|
||||||
|
JONAH (CONT'D)
|
||||||
|
That's a no.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
I had one.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
Past tense is doing a lot of work there.
|
||||||
|
|
||||||
|
He stands, wipes his hands.
|
||||||
|
|
||||||
|
JONAH (CONT'D)
|
||||||
|
Best case, we stabilize it long enough to image what's left.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Worst case?
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
You already know the worst case or you wouldn't be shaking.
|
||||||
|
|
||||||
|
MARA looks down. She is shaking.
|
||||||
|
|
||||||
|
INT. APARTMENT KITCHEN - SAME NIGHT
|
||||||
|
|
||||||
|
ELI sits at the table now. The cereal is soggy. The overhead light flickers.
|
||||||
|
|
||||||
|
He opens his laptop. On screen: a school form titled EMERGENCY CONTACT UPDATE.
|
||||||
|
|
||||||
|
The cursor blinks next to "SECONDARY CONTACT."
|
||||||
|
|
||||||
|
He types: JONAH VALE
|
||||||
|
Then deletes it.
|
||||||
|
|
||||||
|
He types: NONE
|
||||||
|
Then stops.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
(to himself)
|
||||||
|
Great.
|
||||||
|
|
||||||
|
He closes the laptop.
|
||||||
|
|
||||||
|
INT. SERVER ROOM - LATER
|
||||||
|
|
||||||
|
A portable WORK LIGHT now casts harsh white shadows.
|
||||||
|
|
||||||
|
JONAH has rigged a temporary cooling fan with zip ties. MARA sits at the terminal.
|
||||||
|
|
||||||
|
The system begins a recovery scan.
|
||||||
|
|
||||||
|
ON SCREEN: 12%... 13%... 14%
|
||||||
|
|
||||||
|
MARA almost doesn't breathe.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
Don't look at it like that.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Like what?
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
Like fear improves machinery.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
It improves people.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
Debatable.
|
||||||
|
|
||||||
|
Her phone buzzes again. She finally listens to the voicemail.
|
||||||
|
|
||||||
|
ELI (V.O.)
|
||||||
|
Hey. Sorry. I know you're working. Just... if you can call when you get a second, call, okay?
|
||||||
|
|
||||||
|
MARA stares at nothing.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
I missed his concert.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
Tonight?
|
||||||
|
|
||||||
|
MARA nods.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
That explains the face.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
What face?
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
The one that says the server isn't the only thing failing.
|
||||||
|
|
||||||
|
A long beat.
|
||||||
|
|
||||||
|
ON SCREEN: 33%
|
||||||
|
|
||||||
|
INT. APARTMENT BEDROOM - NIGHT
|
||||||
|
|
||||||
|
ELI lies on his bed fully dressed, headphones on, staring at the ceiling.
|
||||||
|
|
||||||
|
A trophy sits on the dresser beside a printed concert program.
|
||||||
|
|
||||||
|
He picks up the program, folds it once, then again.
|
||||||
|
|
||||||
|
His room is neat in the way control freaks keep it neat when other things aren't.
|
||||||
|
|
||||||
|
His phone lights up: MOM CALLING.
|
||||||
|
|
||||||
|
He lets it ring.
|
||||||
|
|
||||||
|
INT. SERVER ROOM - NIGHT
|
||||||
|
|
||||||
|
ON SCREEN: 67%
|
||||||
|
|
||||||
|
MARA leaves a voicemail.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Eli. I know you're awake. I know you don't want to hear from me right now, but I need you to hear this part. I am sorry. Not work-sorry. Not almost-sorry. Just sorry.
|
||||||
|
|
||||||
|
JONAH pretends not to listen.
|
||||||
|
|
||||||
|
MARA (CONT'D)
|
||||||
|
When this is done, I'm coming home.
|
||||||
|
|
||||||
|
She hangs up.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
Good voicemail.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
You grading me now?
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
No. If I was grading you, I'd mention you still haven't asked the important question.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Which is?
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
If the recovered data has somewhere to go.
|
||||||
|
|
||||||
|
Silence.
|
||||||
|
|
||||||
|
MARA turns slowly toward the empty drive bay.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Right.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
Please tell me you have external storage.
|
||||||
|
|
||||||
|
MARA reaches into her bag and pulls out a rugged BLACK PORTABLE SSD.
|
||||||
|
|
||||||
|
JONAH exhales.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
See? Miracles. Tiny, overpriced miracles.
|
||||||
|
|
||||||
|
INT. SERVER ROOM - LATER
|
||||||
|
|
||||||
|
The BLACK PORTABLE SSD is connected.
|
||||||
|
|
||||||
|
ON SCREEN: RECOVERY COMPLETE - PARTIAL IMAGE AVAILABLE.
|
||||||
|
|
||||||
|
MARA closes her eyes in relief.
|
||||||
|
|
||||||
|
Then another message appears.
|
||||||
|
|
||||||
|
CORRUPTION DETECTED IN ARCHIVE SEGMENT 03.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Of course.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
How bad?
|
||||||
|
|
||||||
|
MARA scans the report.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Not core infrastructure. Historical footage. Client archive.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
Can it be rebuilt?
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Some of it.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
Then tonight is not the apocalypse. Just a smaller religion.
|
||||||
|
|
||||||
|
MARA laughs despite herself. First time all night.
|
||||||
|
|
||||||
|
INT. APARTMENT KITCHEN - PRE-DAWN
|
||||||
|
|
||||||
|
Gray light leaks through the blinds.
|
||||||
|
|
||||||
|
ELI sits at the table again, half asleep. The folded concert program is beside him.
|
||||||
|
|
||||||
|
The front door unlocks.
|
||||||
|
|
||||||
|
MARA steps inside carrying her bag, soaked from the rain, wrecked but upright.
|
||||||
|
|
||||||
|
A long silence.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
I saved most of it.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
Congratulations.
|
||||||
|
|
||||||
|
MARA sets the bag down.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
I know that's not the point.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
No, it kind of never is.
|
||||||
|
|
||||||
|
She nods. Takes the hit.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
I brought breakfast.
|
||||||
|
|
||||||
|
She lifts a paper bag. It's crushed and slightly wet.
|
||||||
|
|
||||||
|
ELI almost smiles.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
That bag has been through combat.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
So have I.
|
||||||
|
|
||||||
|
Another silence. Softer now.
|
||||||
|
|
||||||
|
ELI notices the BLACK PORTABLE SSD sticking out of her bag.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
Is that the thing that kept you there?
|
||||||
|
|
||||||
|
MARA
|
||||||
|
Part of it.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
You always say it's temporary.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
I know.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
And is it?
|
||||||
|
|
||||||
|
MARA looks at him. This time she answers carefully.
|
||||||
|
|
||||||
|
MARA
|
||||||
|
No. Not if I keep pretending it fixes itself.
|
||||||
|
|
||||||
|
He studies her, deciding whether to believe it.
|
||||||
|
|
||||||
|
ELI
|
||||||
|
Okay.
|
||||||
|
|
||||||
|
Not forgiveness. Not yet. But not nothing.
|
||||||
|
|
||||||
|
INT. SERVER ROOM - MORNING
|
||||||
|
|
||||||
|
Golden morning light cuts through a high window.
|
||||||
|
|
||||||
|
The red STATUS LED is off. The temporary cooling rig still hangs in place, ugly and effective.
|
||||||
|
|
||||||
|
A printed sign is taped to the rack:
|
||||||
|
|
||||||
|
OUT OF SERVICE - DO NOT RELY ON LEGACY NODE
|
||||||
|
|
||||||
|
Below it, the red-tagged KEY hangs from a hook.
|
||||||
|
|
||||||
|
The BLACK PORTABLE SSD is gone.
|
||||||
|
|
||||||
|
JONAH enters alone with coffee. He reads the sign.
|
||||||
|
|
||||||
|
JONAH
|
||||||
|
Look at that. Growth.
|
||||||
|
|
||||||
|
He places a new replacement drive on the shelf below the empty bay.
|
||||||
|
|
||||||
|
JONAH (CONT'D)
|
||||||
|
Now you show up.
|
||||||
|
|
||||||
|
He exits.
|
||||||
|
|
||||||
|
EXT. BUILDING ROOF - MORNING
|
||||||
|
|
||||||
|
MARA stands alone on the roof with her phone, city waking up around her.
|
||||||
|
|
||||||
|
She opens a new note and types:
|
||||||
|
|
||||||
|
1. Replace legacy node
|
||||||
|
2. Build real backup path
|
||||||
|
3. Go to Eli's next concert
|
||||||
|
|
||||||
|
She looks at the list.
|
||||||
|
|
||||||
|
Then adds:
|
||||||
|
|
||||||
|
4. Leave work before it becomes an emergency
|
||||||
|
|
||||||
|
She saves it.
|
||||||
|
|
||||||
|
FADE OUT.
|
||||||
Loading…
x
Reference in New Issue
Block a user