diff --git a/.devspark/VERSION b/.devspark/VERSION index b9443ccb..814cae45 100644 --- a/.devspark/VERSION +++ b/.devspark/VERSION @@ -1,4 +1,4 @@ -version: 2.1.0 -installed: 2026-04-30 +version: 2.2.0 +installed: 2026-05-01 method: claude-quickstart migrated-from: legacy-specify diff --git a/.devspark/defaults/commands/devspark.address-pr-review.md b/.devspark/defaults/commands/devspark.address-pr-review.md index b28fe9f7..f5e208bf 100644 --- a/.devspark/defaults/commands/devspark.address-pr-review.md +++ b/.devspark/defaults/commands/devspark.address-pr-review.md @@ -4,6 +4,9 @@ handoffs: - label: Re-Review Updated PR agent: devspark.pr-review prompt: Run /devspark.pr-review UPDATE for this PR after fixes are committed +scripts: + sh: pwsh -File .devspark/scripts/powershell/address-pr-review.ps1 -PrId $ARGUMENTS -Json + ps: .devspark/scripts/powershell/address-pr-review.ps1 -PrId $ARGUMENTS -Json --- ## User Input @@ -33,9 +36,9 @@ This command is the **author-side companion** to `/devspark.pr-review`. It helps ### Phase 0 — Load context -> **Script Resolution**: Before running `.devspark/scripts/powershell/address-pr-review.ps1 -PrId $ARGUMENTS -Json`, apply the 2-tier override check for PowerShell only — if `.documentation/scripts/powershell/address-pr-review.ps1` exists on disk, run that file instead, preserving all arguments. Team override in `.documentation/scripts/powershell/` takes priority over `.devspark/scripts/powershell/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check for PowerShell only — if `.documentation/scripts/powershell/address-pr-review.ps1` exists on disk, run that file instead, preserving all arguments. Team override in `.documentation/scripts/powershell/` takes priority over `.devspark/scripts/powershell/`. -1. Run `.devspark/scripts/powershell/address-pr-review.ps1 -PrId $ARGUMENTS -Json` with `-PrId {PR_ID} -Json`. +1. Run `{SCRIPT}` with `-PrId {PR_ID} -Json`. 2. Fail fast if `/.documentation/specs/pr-review/pr-{PR_ID}.md` is missing. 3. Parse open findings from checklist lines matching: - `- [ ] **C-##**` diff --git a/.devspark/defaults/commands/devspark.analyze.md b/.devspark/defaults/commands/devspark.analyze.md index b97cdf32..a8d4817a 100644 --- a/.devspark/defaults/commands/devspark.analyze.md +++ b/.devspark/defaults/commands/devspark.analyze.md @@ -7,6 +7,9 @@ handoffs: - label: Revise Plan agent: devspark.plan prompt: Revise plan to address analysis findings +scripts: + sh: .devspark/scripts/bash/check-prerequisites.sh --json --require-tasks --include-tasks + ps: .devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks --- ## User Input @@ -35,9 +38,9 @@ Read the YAML frontmatter in `spec.md` before analyzing. Treat `classification`, ### 1. Initialize Analysis Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks` once from repo root and parse JSON for FEATURE_DIR and AVAILABLE_DOCS. Derive absolute paths: +Run `{SCRIPT}` once from repo root and parse JSON for FEATURE_DIR and AVAILABLE_DOCS. Derive absolute paths: - SPEC = FEATURE_DIR/spec.md - PLAN = FEATURE_DIR/plan.md @@ -211,7 +214,7 @@ After producing the report: ## Context -$ARGUMENTS +{ARGS} ## Shared Review Resolution Contract Output diff --git a/.devspark/defaults/commands/devspark.archive.md b/.devspark/defaults/commands/devspark.archive.md index a39ec2db..00583221 100644 --- a/.devspark/defaults/commands/devspark.archive.md +++ b/.devspark/defaults/commands/devspark.archive.md @@ -7,6 +7,9 @@ handoffs: - label: Run Documentation Audit agent: devspark.site-audit prompt: Audit documentation quality and stale references before harvest +scripts: + sh: .devspark/scripts/bash/harvest.sh $ARGUMENTS --json + ps: .devspark/scripts/powershell/harvest.ps1 $ARGUMENTS -Json --- ## User Input @@ -36,9 +39,9 @@ Treat every invocation of `/devspark.archive` as an invocation of `/devspark.har ### 1. Gather Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/harvest.ps1 $ARGUMENTS -Json`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/harvest.ps1 $ARGUMENTS -Json` from the repo root. Parse the JSON output: +Run `{SCRIPT}` from the repo root. Parse the JSON output: - `REPO_ROOT` — absolute path to the repository root - `ARCHIVE_DIR` — target folder for today's archive (e.g., `.archive/2026-03-07`) diff --git a/.devspark/defaults/commands/devspark.checklist.md b/.devspark/defaults/commands/devspark.checklist.md index 2f244caa..50cc1d3e 100644 --- a/.devspark/defaults/commands/devspark.checklist.md +++ b/.devspark/defaults/commands/devspark.checklist.md @@ -7,6 +7,9 @@ handoffs: - label: Run Analysis agent: devspark.analyze prompt: Analyze spec consistency after checklist review +scripts: + sh: .devspark/scripts/bash/check-prerequisites.sh --json + ps: .devspark/scripts/powershell/check-prerequisites.ps1 -Json --- ## Overview: "Unit Tests for English" @@ -42,9 +45,9 @@ You **MUST** consider the user input before proceeding (if not empty). **Multi-app support**: If this repository uses multi-app mode (`.documentation/devspark.json` exists with `mode: "multi-app"`), check for `--app ` in the user input to scope this workflow to a specific application. When app context is provided, resolve artifacts from `{app.path}/.documentation/` instead of the repository root `.documentation/`. Print the resolved scope (app name, doc root) at the start of output. -> **Script Resolution**: Before running `.devspark/scripts/powershell/check-prerequisites.ps1 -Json`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -1. **Setup**: Run `.devspark/scripts/powershell/check-prerequisites.ps1 -Json` from repo root and parse JSON for FEATURE_DIR and AVAILABLE_DOCS list. +1. **Setup**: Run `{SCRIPT}` from repo root and parse JSON for FEATURE_DIR and AVAILABLE_DOCS list. - All file paths must be absolute. - For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\''m Groot' (or double-quote if possible: "I'm Groot"). - If `/.documentation/memory/constitution.md` exists, load it. Use constitution principles to inform checklist categories (e.g., if the constitution mandates accessibility, include accessibility items). diff --git a/.devspark/defaults/commands/devspark.clarify.md b/.devspark/defaults/commands/devspark.clarify.md index a4783ad7..3a8f3ad3 100644 --- a/.devspark/defaults/commands/devspark.clarify.md +++ b/.devspark/defaults/commands/devspark.clarify.md @@ -4,6 +4,9 @@ handoffs: - label: Build Technical Plan agent: devspark.plan prompt: Create a plan for the spec. I am building with... +scripts: + sh: .devspark/scripts/bash/check-prerequisites.sh --json --paths-only + ps: .devspark/scripts/powershell/check-prerequisites.ps1 -Json -PathsOnly --- ## User Input @@ -24,9 +27,9 @@ Note: This clarification workflow is expected to run (and be completed) BEFORE i Execution steps: -> **Script Resolution**: Before running `.devspark/scripts/powershell/check-prerequisites.ps1 -Json -PathsOnly`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -1. Run `.devspark/scripts/powershell/check-prerequisites.ps1 -Json -PathsOnly` from repo root **once** (combined `--json --paths-only` mode / `-Json -PathsOnly`). Parse minimal JSON payload fields: +1. Run `{SCRIPT}` from repo root **once** (combined `--json --paths-only` mode / `-Json -PathsOnly`). Parse minimal JSON payload fields: - `FEATURE_DIR` - `FEATURE_SPEC` - (Optionally capture `IMPL_PLAN`, `TASKS` for future chained flows.) @@ -189,7 +192,7 @@ Behavior rules: - If no questions asked due to full coverage, output a compact coverage summary (all categories Clear) then suggest advancing. - If quota reached with unresolved high-impact categories remaining, explicitly flag them under Deferred with rationale. -Context for prioritization: $ARGUMENTS +Context for prioritization: {ARGS} ## Shared Review Resolution Contract Output diff --git a/.devspark/defaults/commands/devspark.commit-audit.md b/.devspark/defaults/commands/devspark.commit-audit.md index 95d17cd8..7ef9be28 100644 --- a/.devspark/defaults/commands/devspark.commit-audit.md +++ b/.devspark/defaults/commands/devspark.commit-audit.md @@ -7,6 +7,9 @@ handoffs: - label: View Harvest Report agent: devspark.harvest prompt: Review completed specs and stale documentation before archiving +scripts: + sh: .devspark/scripts/bash/commit-audit.sh $ARGUMENTS --json + ps: .devspark/scripts/powershell/commit-audit.ps1 $ARGUMENTS -Json --- ## User Input @@ -48,9 +51,9 @@ Multiple scope flags may be combined: `--scope=velocity,hygiene` ### 1. Initialize Audit Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/commit-audit.ps1 $ARGUMENTS -Json`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/commit-audit.ps1 $ARGUMENTS -Json` and parse its JSON output. +Run `{SCRIPT}` and parse its JSON output. Expected fields include: diff --git a/.devspark/defaults/commands/devspark.create-pr.md b/.devspark/defaults/commands/devspark.create-pr.md index 056c1d06..7f74c90a 100644 --- a/.devspark/defaults/commands/devspark.create-pr.md +++ b/.devspark/defaults/commands/devspark.create-pr.md @@ -4,6 +4,9 @@ handoffs: - label: Review Pull Request agent: devspark.pr-review prompt: Review the pull request for constitution compliance +scripts: + sh: .devspark/scripts/bash/create-pr.sh --mode preflight --json $ARGUMENTS + ps: .devspark/scripts/powershell/create-pr.ps1 -Mode Preflight -Json $ARGUMENTS --- ## User Input @@ -34,9 +37,9 @@ This command is advisory. Dirty trees, missing specs, incomplete tasks, unresolv ### 1. Run Preflight Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/create-pr.ps1 -Mode Preflight -Json $ARGUMENTS`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/create-pr.ps1 -Mode Preflight -Json $ARGUMENTS` once from the repository root and parse the returned JSON. +Run `{SCRIPT}` once from the repository root and parse the returned JSON. Use the script output as the source of truth for: diff --git a/.devspark/defaults/commands/devspark.critic.md b/.devspark/defaults/commands/devspark.critic.md index 72af34bc..b054d23a 100644 --- a/.devspark/defaults/commands/devspark.critic.md +++ b/.devspark/defaults/commands/devspark.critic.md @@ -9,6 +9,9 @@ handoffs: agent: devspark.tasks prompt: Regenerate tasks with missing operational items send: true +scripts: + sh: .devspark/scripts/bash/check-prerequisites.sh --json --require-tasks --include-tasks + ps: .devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks --- ## User Input @@ -44,9 +47,9 @@ Read the YAML frontmatter in `spec.md` before evaluating risk. Treat `classifica ### 1. Initialize Analysis Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks` once from repo root and parse JSON for FEATURE_DIR and AVAILABLE_DOCS. Derive absolute paths: +Run `{SCRIPT}` once from repo root and parse JSON for FEATURE_DIR and AVAILABLE_DOCS. Derive absolute paths: - SPEC = FEATURE_DIR/spec.md - PLAN = FEATURE_DIR/plan.md @@ -491,7 +494,7 @@ This command produces a **"pre-mortem"** analysis - imagining the project has fa ## Context -$ARGUMENTS +{ARGS} ## Shared Review Resolution Contract Output diff --git a/.devspark/defaults/commands/devspark.discover-constitution.md b/.devspark/defaults/commands/devspark.discover-constitution.md index bc7f651d..30d645bd 100644 --- a/.devspark/defaults/commands/devspark.discover-constitution.md +++ b/.devspark/defaults/commands/devspark.discover-constitution.md @@ -379,4 +379,4 @@ If user says "done" before any questions: ## Context -$ARGUMENTS +{ARGS} diff --git a/.devspark/defaults/commands/devspark.evolve-constitution.md b/.devspark/defaults/commands/devspark.evolve-constitution.md index 76ed6956..6e1f3912 100644 --- a/.devspark/defaults/commands/devspark.evolve-constitution.md +++ b/.devspark/defaults/commands/devspark.evolve-constitution.md @@ -7,6 +7,9 @@ handoffs: - label: Review PRs agent: devspark.pr-review prompt: Review recent PRs to gather more data +scripts: + sh: .devspark/scripts/bash/evolution-context.sh $ARGUMENTS --json + ps: .devspark/scripts/powershell/evolution-context.ps1 $ARGUMENTS -Json --- ## User Input @@ -50,9 +53,9 @@ Parse `$ARGUMENTS` for action type: ### 1. Initialize Evolution Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/evolution-context.ps1 $ARGUMENTS -Json`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/evolution-context.ps1 $ARGUMENTS -Json` to gather context and parse JSON output for: +Run `{SCRIPT}` to gather context and parse JSON output for: - `CONSTITUTION_PATH`: Path to current constitution - `CONSTITUTION_EXISTS`: Whether constitution exists diff --git a/.devspark/defaults/commands/devspark.harvest.md b/.devspark/defaults/commands/devspark.harvest.md index 3adce013..5ec734c1 100644 --- a/.devspark/defaults/commands/devspark.harvest.md +++ b/.devspark/defaults/commands/devspark.harvest.md @@ -7,6 +7,9 @@ handoffs: - label: Run Documentation Audit agent: devspark.site-audit prompt: Audit documentation quality and stale references before harvest +scripts: + sh: .devspark/scripts/bash/harvest.sh $ARGUMENTS --json + ps: .devspark/scripts/powershell/harvest.ps1 $ARGUMENTS -Json --- ## User Input @@ -60,9 +63,9 @@ Multiple scopes may be combined: `--scope=specs,comments` ### 1. Initialize Harvest Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/harvest.ps1 $ARGUMENTS -Json`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/harvest.ps1 $ARGUMENTS -Json` and parse its JSON output. +Run `{SCRIPT}` and parse its JSON output. Expected fields include: diff --git a/.devspark/defaults/commands/devspark.implement.md b/.devspark/defaults/commands/devspark.implement.md index a963c658..9b6e0e7a 100644 --- a/.devspark/defaults/commands/devspark.implement.md +++ b/.devspark/defaults/commands/devspark.implement.md @@ -8,6 +8,9 @@ handoffs: - label: Run Analysis agent: devspark.analyze prompt: Analyze spec consistency after implementation +scripts: + sh: .devspark/scripts/bash/check-prerequisites.sh --json --require-tasks --include-tasks + ps: .devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks --- ## User Input @@ -22,9 +25,9 @@ You **MUST** consider the user input before proceeding (if not empty). **Multi-app support**: If this repository uses multi-app mode (`.documentation/devspark.json` exists with `mode: "multi-app"`), check for `--app ` in the user input to scope this workflow to a specific application. When app context is provided, resolve artifacts from `{app.path}/.documentation/` instead of the repository root `.documentation/`. Print the resolved scope (app name, doc root) at the start of output. -> **Script Resolution**: Before running `.devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -1. Run `.devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\''m Groot' (or double-quote if possible: "I'm Groot"). +1. Run `{SCRIPT}` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\''m Groot' (or double-quote if possible: "I'm Groot"). 2. **Check checklists status** (if FEATURE_DIR/checklists/ exists): - Scan all checklist files in the checklists/ directory @@ -146,6 +149,11 @@ You **MUST** consider the user input before proceeding (if not empty). - Suggest next steps if implementation cannot proceed - **IMPORTANT** For completed tasks, make sure to mark the task off as [X] in the tasks file. + Additional governance expectations for create-pr/pr-review transitions: + + - Delivery status must be met (`create_pr_ready=true` in latest harness result) + - Branch sync must pass (`HEAD` not behind `origin/main`) + 10. Completion validation: - Verify all required tasks are completed - Check that implemented features match the original specification diff --git a/.devspark/defaults/commands/devspark.personalize.md b/.devspark/defaults/commands/devspark.personalize.md index ba06f325..ba0fe58a 100644 --- a/.devspark/defaults/commands/devspark.personalize.md +++ b/.devspark/defaults/commands/devspark.personalize.md @@ -1,5 +1,8 @@ --- description: Create a personalized copy of any DevSpark command prompt for the current git user. +scripts: + sh: .devspark/scripts/bash/check-prerequisites.sh --json + ps: .devspark/scripts/powershell/check-prerequisites.ps1 -Json --- ## User Input diff --git a/.devspark/defaults/commands/devspark.plan.md b/.devspark/defaults/commands/devspark.plan.md index 33a83b07..1b36e9fb 100644 --- a/.devspark/defaults/commands/devspark.plan.md +++ b/.devspark/defaults/commands/devspark.plan.md @@ -8,6 +8,12 @@ handoffs: - label: Create Checklist agent: devspark.checklist prompt: Create a checklist for the following domain... +scripts: + sh: .devspark/scripts/bash/setup-plan.sh --json + ps: .devspark/scripts/powershell/setup-plan.ps1 -Json +agent_scripts: + sh: .devspark/scripts/bash/update-agent-context.sh __AGENT__ + ps: .devspark/scripts/powershell/update-agent-context.ps1 -AgentType __AGENT__ --- ## User Input @@ -22,9 +28,9 @@ You **MUST** consider the user input before proceeding (if not empty). **Multi-app support**: If this repository uses multi-app mode (`.documentation/devspark.json` exists with `mode: "multi-app"`), check for `--app ` in the user input to scope this workflow to a specific application. When app context is provided, resolve artifacts from `{app.path}/.documentation/` instead of the repository root `.documentation/`. Print the resolved scope (app name, doc root) at the start of output. -> **Script Resolution**: Before running `.devspark/scripts/powershell/setup-plan.ps1 -Json` or `.devspark/scripts/powershell/update-agent-context.ps1 -AgentType __AGENT__`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}` or `{AGENT_SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -1. **Setup**: Run `.devspark/scripts/powershell/setup-plan.ps1 -Json` from repo root and parse JSON for FEATURE_SPEC, IMPL_PLAN, SPECS_DIR, BRANCH. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\''m Groot' (or double-quote if possible: "I'm Groot"). +1. **Setup**: Run `{SCRIPT}` from repo root and parse JSON for FEATURE_SPEC, IMPL_PLAN, SPECS_DIR, BRANCH. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\''m Groot' (or double-quote if possible: "I'm Groot"). 2. **Load context**: Read FEATURE_SPEC and `/.documentation/memory/constitution.md`. Load IMPL_PLAN template (already copied). @@ -84,7 +90,7 @@ You **MUST** consider the user input before proceeding (if not empty). - Skip if project is purely internal (build scripts, one-off tools, etc.) 3. **Agent context update**: - - Run `.devspark/scripts/powershell/update-agent-context.ps1 -AgentType __AGENT__` + - Run `{AGENT_SCRIPT}` - These scripts detect which AI agent is in use - Update the appropriate agent-specific context file - Add only new technology from current plan diff --git a/.devspark/defaults/commands/devspark.pr-review.md b/.devspark/defaults/commands/devspark.pr-review.md index c82a7e12..e69ce05c 100644 --- a/.devspark/defaults/commands/devspark.pr-review.md +++ b/.devspark/defaults/commands/devspark.pr-review.md @@ -4,6 +4,9 @@ handoffs: - label: View Review History agent: devspark.pr-review prompt: Show me previous PR reviews in .documentation/specs/pr-review/ +scripts: + sh: .devspark/scripts/bash/get-pr-context.sh $ARGUMENTS --json + ps: .devspark/scripts/powershell/get-pr-context.ps1 $ARGUMENTS -Json --- ## User Input @@ -37,9 +40,9 @@ Reviews are advisory. The agent must explain constitution or lifecycle issues, r ### 1. Initialize Review Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/get-pr-context.ps1 $ARGUMENTS -Json`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/get-pr-context.ps1 $ARGUMENTS -Json` to extract PR context and parse JSON output for: +Run `{SCRIPT}` to extract PR context and parse JSON output for: - `PR_CONTEXT`: PR metadata (number, title, branches, commit SHA, files, diff) - `CONSTITUTION_PATH`: Path to constitution file diff --git a/.devspark/defaults/commands/devspark.quickfix.md b/.devspark/defaults/commands/devspark.quickfix.md index 425b81d3..6db224e8 100644 --- a/.devspark/defaults/commands/devspark.quickfix.md +++ b/.devspark/defaults/commands/devspark.quickfix.md @@ -7,6 +7,9 @@ handoffs: - label: Upgrade to Full Spec agent: devspark.specify prompt: Create a full specification for this change +scripts: + sh: .devspark/scripts/bash/quickfix-context.sh $ARGUMENTS --json + ps: .devspark/scripts/powershell/quickfix-context.ps1 $ARGUMENTS -Json --- ## User Input @@ -54,9 +57,9 @@ Parse `$ARGUMENTS` for action type: ### 1. Initialize Quickfix Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/quickfix-context.ps1 $ARGUMENTS -Json`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/quickfix-context.ps1 $ARGUMENTS -Json` to gather context and parse JSON output for: +Run `{SCRIPT}` to gather context and parse JSON output for: - `REPO_ROOT`: Repository root path - `CONSTITUTION_PATH`: Path to constitution file diff --git a/.devspark/defaults/commands/devspark.release.md b/.devspark/defaults/commands/devspark.release.md index aa958c71..ab397317 100644 --- a/.devspark/defaults/commands/devspark.release.md +++ b/.devspark/defaults/commands/devspark.release.md @@ -7,6 +7,9 @@ handoffs: - label: Run Final Audit agent: devspark.site-audit prompt: Run a final site audit before release +scripts: + sh: .devspark/scripts/bash/release-context.sh $ARGUMENTS --json + ps: .devspark/scripts/powershell/release-context.ps1 $ARGUMENTS -Json --- ## User Input @@ -51,9 +54,9 @@ Parse `$ARGUMENTS` for options: ### 1. Initialize Release Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/release-context.ps1 $ARGUMENTS -Json`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/release-context.ps1 $ARGUMENTS -Json` to gather context and parse JSON output for: +Run `{SCRIPT}` to gather context and parse JSON output for: - `REPO_ROOT`: Repository root path - `SPECS_DIR`: Path to specs directory diff --git a/.devspark/defaults/commands/devspark.repo-story.md b/.devspark/defaults/commands/devspark.repo-story.md index aa1a718e..55153354 100644 --- a/.devspark/defaults/commands/devspark.repo-story.md +++ b/.devspark/defaults/commands/devspark.repo-story.md @@ -4,6 +4,9 @@ handoffs: - label: View Past Stories agent: devspark.repo-story prompt: Show me previous repo stories in .documentation/repo-story/ +scripts: + sh: .devspark/scripts/bash/repo-story-context.sh $ARGUMENTS --stdout + ps: .devspark/scripts/powershell/repo-story-context.ps1 $ARGUMENTS -Stdout --- ## User Input @@ -47,9 +50,9 @@ If no scope specified, default to `--scope=full`. ### 1. Generate History Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/repo-story-context.ps1 $ARGUMENTS -Stdout`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/repo-story-context.ps1 $ARGUMENTS -Stdout` to produce `history.json` and parse the JSON output for these top-level sections: +Run `{SCRIPT}` to produce `history.json` and parse the JSON output for these top-level sections: - `audit_parameters` — time window, scope, anonymization settings - `repo` — repository name, remote URL, default branch diff --git a/.devspark/defaults/commands/devspark.site-audit.md b/.devspark/defaults/commands/devspark.site-audit.md index f812a4e1..76bd9100 100644 --- a/.devspark/defaults/commands/devspark.site-audit.md +++ b/.devspark/defaults/commands/devspark.site-audit.md @@ -5,6 +5,9 @@ handoffs: - label: View Audit History agent: devspark.site-audit prompt: Show me previous audit reports in .documentation/copilot/audit/ +scripts: + sh: .devspark/scripts/bash/site-audit.sh $ARGUMENTS --json + ps: .devspark/scripts/powershell/site-audit.ps1 $ARGUMENTS -Json --- ## User Input @@ -49,9 +52,9 @@ If no scope specified, default to `--scope=full`. ### 1. Initialize Audit Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/site-audit.ps1 $ARGUMENTS -Json`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/site-audit.ps1 $ARGUMENTS -Json` to gather codebase data and parse JSON output for: +Run `{SCRIPT}` to gather codebase data and parse JSON output for: - `REPO_ROOT`: Repository root path - `CONSTITUTION_PATH`: Path to constitution file - `FILES`: Categorized file listings @@ -133,7 +136,7 @@ Read the most recent `## [X.Y.Z]` entry in `CHANGELOG.md` (repo root) to get |-----------|-----------|---------| | `.devspark/VERSION` absent and legacy stamp absent | VER1 | HIGH | | Installed version < latest version | VER2 | MEDIUM | -| Agent command files reference `.documentation/` or root `memory/`, `scripts/`, `templates/`, or `specs/` paths | VER3 | HIGH | +| Agent command files reference `.specify/` or root `memory/`, `scripts/`, `templates/`, or `specs/` paths | VER3 | HIGH | | Root-level `memory/`, `scripts/`, `templates/`, or `specs/` directories exist | VER4 | HIGH | | Old `devspark.*-old.md` files in agent folder | VER5 | LOW | diff --git a/.devspark/defaults/commands/devspark.specify.md b/.devspark/defaults/commands/devspark.specify.md index 450ebae5..28a6c7a7 100644 --- a/.devspark/defaults/commands/devspark.specify.md +++ b/.devspark/defaults/commands/devspark.specify.md @@ -8,6 +8,9 @@ handoffs: agent: devspark.clarify prompt: Clarify specification requirements send: true +scripts: + sh: .devspark/scripts/bash/create-new-feature.sh --json "{ARGS}" + ps: .devspark/scripts/powershell/create-new-feature.ps1 -Json "{ARGS}" --- ## User Input @@ -43,7 +46,7 @@ This workflow MUST also validate the document against the shared specification v ## Outline -The text the user typed after `/devspark.specify` in the triggering message **is** the feature description. Assume you always have it available in this conversation even if `$ARGUMENTS` appears literally below. Do not ask the user to repeat it unless they provided an empty command. +The text the user typed after `/devspark.specify` in the triggering message **is** the feature description. Assume you always have it available in this conversation even if `{ARGS}` appears literally below. Do not ask the user to repeat it unless they provided an empty command. **Multi-app support**: If this repository uses multi-app mode (`.documentation/devspark.json` exists with `mode: "multi-app"`), check for `--app ` in the user input to scope this workflow to a specific application. When app context is provided, resolve artifacts from `{app.path}/.documentation/` instead of the repository root `.documentation/`. Print the resolved scope (app name, doc root) at the start of output. @@ -87,12 +90,12 @@ Given that feature description, do this: - Find the highest number N - Use N+1 for the new branch number - > **Script Resolution**: Before running `.devspark/scripts/powershell/create-new-feature.ps1 -Json "$ARGUMENTS"`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. + > **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. - d. Run the script `.devspark/scripts/powershell/create-new-feature.ps1 -Json "$ARGUMENTS"` with the calculated number and short-name: + d. Run the script `{SCRIPT}` with the calculated number and short-name: - Pass `--number N+1` and `--short-name "your-short-name"` along with the feature description - - Bash example: `.devspark/scripts/powershell/create-new-feature.ps1 -Json "$ARGUMENTS" --json --number 5 --short-name "user-auth" "Add user authentication"` - - PowerShell example: `.devspark/scripts/powershell/create-new-feature.ps1 -Json "$ARGUMENTS" -Json -Number 5 -ShortName "user-auth" "Add user authentication"` + - Bash example: `{SCRIPT} --json --number 5 --short-name "user-auth" "Add user authentication"` + - PowerShell example: `{SCRIPT} -Json -Number 5 -ShortName "user-auth" "Add user authentication"` **IMPORTANT**: - Check all three sources (remote branches, local branches, specs directories) to find the highest number diff --git a/.devspark/defaults/commands/devspark.tasks.md b/.devspark/defaults/commands/devspark.tasks.md index 46e30148..ef53f82a 100644 --- a/.devspark/defaults/commands/devspark.tasks.md +++ b/.devspark/defaults/commands/devspark.tasks.md @@ -9,6 +9,9 @@ handoffs: agent: devspark.implement prompt: Start the implementation in phases send: true +scripts: + sh: .devspark/scripts/bash/check-prerequisites.sh --json + ps: .devspark/scripts/powershell/check-prerequisites.ps1 -Json --- ## User Input @@ -23,9 +26,9 @@ You **MUST** consider the user input before proceeding (if not empty). **Multi-app support**: If this repository uses multi-app mode (`.documentation/devspark.json` exists with `mode: "multi-app"`), check for `--app ` in the user input to scope this workflow to a specific application. When app context is provided, resolve artifacts from `{app.path}/.documentation/` instead of the repository root `.documentation/`. Print the resolved scope (app name, doc root) at the start of output. -> **Script Resolution**: Before running `.devspark/scripts/powershell/check-prerequisites.ps1 -Json`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -1. **Setup**: Run `.devspark/scripts/powershell/check-prerequisites.ps1 -Json` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\''m Groot' (or double-quote if possible: "I'm Groot"). +1. **Setup**: Run `{SCRIPT}` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\''m Groot' (or double-quote if possible: "I'm Groot"). 2. **Load design documents**: Read from FEATURE_DIR: - **Required**: plan.md (tech stack, libraries, structure), spec.md (user stories with priorities) @@ -76,7 +79,7 @@ You **MUST** consider the user input before proceeding (if not empty). - Suggested MVP scope (typically just User Story 1) - Format validation: Confirm ALL tasks follow the checklist format (checkbox, ID, labels, file paths) -Context for task generation: $ARGUMENTS +Context for task generation: {ARGS} The tasks.md should be immediately executable - each task must be specific enough that an LLM can complete it without additional context. diff --git a/.devspark/defaults/commands/devspark.taskstoissues.md b/.devspark/defaults/commands/devspark.taskstoissues.md index f8262dae..d15514d7 100644 --- a/.devspark/defaults/commands/devspark.taskstoissues.md +++ b/.devspark/defaults/commands/devspark.taskstoissues.md @@ -1,6 +1,9 @@ --- description: Convert existing tasks into actionable, dependency-ordered GitHub issues for the feature based on available design artifacts. tools: ['github/github-mcp-server/issue_write'] +scripts: + sh: .devspark/scripts/bash/check-prerequisites.sh --json --require-tasks --include-tasks + ps: .devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks --- ## User Input @@ -13,9 +16,9 @@ You **MUST** consider the user input before proceeding (if not empty). ## Outline -> **Script Resolution**: Before running `.devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -1. Run `.devspark/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\''m Groot' (or double-quote if possible: "I'm Groot"). +1. Run `{SCRIPT}` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\''m Groot' (or double-quote if possible: "I'm Groot"). 1. From the executed script, extract the path to **tasks**. 1. Get the Git remote by running: diff --git a/.devspark/defaults/commands/devspark.update-pr.md b/.devspark/defaults/commands/devspark.update-pr.md index 61f67ca9..78fce537 100644 --- a/.devspark/defaults/commands/devspark.update-pr.md +++ b/.devspark/defaults/commands/devspark.update-pr.md @@ -7,6 +7,9 @@ handoffs: - label: Create New PR agent: devspark.create-pr prompt: Create a new pull request for this branch +scripts: + sh: .devspark/scripts/bash/get-pr-context.sh $ARGUMENTS --json + ps: .devspark/scripts/powershell/get-pr-context.ps1 $ARGUMENTS -Json --- ## User Input @@ -45,9 +48,9 @@ This command updates an **existing** pull request description based on the curre ### 1. Initialize PR Context -> **Script Resolution**: Before running `.devspark/scripts/powershell/get-pr-context.ps1 $ARGUMENTS -Json`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. +> **Script Resolution**: Before running `{SCRIPT}`, apply the 2-tier override check — if `.documentation/scripts/powershell/` (PowerShell) or `.documentation/scripts/bash/` (Bash) exists on disk, run that file instead, preserving all arguments. Team overrides in `.documentation/scripts/` always take priority over `.devspark/scripts/`. -Run `.devspark/scripts/powershell/get-pr-context.ps1 $ARGUMENTS -Json` and parse its JSON output for: +Run `{SCRIPT}` and parse its JSON output for: - `PR_CONTEXT`: Current PR metadata (number, title, branches, commit SHA, files, diff) - `CONSTITUTION_PATH`: Path to constitution file diff --git a/.devspark/defaults/commands/devspark.upgrade.md b/.devspark/defaults/commands/devspark.upgrade.md index 4f70899c..50b6c349 100644 --- a/.devspark/defaults/commands/devspark.upgrade.md +++ b/.devspark/defaults/commands/devspark.upgrade.md @@ -275,7 +275,7 @@ Offer to show diffs for any changed files so the team can decide what to merge. **Legacy migration collision guidance:** -- If legacy `.documentation/`, root `scripts/`, root `templates/`, or root `specs/` content is migrated and an equivalent file already exists under `.documentation/`, keep the existing `.documentation/` file. +- If legacy `.specify/`, root `scripts/`, root `templates/`, or root `specs/` content is migrated and an equivalent file already exists under `.documentation/`, keep the existing `.documentation/` file. - Report the skipped legacy file and preserve it in the corresponding `.old/` backup for manual review. - Never silently replace active `.documentation/` overrides with legacy content during upgrade. diff --git a/.devspark/scripts/bash/address-pr-review.sh b/.devspark/scripts/bash/address-pr-review.sh new file mode 100644 index 00000000..6104d788 --- /dev/null +++ b/.devspark/scripts/bash/address-pr-review.sh @@ -0,0 +1,155 @@ +#!/usr/bin/env bash +# Helper script for /devspark.address-pr-review +# --pr-id: parse review file and emit open findings +# --gate: enforce staged-path isolation for code-only or review-only commits + +set -euo pipefail + +SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "$SCRIPT_DIR/common.sh" + +PR_ID="" +GATE="" +JSON_OUTPUT=false + +# --------------------------------------------------------------------------- +# Argument parsing +# --------------------------------------------------------------------------- +while [[ $# -gt 0 ]]; do + case "$1" in + --pr-id) + PR_ID="$2" + shift 2 + ;; + --gate) + GATE="$2" + shift 2 + ;; + --json) + JSON_OUTPUT=true + shift + ;; + *) + printf 'Unknown option: %s\n' "$1" >&2 + exit 1 + ;; + esac +done + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- +get_staged_paths() { + git diff --cached --name-only 2>/dev/null || true +} + +is_review_path() { + [[ "$1" =~ ^\.documentation/specs/pr-review/pr-.*\.md$ ]] +} + +write_gate_failure() { + local message="$1" + shift + printf 'DevSpark: %s\n' "$message" >&2 + if [[ $# -gt 0 ]]; then + printf 'Offending staged paths:\n' >&2 + for path in "$@"; do + printf ' - %s\n' "$path" >&2 + done + fi + exit 1 +} + +# --------------------------------------------------------------------------- +# Gate mode +# --------------------------------------------------------------------------- +if [[ -n "$GATE" ]]; then + mapfile -t STAGED < <(get_staged_paths) + + if [[ "$GATE" == "code-only" ]]; then + review_staged=() + for p in "${STAGED[@]:-}"; do + is_review_path "$p" && review_staged+=("$p") || true + done + if [[ ${#review_staged[@]} -gt 0 ]]; then + write_gate_failure \ + "Code commit gate failed. Review files must not be staged for code-only commits." \ + "${review_staged[@]}" + fi + + elif [[ "$GATE" == "review-only" ]]; then + non_review_staged=() + for p in "${STAGED[@]:-}"; do + is_review_path "$p" || non_review_staged+=("$p") + done + if [[ ${#non_review_staged[@]} -gt 0 ]]; then + write_gate_failure \ + "Review commit gate failed. Only PR review markdown files may be staged." \ + "${non_review_staged[@]}" + fi + + else + printf 'DevSpark: Invalid --gate value "%s". Expected code-only or review-only.\n' "$GATE" >&2 + exit 1 + fi + + "$JSON_OUTPUT" && printf '{"gate":"%s","passed":true}\n' "$GATE" || printf "Gate '%s' passed.\n" "$GATE" + exit 0 +fi + +# --------------------------------------------------------------------------- +# PR findings mode +# --------------------------------------------------------------------------- +if [[ -z "$PR_ID" ]]; then + printf 'DevSpark: Provide --pr-id or --gate .\n' >&2 + exit 1 +fi + +# Strip leading # +NORMALIZED="${PR_ID#\#}" +if ! [[ "$NORMALIZED" =~ ^[0-9]+$ ]]; then + printf 'DevSpark: Invalid PR id "%s". Expected a positive integer.\n' "$PR_ID" >&2 + exit 1 +fi + +REPO_ROOT="$(get_repo_root)" +REVIEW_FILE="$REPO_ROOT/.documentation/specs/pr-review/pr-${NORMALIZED}.md" + +if [[ ! -f "$REVIEW_FILE" ]]; then + printf 'DevSpark: Review file not found: %s\n' "$REVIEW_FILE" >&2 + exit 1 +fi + +# Parse open findings — lines matching: - [ ] **{C|H|M|L|CON}-NN** +findings_json="[" +first=true +line_num=0 + +while IFS= read -r line; do + line_num=$((line_num + 1)) + if [[ "$line" =~ ^[[:space:]]*-[[:space:]]\[[[:space:]]\][[:space:]]+\*\*((C|H|M|L|CON)-[0-9]{2})\*\* ]]; then + finding_id="${BASH_REMATCH[1]}" + severity="${BASH_REMATCH[2]}" + escaped_line="${line//\"/\\\"}" + "$first" || findings_json+="," + findings_json+="{\"id\":\"$finding_id\",\"severity\":\"$severity\",\"line_number\":$line_num,\"line\":\"$escaped_line\"}" + first=false + fi +done < "$REVIEW_FILE" + +findings_json+="]" + +open_count=$(printf '%s' "$findings_json" | grep -o '"id"' | wc -l) + +if "$JSON_OUTPUT"; then + printf '{"pr_id":%s,"review_file":"%s","open_findings":%s,"open_count":%s}\n' \ + "$NORMALIZED" "$REVIEW_FILE" "$findings_json" "$open_count" +else + printf 'Review file: %s\n' "$REVIEW_FILE" + printf 'Open findings: %s\n' "$open_count" + while IFS= read -r line; do + if [[ "$line" =~ ^[[:space:]]*-[[:space:]]\[[[:space:]]\][[:space:]]+\*\*((C|H|M|L|CON)-[0-9]{2})\*\* ]]; then + printf -- '- %s\n' "${BASH_REMATCH[1]}" + fi + done < "$REVIEW_FILE" +fi diff --git a/.devspark/scripts/bash/archive-context.sh b/.devspark/scripts/bash/archive-context.sh new file mode 100644 index 00000000..291d6829 --- /dev/null +++ b/.devspark/scripts/bash/archive-context.sh @@ -0,0 +1,191 @@ +#!/usr/bin/env bash +# Deprecated compatibility wrapper for the legacy archive-context pre-scan. +# /devspark.archive now routes through /devspark.harvest. This wrapper calls +# harvest with docs scope and reshapes the result into the old archive-context +# contract for one migration window. + +set -euo pipefail + +SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "$SCRIPT_DIR/common.sh" + +PYTHON_CMD="" +if command -v python3 >/dev/null 2>&1; then + PYTHON_CMD="python3" +elif command -v python >/dev/null 2>&1; then + PYTHON_CMD="python" +else + echo "ERROR: python3/python is required to generate JSON output." >&2 + exit 1 +fi + +JSON_MODE=false +INCLUDE_FULL_INVENTORY=false +SAMPLE_LIMIT=50 + +while [[ $# -gt 0 ]]; do + case "$1" in + --json) + JSON_MODE=true + shift + ;; + --include-full-inventory) + INCLUDE_FULL_INVENTORY=true + shift + ;; + --sample-limit=*) + SAMPLE_LIMIT="${1#--sample-limit=}" + shift + ;; + *) + shift + ;; + esac +done + +if [[ "$SAMPLE_LIMIT" -lt 1 ]]; then + SAMPLE_LIMIT=50 +fi + +REPO_ROOT=$(get_repo_root) +ARCHIVE_BASE="$REPO_ROOT/.archive" +ARCHIVE_DIR=".archive/$(date +%Y-%m-%d)" +GUIDE_PATH=".documentation/Guide.md" +CHANGELOG_PATH="CHANGELOG.md" +GUIDE_EXISTS=false +CHANGELOG_EXISTS=false +ARCHIVE_EXISTS=false +[[ -f "$REPO_ROOT/$GUIDE_PATH" ]] && GUIDE_EXISTS=true +[[ -f "$REPO_ROOT/$CHANGELOG_PATH" ]] && CHANGELOG_EXISTS=true +[[ -d "$ARCHIVE_BASE" ]] && ARCHIVE_EXISTS=true + +EFFECTIVE_SAMPLE_LIMIT="$SAMPLE_LIMIT" +if [[ "$INCLUDE_FULL_INVENTORY" == true && "$EFFECTIVE_SAMPLE_LIMIT" -lt 10000 ]]; then + EFFECTIVE_SAMPLE_LIMIT=10000 +fi + +HARVEST_JSON=$("$SCRIPT_DIR/harvest.sh" --scope=docs --json --sample-limit="$EFFECTIVE_SAMPLE_LIMIT") + +tmp_dir=$(mktemp -d) +cleanup() { + rm -rf "$tmp_dir" +} +trap cleanup EXIT + +HARVEST_JSON_FILE="$tmp_dir/harvest.json" +ARCHIVES_FILE="$tmp_dir/existing_archives.txt" + +printf '%s' "$HARVEST_JSON" > "$HARVEST_JSON_FILE" + +if [[ -d "$ARCHIVE_BASE" ]]; then + find "$ARCHIVE_BASE" -maxdepth 1 -mindepth 1 -type d 2>/dev/null \ + | sed "s|$REPO_ROOT/||" \ + | sort > "$ARCHIVES_FILE" +else + : > "$ARCHIVES_FILE" +fi + +JSON_OUTPUT=$("$PYTHON_CMD" - "$HARVEST_JSON_FILE" "$ARCHIVES_FILE" "$GUIDE_PATH" "$CHANGELOG_PATH" "$ARCHIVE_DIR" "$ARCHIVE_EXISTS" "$GUIDE_EXISTS" "$CHANGELOG_EXISTS" "$SAMPLE_LIMIT" "$INCLUDE_FULL_INVENTORY" <<'PY' +import json +import sys + + +def read_lines(path: str) -> list[str]: + with open(path, encoding="utf-8") as handle: + return [line.strip() for line in handle if line.strip()] + + +harvest_path, archives_path, guide_path, changelog_path, archive_dir, archive_exists, guide_exists, changelog_exists, sample_limit, include_full_inventory = sys.argv[1:] + +with open(harvest_path, encoding="utf-8") as handle: + harvest = json.load(handle) + +existing_archives = read_lines(archives_path) +sample_limit_int = int(sample_limit) +include_full_inventory_bool = include_full_inventory.lower() == "true" + + +def sample_paths(entries: list[dict]) -> list[str]: + return [entry["path"] for entry in entries[:sample_limit_int]] + + +def all_paths(entries: list[dict]) -> list[str]: + return [entry["path"] for entry in entries] + + +result = { + "REPO_ROOT": harvest["repo_root"], + "TIMESTAMP": harvest["harvest_timestamp"], + "ARCHIVE_DIR": archive_dir, + "ARCHIVE_EXISTS": archive_exists.lower() == "true", + "EXISTING_ARCHIVES": existing_archives[:sample_limit_int], + "EXISTING_ARCHIVES_COUNT": len(existing_archives), + "GUIDE_PATH": guide_path, + "GUIDE_EXISTS": guide_exists.lower() == "true", + "CHANGELOG_PATH": changelog_path, + "CHANGELOG_EXISTS": changelog_exists.lower() == "true", + "SAMPLE_LIMIT": sample_limit_int, + "INCLUDE_FULL_INVENTORY": include_full_inventory_bool, + "CANDIDATE_COUNTS": { + "drafts": len(harvest["docs"]["stale_drafts"]), + "session_docs": len(harvest["docs"]["session_notes"]), + "implementation_plans": len(harvest["docs"]["impl_plans"]), + "release_docs": len(harvest["docs"]["release_docs"]), + "quickfix_records": len(harvest["docs"]["quickfix_records"]), + "pr_reviews": len(harvest["docs"]["completed_reviews"]), + }, + "CANDIDATES": { + "drafts": sample_paths(harvest["docs"]["stale_drafts"]), + "session_docs": sample_paths(harvest["docs"]["session_notes"]), + "implementation_plans": sample_paths(harvest["docs"]["impl_plans"]), + "release_docs": sample_paths(harvest["docs"]["release_docs"]), + "quickfix_records": sample_paths(harvest["docs"]["quickfix_records"]), + "pr_reviews": sample_paths(harvest["docs"]["completed_reviews"]), + }, + "CURRENT_DOCS": sample_paths(harvest["docs"]["living_reference"]), + "CURRENT_DOCS_COUNT": len(harvest["docs"]["living_reference"]), + "FULL_INVENTORY": None, +} + +if include_full_inventory_bool: + result["FULL_INVENTORY"] = { + "existing_archives": existing_archives, + "candidates": { + "drafts": all_paths(harvest["docs"]["stale_drafts"]), + "session_docs": all_paths(harvest["docs"]["session_notes"]), + "implementation_plans": all_paths(harvest["docs"]["impl_plans"]), + "release_docs": all_paths(harvest["docs"]["release_docs"]), + "quickfix_records": all_paths(harvest["docs"]["quickfix_records"]), + "pr_reviews": all_paths(harvest["docs"]["completed_reviews"]), + }, + "current_docs": all_paths(harvest["docs"]["living_reference"]), + } + +print(json.dumps(result)) +PY +) + +if [[ "$JSON_MODE" == true ]]; then + printf '%s\n' "$JSON_OUTPUT" +else + echo "Archive Context" + echo "===============" + REPO_ROOT_VALUE=$("$PYTHON_CMD" -c 'import json,sys; print(json.load(sys.stdin)["REPO_ROOT"])' <<<"$JSON_OUTPUT") + ARCHIVE_DIR_VALUE=$("$PYTHON_CMD" -c 'import json,sys; data=json.load(sys.stdin); print("{} (exists: {})".format(data["ARCHIVE_DIR"], data["ARCHIVE_EXISTS"]))' <<<"$JSON_OUTPUT") + GUIDE_VALUE=$("$PYTHON_CMD" -c 'import json,sys; data=json.load(sys.stdin); print("{} (exists: {})".format(data["GUIDE_PATH"], data["GUIDE_EXISTS"]))' <<<"$JSON_OUTPUT") + CHANGELOG_VALUE=$("$PYTHON_CMD" -c 'import json,sys; data=json.load(sys.stdin); print("{} (exists: {})".format(data["CHANGELOG_PATH"], data["CHANGELOG_EXISTS"]))' <<<"$JSON_OUTPUT") + COUNTS=$("$PYTHON_CMD" -c 'import json,sys; c=json.load(sys.stdin)["CANDIDATE_COUNTS"]; print("\n".join(str(c[k]) for k in ("drafts","session_docs","implementation_plans","release_docs","quickfix_records","pr_reviews")))' <<<"$JSON_OUTPUT") + mapfile -t COUNT_LINES <<<"$COUNTS" + echo "Repository: $REPO_ROOT_VALUE" + echo "Archive dir: $ARCHIVE_DIR_VALUE" + echo "Guide.md: $GUIDE_VALUE" + echo "CHANGELOG.md: $CHANGELOG_VALUE" + echo "" + echo "Candidates:" + echo " Drafts: ${COUNT_LINES[0]:-0}" + echo " Session docs: ${COUNT_LINES[1]:-0}" + echo " Implementation plans: ${COUNT_LINES[2]:-0}" + echo " Release docs: ${COUNT_LINES[3]:-0}" + echo " Quickfix records: ${COUNT_LINES[4]:-0}" + echo " PR reviews: ${COUNT_LINES[5]:-0}" +fi diff --git a/.devspark/scripts/bash/check-prerequisites.sh b/.devspark/scripts/bash/check-prerequisites.sh new file mode 100644 index 00000000..4afce897 --- /dev/null +++ b/.devspark/scripts/bash/check-prerequisites.sh @@ -0,0 +1,209 @@ +#!/usr/bin/env bash + +# Consolidated prerequisite checking script +# +# This script provides unified prerequisite checking for Spec-Driven Development workflow. +# It replaces the functionality previously spread across multiple scripts. +# +# Usage: ./check-prerequisites.sh [OPTIONS] +# +# OPTIONS: +# --json Output in JSON format +# --require-tasks Require tasks.md to exist (for implementation phase) +# --include-tasks Include tasks.md in AVAILABLE_DOCS list +# --paths-only Only output path variables (no validation) +# --require-delivery-status Require latest harness run to be create-pr ready +# --timeout-seconds=N Timeout value reported in diagnostics +# --help, -h Show help message +# +# OUTPUTS: +# JSON mode: {"FEATURE_DIR":"...", "AVAILABLE_DOCS":["..."]} +# Text mode: FEATURE_DIR:... \n AVAILABLE_DOCS: \n ✓/✗ file.md +# Paths only: REPO_ROOT: ... \n BRANCH: ... \n FEATURE_DIR: ... etc. + +set -e + +# Verify minimum bash version (3.2+) +if [[ "${BASH_VERSINFO[0]}" -lt 3 ]] || { [[ "${BASH_VERSINFO[0]}" -eq 3 ]] && [[ "${BASH_VERSINFO[1]}" -lt 2 ]]; }; then + echo "ERROR: bash 3.2 or newer is required (found bash ${BASH_VERSION})." >&2 + echo "On macOS, install a newer bash via Homebrew: brew install bash" >&2 + exit 1 +fi + +# Parse command line arguments +JSON_MODE=false +REQUIRE_TASKS=false +INCLUDE_TASKS=false +PATHS_ONLY=false +REQUIRE_DELIVERY_STATUS=false +TIMEOUT_SECONDS=300 + +for arg in "$@"; do + case "$arg" in + --json) + JSON_MODE=true + ;; + --require-tasks) + REQUIRE_TASKS=true + ;; + --include-tasks) + INCLUDE_TASKS=true + ;; + --paths-only) + PATHS_ONLY=true + ;; + --require-delivery-status) + REQUIRE_DELIVERY_STATUS=true + ;; + --timeout-seconds=*) + TIMEOUT_SECONDS="${arg#*=}" + ;; + --help|-h) + cat << 'EOF' +Usage: check-prerequisites.sh [OPTIONS] + +Consolidated prerequisite checking for Spec-Driven Development workflow. + +OPTIONS: + --json Output in JSON format + --require-tasks Require tasks.md to exist (for implementation phase) + --include-tasks Include tasks.md in AVAILABLE_DOCS list + --paths-only Only output path variables (no prerequisite validation) + --require-delivery-status Require latest harness run to be create-pr ready + --timeout-seconds=N Timeout value reported in diagnostics (default: 300) + --help, -h Show this help message + +EXAMPLES: + # Check task prerequisites (plan.md required) + ./check-prerequisites.sh --json + + # Check implementation prerequisites (plan.md + tasks.md required) + ./check-prerequisites.sh --json --require-tasks --include-tasks + + # Get feature paths only (no validation) + ./check-prerequisites.sh --paths-only + + # Enforce delivery gate for create-pr/pr-review transitions + ./check-prerequisites.sh --json --require-delivery-status --timeout-seconds=300 + +EOF + exit 0 + ;; + *) + echo "ERROR: Unknown option '$arg'. Use --help for usage information." >&2 + exit 1 + ;; + esac +done + +# Source common functions +SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "$SCRIPT_DIR/common.sh" + +# Multi-app support (T095e) +parse_app_context "$@" 2>/dev/null || true +if [[ -n "${DEVSPARK_APP_ID:-}" || "${DEVSPARK_REPO_SCOPE:-false}" == "true" ]]; then + resolve_app_scope 2>/dev/null || true + print_scope_summary >&2 +fi + +# Get feature paths and validate branch +eval "$(get_feature_paths)" +check_feature_branch "$CURRENT_BRANCH" "$HAS_GIT" || exit 1 + +# If paths-only mode, output paths and exit (support JSON + paths-only combined) +if $PATHS_ONLY; then + if $JSON_MODE; then + # Minimal JSON paths payload (no validation performed) + printf '{"REPO_ROOT":"%s","BRANCH":"%s","FEATURE_DIR":"%s","FEATURE_SPEC":"%s","IMPL_PLAN":"%s","TASKS":"%s"}\n' \ + "$REPO_ROOT" "$CURRENT_BRANCH" "$FEATURE_DIR" "$FEATURE_SPEC" "$IMPL_PLAN" "$TASKS" + else + echo "REPO_ROOT: $REPO_ROOT" + echo "BRANCH: $CURRENT_BRANCH" + echo "FEATURE_DIR: $FEATURE_DIR" + echo "FEATURE_SPEC: $FEATURE_SPEC" + echo "IMPL_PLAN: $IMPL_PLAN" + echo "TASKS: $TASKS" + fi + exit 0 +fi + +# Validate required directories and files +if [[ ! -d "$FEATURE_DIR" ]]; then + echo "ERROR: Feature directory not found: $FEATURE_DIR" >&2 + echo "Run /devspark.specify first to create the feature structure." >&2 + exit 1 +fi + +if [[ ! -f "$IMPL_PLAN" ]]; then + echo "ERROR: plan.md not found in $FEATURE_DIR" >&2 + echo "Run /devspark.plan first to create the implementation plan." >&2 + exit 1 +fi + +# Check for tasks.md if required +if $REQUIRE_TASKS && [[ ! -f "$TASKS" ]]; then + echo "ERROR: tasks.md not found in $FEATURE_DIR" >&2 + echo "Run /devspark.tasks first to create the task list." >&2 + exit 1 +fi + +if $REQUIRE_DELIVERY_STATUS; then + latest_result="" + if [[ -d ".documentation/devspark/runs" ]]; then + latest_result=$(ls -1dt .documentation/devspark/runs/*/result.json 2>/dev/null | head -n 1) + fi + if [[ -n "$latest_result" ]] && [[ -f "$latest_result" ]] && command -v jq >/dev/null 2>&1; then + create_pr_ready=$(jq -r '.create_pr_ready // false' "$latest_result") + if [[ "$create_pr_ready" != "true" ]]; then + echo "ERROR: delivery-status gate failed; latest harness run is not create-pr ready" >&2 + exit 1 + fi + fi +fi + +# Build list of available documents +docs=() + +# Always check these optional docs +[[ -f "$RESEARCH" ]] && docs+=("research.md") +[[ -f "$DATA_MODEL" ]] && docs+=("data-model.md") + +# Check contracts directory (only if it exists and has files) +if [[ -d "$CONTRACTS_DIR" ]] && [[ -n "$(ls -A "$CONTRACTS_DIR" 2>/dev/null)" ]]; then + docs+=("contracts/") +fi + +[[ -f "$QUICKSTART" ]] && docs+=("quickstart.md") + +# Include tasks.md if requested and it exists +if $INCLUDE_TASKS && [[ -f "$TASKS" ]]; then + docs+=("tasks.md") +fi + +# Output results +if $JSON_MODE; then + # Build JSON array of documents + if [[ ${#docs[@]} -eq 0 ]]; then + json_docs="[]" + else + json_docs=$(printf '"%s",' "${docs[@]}") + json_docs="[${json_docs%,}]" + fi + + printf '{"FEATURE_DIR":"%s","AVAILABLE_DOCS":%s}\n' "$FEATURE_DIR" "$json_docs" +else + # Text output + echo "FEATURE_DIR:$FEATURE_DIR" + echo "AVAILABLE_DOCS:" + + # Show status of each potential document + check_file "$RESEARCH" "research.md" + check_file "$DATA_MODEL" "data-model.md" + check_dir "$CONTRACTS_DIR" "contracts/" + check_file "$QUICKSTART" "quickstart.md" + + if $INCLUDE_TASKS; then + check_file "$TASKS" "tasks.md" + fi +fi diff --git a/.devspark/scripts/bash/common.sh b/.devspark/scripts/bash/common.sh new file mode 100644 index 00000000..be23b11a --- /dev/null +++ b/.devspark/scripts/bash/common.sh @@ -0,0 +1,548 @@ +#!/usr/bin/env bash +# Common functions and variables for all scripts + +# Get repository root, with fallback for non-git repositories +get_repo_root() { + if git rev-parse --show-toplevel >/dev/null 2>&1; then + git rev-parse --show-toplevel + else + # Fall back to script location for non-git repos + local script_dir + script_dir="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + (cd "$script_dir/../../.." && pwd) + fi +} + +# Get current branch, with fallback for non-git repositories +get_current_branch() { + # First check if DEVSPARK_FEATURE environment variable is set + if [[ -n "${DEVSPARK_FEATURE:-}" ]]; then + echo "$DEVSPARK_FEATURE" + return + fi + + # Then check git if available + if git rev-parse --abbrev-ref HEAD >/dev/null 2>&1; then + git rev-parse --abbrev-ref HEAD + return + fi + + # For non-git repos, try to find the latest feature directory + local repo_root + repo_root=$(get_repo_root) + local specs_dir="$repo_root/.documentation/specs" + + if [[ -d "$specs_dir" ]]; then + local latest_feature="" + local highest=0 + + for dir in "$specs_dir"/*; do + if [[ -d "$dir" ]]; then + local dirname + dirname=$(basename "$dir") + if [[ "$dirname" =~ ^([0-9]{3})- ]]; then + local number=${BASH_REMATCH[1]} + number=$((10#$number)) + if [[ "$number" -gt "$highest" ]]; then + highest=$number + latest_feature=$dirname + fi + fi + fi + done + + if [[ -n "$latest_feature" ]]; then + echo "$latest_feature" + return + fi + fi + + echo "main" # Final fallback +} + +# Check if we have git available +has_git() { + git rev-parse --show-toplevel >/dev/null 2>&1 +} + +check_feature_branch() { + local branch="$1" + local has_git_repo="$2" + + # For non-git repos, we can't enforce branch naming but still provide output + if [[ "$has_git_repo" != "true" ]]; then + echo "[devspark] Warning: Git repository not detected; skipped branch validation" >&2 + return 0 + fi + + if [[ ! "$branch" =~ ^[0-9]{3}- ]]; then + echo "ERROR: Not on a feature branch. Current branch: $branch" >&2 + echo "Feature branches should be named like: 001-feature-name" >&2 + return 1 + fi + + return 0 +} + +get_feature_dir() { echo "$1/.documentation/specs/$2"; } + +# Find feature directory by numeric prefix instead of exact branch match +# This allows multiple branches to work on the same spec (e.g., 004-fix-bug, 004-add-feature) +find_feature_dir_by_prefix() { + local repo_root="$1" + local branch_name="$2" + local specs_dir="$repo_root/.documentation/specs" + + # Extract numeric prefix from branch (e.g., "004" from "004-whatever") + if [[ ! "$branch_name" =~ ^([0-9]{3})- ]]; then + # If branch doesn't have numeric prefix, fall back to exact match + echo "$specs_dir/$branch_name" + return + fi + + local prefix="${BASH_REMATCH[1]}" + + # Search for directories in .documentation/specs/ that start with this prefix + local matches=() + if [[ -d "$specs_dir" ]]; then + for dir in "$specs_dir"/"$prefix"-*; do + if [[ -d "$dir" ]]; then + matches+=("$(basename "$dir")") + fi + done + fi + + # Handle results + if [[ ${#matches[@]} -eq 0 ]]; then + # No match found - return the branch name path (will fail later with clear error) + echo "$specs_dir/$branch_name" + elif [[ ${#matches[@]} -eq 1 ]]; then + # Exactly one match - perfect! + echo "$specs_dir/${matches[0]}" + else + # Multiple matches - this shouldn't happen with proper naming convention + echo "ERROR: Multiple spec directories found with prefix '$prefix': ${matches[*]}" >&2 + echo "Please ensure only one spec directory exists per numeric prefix." >&2 + echo "$specs_dir/$branch_name" # Return something to avoid breaking the script + fi +} + +get_feature_paths() { + local repo_root + repo_root=$(get_repo_root) + local current_branch + current_branch=$(get_current_branch) + local has_git_repo="false" + + if has_git; then + has_git_repo="true" + fi + + # Use prefix-based lookup to support multiple branches per spec + local feature_dir + feature_dir=$(find_feature_dir_by_prefix "$repo_root" "$current_branch") + + cat </dev/null) ]] && echo " ✓ $2" || echo " ✗ $2"; } + +get_markdown_frontmatter() { + local file_path="$1" + + [[ -f "$file_path" ]] || return 1 + + awk ' + NR == 1 && $0 == "---" { in_block=1; next } + in_block && $0 == "---" { exit } + in_block { print } + ' "$file_path" +} + +get_markdown_frontmatter_value() { + local file_path="$1" + local key="$2" + + get_markdown_frontmatter "$file_path" | awk -F': ' -v wanted="$key" '$1 == wanted { print $2; exit }' +} + +# --------------------------------------------------------------------------- +# Multi-app support helpers +# --------------------------------------------------------------------------- + +# Detect whether the repository is operating in multi-app mode. +detect_devspark_mode() { + local repo_root + repo_root=$(get_repo_root) + local registry="$repo_root/.documentation/devspark.json" + + if [[ -f "$registry" ]]; then + local mode + mode=$(jq -r '.mode // empty' "$registry" 2>/dev/null || true) + if [[ "$mode" == "multi-app" ]]; then + echo "multi-app" + return + fi + fi + echo "single-app" +} + +# Validate the registry structure with jq before deeper processing. +validate_registry_json() { + local registry_path="$1" + + if [[ ! -f "$registry_path" ]]; then + echo '{"valid":false,"error":"Registry file not found"}'; return 1 + fi + + # Check JSON validity + if ! jq empty "$registry_path" 2>/dev/null; then + echo '{"valid":false,"error":"Invalid JSON"}'; return 1 + fi + + local version mode app_count profile_count + version=$(jq -r '.version // 0' "$registry_path") || true + mode=$(jq -r '.mode // ""' "$registry_path") || true + app_count=$(jq '.apps | length' "$registry_path") || true + profile_count=$(jq '.profiles | keys | length' "$registry_path") || true + + # Check version + if [[ "$version" != "1" ]]; then + echo "{\"valid\":false,\"error\":\"Unsupported version: $version\"}"; return 1 + fi + + # Check unique IDs + local unique_ids total_ids + total_ids=$(jq '.apps | length' "$registry_path") || true + unique_ids=$(jq '[.apps[].id] | unique | length' "$registry_path") || true + if [[ "$total_ids" != "$unique_ids" ]]; then + echo '{"valid":false,"error":"Duplicate app IDs detected"}'; return 1 + fi + + # Check profile references + local bad_profiles + bad_profiles=$(jq -r ' + [.profiles | keys] as $pkeys | + [.apps[].inherits[]] | unique | map(select(. as $p | $pkeys[0] | index($p) | not)) + ' "$registry_path" 2>/dev/null || echo "[]") + if [[ "$bad_profiles" != "[]" ]]; then + echo "{\"valid\":false,\"error\":\"Unknown profiles: $bad_profiles\"}"; return 1 + fi + + echo "{\"valid\":true,\"apps\":$app_count,\"profiles\":$profile_count}" +} + +# Resolve app documentation root (T014) +resolve_app_doc_root() { + local repo_root="$1" + local app_id="$2" + + if [[ -z "$app_id" ]]; then + echo "$repo_root/.documentation" + return + fi + + local registry="$repo_root/.documentation/devspark.json" + if [[ ! -f "$registry" ]]; then + echo "ERROR: No multi-app registry found" >&2; return 1 + fi + + local app_path + app_path=$(jq -r --arg id "$app_id" '.apps[] | select(.id == $id) | .path // empty' "$registry" 2>/dev/null) + if [[ -z "$app_path" ]]; then + echo "ERROR: Unknown application: $app_id" >&2; return 1 + fi + + echo "$repo_root/$app_path/.documentation" +} + +# Parse --app and --repo-scope arguments (T026) +# Sets DEVSPARK_APP_ID and DEVSPARK_REPO_SCOPE +parse_app_context() { + DEVSPARK_APP_ID="" + DEVSPARK_REPO_SCOPE=false + local remaining_args=() + + while [[ $# -gt 0 ]]; do + case "$1" in + --app) + shift + if [[ $# -eq 0 || "$1" == --* ]]; then + echo "ERROR: --app requires an application ID" >&2; return 1 + fi + DEVSPARK_APP_ID="$1" + ;; + --repo-scope) + DEVSPARK_REPO_SCOPE=true + ;; + *) + remaining_args+=("$1") + ;; + esac + shift + done + + # Export for downstream scripts + export DEVSPARK_APP_ID DEVSPARK_REPO_SCOPE + # Set remaining args back (used by callers after sourcing) + export DEVSPARK_REMAINING_ARGS + DEVSPARK_REMAINING_ARGS=("${remaining_args[@]}") +} + +# Resolve scope and validate (T028, T030) +# Sets DEVSPARK_SCOPE, DEVSPARK_DOC_ROOT, DEVSPARK_SCOPE_ERROR (used by callers) +# shellcheck disable=SC2034 +resolve_app_scope() { + local repo_root + repo_root=$(get_repo_root) + local mode + mode=$(detect_devspark_mode) + + DEVSPARK_SCOPE="" + DEVSPARK_DOC_ROOT="" + DEVSPARK_SCOPE_ERROR="" + + if [[ "$mode" == "single-app" ]]; then + # Single-app mode + if [[ -n "$DEVSPARK_APP_ID" ]]; then + DEVSPARK_SCOPE_ERROR="No multi-app registry found. Cannot use --app." + return 1 + fi + DEVSPARK_SCOPE="repo" + DEVSPARK_DOC_ROOT="$repo_root/.documentation" + return 0 + fi + + # Multi-app mode + if [[ "$DEVSPARK_REPO_SCOPE" == "true" ]]; then + DEVSPARK_SCOPE="repo" + DEVSPARK_DOC_ROOT="$repo_root/.documentation" + return 0 + fi + + if [[ -n "$DEVSPARK_APP_ID" ]]; then + local doc_root + doc_root=$(resolve_app_doc_root "$repo_root" "$DEVSPARK_APP_ID") || { + DEVSPARK_SCOPE_ERROR="$doc_root" + return 1 + } + DEVSPARK_SCOPE="single-app" + DEVSPARK_DOC_ROOT="$doc_root" + return 0 + fi + + # No explicit scope — check app count + local registry="$repo_root/.documentation/devspark.json" + local app_count + app_count=$(jq '.apps | length' "$registry" 2>/dev/null || echo "0") + + if [[ "$app_count" -gt 1 ]]; then + local available + available=$(jq -r '[.apps[].id] | join(", ")' "$registry") + DEVSPARK_SCOPE_ERROR="Multiple apps registered; specify --app or use --repo-scope. Available: $available" + return 1 + fi + + if [[ "$app_count" -eq 1 ]]; then + local app_id app_path + app_id=$(jq -r '.apps[0].id' "$registry") + app_path=$(jq -r '.apps[0].path' "$registry") + DEVSPARK_APP_ID="$app_id" + DEVSPARK_SCOPE="single-app" + DEVSPARK_DOC_ROOT="$repo_root/$app_path/.documentation" + return 0 + fi + + DEVSPARK_SCOPE="repo" + DEVSPARK_DOC_ROOT="$repo_root/.documentation" +} + +# Resolve constitution with app overlay (T022) +resolve_constitution() { + local repo_root="$1" + local app_id="${2:-}" + + local repo_constitution="$repo_root/.documentation/memory/constitution.md" + if [[ ! -f "$repo_constitution" ]]; then + echo "ERROR: Repository constitution required at $repo_constitution" >&2 + return 1 + fi + + local output + output=$(cat "$repo_constitution") + + if [[ -n "$app_id" ]]; then + local app_doc_root + app_doc_root=$(resolve_app_doc_root "$repo_root" "$app_id") || return 1 + local app_constitution="$app_doc_root/memory/constitution.md" + + if [[ -f "$app_constitution" ]]; then + output="$output + +--- + +## Application Overlay: $app_id + +$(cat "$app_constitution")" + fi + fi + + echo "$output" +} + +# Get direct downstream consumers of an app (T039) +get_downstream_apps() { + local repo_root="$1" + local app_id="$2" + local registry="$repo_root/.documentation/devspark.json" + + if [[ ! -f "$registry" ]]; then + return + fi + + # Find all apps whose dependsOn contains app_id + jq -r --arg id "$app_id" \ + '[.apps[] | select(.dependsOn | index($id)) | .id] | join(",")' \ + "$registry" 2>/dev/null || true +} + +# Generate scope report (T039) +generate_scope_report() { + local repo_root + repo_root=$(get_repo_root) + + echo "## DevSpark Scope Report" + echo "" + echo "**Scope type**: ${DEVSPARK_SCOPE:-unknown}" + echo "**Documentation root**: ${DEVSPARK_DOC_ROOT:-unknown}" + + if [[ -n "$DEVSPARK_APP_ID" ]]; then + echo "**Primary application**: $DEVSPARK_APP_ID" + + # Declared downstream + local downstream + downstream=$(get_downstream_apps "$repo_root" "$DEVSPARK_APP_ID") + if [[ -n "$downstream" ]]; then + echo "" + echo "### Declared downstream dependencies" + IFS=',' read -r -a deps <<< "$downstream" + for dep in "${deps[@]}"; do + echo "- $dep" + done + fi + fi +} + +# Print scope summary (T035) +print_scope_summary() { + echo "--- DevSpark Scope ---" + echo "scope: ${DEVSPARK_SCOPE:-unknown}" + echo "doc-root: ${DEVSPARK_DOC_ROOT:-unknown}" + if [[ -n "$DEVSPARK_APP_ID" ]]; then + echo "app: $DEVSPARK_APP_ID" + fi + echo "mode: $(detect_devspark_mode)" + echo "---" +} + +# Resolve inherited profile chain for an app (T052) +# Composes all inherited profiles + overrides + app.json into one effective profile +resolve_app_profiles() { + local repo_root="$1" + local app_id="$2" + local registry="$repo_root/.documentation/devspark.json" + + if [[ ! -f "$registry" ]]; then + echo '{"tags":{},"rules":[],"hints":{}}'; return + fi + + local app_path + app_path=$(jq -r --arg id "$app_id" '.apps[] | select(.id == $id) | .path // ""' "$registry" 2>/dev/null) + local app_json="$repo_root/$app_path/app.json" + local manifest='{}' + if [[ -f "$app_json" ]]; then + manifest=$(cat "$app_json") + fi + + jq -n --arg id "$app_id" --argjson manifest "$manifest" --slurpfile reg "$registry" ' + $reg[0] as $r | + ($r.apps[] | select(.id == $id)) as $app | + reduce ($app.inherits // [])[] as $pname ( + { tags: {}, rules: [], hints: {} }; + ($r.profiles[$pname] // {}) as $p | + .tags = (.tags * ($p.tags // {})) | + .rules = (.rules + (($p.rules // []) - .rules)) | + .hints = (.hints * ($p.hints // {})) + ) | + (($app.overrides // {}) as $o | + .tags = (.tags * ($o.tags // {})) | + .rules = (.rules + (($o.rules // []) - .rules)) | + .hints = (.hints * ($o.hints // {})) + ) | + .tags = (.tags * ($manifest.tags // {})) | + .rules = (.rules + (($manifest.rules // []) - .rules)) | + .hints = (.hints * ($manifest.hints // {})) + ' +} + +# Override get_feature_paths for app-scoped workflows (T028) +get_feature_paths_app_aware() { + local repo_root + repo_root=$(get_repo_root) + local current_branch + current_branch=$(get_current_branch) + local has_git_repo="false" + + if has_git; then + has_git_repo="true" + fi + + # Determine doc root based on app context + local doc_root="$repo_root/.documentation" + if [[ -n "$DEVSPARK_DOC_ROOT" ]]; then + doc_root="$DEVSPARK_DOC_ROOT" + fi + + # Use prefix-based lookup within the resolved doc root + local specs_dir="$doc_root/specs" + local feature_dir + if [[ -d "$specs_dir" ]]; then + feature_dir=$(find_feature_dir_by_prefix "$(dirname "$doc_root")" "$current_branch" 2>/dev/null || echo "$specs_dir/$current_branch") + # Re-base if we're in app scope + if [[ "$doc_root" != "$repo_root/.documentation" ]]; then + feature_dir="$specs_dir/$current_branch" + fi + else + feature_dir="$specs_dir/$current_branch" + fi + + cat <&2 + exit 1 + fi + i=$((i + 1)) + next_arg="${!i}" + # Check if the next argument is another option (starts with --) + if [[ "$next_arg" == --* ]]; then + echo 'Error: --short-name requires a value' >&2 + exit 1 + fi + SHORT_NAME="$next_arg" + ;; + --number) + if [ $((i + 1)) -gt $# ]; then + echo 'Error: --number requires a value' >&2 + exit 1 + fi + i=$((i + 1)) + next_arg="${!i}" + if [[ "$next_arg" == --* ]]; then + echo 'Error: --number requires a value' >&2 + exit 1 + fi + BRANCH_NUMBER="$next_arg" + ;; + --help|-h) + echo "Usage: $0 [--json] [--short-name ] [--number N] " + echo "" + echo "Options:" + echo " --json Output in JSON format" + echo " --short-name Provide a custom short name (2-4 words) for the branch" + echo " --number N Specify branch number manually (overrides auto-detection)" + echo " --help, -h Show this help message" + echo "" + echo "Examples:" + echo " $0 'Add user authentication system' --short-name 'user-auth'" + echo " $0 'Implement OAuth2 integration for API' --number 5" + exit 0 + ;; + *) + ARGS+=("$arg") + ;; + esac + i=$((i + 1)) +done + +FEATURE_DESCRIPTION="${ARGS[*]}" +if [ -z "$FEATURE_DESCRIPTION" ]; then + echo "Usage: $0 [--json] [--short-name ] [--number N] " >&2 + exit 1 +fi + +# Function to find the repository root by searching for existing project markers +find_repo_root() { + local dir="$1" + while [ "$dir" != "/" ]; do + if [ -d "$dir/.git" ] || [ -d "$dir/.documentation" ]; then + echo "$dir" + return 0 + fi + dir="$(dirname "$dir")" + done + return 1 +} + +# Function to get highest number from specs directory +get_highest_from_specs() { + local specs_dir="$1" + local highest=0 + + if [ -d "$specs_dir" ]; then + for dir in "$specs_dir"/*; do + [ -d "$dir" ] || continue + dirname=$(basename "$dir") + number=$(echo "$dirname" | grep -o '^[0-9]\+' || echo "0") + number=$((10#$number)) + if [ "$number" -gt "$highest" ]; then + highest=$number + fi + done + fi + + echo "$highest" +} + +# Function to get highest number from git branches +get_highest_from_branches() { + local highest=0 + + # Get all branches (local and remote) + branches=$(git branch -a 2>/dev/null || echo "") + + if [ -n "$branches" ]; then + while IFS= read -r branch; do + # Clean branch name: remove leading markers and remote prefixes + clean_branch=$(echo "$branch" | sed 's/^[* ]*//; s|^remotes/[^/]*/||') + + # Extract feature number if branch matches pattern ###-* + if echo "$clean_branch" | grep -q '^[0-9]\{3\}-'; then + number=$(echo "$clean_branch" | grep -o '^[0-9]\{3\}' || echo "0") + number=$((10#$number)) + if [ "$number" -gt "$highest" ]; then + highest=$number + fi + fi + done <<< "$branches" + fi + + echo "$highest" +} + +# Function to check existing branches (local and remote) and return next available number +check_existing_branches() { + local specs_dir="$1" + + # Fetch all remotes to get latest branch info (suppress errors if no remotes) + git fetch --all --prune 2>/dev/null || true + + # Get highest number from ALL branches (not just matching short name) + local highest_branch + highest_branch=$(get_highest_from_branches) + + # Get highest number from ALL specs (not just matching short name) + local highest_spec + highest_spec=$(get_highest_from_specs "$specs_dir") + + # Take the maximum of both + local max_num=$highest_branch + if [ "$highest_spec" -gt "$max_num" ]; then + max_num=$highest_spec + fi + + # Return next number + echo $((max_num + 1)) +} + +# Function to clean and format a branch name +clean_branch_name() { + local name="$1" + echo "$name" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/-/g' | sed 's/-\+/-/g' | sed 's/^-//' | sed 's/-$//' +} + +# Resolve repository root. Prefer git information when available, but fall back +# to searching for repository markers so the workflow still functions in repositories that +# were initialised with --no-git. +SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +if git rev-parse --show-toplevel >/dev/null 2>&1; then + REPO_ROOT=$(git rev-parse --show-toplevel) + HAS_GIT=true +else + REPO_ROOT="$(find_repo_root "$SCRIPT_DIR")" + if [ -z "$REPO_ROOT" ]; then + echo "Error: Could not determine repository root. Please run this script from within the repository." >&2 + exit 1 + fi + HAS_GIT=false +fi + +cd "$REPO_ROOT" + +# Load common for multi-app helpers +source "$SCRIPT_DIR/common.sh" 2>/dev/null || source "$(dirname "${BASH_SOURCE[0]}")/common.sh" 2>/dev/null || true + +# Multi-app support: parse --app and --repo-scope from ARGS (T031) +parse_app_context "${ARGS[@]}" 2>/dev/null || true +if [[ ${#DEVSPARK_REMAINING_ARGS[@]} -gt 0 ]]; then + ARGS=("${DEVSPARK_REMAINING_ARGS[@]}") +fi +FEATURE_DESCRIPTION="${ARGS[*]}" + +# Determine specs directory based on app context +if [[ -n "${DEVSPARK_APP_ID:-}" ]]; then + APP_DOC_ROOT=$(resolve_app_doc_root "$REPO_ROOT" "$DEVSPARK_APP_ID" 2>/dev/null || true) + if [[ -n "$APP_DOC_ROOT" && "$APP_DOC_ROOT" != ERROR* ]]; then + SPECS_DIR="$APP_DOC_ROOT/specs" + else + SPECS_DIR="$REPO_ROOT/.documentation/specs" + fi +else + SPECS_DIR="$REPO_ROOT/.documentation/specs" +fi +mkdir -p "$SPECS_DIR" + +# Function to generate branch name with stop word filtering and length filtering +generate_branch_name() { + local description="$1" + + # Common stop words to filter out + local stop_words="^(i|a|an|the|to|for|of|in|on|at|by|with|from|is|are|was|were|be|been|being|have|has|had|do|does|did|will|would|should|could|can|may|might|must|shall|this|that|these|those|my|your|our|their|want|need|add|get|set)$" + + # Convert to lowercase and split into words + local clean_name + clean_name=$(echo "$description" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/ /g') + + # Filter words: remove stop words and words shorter than 3 chars (unless they're uppercase acronyms in original) + local meaningful_words=() + for word in $clean_name; do + # Skip empty words + [ -z "$word" ] && continue + + # Keep words that are NOT stop words AND (length >= 3 OR are potential acronyms) + if ! echo "$word" | grep -qiE "$stop_words"; then + if [ ${#word} -ge 3 ]; then + meaningful_words+=("$word") + elif echo "$description" | grep -q "\b$(echo "$word" | tr '[:lower:]' '[:upper:]')\b"; then + # Keep short words if they appear as uppercase in original (likely acronyms) + meaningful_words+=("$word") + fi + fi + done + + # If we have meaningful words, use first 3-4 of them + if [ ${#meaningful_words[@]} -gt 0 ]; then + local max_words=3 + if [ ${#meaningful_words[@]} -eq 4 ]; then max_words=4; fi + + local result="" + local count=0 + for word in "${meaningful_words[@]}"; do + if [ $count -ge $max_words ]; then break; fi + if [ -n "$result" ]; then result="$result-"; fi + result="$result$word" + count=$((count + 1)) + done + echo "$result" + else + # Fallback to original logic if no meaningful words found + local cleaned + cleaned=$(clean_branch_name "$description") + echo "$cleaned" | tr '-' '\n' | grep -v '^$' | head -3 | tr '\n' '-' | sed 's/-$//' + fi +} + +# Generate branch name +if [ -n "$SHORT_NAME" ]; then + # Use provided short name, just clean it up + BRANCH_SUFFIX=$(clean_branch_name "$SHORT_NAME") +else + # Generate from description with smart filtering + BRANCH_SUFFIX=$(generate_branch_name "$FEATURE_DESCRIPTION") +fi + +# Determine branch number +if [ -z "$BRANCH_NUMBER" ]; then + if [ "$HAS_GIT" = true ]; then + # Check existing branches on remotes + BRANCH_NUMBER=$(check_existing_branches "$SPECS_DIR") + else + # Fall back to local directory check + HIGHEST=$(get_highest_from_specs "$SPECS_DIR") + BRANCH_NUMBER=$((HIGHEST + 1)) + fi +fi + +# Force base-10 interpretation to prevent octal conversion (e.g., 010 → 8 in octal, but should be 10 in decimal) +FEATURE_NUM=$(printf "%03d" "$((10#$BRANCH_NUMBER))") +BRANCH_NAME="${FEATURE_NUM}-${BRANCH_SUFFIX}" + +# GitHub enforces a 244-byte limit on branch names +# Validate and truncate if necessary +MAX_BRANCH_LENGTH=244 +if [ ${#BRANCH_NAME} -gt $MAX_BRANCH_LENGTH ]; then + # Calculate how much we need to trim from suffix + # Account for: feature number (3) + hyphen (1) = 4 chars + MAX_SUFFIX_LENGTH=$((MAX_BRANCH_LENGTH - 4)) + + # Truncate suffix at word boundary if possible + TRUNCATED_SUFFIX=$(echo "$BRANCH_SUFFIX" | cut -c1-$MAX_SUFFIX_LENGTH) + # Remove trailing hyphen if truncation created one + TRUNCATED_SUFFIX=$(echo "$TRUNCATED_SUFFIX" | sed 's/-$//') + + ORIGINAL_BRANCH_NAME="$BRANCH_NAME" + BRANCH_NAME="${FEATURE_NUM}-${TRUNCATED_SUFFIX}" + + >&2 echo "[devspark] Warning: Branch name exceeded GitHub's 244-byte limit" + >&2 echo "[devspark] Original: $ORIGINAL_BRANCH_NAME (${#ORIGINAL_BRANCH_NAME} bytes)" + >&2 echo "[devspark] Truncated to: $BRANCH_NAME (${#BRANCH_NAME} bytes)" +fi + +if [ "$HAS_GIT" = true ]; then + git checkout -b "$BRANCH_NAME" +else + >&2 echo "[devspark] Warning: Git repository not detected; skipped branch creation for $BRANCH_NAME" +fi + +FEATURE_DIR="$SPECS_DIR/$BRANCH_NAME" +mkdir -p "$FEATURE_DIR" + +TEMPLATE="$REPO_ROOT/.documentation/templates/spec-template.md" +SPEC_FILE="$FEATURE_DIR/spec.md" +if [ -f "$TEMPLATE" ]; then cp "$TEMPLATE" "$SPEC_FILE"; else touch "$SPEC_FILE"; fi + +# Set the DEVSPARK_FEATURE environment variable for the current session +export DEVSPARK_FEATURE="$BRANCH_NAME" + +if $JSON_MODE; then + printf '{"BRANCH_NAME":"%s","SPEC_FILE":"%s","FEATURE_NUM":"%s"}\n' "$BRANCH_NAME" "$SPEC_FILE" "$FEATURE_NUM" +else + echo "BRANCH_NAME: $BRANCH_NAME" + echo "SPEC_FILE: $SPEC_FILE" + echo "FEATURE_NUM: $FEATURE_NUM" + echo "DEVSPARK_FEATURE environment variable set to: $BRANCH_NAME" +fi diff --git a/.devspark/scripts/bash/create-pr.sh b/.devspark/scripts/bash/create-pr.sh new file mode 100644 index 00000000..24367a12 --- /dev/null +++ b/.devspark/scripts/bash/create-pr.sh @@ -0,0 +1,636 @@ +#!/usr/bin/env bash + +set -euo pipefail + +SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "$SCRIPT_DIR/common.sh" +source "$SCRIPT_DIR/platform.sh" + +MODE="preflight" +export JSON_MODE=false +TITLE="" +BODY="" +BODY_FILE="" +BASE_BRANCH="" +PR_NUMBER="" +DRAFT=false +declare -a REVIEWERS=() +declare -a LABELS=() +declare -a ASSIGNEES=() +declare -a ISSUES=() + +if ! command -v jq >/dev/null 2>&1; then + printf '{"error":true,"message":"jq is required for create-pr.sh","details":"Install jq or use the PowerShell create-pr script."}\n' + exit 1 +fi + +while [[ $# -gt 0 ]]; do + case "$1" in + --mode) + MODE="$2" + shift 2 + ;; + --json) + JSON_MODE=true + shift + ;; + --title) + TITLE="$2" + shift 2 + ;; + --body) + BODY="$2" + shift 2 + ;; + --body-file) + BODY_FILE="$2" + shift 2 + ;; + --base) + BASE_BRANCH="$2" + shift 2 + ;; + --pr-number) + PR_NUMBER="$2" + shift 2 + ;; + --draft) + DRAFT=true + shift + ;; + --reviewer) + REVIEWERS+=("$2") + shift 2 + ;; + --label) + LABELS+=("$2") + shift 2 + ;; + --assignee) + ASSIGNEES+=("$2") + shift 2 + ;; + --issue) + ISSUES+=("$2") + shift 2 + ;; + *) + shift + ;; + esac +done + +json_error() { + local message="$1" + local details="${2:-}" + jq -n --arg message "$message" --arg details "$details" '{error: true, message: $message, details: $details}' +} + +get_default_base_branch() { + local branch="" + if command -v gh >/dev/null 2>&1 && check_platform_auth; then + branch=$(gh repo view --json defaultBranchRef --jq '.defaultBranchRef.name' 2>/dev/null || true) + fi + if [[ -z "$branch" ]]; then + branch=$(git symbolic-ref refs/remotes/origin/HEAD 2>/dev/null | sed 's@^refs/remotes/origin/@@' || true) + fi + [[ -n "$branch" ]] || branch="main" + printf '%s\n' "$branch" +} + +resolve_body() { + local body_value="$BODY" + if [[ -n "$BODY_FILE" && -f "$BODY_FILE" ]]; then + body_value=$(cat "$BODY_FILE") + fi + if [[ ${#ISSUES[@]} -gt 0 ]]; then + local issue_line="" + issue_line=$(printf '%s, ' "${ISSUES[@]}") + issue_line=${issue_line%, } + if [[ -n "$body_value" ]]; then + body_value+=$'\n\n' + fi + body_value+="Refs: $issue_line" + fi + printf '%s' "$body_value" +} + +trim_text() { + sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//' +} + +extract_section_text() { + local file_path="$1" + local section_name="$2" + python - "$file_path" "$section_name" <<'PY' +from pathlib import Path +import re +import sys + +path = Path(sys.argv[1]) +heading = sys.argv[2].strip().lower() +if not path.exists(): + sys.exit(0) +text = path.read_text(encoding="utf-8") +lines = text.splitlines() +capture = False +collected = [] +target = heading +for line in lines: + stripped = line.strip() + if re.match(r'^##\s+', stripped): + current = re.sub(r'^##\s+', '', stripped).strip().lower() + if capture and current != target: + break + capture = current == target + continue + if capture: + collected.append(line) +print("\n".join(collected).strip()) +PY +} + +json_escape_multiline() { + jq -Rs '.' +} + +find_quickfix_record_for_branch() { + local repo_root="$1" + local branch_name="$2" + local quickfix_dir="$repo_root/.documentation/quickfixes" + [[ -d "$quickfix_dir" ]] || return 0 + python - "$quickfix_dir" "$branch_name" <<'PY' +from pathlib import Path +import re +import sys + +quickfix_dir = Path(sys.argv[1]) +branch_name = sys.argv[2].strip() +matches = [] +for path in sorted(quickfix_dir.glob('*.md')): + try: + text = path.read_text(encoding='utf-8') + except Exception: + continue + match = re.search(r'^- \*\*Branch\*\*:\s*(.+)$', text, re.MULTILINE) + if match and match.group(1).strip() == branch_name: + id_match = re.search(r'^- \*\*ID\*\*:\s*(.+)$', text, re.MULTILINE) + quickfix_id = id_match.group(1).strip() if id_match else path.stem + matches.append((quickfix_id, str(path))) +if matches: + matches.sort(key=lambda item: item[0]) + print(matches[-1][1]) +PY +} + +build_quickfix_json() { + local quickfix_path="$1" + [[ -f "$quickfix_path" ]] || { printf 'null\n'; return; } + local classification risk_level required_gates recommended_next_step problem_statement gate_ack_text quickfix_title quickfix_id + classification=$(get_markdown_frontmatter_value "$quickfix_path" classification || true) + risk_level=$(get_markdown_frontmatter_value "$quickfix_path" risk_level || true) + required_gates=$(get_markdown_frontmatter_value "$quickfix_path" required_gates || true) + recommended_next_step=$(get_markdown_frontmatter_value "$quickfix_path" recommended_next_step || true) + quickfix_title=$(grep -m1 '^# ' "$quickfix_path" | sed 's/^# //' || true) + quickfix_id=$(grep -m1 '^- \*\*ID\*\*:' "$quickfix_path" | sed 's/^- \*\*ID\*\*:[[:space:]]*//' || true) + problem_statement=$(extract_section_text "$quickfix_path" "Problem Statement" | trim_text) + gate_ack_text=$(extract_section_text "$quickfix_path" "Gate Acknowledgements" | trim_text) + + jq -n \ + --arg path "$quickfix_path" \ + --arg id "$quickfix_id" \ + --arg title "$quickfix_title" \ + --arg classification "$classification" \ + --arg risk_level "$risk_level" \ + --arg required_gates "$required_gates" \ + --arg recommended_next_step "$recommended_next_step" \ + --arg problem_statement "$problem_statement" \ + --arg gate_acknowledgements "$gate_ack_text" \ + '{path: $path, id: $id, title: $title, classification: $classification, risk_level: $risk_level, required_gates: $required_gates, recommended_next_step: $recommended_next_step, problem_statement: $problem_statement, gate_acknowledgements: $gate_acknowledgements}' +} + +count_tasks() { + local tasks_path="$1" + if [[ ! -f "$tasks_path" ]]; then + printf '0\t0\t0\n' + return + fi + local total completed incomplete + total=$(grep -cE '^\s*- \[([ xX])\]' "$tasks_path" 2>/dev/null || echo '0') + completed=$(grep -cE '^\s*- \[[xX]\]' "$tasks_path" 2>/dev/null || echo '0') + incomplete=$((total - completed)) + printf '%s\t%s\t%s\n' "$total" "$completed" "$incomplete" +} + +collect_checklists_json() { + local checklist_dir="$1" + if [[ ! -d "$checklist_dir" ]]; then + printf '[]\n' + return + fi + + local items=() + local file + shopt -s nullglob + for file in "$checklist_dir"/*.md; do + local total completed incomplete status + total=$(grep -cE '^\s*- \[([ xX])\]' "$file" 2>/dev/null || echo '0') + completed=$(grep -cE '^\s*- \[[xX]\]' "$file" 2>/dev/null || echo '0') + incomplete=$((total - completed)) + if [[ "$incomplete" -eq 0 ]]; then + status="pass" + else + status="fail" + fi + items+=("$(jq -n --arg name "$(basename "$file")" --argjson total "$total" --argjson completed "$completed" --argjson incomplete "$incomplete" --arg status "$status" '{name: $name, total: $total, completed: $completed, incomplete: $incomplete, status: $status}')") + done + shopt -u nullglob + if [[ ${#items[@]} -eq 0 ]]; then + printf '[]\n' + else + printf '%s\n' "${items[@]}" | jq -s '.' + fi +} + +scan_gate_artifacts_json() { + local feature_dir="$1" + local items=() + local gate path status blocking severity summary + for gate in analyze critic checklist; do + path="" + if [[ -f "$feature_dir/gates/$gate.md" ]]; then + path="$feature_dir/gates/$gate.md" + elif [[ -f "$feature_dir/$gate.md" ]]; then + path="$feature_dir/$gate.md" + fi + if [[ -n "$path" ]]; then + status=$(grep -m1 '^status:' "$path" 2>/dev/null | sed 's/^status:[[:space:]]*//' || true) + blocking=$(grep -m1 '^blocking:' "$path" 2>/dev/null | sed 's/^blocking:[[:space:]]*//' || true) + severity=$(grep -m1 '^severity:' "$path" 2>/dev/null | sed 's/^severity:[[:space:]]*//' || true) + summary=$(grep -m1 '^summary:' "$path" 2>/dev/null | sed 's/^summary:[[:space:]]*//' || true) + [[ -n "$status" ]] || status="unknown" + [[ -n "$blocking" ]] || blocking="false" + [[ -n "$severity" ]] || severity="info" + items+=("$(jq -n --arg gate "$gate" --arg path "$path" --arg status "$status" --arg severity "$severity" --arg summary "$summary" --argjson blocking "$([[ "$blocking" == "true" ]] && echo true || echo false)" '{gate: $gate, path: $path, status: $status, severity: $severity, summary: $summary, blocking: $blocking}')") + fi + done + if [[ ${#items[@]} -eq 0 ]]; then + printf '[]\n' + else + printf '%s\n' "${items[@]}" | jq -s '.' + fi +} + +collect_gate_acknowledgements_json() { + local tasks_path="$1" + if [[ ! -f "$tasks_path" ]]; then + printf '[]\n' + return + fi + local section_text + section_text=$(extract_section_text "$tasks_path" "Gate Acknowledgements" | trim_text) + if [[ -z "$section_text" ]]; then + printf '[]\n' + return + fi + SECTION_TEXT="$section_text" python <<'PY' +import json +import os + +text = os.environ["SECTION_TEXT"].strip() +entries = [] +current = [] +for line in text.splitlines(): + stripped = line.strip() + if not stripped: + if current: + entries.append("\n".join(current).strip()) + current = [] + continue + if stripped.startswith('- Gate:') and current: + entries.append("\n".join(current).strip()) + current = [stripped] + else: + current.append(stripped) +if current: + entries.append("\n".join(current).strip()) +print(json.dumps(entries)) +PY +} + +collect_preflight() { + local repo_root current_branch target_branch dirty auth_ok cli_available creation_supported + local local_head remote_head origin_exists remote_branch_exists branch_pushed_to_remote branch_push_details clean_worktree + local feature_dir spec_path plan_path tasks_path checklist_dir quickfix_path + local spec_exists=false plan_exists=false tasks_exists=false + local spec_title="" classification="" risk_level="" required_gates="" recommended_next_step="" + local tasks_total=0 tasks_completed=0 tasks_incomplete=0 + local diff_ref lines_summary changed_files_count recent_commits_json existing_pr_json checklists_json gate_artifacts_json gate_acknowledgements_json quickfix_json + local existing_pr=false existing_pr_number="" existing_pr_url="" existing_pr_title="" existing_pr_state="" existing_pr_draft=false + + repo_root=$(get_repo_root) + current_branch=$(get_current_branch) + target_branch="${BASE_BRANCH:-$(get_default_base_branch)}" + local_head=$(git rev-parse HEAD 2>/dev/null || echo "") + if [[ -n "$(git status --porcelain 2>/dev/null || true)" ]]; then + dirty=true + else + dirty=false + fi + if [[ "$dirty" == "true" ]]; then + clean_worktree=false + else + clean_worktree=true + fi + + origin_exists=false + remote_branch_exists=false + branch_pushed_to_remote=false + remote_head="" + branch_push_details="" + if git remote get-url origin >/dev/null 2>&1; then + origin_exists=true + remote_head=$(git ls-remote --heads origin "refs/heads/$current_branch" 2>/dev/null | awk 'NR==1 {print $1}') + if [[ -n "$remote_head" ]]; then + remote_branch_exists=true + fi + fi + + if [[ "$origin_exists" != true ]]; then + branch_push_details="Remote 'origin' is not configured" + elif [[ "$remote_branch_exists" != true ]]; then + branch_push_details="Branch '$current_branch' has not been pushed to origin" + elif [[ -n "$local_head" && "$local_head" == "$remote_head" ]]; then + branch_pushed_to_remote=true + branch_push_details="Branch '$current_branch' is pushed to origin" + else + branch_push_details="Push the latest commits from '$current_branch' to origin before creating a PR" + fi + + if command -v gh >/dev/null 2>&1; then + cli_available=true + if check_platform_auth; then + auth_ok=true + else + auth_ok=false + fi + else + cli_available=false + auth_ok=false + fi + + if [[ "$DEVSPARK_PLATFORM_NAME" == "github" && "$cli_available" == true ]]; then + creation_supported=true + else + creation_supported=false + fi + + feature_dir=$(find_feature_dir_by_prefix "$repo_root" "$current_branch") + spec_path="$feature_dir/spec.md" + plan_path="$feature_dir/plan.md" + tasks_path="$feature_dir/tasks.md" + checklist_dir="$feature_dir/checklists" + + [[ -f "$spec_path" ]] && spec_exists=true + [[ -f "$plan_path" ]] && plan_exists=true + [[ -f "$tasks_path" ]] && tasks_exists=true + + if [[ "$spec_exists" == true ]]; then + spec_title=$(grep -m1 '^# ' "$spec_path" | sed 's/^# //' || true) + classification=$(get_markdown_frontmatter_value "$spec_path" classification || true) + risk_level=$(get_markdown_frontmatter_value "$spec_path" risk_level || true) + required_gates=$(get_markdown_frontmatter_value "$spec_path" required_gates || true) + recommended_next_step=$(get_markdown_frontmatter_value "$spec_path" recommended_next_step || true) + fi + + if [[ "$tasks_exists" == true ]]; then + IFS=$'\t' read -r tasks_total tasks_completed tasks_incomplete < <(count_tasks "$tasks_path") + fi + + checklists_json=$(collect_checklists_json "$checklist_dir") + gate_artifacts_json=$(scan_gate_artifacts_json "$feature_dir") + gate_acknowledgements_json=$(collect_gate_acknowledgements_json "$tasks_path") + quickfix_path=$(find_quickfix_record_for_branch "$repo_root" "$current_branch") + quickfix_json=$(build_quickfix_json "$quickfix_path") + + if [[ "$spec_exists" != true && "$quickfix_json" != "null" ]]; then + classification=$(echo "$quickfix_json" | jq -r '.classification // ""') + risk_level=$(echo "$quickfix_json" | jq -r '.risk_level // ""') + required_gates=$(echo "$quickfix_json" | jq -r '.required_gates // ""') + recommended_next_step=$(echo "$quickfix_json" | jq -r '.recommended_next_step // ""') + spec_title=$(echo "$quickfix_json" | jq -r '.title // ""') + gate_acknowledgements_json=$(echo "$quickfix_json" | jq -c '[.gate_acknowledgements] | map(select(length > 0))') + fi + + if git rev-parse --verify "origin/$target_branch" >/dev/null 2>&1; then + git fetch origin "$target_branch" >/dev/null 2>&1 || true + diff_ref="origin/$target_branch...HEAD" + else + diff_ref="HEAD~1...HEAD" + fi + lines_summary=$(git diff --shortstat "$diff_ref" 2>/dev/null || echo "") + changed_files_count=$(git diff --name-only "$diff_ref" 2>/dev/null | grep -c . || echo '0') + recent_commits_json=$(git log --format='%s' -n 10 "$diff_ref" 2>/dev/null | jq -R -s -c 'split("\n") | map(select(length > 0))' 2>/dev/null || echo '[]') + + if [[ "$cli_available" == true && "$auth_ok" == true ]]; then + existing_pr_json=$(gh pr list --head "$current_branch" --json number,url,title,state,isDraft --limit 1 2>/dev/null | jq '.[0] // {}' 2>/dev/null || echo '{}') + existing_pr_number=$(echo "$existing_pr_json" | jq -r '.number // empty') + if [[ -n "$existing_pr_number" ]]; then + existing_pr=true + existing_pr_url=$(echo "$existing_pr_json" | jq -r '.url // ""') + existing_pr_title=$(echo "$existing_pr_json" | jq -r '.title // ""') + existing_pr_state=$(echo "$existing_pr_json" | jq -r '.state // ""') + existing_pr_draft=$(echo "$existing_pr_json" | jq -r '.isDraft // false') + fi + fi + + jq -n \ + --arg repo_root "$repo_root" \ + --arg current_branch "$current_branch" \ + --arg target_branch "$target_branch" \ + --arg spec_path "$spec_path" \ + --arg plan_path "$plan_path" \ + --arg tasks_path "$tasks_path" \ + --arg feature_dir "$feature_dir" \ + --arg checklist_dir "$checklist_dir" \ + --arg spec_title "$spec_title" \ + --arg classification "$classification" \ + --arg risk_level "$risk_level" \ + --arg required_gates "$required_gates" \ + --arg recommended_next_step "$recommended_next_step" \ + --arg lines_summary "$lines_summary" \ + --arg existing_pr_url "$existing_pr_url" \ + --arg existing_pr_title "$existing_pr_title" \ + --arg existing_pr_state "$existing_pr_state" \ + --arg local_head "$local_head" \ + --arg remote_head "$remote_head" \ + --arg branch_push_details "$branch_push_details" \ + --argjson dirty "$dirty" \ + --argjson clean_worktree "$clean_worktree" \ + --argjson auth_ok "$auth_ok" \ + --argjson cli_available "$cli_available" \ + --argjson creation_supported "$creation_supported" \ + --argjson origin_exists "$origin_exists" \ + --argjson remote_branch_exists "$remote_branch_exists" \ + --argjson branch_pushed_to_remote "$branch_pushed_to_remote" \ + --argjson spec_exists "$spec_exists" \ + --argjson plan_exists "$plan_exists" \ + --argjson tasks_exists "$tasks_exists" \ + --argjson tasks_total "$tasks_total" \ + --argjson tasks_completed "$tasks_completed" \ + --argjson tasks_incomplete "$tasks_incomplete" \ + --argjson changed_files_count "$changed_files_count" \ + --argjson existing_pr "$existing_pr" \ + --argjson existing_pr_number "${existing_pr_number:-0}" \ + --argjson existing_pr_draft "$([[ "$existing_pr_draft" == "true" ]] && echo true || echo false)" \ + --argjson recent_commits "$recent_commits_json" \ + --argjson checklists "$checklists_json" \ + --argjson gate_artifacts "$gate_artifacts_json" \ + --argjson gate_acknowledgements "$gate_acknowledgements_json" \ + --argjson quickfix_record "$quickfix_json" \ + '{ + repo_root: $repo_root, + current_branch: $current_branch, + target_branch: $target_branch, + dirty_worktree: $dirty, + prerequisites: { + clean_worktree: $clean_worktree, + branch_pushed_to_remote: $branch_pushed_to_remote + }, + remote: { + name: "origin", + exists: $origin_exists, + branch_exists: $remote_branch_exists, + branch_pushed_to_remote: $branch_pushed_to_remote, + local_head: $local_head, + remote_head: $remote_head, + details: $branch_push_details + }, + cli_available: $cli_available, + auth_ok: $auth_ok, + creation_supported: $creation_supported, + feature: { + dir: $feature_dir, + spec_path: $spec_path, + plan_path: $plan_path, + tasks_path: $tasks_path, + checklist_dir: $checklist_dir, + spec_exists: $spec_exists, + plan_exists: $plan_exists, + tasks_exists: $tasks_exists, + spec_title: $spec_title, + classification: $classification, + risk_level: $risk_level, + required_gates: $required_gates, + recommended_next_step: $recommended_next_step, + tasks_total: $tasks_total, + tasks_completed: $tasks_completed, + tasks_incomplete: $tasks_incomplete, + checklists: $checklists, + gate_artifacts: $gate_artifacts, + gate_acknowledgements: $gate_acknowledgements + }, + diff: { + changed_files_count: $changed_files_count, + lines_summary: $lines_summary, + recent_commit_subjects: $recent_commits + }, + quickfix_record: $quickfix_record, + existing_pr: { + exists: $existing_pr, + number: (if $existing_pr then $existing_pr_number else null end), + url: $existing_pr_url, + title: $existing_pr_title, + state: $existing_pr_state, + draft: $existing_pr_draft + } + }' +} + +run_create_or_update() { + local action="$1" + local preflight_json body_value pr_to_edit gh_output pr_url pr_view_json + preflight_json=$(collect_preflight) + + if [[ "$(echo "$preflight_json" | jq -r '.prerequisites.clean_worktree')" != "true" ]]; then + json_error "Working tree has unmanaged changes" "Commit, stash, or discard all changes before creating or updating a PR" + return 1 + fi + if [[ "$(echo "$preflight_json" | jq -r '.prerequisites.branch_pushed_to_remote')" != "true" ]]; then + json_error "Current branch is not pushed to remote" "$(echo "$preflight_json" | jq -r '.remote.details')" + return 1 + fi + + if [[ "$(echo "$preflight_json" | jq -r '.creation_supported')" != "true" ]]; then + json_error "Automated PR creation is only supported for GitHub in this release" "Platform: $DEVSPARK_PLATFORM_NAME" + return 1 + fi + if [[ "$(echo "$preflight_json" | jq -r '.auth_ok')" != "true" ]]; then + json_error "GitHub CLI is not authenticated" "Run: gh auth login" + return 1 + fi + + body_value=$(resolve_body) + if [[ -z "$TITLE" || -z "$body_value" ]]; then + json_error "Title and body are required for create/update mode" "Pass --title and --body or --body-file" + return 1 + fi + + if [[ "$action" == "create" && "$(echo "$preflight_json" | jq -r '.existing_pr.exists')" == "true" ]]; then + json_error "A PR already exists for this branch" "Use update mode or provide --pr-number" + return 1 + fi + + if [[ "$action" == "update" ]]; then + pr_to_edit="$PR_NUMBER" + if [[ -z "$pr_to_edit" ]]; then + pr_to_edit=$(echo "$preflight_json" | jq -r '.existing_pr.number // empty') + fi + if [[ -z "$pr_to_edit" ]]; then + json_error "No PR found to update" "Create a PR first or pass --pr-number" + return 1 + fi + gh pr edit "$pr_to_edit" --title "$TITLE" --body "$body_value" --base "${BASE_BRANCH:-$(echo "$preflight_json" | jq -r '.target_branch')}" >/dev/null + for reviewer in "${REVIEWERS[@]}"; do gh pr edit "$pr_to_edit" --add-reviewer "$reviewer" >/dev/null; done + for label in "${LABELS[@]}"; do gh pr edit "$pr_to_edit" --add-label "$label" >/dev/null; done + for assignee in "${ASSIGNEES[@]}"; do gh pr edit "$pr_to_edit" --add-assignee "$assignee" >/dev/null; done + pr_view_json=$(gh pr view "$pr_to_edit" --json number,url,title,state,isDraft 2>/dev/null) + else + local -a gh_args=() + gh_args+=(--title "$TITLE" --body "$body_value") + gh_args+=(--base "${BASE_BRANCH:-$(echo "$preflight_json" | jq -r '.target_branch')}") + if $DRAFT; then gh_args+=(--draft); fi + for reviewer in "${REVIEWERS[@]}"; do gh_args+=(--reviewer "$reviewer"); done + for label in "${LABELS[@]}"; do gh_args+=(--label "$label"); done + for assignee in "${ASSIGNEES[@]}"; do gh_args+=(--assignee "$assignee"); done + gh_output=$(gh pr create "${gh_args[@]}" 2>/dev/null || true) + pr_url=$(printf '%s\n' "$gh_output" | tail -n 1) + if [[ -z "$pr_url" ]]; then + json_error "Failed to create pull request" "$gh_output" + return 1 + fi + pr_view_json=$(gh pr view "$pr_url" --json number,url,title,state,isDraft 2>/dev/null) + fi + + jq -n --arg action "$action" --arg title "$(echo "$pr_view_json" | jq -r '.title')" --arg url "$(echo "$pr_view_json" | jq -r '.url')" --arg state "$(echo "$pr_view_json" | jq -r '.state')" --argjson number "$(echo "$pr_view_json" | jq '.number')" --argjson draft "$(echo "$pr_view_json" | jq '.isDraft // false')" '{status: "ok", action: $action, pr_number: $number, url: $url, title: $title, state: $state, draft: $draft}' +} + +main() { + case "$MODE" in + preflight) + collect_preflight + ;; + create) + run_create_or_update create + ;; + update) + run_create_or_update update + ;; + *) + json_error "Unknown mode" "$MODE" + return 1 + ;; + esac +} + +main diff --git a/.devspark/scripts/bash/delivery-status-smoke-test.sh b/.devspark/scripts/bash/delivery-status-smoke-test.sh new file mode 100644 index 00000000..dc27f270 --- /dev/null +++ b/.devspark/scripts/bash/delivery-status-smoke-test.sh @@ -0,0 +1,130 @@ +#!/usr/bin/env bash + +# Smoke test for delivery-status enforcement +# +# Validates that delivery-status gating works correctly in bash/powershell environments. +# This test verifies the core MVP feature: delivery runs report accurate status and gate create-pr. +# +# Usage: ./delivery-status-smoke-test.sh +# Expected exit code: 0 (success) or 1 (failure) + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" + +# Colors for output +GREEN='\033[0;32m' +RED='\033[0;31m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +test_count=0 +passed=0 +failed=0 + +# Helper functions +log_test() { + test_count=$((test_count + 1)) + echo -e "${YELLOW}[TEST $test_count]${NC} $1" +} + +log_pass() { + passed=$((passed + 1)) + echo -e " ${GREEN}✓ PASS${NC}: $1" +} + +log_fail() { + failed=$((failed + 1)) + echo -e " ${RED}✗ FAIL${NC}: $1" +} + +# Test 1: Verify git command works for change detection +log_test "Git diff strategy available" +if git --version >/dev/null 2>&1; then + log_pass "git command available" +else + log_fail "git command not available" +fi + +# Test 2: Verify delivery result file structure +log_test "Delivery result JSON structure" +result_file="$REPO_ROOT/.documentation/devspark/runs/latest/result.json" +if [[ -f "$result_file" ]]; then + if grep -q '"delivery_status"' "$result_file"; then + log_pass "delivery_status field present in result.json" + else + log_fail "delivery_status field missing from result.json" + fi + + if grep -q '"create_pr_ready"' "$result_file"; then + log_pass "create_pr_ready field present in result.json" + else + log_fail "create_pr_ready field missing from result.json" + fi +else + log_fail "No result.json file found (no recent runs)" +fi + +# Test 3: Verify delivery check validation rule exists +log_test "Delivery check rule defined" +spec_path="$REPO_ROOT/.documentation/specs/001-harness-delivery-integrity" +if [[ -d "$spec_path" ]]; then + log_pass "Feature spec directory exists" + + if grep -r "git\.changed_count\|git\.changed_path_match" "$spec_path" >/dev/null 2>&1; then + log_pass "Delivery validation rules defined in spec" + else + log_fail "Delivery validation rules not found in spec" + fi +else + log_fail "Feature spec directory not found" +fi + +# Test 4: Verify no-change explainer can be generated +log_test "No-change explainer artifact" +if [[ -f "$REPO_ROOT/.documentation/devspark/runs/latest/no-change-explainer.md" ]]; then + log_pass "No-change explainer artifact can be generated" +else + # This is OK if no run yet - not a failure + log_pass "No-change explainer (not required if no runs yet)" +fi + +# Test 5: Verify governance approval checkpoint exists +log_test "Governance approval gate documentation" +approval_file="$REPO_ROOT/.documentation/specs/001-harness-delivery-integrity/gates/governance-approval.md" +if [[ -f "$approval_file" ]]; then + if grep -q "Approver Name:" "$approval_file"; then + log_pass "Governance approval template exists" + else + log_fail "Governance approval template incomplete" + fi +else + log_fail "Governance approval template not found" +fi + +# Test 6: Verify git diff command executes without errors +log_test "Git diff command works" +if git diff --stat HEAD 2>/dev/null >/dev/null; then + log_pass "Git diff command executes without errors" +else + log_fail "Git diff command failed" +fi + +# Summary +echo "" +echo "==========================================" +echo "Smoke Test Summary" +echo "==========================================" +echo "Tests run: $test_count" +echo -e "Passed: ${GREEN}$passed${NC}" +echo -e "Failed: ${RED}$failed${NC}" +echo "" + +if [[ $failed -eq 0 ]]; then + echo -e "${GREEN}✓ All smoke tests passed${NC}" + exit 0 +else + echo -e "${RED}✗ Some tests failed${NC}" + exit 1 +fi diff --git a/.devspark/scripts/bash/evolution-context.sh b/.devspark/scripts/bash/evolution-context.sh new file mode 100644 index 00000000..2849b5ed --- /dev/null +++ b/.devspark/scripts/bash/evolution-context.sh @@ -0,0 +1,209 @@ +#!/usr/bin/env bash +# Constitution evolution context gathering script +# Analyzes PR reviews and audits to propose constitution amendments + +set -e +set -u +set -o pipefail + +SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "$SCRIPT_DIR/common.sh" + +# Multi-app support (T091) +parse_app_context "$@" 2>/dev/null || true +if [[ -n "${DEVSPARK_APP_ID:-}" || "${DEVSPARK_REPO_SCOPE:-false}" == "true" ]]; then + resolve_app_scope 2>/dev/null || true + print_scope_summary >&2 +fi + +# Default values +JSON_MODE=false +FROM_PR="" +FROM_AUDIT="" +SUGGESTION="" +ACTION="analyze" +CAP_ID="" + +# Parse arguments +while [[ $# -gt 0 ]]; do + case "$1" in + --json) + JSON_MODE=true + shift + ;; + --from-pr=*) + FROM_PR="${1#*=}" + shift + ;; + --from-pr) + FROM_PR="$2" + shift 2 + ;; + --from-audit=*) + FROM_AUDIT="${1#*=}" + shift + ;; + --from-audit) + FROM_AUDIT="$2" + shift 2 + ;; + suggest) + ACTION="suggest" + shift + # Rest of args are the suggestion + SUGGESTION="$*" + break + ;; + approve) + ACTION="approve" + shift + ;; + reject) + ACTION="reject" + shift + ;; + CAP-*) + CAP_ID="$1" + shift + ;; + *) + shift + ;; + esac +done + +# Get repository context +REPO_ROOT=$(get_repo_root) +CONSTITUTION_PATH="$REPO_ROOT/.documentation/memory/constitution.md" +PR_REVIEW_DIR="$REPO_ROOT/.documentation/specs/pr-review" +AUDIT_DIR="$REPO_ROOT/.documentation/copilot/audit" +PROPOSALS_DIR="$REPO_ROOT/.documentation/memory/proposals" +HISTORY_FILE="$REPO_ROOT/.documentation/memory/constitution-history.md" + +# Check constitution exists +CONSTITUTION_EXISTS="false" +CONSTITUTION_VERSION="" +if [[ -f "$CONSTITUTION_PATH" ]]; then + CONSTITUTION_EXISTS="true" + CONSTITUTION_VERSION=$(sed -nE 's/\*\*Version\*\*:[[:space:]]*([^[:space:]|]+).*/\1/p' "$CONSTITUTION_PATH" 2>/dev/null | head -1 || echo "1.0.0") +fi + +# Extract constitution principles +CONSTITUTION_PRINCIPLES='[]' +if [[ -f "$CONSTITUTION_PATH" ]]; then + CONSTITUTION_PRINCIPLES=$(grep -E '^###\s+' "$CONSTITUTION_PATH" 2>/dev/null | \ + sed 's/^###\s*//' | \ + jq -R -s 'split("\n") | map(select(. != ""))' 2>/dev/null || echo '[]') +fi + +# List PR reviews +PR_REVIEWS='[]' +PR_REVIEW_COUNT=0 +if [[ -d "$PR_REVIEW_DIR" ]]; then + PR_REVIEWS=$(ls "$PR_REVIEW_DIR"/pr-*.md 2>/dev/null | head -20 | while read -r f; do + basename "$f" .md + done | jq -R -s 'split("\n") | map(select(. != ""))' 2>/dev/null || echo '[]') + PR_REVIEW_COUNT=$(ls "$PR_REVIEW_DIR"/pr-*.md 2>/dev/null | wc -l | tr -d ' ' || echo "0") +fi + +# List audit reports +AUDIT_REPORTS='[]' +AUDIT_COUNT=0 +if [[ -d "$AUDIT_DIR" ]]; then + AUDIT_REPORTS=$(ls "$AUDIT_DIR"/*_results.md 2>/dev/null | head -10 | while read -r f; do + basename "$f" + done | jq -R -s 'split("\n") | map(select(. != ""))' 2>/dev/null || echo '[]') + AUDIT_COUNT=$(ls "$AUDIT_DIR"/*_results.md 2>/dev/null | wc -l | tr -d ' ' || echo "0") +fi + +# List existing proposals +PROPOSALS='[]' +if [[ -d "$PROPOSALS_DIR" ]]; then + PROPOSALS=$(ls "$PROPOSALS_DIR"/CAP-*.md 2>/dev/null | while read -r f; do + basename "$f" .md + done | jq -R -s 'split("\n") | map(select(. != ""))' 2>/dev/null || echo '[]') +fi + +# Calculate next proposal ID (CAP-YYYY-NNN format) +YEAR=$(date +%Y) +NEXT_CAP_NUM=1 +if [[ -d "$PROPOSALS_DIR" ]]; then + LATEST=$(find "$PROPOSALS_DIR" -maxdepth 1 -name "CAP-${YEAR}-[0-9]*.md" -exec basename {} \; 2>/dev/null | grep -E "^CAP-${YEAR}-[0-9]+\.md$" | sort -r | head -1 || echo "") + if [[ -n "$LATEST" ]]; then + CURRENT_NUM=$(echo "$LATEST" | sed -E "s/CAP-$YEAR-0*([0-9]+)\.md/\1/") + NEXT_CAP_NUM=$((CURRENT_NUM + 1)) + fi +fi +NEXT_CAP_ID=$(printf "CAP-%s-%03d" "$YEAR" "$NEXT_CAP_NUM") + +# Aggregate violation patterns from PR reviews +PATTERN_SUMMARY='[]' +CRITICAL_COUNT=0 +HIGH_COUNT=0 + +if [[ -d "$PR_REVIEW_DIR" ]]; then + # Count severity levels across all reviews + CRITICAL_COUNT=$(grep -h "CRITICAL" "$PR_REVIEW_DIR"/*.md 2>/dev/null | wc -l | tr -d ' ' || echo "0") + HIGH_COUNT=$(grep -h "HIGH" "$PR_REVIEW_DIR"/*.md 2>/dev/null | wc -l | tr -d ' ' || echo "0") + + # Extract principle violations (simplified pattern matching) + PATTERN_SUMMARY=$(grep -h -E '^\| [A-Z]' "$PR_REVIEW_DIR"/*.md 2>/dev/null | \ + grep -Ev "Principle|Status|--" | \ + cut -d'|' -f2 | \ + sed 's/^[[:space:]]*//;s/[[:space:]]*$//' | \ + sort | uniq -c | sort -rn | head -10 | \ + awk '{print "{\"principle\": \"" $2 "\", \"count\": " $1 "}"}' | \ + jq -s '.' 2>/dev/null || echo '[]') +fi + +# Get timestamp +TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ") + +# Output JSON if requested +if [[ "$JSON_MODE" == true ]]; then + cat <&2; exit 2 ;; + esac +done + +if [[ -z "$REPO_ROOT" ]]; then + REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" +fi + +COMMANDS_DIR="$REPO_ROOT/templates/commands" +ATOMIC_DIR="$REPO_ROOT/templates/prompts/atomic" + +if [[ ! -d "$COMMANDS_DIR" ]]; then + echo "commands dir not found: $COMMANDS_DIR" >&2 + exit 2 +fi +mkdir -p "$ATOMIC_DIR" + +drift=() +generated=0 + +shopt -s nullglob +for f in "$COMMANDS_DIR"/*.md; do + cmd="$(basename "$f" .md)" + shim="$ATOMIC_DIR/$cmd.md" + + description="Atomic shim for /devspark.${cmd}. Resolves to templates/commands/${cmd}.md." + if [[ ${#description} -gt 200 ]]; then + description="${description:0:197}..." + fi + + body=$(cat < team -> stock) and forwards execution to the legacy +command body. +EOF +) + + if [[ -f "$shim" ]]; then + if ! diff -q <(printf "%s\n" "$body") "$shim" >/dev/null 2>&1; then + drift+=("$cmd") + if [[ "$CHECK" -eq 0 ]]; then + printf "%s\n" "$body" > "$shim" + generated=$((generated + 1)) + fi + fi + else + drift+=("$cmd") + if [[ "$CHECK" -eq 0 ]]; then + printf "%s\n" "$body" > "$shim" + generated=$((generated + 1)) + fi + fi +done + +if [[ "$CHECK" -eq 1 ]]; then + if [[ ${#drift[@]} -gt 0 ]]; then + echo "Atomic-shim drift detected for ${#drift[@]} command(s):" + for d in "${drift[@]}"; do echo " - $d"; done + echo "" + echo "Run scripts/bash/generate-atomic-shims.sh to regenerate." + exit 1 + fi + echo "No atomic-shim drift detected." + exit 0 +fi + +echo "Generated/updated ${generated} atomic-prompt shim(s) under templates/prompts/atomic/." +exit 0 diff --git a/.devspark/scripts/bash/get-pr-context.sh b/.devspark/scripts/bash/get-pr-context.sh new file mode 100644 index 00000000..433cbb62 --- /dev/null +++ b/.devspark/scripts/bash/get-pr-context.sh @@ -0,0 +1,449 @@ +#!/usr/bin/env bash +# Extract PR context for review +# +# This script fetches Pull Request information from GitHub and provides it +# in JSON format for the pr-review command. +# +# Usage: ./get-pr-context.sh [PR_NUMBER] [--json] +# ./get-pr-context.sh --json # Auto-detect PR from current branch +# ./get-pr-context.sh 123 --json # Specific PR number +# ./get-pr-context.sh #123 --json # Also accepts # prefix + +set -e +set -u +set -o pipefail + +#============================================================================== +# Configuration +#============================================================================== + +SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +if git rev-parse --show-toplevel >/dev/null 2>&1; then + REPO_ROOT="$(git rev-parse --show-toplevel)" +else + REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" +fi + +# Load platform adapter (sets DEVSPARK_PLATFORM_NAME, DEVSPARK_PR_CLI, etc.) +source "$SCRIPT_DIR/platform.sh" + +# Multi-app support (T041, T066) +parse_app_context "$@" 2>/dev/null || true +if [[ -n "${DEVSPARK_APP_ID:-}" || "${DEVSPARK_REPO_SCOPE:-false}" == "true" ]]; then + resolve_app_scope 2>/dev/null || true + print_scope_summary >&2 + # Include scope report in PR context + generate_scope_report >&2 +fi + +PR_NUMBER="" +JSON_MODE=false + +#============================================================================== +# Parse Arguments +#============================================================================== + +for arg in "$@"; do + case "$arg" in + --json) + JSON_MODE=true + ;; + \#*) + # Strip # prefix if provided + PR_NUMBER="${arg#\#}" + ;; + [0-9]*) + # Numeric PR number + PR_NUMBER="$arg" + ;; + *) + # Ignore other arguments + ;; + esac +done + +#============================================================================== +# Utility Functions +#============================================================================== + +log_error() { + if [[ "$JSON_MODE" == true ]]; then + # Output JSON error + cat <&2 + [[ -n "${2:-}" ]] && echo "$2" >&2 + fi +} + +#============================================================================== +# PR Number Detection +#============================================================================== + +detect_pr_number() { + # Try various methods to detect PR number + + # Method 1: Check platform-specific environment variable + if [[ -n "${!DEVSPARK_PR_ENV_VAR:-}" ]]; then + echo "${!DEVSPARK_PR_ENV_VAR}" + return 0 + fi + + # Method 2: Check generic env var + if [[ -n "${PR_NUMBER_ENV:-}" ]]; then + echo "$PR_NUMBER_ENV" + return 0 + fi + + # Method 3: Try platform CLI for current branch + case "$DEVSPARK_PLATFORM_NAME" in + github) + if command -v gh &>/dev/null; then + local detected_pr + detected_pr=$(gh pr view --json number --jq '.number' 2>/dev/null || echo "") + if [[ -n "$detected_pr" ]]; then echo "$detected_pr"; return 0; fi + fi + ;; + azdo) + if command -v az &>/dev/null; then + local branch pr_list + branch=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "") + if [[ -n "$branch" ]]; then + pr_list=$(az repos pr list --source-branch "$branch" --status active --top 1 --output json 2>/dev/null || echo "[]") + local pr_id + pr_id=$(echo "$pr_list" | jq -r '.[0].pullRequestId // empty' 2>/dev/null || echo "") + if [[ -n "$pr_id" ]]; then echo "$pr_id"; return 0; fi + fi + fi + ;; + gitlab) + if command -v glab &>/dev/null; then + local mr_iid + mr_iid=$(glab mr view --output json 2>/dev/null | jq -r '.iid // empty' || echo "") + if [[ -n "$mr_iid" ]]; then echo "$mr_iid"; return 0; fi + fi + ;; + esac + + # Unable to detect + return 1 +} + +#============================================================================== +# Main Execution +#============================================================================== + +main() { + # Detect PR number if not provided + if [[ -z "$PR_NUMBER" ]]; then + if ! PR_NUMBER=$(detect_pr_number); then + log_error "Unable to detect PR number" \ + "Please provide PR number explicitly: /devspark.pr-review #123" + exit 1 + fi + fi + + # Validate PR number is numeric + if ! [[ "$PR_NUMBER" =~ ^[0-9]+$ ]]; then + log_error "Invalid PR number: $PR_NUMBER" \ + "PR number must be a positive integer" + exit 1 + fi + + # Check if platform CLI is available + local cli_cmd="$DEVSPARK_PR_CLI" + if ! command -v "$cli_cmd" &>/dev/null; then + log_error "$DEVSPARK_PLATFORM_DISPLAY CLI ($cli_cmd) is required but not installed" \ + "Install from: $DEVSPARK_PR_CLI_INSTALL_URL" + exit 1 + fi + + # Check platform CLI authentication + if ! check_platform_auth; then + local auth_cmd + case "$DEVSPARK_PLATFORM_NAME" in + github) auth_cmd="gh auth login" ;; + azdo) auth_cmd="az login" ;; + gitlab) auth_cmd="glab auth login" ;; + *) auth_cmd="See platform documentation" ;; + esac + log_error "$DEVSPARK_PLATFORM_DISPLAY CLI not authenticated" \ + "Run: $auth_cmd" + exit 1 + fi + + # Fetch PR data (platform-specific, normalized to common fields) + local pr_number_int="$PR_NUMBER" + local pr_title="" pr_body="" pr_state="" pr_author="" + local source_branch="" target_branch="" + local commit_sha="unknown" commit_count=0 + local files_changed_json="[]" lines_added=0 lines_deleted=0 + local created_at="" updated_at="" + local merge_state_status="UNKNOWN" + local file_sample_limit=200 + + case "$DEVSPARK_PLATFORM_NAME" in + github) + local pr_data + if ! pr_data=$(gh pr view "$PR_NUMBER" --json number,title,body,state,author,headRefName,baseRefName,commits,files,additions,deletions,createdAt,updatedAt,mergeStateStatus 2>&1); then + log_error "Failed to fetch PR #$PR_NUMBER" \ + "Verify PR exists and you have access. Details: $pr_data" + exit 1 + fi + pr_number_int=$(echo "$pr_data" | jq -r '.number') + pr_title=$(echo "$pr_data" | jq -r '.title') + pr_body=$(echo "$pr_data" | jq -r '.body // ""') + pr_state=$(echo "$pr_data" | jq -r '.state') + pr_author=$(echo "$pr_data" | jq -r '.author.login') + source_branch=$(echo "$pr_data" | jq -r '.headRefName') + target_branch=$(echo "$pr_data" | jq -r '.baseRefName') + commit_sha=$(echo "$pr_data" | jq -r '.commits[-1].oid // "unknown"') + commit_count=$(echo "$pr_data" | jq '.commits | length') + files_changed_json=$(echo "$pr_data" | jq -c '[.files[].path]') + lines_added=$(echo "$pr_data" | jq '.additions // 0') + lines_deleted=$(echo "$pr_data" | jq '.deletions // 0') + created_at=$(echo "$pr_data" | jq -r '.createdAt') + updated_at=$(echo "$pr_data" | jq -r '.updatedAt') + merge_state_status=$(echo "$pr_data" | jq -r '.mergeStateStatus // "UNKNOWN"') + ;; + azdo) + local pr_data + if ! pr_data=$(az repos pr show --id "$PR_NUMBER" --output json 2>&1); then + log_error "Failed to fetch PR #$PR_NUMBER" \ + "Verify PR exists and you have access. Details: $pr_data" + exit 1 + fi + pr_number_int=$(echo "$pr_data" | jq -r '.pullRequestId') + pr_title=$(echo "$pr_data" | jq -r '.title') + pr_body=$(echo "$pr_data" | jq -r '.description // ""') + pr_state=$(echo "$pr_data" | jq -r '.status') + pr_author=$(echo "$pr_data" | jq -r '.createdBy.uniqueName') + source_branch=$(echo "$pr_data" | jq -r '.sourceRefName' | sed 's|^refs/heads/||') + target_branch=$(echo "$pr_data" | jq -r '.targetRefName' | sed 's|^refs/heads/||') + commit_sha=$(echo "$pr_data" | jq -r '.commits[-1].commitId // "unknown"') + commit_count=$(echo "$pr_data" | jq '.commits | length // 0') + created_at=$(echo "$pr_data" | jq -r '.creationDate') + updated_at=$(echo "$pr_data" | jq -r '.completionQueueTime // .creationDate') + # AzDO mergeStatus: notSet | conflicts | succeeded | rejectedByPolicy | failure + merge_state_status=$(echo "$pr_data" | jq -r '.mergeStatus // "UNKNOWN"' | tr '[:lower:]' '[:upper:]') + # Files resolved via git diff below + ;; + gitlab) + local mr_data + if ! mr_data=$(glab mr view "$PR_NUMBER" --output json 2>&1); then + log_error "Failed to fetch MR !$PR_NUMBER" \ + "Verify MR exists and you have access. Details: $mr_data" + exit 1 + fi + pr_number_int=$(echo "$mr_data" | jq -r '.iid') + pr_title=$(echo "$mr_data" | jq -r '.title') + pr_body=$(echo "$mr_data" | jq -r '.description // ""') + pr_state=$(echo "$mr_data" | jq -r '.state') + pr_author=$(echo "$mr_data" | jq -r '.author.username') + source_branch=$(echo "$mr_data" | jq -r '.source_branch') + target_branch=$(echo "$mr_data" | jq -r '.target_branch') + created_at=$(echo "$mr_data" | jq -r '.created_at') + updated_at=$(echo "$mr_data" | jq -r '.updated_at') + local raw_merge_status + raw_merge_status=$(echo "$mr_data" | jq -r '.merge_status // "UNKNOWN"') + if [[ "$raw_merge_status" == "can_be_merged" ]]; then + merge_state_status="CLEAN" + else + merge_state_status=$(echo "$raw_merge_status" | tr '[:lower:]' '[:upper:]') + fi + # Files resolved via git diff below + ;; + esac + + # For non-GitHub platforms, resolve file list and commit SHA via git diff + if [[ "$DEVSPARK_PLATFORM_NAME" != "github" && -n "$source_branch" && -n "$target_branch" ]]; then + git fetch origin "$source_branch" "$target_branch" 2>/dev/null || true + files_changed_json=$(git diff --name-only "origin/$target_branch...origin/$source_branch" 2>/dev/null \ + | jq -R -s -c 'split("\n") | map(select(length > 0))' || echo "[]") + if [[ "$commit_sha" == "unknown" ]]; then + commit_sha=$(git rev-parse "origin/$source_branch" 2>/dev/null || echo "unknown") + fi + fi + + #========================================================================== + # HARD RULE: Source branch MUST be in sync with target branch + # GitHub's mergeStateStatus=="BEHIND" is treated as authoritative. + # A universal git-based check covers AzDO, GitLab, and verifies GitHub. + #========================================================================== + local is_behind_target="false" + + # Fast path: GitHub API already says BEHIND + if [[ "$merge_state_status" == "BEHIND" ]]; then + is_behind_target="true" + fi + + # Universal git check — fetch both refs, count target commits absent from source + if [[ "$is_behind_target" == "false" && -n "$source_branch" && -n "$target_branch" ]]; then + git fetch origin "$source_branch" "$target_branch" 2>/dev/null || true + local behind_count + behind_count=$(git rev-list "origin/$target_branch" "^origin/$source_branch" --count 2>/dev/null || echo "0") + if [[ "$behind_count" -gt 0 ]]; then + is_behind_target="true" + fi + fi + + if [[ "$is_behind_target" == "true" ]]; then + local fix_hint + case "$DEVSPARK_PLATFORM_NAME" in + github) fix_hint="gh pr update-branch $PR_NUMBER OR git fetch origin && git rebase origin/$target_branch" ;; + *) fix_hint="git fetch origin && git rebase origin/$target_branch" ;; + esac + log_error "BLOCKED: Source branch '$source_branch' is behind target branch '$target_branch'" \ + "HARD RULE: PR review and approval are blocked until the source branch is in sync with the target branch. Fix with: $fix_hint" + exit 1 + fi + + # Check if diff is available (platform-specific) + local diff_available="false" + case "$DEVSPARK_PLATFORM_NAME" in + github) + if gh pr diff "$PR_NUMBER" &>/dev/null; then diff_available="true"; fi + ;; + *) + if git diff "origin/$target_branch...origin/$source_branch" &>/dev/null; then diff_available="true"; fi + ;; + esac + + # Apply file sampling + local files_changed_total + files_changed_total=$(echo "$files_changed_json" | jq 'length') + local files_changed_truncated="false" + if [[ "$files_changed_total" -gt "$file_sample_limit" ]]; then + files_changed_json=$(echo "$files_changed_json" | jq -c ".[0:$file_sample_limit]") + files_changed_truncated="true" + fi + + # Check for constitution + local constitution_path="$REPO_ROOT/.documentation/memory/constitution.md" + local constitution_exists="false" + if [[ -f "$constitution_path" ]]; then + constitution_exists="true" + fi + + #========================================================================== + # Spec Lifecycle Detection + # Extract feature identifier from branch name and check spec/task status + #========================================================================== + local spec_feature_id="" + local spec_status="N/A" + local spec_path="" + local tasks_total=0 + local tasks_completed=0 + local tasks_incomplete=0 + local is_feature_branch="false" + + # Detect feature branch pattern: digits-name (e.g., 001-feature-name) + if [[ "$source_branch" =~ ^([0-9]+-[a-zA-Z].*)$ ]]; then + is_feature_branch="true" + spec_feature_id="$source_branch" + + # Check for spec directory + local feature_dir="$REPO_ROOT/.documentation/specs/$spec_feature_id" + spec_path="$feature_dir/spec.md" + + if [[ -f "$spec_path" ]]; then + # Extract Status field from spec.md + spec_status=$(grep -oP '\*\*Status\*\*:\s*\K[A-Za-z ]+' "$spec_path" 2>/dev/null | head -1 | sed 's/[[:space:]]*$//' || echo "Unknown") + # Strip HTML comment if present + spec_status=$(echo "$spec_status" | sed 's/" +SHARED_CONTEXT_END="" + +# Template file +TEMPLATE_FILE="$REPO_ROOT/.documentation/templates/agent-file-template.md" + +# Global variables for parsed plan data +NEW_LANG="" +NEW_FRAMEWORK="" +NEW_DB="" +NEW_PROJECT_TYPE="" + +#============================================================================== +# Utility Functions +#============================================================================== + +log_info() { + echo "INFO: $1" +} + +log_success() { + echo "✓ $1" +} + +log_error() { + echo "ERROR: $1" >&2 +} + +log_warning() { + echo "WARNING: $1" >&2 +} + +get_agent_name() { + local agent_key="$1" + jq -r --arg key "$agent_key" '.agents[] | select(.key == $key) | .name' "$AGENT_REGISTRY_FILE" +} + +get_agent_target_file() { + local agent_key="$1" + local relative_path + relative_path=$(jq -r --arg key "$agent_key" '.agents[] | select(.key == $key) | .context_file' "$AGENT_REGISTRY_FILE") + [[ -n "$relative_path" && "$relative_path" != "null" ]] || return 1 + printf '%s/%s\n' "$REPO_ROOT" "$relative_path" +} + +get_agent_keys() { + jq -r '.agents[].key' "$AGENT_REGISTRY_FILE" +} + +refresh_shared_context_block() { + local target_file="$1" + + [[ -f "$SHARED_AGENT_CONTEXT_FILE" ]] || return 0 + [[ "$target_file" != "$SHARED_AGENT_CONTEXT_FILE" ]] || return 0 + + local temp_file + temp_file=$(mktemp) || { + log_error "Failed to create temporary file for shared context refresh" + return 1 + } + + awk -v start="$SHARED_CONTEXT_START" -v end="$SHARED_CONTEXT_END" ' + $0 == start { skipping=1; next } + $0 == end { skipping=0; next } + !skipping { print } + ' "$target_file" > "$temp_file" + + { + cat "$temp_file" + printf '\n%s\n' "$SHARED_CONTEXT_START" + cat "$SHARED_AGENT_CONTEXT_FILE" + printf '\n%s\n' "$SHARED_CONTEXT_END" + } > "$temp_file.rendered" + + mv "$temp_file.rendered" "$target_file" + rm -f "$temp_file" +} + +# Cleanup function for temporary files +cleanup() { + local exit_code=$? + rm -f /tmp/agent_update_*_$$ + rm -f /tmp/manual_additions_$$ + exit $exit_code +} + +# Set up cleanup trap +trap cleanup EXIT INT TERM + +#============================================================================== +# Validation Functions +#============================================================================== + +validate_environment() { + # Check if we have a current branch/feature (git or non-git) + if [[ -z "$CURRENT_BRANCH" ]]; then + log_error "Unable to determine current feature" + if [[ "$HAS_GIT" == "true" ]]; then + log_info "Make sure you're on a feature branch" + else + log_info "Set DEVSPARK_FEATURE environment variable or create a feature first" + fi + exit 1 + fi + + # Check if plan.md exists + if [[ ! -f "$NEW_PLAN" ]]; then + log_error "No plan.md found at $NEW_PLAN" + log_info "Make sure you're working on a feature with a corresponding spec directory" + if [[ "$HAS_GIT" != "true" ]]; then + log_info "Use: export DEVSPARK_FEATURE=your-feature-name or create a new feature first" + fi + exit 1 + fi + + if [[ ! -f "$AGENT_REGISTRY_FILE" ]]; then + log_error "Agent registry not found at $AGENT_REGISTRY_FILE" + exit 1 + fi + + # Check if template exists (needed for new files) + if [[ ! -f "$TEMPLATE_FILE" ]]; then + log_warning "Template file not found at $TEMPLATE_FILE" + log_warning "Creating new agent files will fail" + fi + + if [[ ! -f "$SHARED_AGENT_CONTEXT_FILE" ]]; then + log_warning "Shared agent context not found at $SHARED_AGENT_CONTEXT_FILE" + fi +} + +#============================================================================== +# Plan Parsing Functions +#============================================================================== + +extract_plan_field() { + local field_pattern="$1" + local plan_file="$2" + + grep "^\*\*${field_pattern}\*\*: " "$plan_file" 2>/dev/null | \ + head -1 | \ + sed "s|^\*\*${field_pattern}\*\*: ||" | \ + sed 's/^[ \t]*//;s/[ \t]*$//' | \ + grep -v "NEEDS CLARIFICATION" | \ + grep -v "^N/A$" || echo "" +} + +parse_plan_data() { + local plan_file="$1" + + if [[ ! -f "$plan_file" ]]; then + log_error "Plan file not found: $plan_file" + return 1 + fi + + if [[ ! -r "$plan_file" ]]; then + log_error "Plan file is not readable: $plan_file" + return 1 + fi + + log_info "Parsing plan data from $plan_file" + + NEW_LANG=$(extract_plan_field "Language/Version" "$plan_file") + NEW_FRAMEWORK=$(extract_plan_field "Primary Dependencies" "$plan_file") + NEW_DB=$(extract_plan_field "Storage" "$plan_file") + NEW_PROJECT_TYPE=$(extract_plan_field "Project Type" "$plan_file") + + # Log what we found + if [[ -n "$NEW_LANG" ]]; then + log_info "Found language: $NEW_LANG" + else + log_warning "No language information found in plan" + fi + + if [[ -n "$NEW_FRAMEWORK" ]]; then + log_info "Found framework: $NEW_FRAMEWORK" + fi + + if [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]]; then + log_info "Found database: $NEW_DB" + fi + + if [[ -n "$NEW_PROJECT_TYPE" ]]; then + log_info "Found project type: $NEW_PROJECT_TYPE" + fi +} + +format_technology_stack() { + local lang="$1" + local framework="$2" + local parts=() + + # Add non-empty parts + [[ -n "$lang" && "$lang" != "NEEDS CLARIFICATION" ]] && parts+=("$lang") + [[ -n "$framework" && "$framework" != "NEEDS CLARIFICATION" && "$framework" != "N/A" ]] && parts+=("$framework") + + # Join with proper formatting + if [[ ${#parts[@]} -eq 0 ]]; then + echo "" + elif [[ ${#parts[@]} -eq 1 ]]; then + echo "${parts[0]}" + else + # Join multiple parts with " + " + local result="${parts[0]}" + for ((i=1; i<${#parts[@]}; i++)); do + result="$result + ${parts[i]}" + done + echo "$result" + fi +} + +#============================================================================== +# Template and Content Generation Functions +#============================================================================== + +get_project_structure() { + local project_type="$1" + + if [[ "$project_type" == *"web"* ]]; then + echo "backend/\\nfrontend/\\ntests/" + else + echo "src/\\ntests/" + fi +} + +get_commands_for_language() { + local lang="$1" + + case "$lang" in + *"Python"*) + echo "cd src && pytest && ruff check ." + ;; + *"Rust"*) + echo "cargo test && cargo clippy" + ;; + *"JavaScript"*|*"TypeScript"*) + echo "npm test \\&\\& npm run lint" + ;; + *) + echo "# Add commands for $lang" + ;; + esac +} + +get_language_conventions() { + local lang="$1" + echo "$lang: Follow standard conventions" +} + +create_new_agent_file() { + local target_file="$1" + local temp_file="$2" + local project_name="$3" + local current_date="$4" + + if [[ ! -f "$TEMPLATE_FILE" ]]; then + log_error "Template not found at $TEMPLATE_FILE" + return 1 + fi + + if [[ ! -r "$TEMPLATE_FILE" ]]; then + log_error "Template file is not readable: $TEMPLATE_FILE" + return 1 + fi + + log_info "Creating new agent context file from template..." + + if ! cp "$TEMPLATE_FILE" "$temp_file"; then + log_error "Failed to copy template file" + return 1 + fi + + # Replace template placeholders + local project_structure + project_structure=$(get_project_structure "$NEW_PROJECT_TYPE") + + local commands + commands=$(get_commands_for_language "$NEW_LANG") + + local language_conventions + language_conventions=$(get_language_conventions "$NEW_LANG") + + # Perform substitutions with error checking using safer approach + # Escape special characters for sed by using a different delimiter or escaping + local escaped_lang + escaped_lang=$(printf '%s\n' "$NEW_LANG" | sed 's/[\[\.*^$()+{}|]/\\&/g') + local escaped_framework + escaped_framework=$(printf '%s\n' "$NEW_FRAMEWORK" | sed 's/[\[\.*^$()+{}|]/\\&/g') + local escaped_branch + escaped_branch=$(printf '%s\n' "$CURRENT_BRANCH" | sed 's/[\[\.*^$()+{}|]/\\&/g') + + # Build technology stack and recent change strings conditionally + local tech_stack + if [[ -n "$escaped_lang" && -n "$escaped_framework" ]]; then + tech_stack="- $escaped_lang + $escaped_framework ($escaped_branch)" + elif [[ -n "$escaped_lang" ]]; then + tech_stack="- $escaped_lang ($escaped_branch)" + elif [[ -n "$escaped_framework" ]]; then + tech_stack="- $escaped_framework ($escaped_branch)" + else + tech_stack="- ($escaped_branch)" + fi + + local recent_change + if [[ -n "$escaped_lang" && -n "$escaped_framework" ]]; then + recent_change="- $escaped_branch: Added $escaped_lang + $escaped_framework" + elif [[ -n "$escaped_lang" ]]; then + recent_change="- $escaped_branch: Added $escaped_lang" + elif [[ -n "$escaped_framework" ]]; then + recent_change="- $escaped_branch: Added $escaped_framework" + else + recent_change="- $escaped_branch: Added" + fi + + local substitutions=( + "s|\[PROJECT NAME\]|$project_name|" + "s|\[DATE\]|$current_date|" + "s|\[EXTRACTED FROM ALL PLAN.MD FILES\]|$tech_stack|" + "s|\[ACTUAL STRUCTURE FROM PLANS\]|$project_structure|g" + "s|\[ONLY COMMANDS FOR ACTIVE TECHNOLOGIES\]|$commands|" + "s|\[LANGUAGE-SPECIFIC, ONLY FOR LANGUAGES IN USE\]|$language_conventions|" + "s|\[LAST 3 FEATURES AND WHAT THEY ADDED\]|$recent_change|" + ) + + for substitution in "${substitutions[@]}"; do + if ! sed -i.bak -e "$substitution" "$temp_file"; then + log_error "Failed to perform substitution: $substitution" + rm -f "$temp_file" "$temp_file.bak" + return 1 + fi + done + + # Convert \n sequences to actual newlines + newline=$(printf '\n') + sed -i.bak2 "s/\\\\n/${newline}/g" "$temp_file" + + # Clean up backup files + rm -f "$temp_file.bak" "$temp_file.bak2" + + return 0 +} + + + + +update_existing_agent_file() { + local target_file="$1" + local current_date="$2" + + log_info "Updating existing agent context file..." + + # Use a single temporary file for atomic update + local temp_file + temp_file=$(mktemp) || { + log_error "Failed to create temporary file" + return 1 + } + + # Process the file in one pass + local tech_stack + tech_stack=$(format_technology_stack "$NEW_LANG" "$NEW_FRAMEWORK") + local new_tech_entries=() + local new_change_entry="" + + # Prepare new technology entries + if [[ -n "$tech_stack" ]] && ! grep -q "$tech_stack" "$target_file"; then + new_tech_entries+=("- $tech_stack ($CURRENT_BRANCH)") + fi + + if [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]] && [[ "$NEW_DB" != "NEEDS CLARIFICATION" ]] && ! grep -q "$NEW_DB" "$target_file"; then + new_tech_entries+=("- $NEW_DB ($CURRENT_BRANCH)") + fi + + # Prepare new change entry + if [[ -n "$tech_stack" ]]; then + new_change_entry="- $CURRENT_BRANCH: Added $tech_stack" + elif [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]] && [[ "$NEW_DB" != "NEEDS CLARIFICATION" ]]; then + new_change_entry="- $CURRENT_BRANCH: Added $NEW_DB" + fi + + # Check if sections exist in the file + local has_active_technologies=0 + local has_recent_changes=0 + + if grep -q "^## Active Technologies" "$target_file" 2>/dev/null; then + has_active_technologies=1 + fi + + if grep -q "^## Recent Changes" "$target_file" 2>/dev/null; then + has_recent_changes=1 + fi + + # Process file line by line + local in_tech_section=false + local in_changes_section=false + local tech_entries_added=false + local existing_changes_count=0 + + while IFS= read -r line || [[ -n "$line" ]]; do + # Handle Active Technologies section + if [[ "$line" == "## Active Technologies" ]]; then + echo "$line" >> "$temp_file" + in_tech_section=true + continue + elif [[ $in_tech_section == true ]] && [[ "$line" =~ ^##[[:space:]] ]]; then + # Add new tech entries before closing the section + if [[ $tech_entries_added == false ]] && [[ ${#new_tech_entries[@]} -gt 0 ]]; then + printf '%s\n' "${new_tech_entries[@]}" >> "$temp_file" + tech_entries_added=true + fi + echo "$line" >> "$temp_file" + in_tech_section=false + continue + elif [[ $in_tech_section == true ]] && [[ -z "$line" ]]; then + # Add new tech entries before empty line in tech section + if [[ $tech_entries_added == false ]] && [[ ${#new_tech_entries[@]} -gt 0 ]]; then + printf '%s\n' "${new_tech_entries[@]}" >> "$temp_file" + tech_entries_added=true + fi + echo "$line" >> "$temp_file" + continue + fi + + # Handle Recent Changes section + if [[ "$line" == "## Recent Changes" ]]; then + echo "$line" >> "$temp_file" + # Add new change entry right after the heading + if [[ -n "$new_change_entry" ]]; then + echo "$new_change_entry" >> "$temp_file" + fi + in_changes_section=true + continue + elif [[ $in_changes_section == true ]] && [[ "$line" =~ ^##[[:space:]] ]]; then + echo "$line" >> "$temp_file" + in_changes_section=false + continue + elif [[ $in_changes_section == true ]] && [[ "$line" == "- "* ]]; then + # Keep only first 2 existing changes + if [[ $existing_changes_count -lt 2 ]]; then + echo "$line" >> "$temp_file" + ((existing_changes_count++)) + fi + continue + fi + + # Update timestamp + if [[ "$line" =~ \*\*Last\ updated\*\*:.*[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] ]]; then + echo "$line" | sed "s/[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]/$current_date/" >> "$temp_file" + else + echo "$line" >> "$temp_file" + fi + done < "$target_file" + + # Post-loop check: if we're still in the Active Technologies section and haven't added new entries + if [[ $in_tech_section == true ]] && [[ $tech_entries_added == false ]] && [[ ${#new_tech_entries[@]} -gt 0 ]]; then + printf '%s\n' "${new_tech_entries[@]}" >> "$temp_file" + tech_entries_added=true + fi + + # If sections don't exist, add them at the end of the file + if [[ $has_active_technologies -eq 0 ]] && [[ ${#new_tech_entries[@]} -gt 0 ]]; then + echo "" >> "$temp_file" + echo "## Active Technologies" >> "$temp_file" + printf '%s\n' "${new_tech_entries[@]}" >> "$temp_file" + tech_entries_added=true + fi + + if [[ $has_recent_changes -eq 0 ]] && [[ -n "$new_change_entry" ]]; then + echo "" >> "$temp_file" + echo "## Recent Changes" >> "$temp_file" + echo "$new_change_entry" >> "$temp_file" + fi + + # Move temp file to target atomically + if ! mv "$temp_file" "$target_file"; then + log_error "Failed to update target file" + rm -f "$temp_file" + return 1 + fi + + if ! refresh_shared_context_block "$target_file"; then + log_error "Failed to refresh shared context block in $target_file" + return 1 + fi + + return 0 +} +#============================================================================== +# Main Agent File Update Function +#============================================================================== + +update_agent_file() { + local target_file="$1" + local agent_name="$2" + + if [[ -z "$target_file" ]] || [[ -z "$agent_name" ]]; then + log_error "update_agent_file requires target_file and agent_name parameters" + return 1 + fi + + log_info "Updating $agent_name context file: $target_file" + + local project_name + project_name=$(basename "$REPO_ROOT") + local current_date + current_date=$(date +%Y-%m-%d) + + # Create directory if it doesn't exist + local target_dir + target_dir=$(dirname "$target_file") + if [[ ! -d "$target_dir" ]]; then + if ! mkdir -p "$target_dir"; then + log_error "Failed to create directory: $target_dir" + return 1 + fi + fi + + if [[ ! -f "$target_file" ]]; then + # Create new file from template + local temp_file + temp_file=$(mktemp) || { + log_error "Failed to create temporary file" + return 1 + } + + if create_new_agent_file "$target_file" "$temp_file" "$project_name" "$current_date"; then + if mv "$temp_file" "$target_file"; then + log_success "Created new $agent_name context file" + else + log_error "Failed to move temporary file to $target_file" + rm -f "$temp_file" + return 1 + fi + else + log_error "Failed to create new agent file" + rm -f "$temp_file" + return 1 + fi + else + # Update existing file + if [[ ! -r "$target_file" ]]; then + log_error "Cannot read existing file: $target_file" + return 1 + fi + + if [[ ! -w "$target_file" ]]; then + log_error "Cannot write to existing file: $target_file" + return 1 + fi + + if update_existing_agent_file "$target_file" "$current_date"; then + log_success "Updated existing $agent_name context file" + else + log_error "Failed to update existing agent file" + return 1 + fi + fi + + return 0 +} + +#============================================================================== +# Agent Selection and Processing +#============================================================================== + +update_specific_agent() { + local agent_type="$1" + + local target_file + local agent_name + target_file=$(get_agent_target_file "$agent_type") || { + log_error "Unknown agent type '$agent_type'" + log_error "Expected: $(get_agent_keys | paste -sd'|' -)" + exit 1 + } + agent_name=$(get_agent_name "$agent_type") + update_agent_file "$target_file" "$agent_name" +} + +update_all_existing_agents() { + local found_agent=false + + local seen_targets='|' + local agent_key + while IFS= read -r agent_key; do + local target_file + local agent_name + target_file=$(get_agent_target_file "$agent_key") || continue + agent_name=$(get_agent_name "$agent_key") + if [[ "$seen_targets" == *"|$target_file|"* ]]; then + continue + fi + seen_targets+="$target_file|" + if [[ -f "$target_file" ]]; then + update_agent_file "$target_file" "$agent_name" + found_agent=true + fi + done < <(get_agent_keys) + + # If no agent files exist, create a default Claude file + if [[ "$found_agent" == false ]]; then + log_info "No existing agent files found, creating default Claude file..." + update_agent_file "$REPO_ROOT/CLAUDE.md" "Claude Code" + fi +} +print_summary() { + echo + log_info "Summary of changes:" + + if [[ -n "$NEW_LANG" ]]; then + echo " - Added language: $NEW_LANG" + fi + + if [[ -n "$NEW_FRAMEWORK" ]]; then + echo " - Added framework: $NEW_FRAMEWORK" + fi + + if [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]]; then + echo " - Added database: $NEW_DB" + fi + + echo + + log_info "Usage: $0 [$(get_agent_keys | paste -sd'|' -)]" +} + +#============================================================================== +# Main Execution +#============================================================================== + +main() { + # Validate environment before proceeding + validate_environment + + log_info "=== Updating agent context files for feature $CURRENT_BRANCH ===" + + # Parse the plan file to extract project information + if ! parse_plan_data "$NEW_PLAN"; then + log_error "Failed to parse plan data" + exit 1 + fi + + # Process based on agent type argument + local success=true + + if [[ -z "$AGENT_TYPE" ]]; then + # No specific agent provided - update all existing agent files + log_info "No agent specified, updating all existing agent files..." + if ! update_all_existing_agents; then + success=false + fi + else + # Specific agent provided - update only that agent + log_info "Updating specific agent: $AGENT_TYPE" + if ! update_specific_agent "$AGENT_TYPE"; then + success=false + fi + fi + + # Print summary + print_summary + + if [[ "$success" == true ]]; then + log_success "Agent context update completed successfully" + exit 0 + else + log_error "Agent context update completed with errors" + exit 1 + fi +} + +# Execute main function if script is run directly +if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then + main "$@" +fi diff --git a/.devspark/scripts/powershell/check-prerequisites.ps1 b/.devspark/scripts/powershell/check-prerequisites.ps1 index 5c93a0aa..02442cef 100644 --- a/.devspark/scripts/powershell/check-prerequisites.ps1 +++ b/.devspark/scripts/powershell/check-prerequisites.ps1 @@ -20,6 +20,8 @@ param( [switch]$RequireTasks, [switch]$IncludeTasks, [switch]$PathsOnly, + [switch]$RequireDeliveryStatus, + [int]$TimeoutSeconds = 300, [switch]$Help ) @@ -37,6 +39,8 @@ OPTIONS: -RequireTasks Require tasks.md to exist (for implementation phase) -IncludeTasks Include tasks.md in AVAILABLE_DOCS list -PathsOnly Only output path variables (no prerequisite validation) + -RequireDeliveryStatus Require latest harness run to be create-pr ready + -TimeoutSeconds Timeout value reported in diagnostics (default: 300) -Help, -h Show this help message EXAMPLES: @@ -49,6 +53,9 @@ EXAMPLES: # Get feature paths only (no validation) .\check-prerequisites.ps1 -PathsOnly + # Enforce delivery gate for create-pr/pr-review transitions + .\check-prerequisites.ps1 -Json -RequireDeliveryStatus -TimeoutSeconds 300 + "@ exit 0 } @@ -110,6 +117,25 @@ if ($RequireTasks -and -not (Test-Path $paths.TASKS -PathType Leaf)) { exit 1 } +if ($RequireDeliveryStatus) { + $runsRoot = Join-Path $paths.REPO_ROOT ".documentation/devspark/runs" + if (Test-Path $runsRoot -PathType Container) { + $latest = Get-ChildItem -Path $runsRoot -Directory -ErrorAction SilentlyContinue | + Sort-Object LastWriteTime -Descending | + Select-Object -First 1 + if ($latest) { + $resultPath = Join-Path $latest.FullName "result.json" + if (Test-Path $resultPath -PathType Leaf) { + $result = Get-Content $resultPath -Raw | ConvertFrom-Json + if (-not $result.create_pr_ready) { + Write-Output "ERROR: delivery-status gate failed; latest harness run is not create-pr ready" + exit 1 + } + } + } + } +} + # Build list of available documents $docs = @() diff --git a/.devspark/scripts/powershell/common.ps1 b/.devspark/scripts/powershell/common.ps1 index ed0d6025..8996ad59 100644 --- a/.devspark/scripts/powershell/common.ps1 +++ b/.devspark/scripts/powershell/common.ps1 @@ -577,5 +577,3 @@ function Get-FeaturePathsAppAware { CONTRACTS_DIR = Join-Path $featureDir 'contracts' } } - - diff --git a/.devspark/scripts/powershell/create-new-feature.ps1 b/.devspark/scripts/powershell/create-new-feature.ps1 index 5c6eb1c8..babd5496 100644 --- a/.devspark/scripts/powershell/create-new-feature.ps1 +++ b/.devspark/scripts/powershell/create-new-feature.ps1 @@ -285,4 +285,3 @@ if ($Json) { Write-Output "HAS_GIT: $hasGit" Write-Output "DEVSPARK_FEATURE environment variable set to: $branchName" } - diff --git a/.devspark/scripts/powershell/create-pr.ps1 b/.devspark/scripts/powershell/create-pr.ps1 index 3159141d..a46b7069 100644 --- a/.devspark/scripts/powershell/create-pr.ps1 +++ b/.devspark/scripts/powershell/create-pr.ps1 @@ -483,4 +483,4 @@ switch ($Mode) { 'Update' { Write-Output (Invoke-CreateOrUpdatePr -Action Update) } -} \ No newline at end of file +} diff --git a/.devspark/scripts/powershell/delivery-status-smoke-test.ps1 b/.devspark/scripts/powershell/delivery-status-smoke-test.ps1 new file mode 100644 index 00000000..f05277d9 --- /dev/null +++ b/.devspark/scripts/powershell/delivery-status-smoke-test.ps1 @@ -0,0 +1,153 @@ +# Smoke test for delivery-status enforcement (PowerShell) +# +# Validates that delivery-status gating works correctly in PowerShell environments. +# This test verifies the core MVP feature: delivery runs report accurate status and gate create-pr. +# +# Usage: .\delivery-status-smoke-test.ps1 +# Expected exit code: 0 (success) or non-zero (failure) + +param( + [switch]$Quiet = $false +) + +$ErrorActionPreference = "Continue" +$ProgressPreference = "SilentlyContinue" + +# Script locations +$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path +$RepoRoot = (Get-Item $ScriptDir).Parent.Parent.FullName + +# Colors for output +$Green = "`e[32m" +$Red = "`e[31m" +$Yellow = "`e[33m" +$NC = "`e[0m" + +$testCount = 0 +$passed = 0 +$failed = 0 + +# Helper functions +function LogTest { + param([string]$Message) + $script:testCount++ + if (-not $Quiet) { + Write-Host "[TEST $testCount] $Message" -ForegroundColor Cyan + } +} + +function LogPass { + param([string]$Message) + $script:passed++ + if (-not $Quiet) { + Write-Host " $Green✓ PASS$NC : $Message" -ForegroundColor Green + } +} + +function LogFail { + param([string]$Message) + $script:failed++ + if (-not $Quiet) { + Write-Host " $Red✗ FAIL$NC : $Message" -ForegroundColor Red + } +} + +# Test 1: Verify git command works for change detection +LogTest "Git command available" +$gitAvailable = $null -ne (Get-Command git -ErrorAction SilentlyContinue) +if ($gitAvailable) { + LogPass "git command available" +} else { + LogFail "git command not available" +} + +# Test 2: Verify delivery result file structure +LogTest "Delivery result JSON structure" +$resultFile = "$RepoRoot\.documentation\devspark\runs\latest\result.json" +if (Test-Path $resultFile) { + $resultContent = Get-Content $resultFile -Raw + + if ($resultContent -match '"delivery_status"') { + LogPass "delivery_status field present in result.json" + } else { + LogFail "delivery_status field missing from result.json" + } + + if ($resultContent -match '"create_pr_ready"') { + LogPass "create_pr_ready field present in result.json" + } else { + LogFail "create_pr_ready field missing from result.json" + } +} else { + LogFail "No result.json file found (no recent runs)" +} + +# Test 3: Verify delivery check validation rule exists +LogTest "Delivery check rule defined" +$specPath = "$RepoRoot\.documentation\specs\001-harness-delivery-integrity" +if (Test-Path $specPath) { + LogPass "Feature spec directory exists" + + # Search for git.changed_count or git.changed_path_match in spec files + $ruleFound = Get-ChildItem $specPath -Recurse -Filter "*.md" | + Select-String -Pattern "git\.changed_count|git\.changed_path_match" -ErrorAction SilentlyContinue + + if ($null -ne $ruleFound) { + LogPass "Delivery validation rules defined in spec" + } else { + LogFail "Delivery validation rules not found in spec" + } +} else { + LogFail "Feature spec directory not found" +} + +# Test 4: Verify no-change explainer can be generated +LogTest "No-change explainer artifact" +$explainerFile = "$RepoRoot\.documentation\devspark\runs\latest\no-change-explainer.md" +if (Test-Path $explainerFile) { + LogPass "No-change explainer artifact can be generated" +} else { + # This is OK if no run yet - not a failure + LogPass "No-change explainer (not required if no runs yet)" +} + +# Test 5: Verify governance approval checkpoint exists +LogTest "Governance approval gate documentation" +$approvalFile = "$RepoRoot\.documentation\specs\001-harness-delivery-integrity\gates\governance-approval.md" +if (Test-Path $approvalFile) { + $approvalContent = Get-Content $approvalFile -Raw + if ($approvalContent -match "Approver Name:") { + LogPass "Governance approval template exists" + } else { + LogFail "Governance approval template incomplete" + } +} else { + LogFail "Governance approval template not found" +} + +# Test 6: Verify delivery checks can be collected from git +LogTest "Git diff command works" +try { + $gitDiffResult = & git diff --name-only -- src/ test/ 2>$null + LogPass "Git diff command executes without errors" +} catch { + LogFail "Git diff command failed: $_" +} + +# Summary +Write-Host "" +Write-Host "==========================================" +Write-Host "Smoke Test Summary" +Write-Host "==========================================" +Write-Host "Tests run: $testCount" +Write-Host ("Passed: {0}{1}{2}" -f $Green, $passed, $NC) +Write-Host ("Failed: {0}{1}{2}" -f $Red, $failed, $NC) +Write-Host "" + +if ($failed -eq 0) { + Write-Host ("{0}✓ All smoke tests passed{1}" -f $Green, $NC) + exit 0 +} else { + Write-Host ("{0}✗ Some tests failed{1}" -f $Red, $NC) + exit 1 +} diff --git a/.devspark/scripts/powershell/release-history-context.ps1 b/.devspark/scripts/powershell/release-history-context.ps1 index 4403ed0d..a1cdbdd0 100644 --- a/.devspark/scripts/powershell/release-history-context.ps1 +++ b/.devspark/scripts/powershell/release-history-context.ps1 @@ -366,4 +366,4 @@ if ($Json) { Write-Output "Merged PRs: $(@($mergedPrNumbers).Count)" Write-Output "Recovered Specs: $($recoveredSpecs.Count)" Write-Output "Recovered Quickfixes: $($recoveredQuickfixes.Count)" -} \ No newline at end of file +} diff --git a/.devspark/scripts/powershell/update-agent-context.ps1 b/.devspark/scripts/powershell/update-agent-context.ps1 index 3f961401..56fd7d81 100644 --- a/.devspark/scripts/powershell/update-agent-context.ps1 +++ b/.devspark/scripts/powershell/update-agent-context.ps1 @@ -491,4 +491,3 @@ function Main { } Main - diff --git a/PromptSpark.Domain/PromptSpark.Domain.csproj b/PromptSpark.Domain/PromptSpark.Domain.csproj index e68a9963..0f8986fd 100644 --- a/PromptSpark.Domain/PromptSpark.Domain.csproj +++ b/PromptSpark.Domain/PromptSpark.Domain.csproj @@ -43,6 +43,7 @@ + \ No newline at end of file diff --git a/PromptSpark.Domain/Service/IRecipeGPTService.cs b/PromptSpark.Domain/Service/IRecipeGPTService.cs index 33a260b2..e230578c 100644 --- a/PromptSpark.Domain/Service/IRecipeGPTService.cs +++ b/PromptSpark.Domain/Service/IRecipeGPTService.cs @@ -1,4 +1,4 @@ -using WebSpark.Core.Models; +using WebSpark.Recipe.Models; namespace PromptSpark.Domain.Service; diff --git a/PromptSpark.Domain/Service/MomRecipeRequestService.cs b/PromptSpark.Domain/Service/MomRecipeRequestService.cs index 487518d7..d5d76202 100644 --- a/PromptSpark.Domain/Service/MomRecipeRequestService.cs +++ b/PromptSpark.Domain/Service/MomRecipeRequestService.cs @@ -1,7 +1,7 @@ using Microsoft.Extensions.Configuration; using Newtonsoft.Json; using System.Text; -using WebSpark.Core.Models; +using WebSpark.Recipe.Models; namespace PromptSpark.Domain.Service; diff --git a/PromptSpark.Domain/Service/RecipePromptSparkService.cs b/PromptSpark.Domain/Service/RecipePromptSparkService.cs index d5712ffc..c9bdef0f 100644 --- a/PromptSpark.Domain/Service/RecipePromptSparkService.cs +++ b/PromptSpark.Domain/Service/RecipePromptSparkService.cs @@ -2,7 +2,7 @@ using Newtonsoft.Json; using PromptSpark.Domain.Data; using System.Text; -using WebSpark.Core.Models; +using WebSpark.Recipe.Models; namespace PromptSpark.Domain.Service; diff --git a/PromptSpark.Recipe.Console/PromptSpark.Recipe.Console.csproj b/PromptSpark.Recipe.Console/PromptSpark.Recipe.Console.csproj index 61d70f8c..818476d7 100644 --- a/PromptSpark.Recipe.Console/PromptSpark.Recipe.Console.csproj +++ b/PromptSpark.Recipe.Console/PromptSpark.Recipe.Console.csproj @@ -2,7 +2,7 @@ Exe - net9.0 + net10.0 enable enable 40c9aa10-c8b2-4e03-9d48-40fc6a373ee9 @@ -10,14 +10,14 @@ - + - + - + \ No newline at end of file diff --git a/PromptSpark.SchemaTest/Program.cs b/PromptSpark.SchemaTest/Program.cs index ebb918f5..73369eba 100644 --- a/PromptSpark.SchemaTest/Program.cs +++ b/PromptSpark.SchemaTest/Program.cs @@ -1,6 +1,6 @@ using PromptSpark.SchemaTest.Utility; using TriviaSpark.JShow.Models; -using WebSpark.Core.Models; +using WebSpark.Recipe.Models; Console.WriteLine("Hello, World!"); diff --git a/WebSpark.Core/Data/RecipeOld.cs b/WebSpark.Core/Data/RecipeOld.cs deleted file mode 100644 index e33fbe7c..00000000 --- a/WebSpark.Core/Data/RecipeOld.cs +++ /dev/null @@ -1,42 +0,0 @@ -namespace WebSpark.Core.Data; - -public class RecipeOld -{ - public Recipe GetRecipe(WebSite mom, RecipeCategory rcat) - { - return new Recipe() - { - Id = Id, - Name = RecipeNM, - Description = Description, - Ingredients = Ingredients, - Instructions = Instructions, - AuthorName = AuthorNM, - AverageRating = AverageRating, - CommentCount = 0, - CreatedDate = ModifiedDT, - CreatedID = 1, - Domain = mom, - IsApproved = true, - LastViewDt = ModifiedDT, - RatingCount = 0, - RecipeCategory = rcat - }; - } - - public int Id { get; set; } - public int RecipeCategoryID { get; set; } - public string RecipeNM { get; set; } = string.Empty; - public string Description { get; set; } = string.Empty; - public string AuthorNM { get; set; } = string.Empty; - public string Ingredients { get; set; } = string.Empty; - public string Instructions { get; set; } = string.Empty; - public string IsApproved { get; set; } = string.Empty; - public DateTime ModifiedDT { get; set; } - public int ModifiedID { get; set; } - public int ViewCount { get; set; } - public double AverageRating { get; set; } - public int RatingCount { get; set; } - public int CommentCount { get; set; } - public DateTime LastViewDT { get; set; } -} diff --git a/WebSpark.Core/Data/SeedDatabase.cs b/WebSpark.Core/Data/SeedDatabase.cs index 0dd7c68c..a65eaecd 100644 --- a/WebSpark.Core/Data/SeedDatabase.cs +++ b/WebSpark.Core/Data/SeedDatabase.cs @@ -1,4 +1,3 @@ -using System.Text.Json; using WebSpark.Core.Helpers; using WebSpark.Core.Infrastructure.BaseClasses; @@ -166,37 +165,11 @@ public async Task SeedDatabaseAsync() } try { - var dOrder = 1; var Mom = GetMomWebsite(); _context.Domain.Add(Mom); _context.SaveChanges(); - - var catlist = new List() { "Appetizer", "Bread", "Breakfast", "Dessert", "Dips", "Drink", "Main Course", "Quick Meals", "Salad", "Sauce", "Side Dishes", "Slow Cooker", "Soup", "Vegetable" }; - foreach (var Name in catlist) - { - _context.RecipeCategory.Add(RecipeHelper.GetRecipeCategory(Name, dOrder)); - dOrder++; - } - await _context.SaveChangesAsync(); - - string jsonFilePath = System.IO.Path.Combine(System.IO.Directory.GetCurrentDirectory(), @"wwwroot\site\mom\RecipeList.json"); - - - // var jsonFilePath = @"~\site\mom\RecipeList.json"; - var json = File.ReadAllText(jsonFilePath); - var recipeOlds = JsonSerializer.Deserialize>(json) ?? new List(); - foreach (var item in recipeOlds) - { - if (item == null) continue; - var cat = _context.RecipeCategory.FirstOrDefault(w => w.Id == item.RecipeCategoryID); - if (cat == null) continue; - var newRecipe = item.GetRecipe(Mom, cat); - if (newRecipe != null) - { - _context.Recipe.Add(newRecipe); - _context.SaveChanges(); - } - } + // Recipe categories and recipes are now seeded via RecipeDbContext in WebSpark.Recipe. + // Legacy RecipeOld-based seeding removed as part of 006-extract-recipe-library extraction. } catch (Exception ex) { diff --git a/WebSpark.Core/Data/WebSparkDbContext.cs b/WebSpark.Core/Data/WebSparkDbContext.cs index 07fa9531..dc842f07 100644 --- a/WebSpark.Core/Data/WebSparkDbContext.cs +++ b/WebSpark.Core/Data/WebSparkDbContext.cs @@ -129,74 +129,6 @@ protected override void OnModelCreating(ModelBuilder modelBuilder) j => j.HasOne().WithMany().HasForeignKey("KeywordId"), j => j.HasOne().WithMany().HasForeignKey("ContentPartId")); - modelBuilder.Entity(entity => - { - entity.Property(e => e.AuthorName) - .IsRequired() - .HasMaxLength(50); - entity.Property(e => e.Ingredients) - .IsRequired(); - entity.Property(e => e.Instructions) - .IsRequired(); - entity.Property(e => e.Description) - .HasMaxLength(500); - entity.Property(e => e.Keywords) - .HasMaxLength(100); - entity.Property(e => e.Name) - .IsRequired() - .HasMaxLength(150); - entity.HasOne(d => d.RecipeCategory) - .WithMany(p => p.Recipe) - .OnDelete(DeleteBehavior.Restrict) - .HasConstraintName("FK_Recipe_RecipeCategory") - .IsRequired(); - entity.HasOne(d => d.Domain); - }); - - modelBuilder.Entity(entity => - { - entity.Property(e => e.Comment) - .HasMaxLength(1500); - entity.Property(e => e.Name) - .IsRequired() - .HasMaxLength(70); - entity.HasOne(d => d.Domain); - entity.HasMany(entity => entity.Recipe) - .WithOne(entity => entity.RecipeCategory) - .OnDelete(DeleteBehavior.Restrict); - }); - - modelBuilder.Entity(entity => - { - entity.Property(e => e.Email) - .IsRequired() - .HasMaxLength(50); - - entity.Property(e => e.Name) - .IsRequired() - .HasMaxLength(60); - - entity.Property(e => e.Comment).IsRequired(); - - entity.HasOne(d => d.Recipe) - .WithMany(p => p.RecipeComment) - .OnDelete(DeleteBehavior.ClientCascade) - .HasConstraintName("FK_RecipeComment_Recipe"); - }); - - modelBuilder.Entity(entity => - { - entity.Property(e => e.FileDescription).HasMaxLength(255); - - entity.Property(e => e.FileName) - .IsRequired() - .HasMaxLength(50); - - entity.HasOne(d => d.Recipe) - .WithMany(p => p.RecipeImage) - .OnDelete(DeleteBehavior.ClientCascade) - .HasConstraintName("FK_RecipeImage_Recipe"); - }); modelBuilder.Entity() .HasKey(t => new { t.PostId, t.CategoryId }); @@ -263,10 +195,6 @@ public override async Task SaveChangesAsync(CancellationToken cancellationT public virtual DbSet Newsletters { get; set; } public virtual DbSet PostCategories { get; set; } public virtual DbSet Posts { get; set; } - public virtual DbSet Recipe { get; set; } - public virtual DbSet RecipeCategory { get; set; } - public virtual DbSet RecipeComment { get; set; } - public virtual DbSet RecipeImage { get; set; } public virtual DbSet Subscribers { get; set; } public virtual DbSet Keywords { get; set; } public virtual DbSet ContentParts { get; set; } diff --git a/WebSpark.Core/Interfaces/IRecipeService.cs b/WebSpark.Core/Interfaces/IRecipeService.cs deleted file mode 100644 index 88abc9c5..00000000 --- a/WebSpark.Core/Interfaces/IRecipeService.cs +++ /dev/null @@ -1,69 +0,0 @@ -using WebSpark.Core.Models.ViewModels; - -namespace WebSpark.Core.Interfaces; - -/// -/// Recipe Service Interface -/// -public interface IRecipeService -{ - /// - /// Deletes the specified identifier. - /// - /// The identifier. - /// true if XXXX, false otherwise. - bool Delete(int Id); - bool Delete(Models.RecipeCategoryModel saveItem); - /// - /// Gets the by identifier. - /// - /// The identifier. - /// RecipeModel. - Models.RecipeModel Get(int Id); - - /// - /// - /// - /// - /// - Models.RecipeCategoryModel GetRecipeCategoryById(int Id); - - /// - /// Gets the recipe category list. - /// - /// List<RecipeCategoryModel>. - List GetRecipeCategoryList(); - - /// - /// Gets the recipe list. - /// - /// List<RecipeModel>. - IEnumerable Get(); - - /// - /// Saves the specified save item. - /// - /// The save item. - /// RecipeModel. - Models.RecipeModel Save(Models.RecipeModel saveItem); - /// - /// Get RecipeVM - /// - /// - /// - /// - RecipeVM GetRecipeVMHostAsync(string host, WebsiteVM baseVM); - - /// - /// - /// - /// - /// - Models.RecipeCategoryModel Save(Models.RecipeCategoryModel saveItem); - - /// - /// - /// - /// - List GetRecipeImages(); -} diff --git a/WebSpark.Core/Models/EditModels/RecipeEditModel.cs b/WebSpark.Core/Models/EditModels/RecipeEditModel.cs deleted file mode 100644 index 1604f78a..00000000 --- a/WebSpark.Core/Models/EditModels/RecipeEditModel.cs +++ /dev/null @@ -1,43 +0,0 @@ -namespace WebSpark.Core.Models.EditModels -{ - /// - /// Class RecipeModel. - /// - public class RecipeEditModel : RecipeModel - { - /// - /// Initializes a new instance of the class. - /// - public RecipeEditModel() - { - } - public RecipeEditModel(RecipeModel Recipe) - { - if (Recipe == null) return; - AuthorNM = Recipe.AuthorNM; - AverageRating = Recipe.AverageRating; - IsApproved = Recipe.IsApproved; - CommentCount = Recipe.CommentCount; - Description = Recipe.Description; - FileDescription = Recipe.FileDescription; - LastViewDT = Recipe.LastViewDT; - FileName = Recipe.FileName; - Id = Recipe.Id; - ModifiedDT = Recipe.ModifiedDT; - RecipeCategoryID = Recipe.RecipeCategoryID; - Ingredients = Recipe.Ingredients; - Instructions = Recipe.Instructions; - ModifiedID = Recipe.ModifiedID; - Servings = Recipe.Servings; - Name = Recipe.Name; - RatingCount = Recipe.RatingCount; - RecipeCategories = Recipe.RecipeCategories; - RecipeCategory = Recipe.RecipeCategory; - RecipeCategoryNM = Recipe.RecipeCategoryNM; - RecipeURL = Recipe.RecipeURL; - ViewCount = Recipe.ViewCount; - } - public List Categories { get; set; } = []; - - } -} diff --git a/WebSpark.Core/Models/RecipeCategoryModel.cs b/WebSpark.Core/Models/RecipeCategoryModel.cs deleted file mode 100644 index 6127da94..00000000 --- a/WebSpark.Core/Models/RecipeCategoryModel.cs +++ /dev/null @@ -1,68 +0,0 @@ -namespace WebSpark.Core.Models; - -/// -/// Class RecipeCategoryModel. -/// -public class RecipeCategoryModel -{ - /// - /// Initializes a new instance of the class. - /// - public RecipeCategoryModel() - { - Recipes = []; - } - - /// - /// Gets or sets the recipe category identifier. - /// - /// The recipe category identifier. - public int Id { get; set; } - - /// - /// Gets or sets the recipe category name. - /// - /// The recipe category nm. - [JsonPropertyName("name")] - [DisplayName("Category")] - [StringLength(50, ErrorMessage = "Max length is 50.")] - [DataType(DataType.Text)] - [Required] - public string Name { get; set; } = string.Empty; - - /// - /// Gets or sets the recipe category Description. - /// - /// The recipe category cm. - [JsonPropertyName("description")] - [DisplayName("Description")] - [StringLength(100, ErrorMessage = "Max length is 100.")] - [DataType(DataType.MultilineText)] - public string Description { get; set; } = string.Empty; - - /// - /// Gets or sets the display order. - /// - /// The display order. - [JsonPropertyName("order")] - [DisplayName("Order")] - public int DisplayOrder { get; set; } - - /// - /// Gets or sets a value indicating whether this instance is active. - /// - /// true if this instance is active; otherwise, false. - public bool IsActive { get; set; } - - /// - /// Gets or sets the recipes. - /// - /// The recipes. - public List Recipes { get; set; } - /// - /// Link to API - /// - public string Url { get; set; } = string.Empty; - public int DomainID { get; set; } = RecipeConstants.INT_MOM_DomainId; - -} diff --git a/WebSpark.Core/Models/RecipeImageModel.cs b/WebSpark.Core/Models/RecipeImageModel.cs deleted file mode 100644 index cf7c9d34..00000000 --- a/WebSpark.Core/Models/RecipeImageModel.cs +++ /dev/null @@ -1,12 +0,0 @@ -namespace WebSpark.Core.Models -{ - public class RecipeImageModel - { - public int Id { get; set; } - public string FileName { get; set; } = string.Empty; - public string FileDescription { get; set; } = string.Empty; - public int DisplayOrder { get; set; } - public byte[] ImageData { get; set; } = Array.Empty(); - public Models.RecipeModel Recipe { get; set; } = new Models.RecipeModel(); - } -} diff --git a/WebSpark.Core/Models/RecipeModel.cs b/WebSpark.Core/Models/RecipeModel.cs deleted file mode 100644 index 0f761bef..00000000 --- a/WebSpark.Core/Models/RecipeModel.cs +++ /dev/null @@ -1,170 +0,0 @@ -namespace WebSpark.Core.Models; - -public static class RecipeConstants -{ - public const int INT_MOM_DomainId = 2; -} - -/// -/// Class RecipeModel. -/// -public class RecipeModel -{ - /// - /// - /// - public RecipeModel() - { - RecipeCategory = new Models.RecipeCategoryModel(); - } - /// - /// Gets or sets the author nm. - /// - /// The author nm. - [DisplayName("Author")] - [Required] - public string AuthorNM { get; set; } = string.Empty; - - /// - /// Gets or sets the average rating. - /// - /// The average rating. - [DisplayName("Average Ratings")] - public double AverageRating { get; set; } - - /// - /// Gets or sets the comment count. - /// - /// The comment count. - [DisplayName("Comments")] - public int CommentCount { get; set; } - - /// - /// Gets or sets the file description. - /// - /// The file description. - public string FileDescription { get; set; } = string.Empty; - - /// - /// Gets or sets the name of the file. - /// - /// The name of the file. - public string FileName { get; set; } = string.Empty; - - /// - /// Gets or sets the ingredient ds. - /// - /// The ingredient ds. - [DisplayName("Ingredients")] - public string Ingredients { get; set; } = string.Empty; - - /// - /// Gets or sets the instruction ds. - /// - /// The instruction ds. - [DisplayName("Instructions")] - public string Instructions { get; set; } = string.Empty; - - /// - /// Gets or sets a value indicating whether this instance is approved. - /// - /// true if this instance is approved; otherwise, false. - [DisplayName("Approved")] - public bool IsApproved { get; set; } - - /// - /// Gets or sets the last view dt. - /// - /// The last view dt. - [DisplayName("Last View")] - public DateTime LastViewDT { get; set; } - - /// - /// Gets or sets the modified dt. - /// - /// The modified dt. - [DisplayName("Last Modified")] - public DateTime ModifiedDT { get; set; } - - /// - /// Gets or sets the modified identifier. - /// - /// The modified identifier. - public int ModifiedID { get; set; } - - /// - /// Gets or sets the rating count. - /// - /// The rating count. - [DisplayName("Ratings Count")] - public int RatingCount { get; set; } - - /// - /// The number of servings this recipe makes - /// - [DisplayName("Servings")] - public int Servings { get; set; } - - /// - /// RecipeCategory - /// - public RecipeCategoryModel RecipeCategory { get; set; } - /// - /// Gets or sets the recipe category identifier. - /// - /// The recipe category identifier. - [DisplayName("Category")] - [Required] - public int RecipeCategoryID { get; set; } - - /// - /// Gets or sets the recipe category nm. - /// - /// The recipe category nm. - [DisplayName("Category")] - public string RecipeCategoryNM { get; set; } = string.Empty; - - /// - /// Gets or sets the recipe ds. - /// - /// The recipe ds. - [DisplayName("Description")] - [Required] - public string Description { get; set; } = string.Empty; - - /// - /// Gets or sets the recipe identifier. - /// - /// The recipe identifier. - public int Id { get; set; } - - /// - /// Gets or sets the recipe nm. - /// - /// The recipe nm. - [DisplayName("Recipe")] - [Required] - public string Name { get; set; } = string.Empty; - - /// - /// Gets or sets the view count. - /// - /// The view count. - [DisplayName("View Count")] - public int ViewCount { get; set; } - - /// - /// Gets or sets the recipe URL. - /// - /// The recipe URL. - public string RecipeURL { get; set; } = string.Empty; - - public List Images { get; set; } = []; - - /// - /// Lookup List of Recipe Categories - /// - public IEnumerable RecipeCategories { get; set; } = Array.Empty(); - public int DomainID { get; set; } = RecipeConstants.INT_MOM_DomainId; - public string SEO_Keywords { get; set; } = string.Empty; -} diff --git a/WebSpark.Core/Providers/RecipeProvider.cs b/WebSpark.Core/Providers/RecipeProvider.cs deleted file mode 100644 index cb547a18..00000000 --- a/WebSpark.Core/Providers/RecipeProvider.cs +++ /dev/null @@ -1,628 +0,0 @@ -using WebSpark.Core.Data; -using WebSpark.Core.Infrastructure; -using WebSpark.Core.Interfaces; -using WebSpark.Core.Models; -using WebSpark.Core.Models.ViewModels; - -namespace WebSpark.Core.Providers; - -/// -/// Recipe Service -/// Implements the -/// Implements the -/// -/// -/// -public class RecipeProvider(WebSparkDbContext webDomainContext) : IMenuProvider, IRecipeService, IDisposable -{ - private bool disposedValue; - - /// - /// - /// - /// - /// - private List Create(List? list) - { - if (list == null) return []; - return [.. list.Select(Create).OrderBy(x => x.Name)]; - } - - /// - /// Creates the specified list. - /// - /// The list. - /// List<RecipeCategoryModel>. - private List Create(List? list) - { - if (list == null) return []; - return [.. list.Select(item => Create(item)).OrderBy(x => x.Name)]; - } - - /// - /// Creates the specified recipe. - /// - /// The recipe. - /// RecipeModel. - private RecipeModel Create(Recipe? Recipe) - { - if (Recipe == null) return new RecipeModel(); - var r = Recipe; // local non-null alias - - return new RecipeModel() - { - DomainID = r.Domain?.Id ?? RecipeConstants.INT_MOM_DomainId, - RecipeURL = FormatHelper.GetRecipeURL(r.Name), - Id = r.Id, - Name = r.Name, - Ingredients = r.Ingredients, - Instructions = r.Instructions, - Description = string.IsNullOrEmpty(r.Description) ? r.Name : r.Description, - SEO_Keywords = r.Keywords, - Servings = r.Servings, - AuthorNM = r.AuthorName, - AverageRating = r.AverageRating, - IsApproved = r.IsApproved, - CommentCount = 0, - RecipeCategory = Create(r.RecipeCategory), - RecipeCategoryID = r.RecipeCategory?.Id ?? 0, - RatingCount = r.RatingCount, - ViewCount = r.ViewCount, - LastViewDT = r.LastViewDt, - ModifiedDT = r.UpdatedDate, - }; - } - - /// - /// Creates the specified recipe. - /// - /// The recipe. - /// Recipe. - private Recipe Create(RecipeModel? Recipe) - { - if (Recipe == null) return new Recipe(); - - if (Recipe.DomainID == 0) - { - Recipe.DomainID = RecipeConstants.INT_MOM_DomainId; - } - - var Category = webDomainContext.RecipeCategory.FirstOrDefault(w => w.Id == Recipe.RecipeCategoryID); - var Domain = webDomainContext.Domain.FirstOrDefault(w => w.Id == Recipe.DomainID); - - return new Recipe() - { - Id = Recipe.Id, - Name = Recipe.Name, - Ingredients = Recipe.Ingredients, - Instructions = Recipe.Instructions, - Keywords = Recipe.SEO_Keywords, - Description = string.IsNullOrEmpty(Recipe.Description) ? Recipe.Name : Recipe.Description, - AuthorName = Recipe.AuthorNM, - AverageRating = Recipe.AverageRating, - IsApproved = Recipe.IsApproved, - CommentCount = Recipe.CommentCount, - RatingCount = Recipe.RatingCount, - ViewCount = Recipe.ViewCount, - LastViewDt = Recipe.LastViewDT, - RecipeCategory = Category ?? new RecipeCategory { Name = Recipe.Name, Comment = Recipe.Name }, - Domain = Domain ?? new WebSite { Name = Recipe.Name, Title = Recipe.Name }, - Servings = Recipe.Servings, - CreatedDate = DateTime.UtcNow, - UpdatedDate = DateTime.UtcNow, - }; - } - private static RecipeCategory Create(RecipeCategoryModel? s) - { - if (s == null) return new RecipeCategory(); - return new RecipeCategory() - { - DisplayOrder = s.DisplayOrder, - IsActive = s.IsActive, - Comment = s.Name, - Id = s.Id, - Name = s.Name - }; - } - - private List Create(List? dbRecipeImage) - { - var recipeImageList = new List(); - if (dbRecipeImage == null) return recipeImageList; - foreach (var dbItem in dbRecipeImage) - { - if (dbItem == null) continue; - recipeImageList.Add(Create(dbItem)); - } - return recipeImageList; - } - - private RecipeImageModel Create(RecipeImage? dbItem) - { - if (dbItem == null) return new RecipeImageModel(); - return new RecipeImageModel() - { - Id = dbItem.Id, - Recipe = Create(dbItem.Recipe), - DisplayOrder = dbItem.DisplayOrder, - FileDescription = dbItem.FileDescription, - FileName = dbItem.FileName, - }; - } - - /// - /// Creates the specified rc. - /// - /// The rc. - /// RecipeCategoryModel. - private RecipeCategoryModel Create(RecipeCategory? rc, bool LoadRecipes = false) - { - if (rc == null) return new RecipeCategoryModel(); - - return new RecipeCategoryModel() - { - DomainID = rc.Domain?.Id ?? RecipeConstants.INT_MOM_DomainId, - DisplayOrder = rc.DisplayOrder, - IsActive = rc.IsActive, - Description = rc.Comment, - Id = rc.Id, - Name = rc.Name, - Url = FormatHelper.GetRecipeCategoryURL(rc.Name), - Recipes = LoadRecipes ? Create(rc.Recipe?.ToList()) : [] - }; - } - /// - /// Creates the domain menu. - /// - /// The list. - /// The domain identifier. - /// List<MenuModel>. - private List CreateDomainMenu(List? list, int DomainID) - { - if (list == null) return []; - - if (DomainID > 0) - { - var webSite = webDomainContext.Domain.Where(w => w.Id == DomainID).FirstOrDefault(); - var page = webDomainContext.Menu.Where(w => w.Id == DomainID).Where(w => w.Title == "Recipe").FirstOrDefault(); - - if (webSite != null) - { - if (page == null) - { - return [.. list.Select(item => GetMenuItem(item, webSite)).OrderBy(x => x.DisplayOrder)]; - } - else - { - return [.. list.Select(item => GetMenuItem(item, webSite, page)).OrderBy(x => x.DisplayOrder)]; - } - } - else - { - return [.. list.Select(item => GetMenuItem(item)).OrderBy(x => x.DisplayOrder)]; - } - } - return []; - - } - - - /// - /// Creates the menu. - /// - /// The list. - /// List<MenuModel>. - private static List CreateMenu(List? list) - { - if (list == null) return []; - return [.. list.Select(item => GetMenuItem(item)).OrderBy(x => x.DisplayOrder)]; - } - - - /// - /// Gets the menu item. - /// - /// The recipe. - /// MenuModel. - private static MenuModel GetMenuItem(Recipe? recipe) - { - if (recipe == null) return new MenuModel(); - - return new MenuModel() - { - Id = recipe.Id, - ParentId = recipe.RecipeCategory?.Id, - Controller = "Recipe", - Action = "index", - Description = recipe.Name, - KeyWords = recipe.Keywords, - Title = recipe.Name, - Url = FormatHelper.GetRecipeURL(recipe.Name), - DisplayInNavigation = false - }; - - } - - /// - /// Gets the menu item. - /// - /// The recipe. - /// The domain. - /// MenuModel. - private static MenuModel GetMenuItem(Recipe? recipe, WebSite? domain) - { - if (recipe == null) return new MenuModel(); - - return new MenuModel() - { - Id = recipe.Id, - ParentId = recipe.RecipeCategory?.Id, - Controller = "recipe", - Action = "index", - Description = recipe.Description, - Title = recipe.Name, - DomainID = domain?.Id ?? 0, - ParentController = "recipe", - ParentTitle = "Recipe", - Url = FormatHelper.GetRecipeURL(recipe.Name), - VirtualPath = FormatHelper.GetRecipeURL(recipe.Name), - Icon = "fa fa-food", - PageContent = recipe.Description, - DisplayOrder = 100, - DisplayInNavigation = false - }; - } - /// - /// Gets the menu item. - /// - /// The recipe. - /// The domain. - /// The page. - /// MenuModel. - private static MenuModel GetMenuItem(Recipe? recipe, WebSite? domain, Menu? page) - { - if (recipe == null) return new MenuModel(); - - return new MenuModel() - { - Id = recipe.Id, - ParentId = page?.Id, - Controller = "recipe", - ParentController = "recipe", - ParentTitle = "recipe", - Action = "index", - Description = recipe.Description, - Title = recipe.Name, - Url = FormatHelper.GetRecipeURL(recipe.Name), - DomainID = domain?.Id ?? 0, - VirtualPath = FormatHelper.GetRecipeURL(recipe.Name), - Icon = "fa fa-food", - PageContent = recipe.Description, - DisplayOrder = 100, - DisplayInNavigation = false - }; - } - - /// - /// Deletes the specified identifier. - /// - /// The identifier. - /// true if XXXX, false otherwise. - public bool Delete(int Id) - { - var deleteItem = webDomainContext.Recipe.Where(w => w.Id == Id).FirstOrDefault(); - if (deleteItem != null) - { - webDomainContext.Recipe.Remove(deleteItem); - webDomainContext.SaveChanges(); - return true; - } - return false; - } - public bool Delete(RecipeCategoryModel saveItem) - { - var deleteItem = webDomainContext.RecipeCategory.Where(w => w.Id == saveItem.Id).FirstOrDefault(); - if (deleteItem != null) - { - webDomainContext.RecipeCategory.Remove(deleteItem); - webDomainContext.SaveChanges(); - return true; - } - return false; - } - - /// - /// Gets the recipe list. - /// - /// List<RecipeModel>. - public IEnumerable Get() - { - var theList = webDomainContext.Recipe.Include(r => r.RecipeCategory).Include(i => i.RecipeImage).ToList(); - return Create(theList); - } - - /// - /// Gets the by identifier. - /// - /// The identifier. - /// RecipeModel. - public RecipeModel Get(int Id) - { - var returnRecipe = Create(webDomainContext.Recipe.Where(w => w.Id == Id).Include(r => r.RecipeCategory).FirstOrDefault()); - returnRecipe.RecipeCategories = webDomainContext.RecipeCategory.Select(s => new LookupModel() { Value = s.Id.ToString(), Text = s.Name }).ToList(); - return returnRecipe; - } - - - /// - /// Gets the menu list. - /// - /// List<MenuModel>. - public List GetAllMenuItems() - { - return CreateMenu([.. webDomainContext.Recipe]); - } - - /// - /// Get Menu Item for a Recipe Id - /// - /// The identifier. - /// MenuModel. - public MenuModel GetMenuItem(int Id) - { - return GetMenuItem(webDomainContext.Recipe.Where(w => w.Id == Id).FirstOrDefault()); - } - /// - /// - /// - /// - /// - public async Task GetMenuItemAsync(int Id) - { - var returnMenu = GetMenuItem(await webDomainContext.Set().Where(w => w.Id == Id).FirstOrDefaultAsync()); - if (returnMenu == null) - returnMenu = new MenuModel(); - return returnMenu; - } - - public IEnumerable GetMenuList() - { - return new List(); - } - - /// - /// - /// - /// - /// - public RecipeCategoryModel GetRecipeCategoryById(int Id) - { - var returnCategory = new RecipeCategoryModel(); - returnCategory = Create(webDomainContext.RecipeCategory.Include(i => i.Recipe).Where(w => w.Id == Id).FirstOrDefault(), LoadRecipes: true); - return returnCategory; - } - - /// - /// Gets the recipe category list. - /// - /// List<RecipeCategoryModel>. - public List GetRecipeCategoryList() - { - return Create(webDomainContext.RecipeCategory.ToList()); - } - - public List GetRecipeImages() - { - var dbRecipeImage = webDomainContext.RecipeImage.Include(i => i.Recipe).ToList(); - return Create(dbRecipeImage); - } - - public RecipeVM GetRecipeVMHostAsync(string host, WebsiteVM baseVM) - { - var recipeVM = new RecipeVM(baseVM) - { - CategoryList = [.. GetRecipeCategoryList()], - RecipeList = [.. Get()], - }; - return recipeVM; - } - - /// - /// Gets the site menu. - /// - /// The domain identifier. - /// List<MenuModel>. - public List GetSiteMenu(int domainId) - { - return CreateDomainMenu([.. webDomainContext.Recipe], domainId); - } - - - ///// - ///// Sync Up Database with List of Categories - ///// - ///// The save item. - ///// RecipeCategoryModel. - public List Save(List? saveCategories) - { - - if (saveCategories == null) return []; - - var currentCategories = GetRecipeCategoryList(); - - foreach (var saveItem in saveCategories) - { - if (saveItem == null) continue; - - if (string.IsNullOrWhiteSpace(saveItem.Name)) continue; - - var curCat = currentCategories.Where(w => w.Name == saveItem.Name).FirstOrDefault(); - - saveItem.Id = curCat == null ? 0 : curCat.Id; - - if (saveItem.Id == 0) - { - var saveCategory = Create(saveItem); - saveCategory.UpdatedDate = DateTime.UtcNow; - webDomainContext.RecipeCategory.Add(saveCategory); - webDomainContext.SaveChanges(); - saveItem.Id = saveCategory.Id; - } - else - { - var saveCategory = webDomainContext.RecipeCategory.Where(w => w.Id == saveItem.Id).FirstOrDefault(); - if (saveCategory != null) - { - saveCategory.Name = saveItem.Name; - saveCategory.Comment = saveItem.Description; - saveCategory.UpdatedDate = DateTime.UtcNow; - webDomainContext.SaveChanges(); - } - } - } - return GetRecipeCategoryList(); - } - - - ///// - ///// Sync Up Database with List of Categories - ///// - ///// The save item. - ///// RecipeCategoryModel. - public IEnumerable Save(List? saveRecipes) - { - - if (saveRecipes == null) return new List(); - - var curRecipes = Get(); - var currentCategories = GetRecipeCategoryList(); - - foreach (var saveItem in saveRecipes) - { - if (saveItem == null) continue; - - if (string.IsNullOrWhiteSpace(saveItem.RecipeCategoryNM)) continue; - - var curRecipe = curRecipes.Where(w => w.Name == saveItem.Name).FirstOrDefault(); - - saveItem.Id = curRecipe == null ? 0 : curRecipe.Id; - var curCategory = currentCategories.Where(w => w.Name == saveItem.RecipeCategoryNM).FirstOrDefault(); - - saveItem.RecipeCategoryID = curCategory == null ? 0 : curCategory.Id; - - if (saveItem.Id == 0) - { - var saveRecipe = Create(saveItem); - saveRecipe.UpdatedDate = DateTime.UtcNow; - webDomainContext.Recipe.Add(saveRecipe); - webDomainContext.SaveChanges(); - saveItem.Id = saveRecipe.Id; - } - else - { - var saveRecipe = webDomainContext.Recipe.Where(w => w.Id == saveItem.Id).FirstOrDefault(); - if (saveRecipe != null) - { - webDomainContext.SaveChanges(); - } - } - } - return Get(); - } - - - - /// - /// Saves the specified save item. - /// - /// The save item. - /// RecipeModel. - public RecipeModel Save(RecipeModel? saveItem) - { - if (saveItem == null) return new RecipeModel(); - if (saveItem.Id == 0) - { - var saveRecipe = Create(saveItem); - webDomainContext.Recipe.Add(saveRecipe); - webDomainContext.SaveChanges(); - saveItem.Id = saveRecipe.Id; - } - else - { - var saveRecipe = webDomainContext.Recipe.Where(w => w.Id == saveItem.Id).Include(i => i.RecipeCategory).FirstOrDefault(); - if (saveRecipe != null) - { - if (saveRecipe.RecipeCategory == null || saveRecipe.RecipeCategory.Id != saveItem.RecipeCategoryID) - { - saveRecipe.RecipeCategory = (webDomainContext.RecipeCategory.FirstOrDefault(w => w.Id == saveItem.RecipeCategoryID) ?? saveRecipe.RecipeCategory)!; - } - saveRecipe.Name = saveItem.Name; - saveRecipe.AuthorName = saveItem.AuthorNM; - saveRecipe.Name = saveItem.Name; - saveRecipe.Description = saveItem.Description; - saveRecipe.Ingredients = saveItem.Ingredients; - saveRecipe.Instructions = saveItem.Instructions; - saveRecipe.Servings = saveItem.Servings; - saveRecipe.IsApproved = saveItem.IsApproved; - - webDomainContext.SaveChanges(); - } - } - return Get(saveItem.Id); - } - - public RecipeCategoryModel Save(RecipeCategoryModel saveItem) - { - if (saveItem == null) return new RecipeCategoryModel(); - if (saveItem.Id == 0) - { - var saveCategory = Create(saveItem); - webDomainContext.RecipeCategory.Add(saveCategory); - webDomainContext.SaveChanges(); - saveItem.Id = saveCategory.Id; - } - else - { - var saveCategory = webDomainContext.RecipeCategory.Where(w => w.Id == saveItem.Id).FirstOrDefault(); - if (saveCategory != null) - { - saveCategory.Name = saveItem.Name; - saveCategory.Comment = saveItem.Description; - saveCategory.DisplayOrder = saveItem.DisplayOrder; - saveCategory.IsActive = saveItem.IsActive; - - - webDomainContext.SaveChanges(); - } - } - return GetRecipeCategoryById(saveItem.Id); - } - - protected virtual void Dispose(bool disposing) - { - if (!disposedValue) - { - if (disposing) - { - webDomainContext?.Dispose(); - } - // TODO: free unmanaged resources (unmanaged objects) and override finalizer - // TODO: set large fields to null - disposedValue = true; - } - } - - // // TODO: override finalizer only if 'Dispose(bool disposing)' has code to free unmanaged resources - ~RecipeProvider() - { - // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method - Dispose(disposing: false); - } - - void IDisposable.Dispose() - { - // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method - Dispose(disposing: true); - GC.SuppressFinalize(this); - } -} diff --git a/WebSpark.Core/Providers/WebsiteProvider.cs b/WebSpark.Core/Providers/WebsiteProvider.cs index d83a8697..feca5370 100644 --- a/WebSpark.Core/Providers/WebsiteProvider.cs +++ b/WebSpark.Core/Providers/WebsiteProvider.cs @@ -56,50 +56,10 @@ private Models.WebsiteModel Create(WebSite? website) ModifiedID = website.UpdatedID ?? 99, }; - if (website.Id == 2) item.Menu.AddRange(CreateRecipeMenu()); + // Recipe menu items are provided by RecipeMenuAdapter in WebSpark.Portal. return item; } - /// - /// - /// - /// - /// - private List Create(IEnumerable? list) - { - return list == null ? [] : [.. list.Select(Create).OrderBy(x => x.Name)]; - } - - /// - /// Creates the specified recipe. - /// - /// The recipe. - /// RecipeModel. - private Models.RecipeModel Create(Data.Recipe? Recipe) - { - return Recipe == null - ? new Models.RecipeModel() - : new Models.RecipeModel() - { - RecipeURL = FormatHelper.GetRecipeURL(Recipe.Name), - Id = Recipe.Id, - Name = Recipe.Name, - Ingredients = Recipe.Ingredients, - Instructions = Recipe.Instructions, - Description = string.IsNullOrEmpty(Recipe.Description) ? Recipe.Name : Recipe.Description, - Servings = Recipe.Servings, - AuthorNM = Recipe.AuthorName, - AverageRating = Recipe.AverageRating, - IsApproved = Recipe.IsApproved, - CommentCount = 0, - RecipeCategory = Create(Recipe.RecipeCategory), - RecipeCategoryID = Recipe.RecipeCategory?.Id ?? 0, - RecipeCategoryNM = Recipe.RecipeCategory?.Name ?? string.Empty, - RatingCount = Recipe.RatingCount, - ViewCount = Recipe.ViewCount, - LastViewDT = Recipe.LastViewDt, - }; - } /// /// Returns Domain Model from Domain table @@ -134,26 +94,6 @@ private static WebSite Create(Models.WebsiteModel domain) { return list == null ? [] : [.. list.Select(Create).OrderBy(x => x.Name)]; } - /// - /// Creates the specified rc. - /// - /// The rc. - /// RecipeCategoryModel. - private Models.RecipeCategoryModel Create(RecipeCategory? rc, bool LoadRecipes = false) - { - return rc == null - ? new Models.RecipeCategoryModel() - : new Models.RecipeCategoryModel() - { - DisplayOrder = rc.DisplayOrder, - IsActive = rc.IsActive, - Description = rc.Comment, - Id = rc.Id, - Name = rc.Name, - Url = FormatHelper.GetRecipeCategoryURL(rc.Name), - Recipes = LoadRecipes ? Create(rc.Recipe.ToList()) : [] - }; - } private static List Create(ICollection? list, bool LoadChild = false) { var menuList = list == null ? [] : list.Select(item => Create(item, LoadChild)).OrderBy(x => x.Title).ToList(); @@ -295,84 +235,11 @@ private WebsiteVM CreateBaseView(WebSite? domain) Menu = Create(domain.Menus, false), }; - if (domain.Id == 2) item.Menu.AddRange(CreateRecipeMenu()); + // Recipe menu items are provided by RecipeMenuAdapter in WebSpark.Portal. return item; } - /// - /// Creates the website menu. - /// - /// The list. - /// The website identifier. - /// List<MenuModel>. - private List CreateRecipeMenu() - { - var categoryList = webDomainContext.RecipeCategory.ToList(); - List categoryMenu = [.. categoryList.Select(GetMenuItem).OrderBy(x => x.DisplayOrder)]; - - var list = webDomainContext.Recipe.Include(i => i.RecipeCategory).ToList(); - categoryMenu.AddRange([.. list.Select(GetMenuItem).OrderBy(x => x.DisplayOrder)]); - - return categoryMenu; - } - - - private Models.MenuModel GetMenuItem(RecipeCategory? category) - { - return category == null - ? new Models.MenuModel() - : new Models.MenuModel() - { - ParentId = category.Id, - Controller = "recipe", - Action = "Category", - Argument = FormatHelper.GetSafePath(category.Name ?? string.Empty), - Description = string.IsNullOrEmpty(category.Comment) ? (category.Name ?? string.Empty) : category.Comment!, - Title = category.Name ?? string.Empty, - ParentController = "recipe", - ParentTitle = "Recipe", - Url = FormatHelper.GetRecipeCategoryURL(category.Name ?? string.Empty), - VirtualPath = FormatHelper.GetRecipeCategoryURL(category.Name ?? string.Empty), - Icon = "fa fa-food", - PageContent = category.Comment ?? string.Empty, - DisplayOrder = 10, - DisplayInNavigation = false - }; - } - - - /// - /// Gets the menu item. - /// - /// The recipe. - /// The website. - /// MenuModel. - private Models.MenuModel GetMenuItem(Data.Recipe? recipe) - { - return recipe == null - ? new Models.MenuModel() - : new Models.MenuModel() - { - Id = recipe.Id, - ParentId = recipe.RecipeCategory?.Id, - Controller = "recipe", - Action = "Index", - Argument = FormatHelper.GetSafePath(recipe.Name ?? string.Empty), - Description = string.IsNullOrEmpty(recipe.Description) ? (recipe.Name ?? string.Empty) : recipe.Description, - Title = recipe.Name ?? string.Empty, - ParentController = "recipe", - ParentTitle = "Recipe", - Url = FormatHelper.GetRecipeURL(recipe.Name ?? string.Empty), - VirtualPath = FormatHelper.GetRecipeURL(recipe.Name ?? string.Empty), - Icon = "fa fa-food", - PageContent = recipe.Description ?? string.Empty, - DisplayOrder = 100, - DisplayInNavigation = false - }; - } - - public bool Delete(int Id) { if (Id == 0) diff --git a/WebSpark.Portal/Areas/RecipeSpark/Controllers/HomeController.cs b/WebSpark.Portal/Areas/RecipeSpark/Controllers/HomeController.cs index 600d2413..8b240381 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Controllers/HomeController.cs +++ b/WebSpark.Portal/Areas/RecipeSpark/Controllers/HomeController.cs @@ -1,9 +1,10 @@ using PromptSpark.Domain.Service; using System; using System.Linq; -using WebSpark.Core.Interfaces; using WebSpark.Core.Models; using WebSpark.Portal.Services.Seo; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Models; namespace WebSpark.Portal.Areas.RecipeSpark.Controllers { diff --git a/WebSpark.Portal/Areas/RecipeSpark/Controllers/RecipeCategoryController.cs b/WebSpark.Portal/Areas/RecipeSpark/Controllers/RecipeCategoryController.cs index 55e4518d..cfe155d2 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Controllers/RecipeCategoryController.cs +++ b/WebSpark.Portal/Areas/RecipeSpark/Controllers/RecipeCategoryController.cs @@ -1,5 +1,5 @@ -using WebSpark.Core.Interfaces; -using WebSpark.Core.Models; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Models; namespace WebSpark.Portal.Areas.RecipeSpark.Controllers { diff --git a/WebSpark.Portal/Areas/RecipeSpark/Controllers/RecipeImageController.cs b/WebSpark.Portal/Areas/RecipeSpark/Controllers/RecipeImageController.cs index e6e576ca..c532df0c 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Controllers/RecipeImageController.cs +++ b/WebSpark.Portal/Areas/RecipeSpark/Controllers/RecipeImageController.cs @@ -1,8 +1,9 @@ -using WebSpark.Core.Providers; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Models; namespace WebSpark.Portal.Areas.RecipeSpark.Controllers; -public class RecipeImageFileModel : Core.Models.RecipeImageModel +public class RecipeImageFileModel : RecipeImageModel { public IFormFile? UploadedImage { get; set; } } @@ -17,7 +18,7 @@ public class RecipeImageFileModel : Core.Models.RecipeImageModel /// /// public class RecipeImageController( - Core.Interfaces.IRecipeService _recipeService, + IRecipeService _recipeService, IRecipeImageService _recipeImageService) : RecipeBaseController { /// @@ -117,7 +118,7 @@ public IActionResult Edit(int id) [HttpPost] [ValidateAntiForgeryToken] - public IActionResult Edit(int id, Core.Models.RecipeImageModel recipeImageModel) + public IActionResult Edit(int id, RecipeImageModel recipeImageModel) { if (id != recipeImageModel.Id) { diff --git a/WebSpark.Portal/Areas/RecipeSpark/Controllers/api/RecipeSparkApiController.cs b/WebSpark.Portal/Areas/RecipeSpark/Controllers/api/RecipeSparkApiController.cs index 3bd30f5d..a3e81161 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Controllers/api/RecipeSparkApiController.cs +++ b/WebSpark.Portal/Areas/RecipeSpark/Controllers/api/RecipeSparkApiController.cs @@ -1,7 +1,7 @@ using Microsoft.AspNetCore.Mvc; -using WebSpark.Core.Interfaces; -using WebSpark.Core.Models; using System.ComponentModel.DataAnnotations; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Models; namespace WebSpark.Portal.Areas.RecipeSpark.Controllers.Api { diff --git a/WebSpark.Core/Models/ViewModels/RecipeVM.cs b/WebSpark.Portal/Areas/RecipeSpark/Models/RecipeVM.cs similarity index 66% rename from WebSpark.Core/Models/ViewModels/RecipeVM.cs rename to WebSpark.Portal/Areas/RecipeSpark/Models/RecipeVM.cs index 2747c851..8ab90438 100644 --- a/WebSpark.Core/Models/ViewModels/RecipeVM.cs +++ b/WebSpark.Portal/Areas/RecipeSpark/Models/RecipeVM.cs @@ -1,15 +1,16 @@ -namespace WebSpark.Core.Models.ViewModels; +using System.Text.Json.Serialization; +using WebSpark.Core.Models.ViewModels; +using WebSpark.Recipe.Models; + +namespace WebSpark.Portal.Areas.RecipeSpark.Models; /// -/// Class RecipeVM. -/// Implements the +/// Recipe view model for portal presentation layer. +/// Moved from WebSpark.Core as part of 006-extract-recipe-library. +/// Inherits WebsiteVM (WebSpark.Core) and uses recipe types from WebSpark.Recipe. /// -/// public class RecipeVM : WebsiteVM { - /// - /// - /// public RecipeVM() { Recipe = new(); @@ -18,10 +19,6 @@ public RecipeVM() RecipeList = []; } - /// - /// - /// - /// public RecipeVM(WebsiteVM website) { WebsiteId = website.WebsiteId; @@ -43,28 +40,15 @@ public RecipeVM(WebsiteVM website) RecipeList = []; } - /// - /// - /// [JsonPropertyName("category")] public RecipeCategoryModel Category { get; set; } - /// - /// Gets the category list. - /// - /// The category list. + [JsonPropertyName("category_list")] public List CategoryList { get; set; } - /// - /// Gets or sets the recipe. - /// - /// The recipe. [JsonPropertyName("recipe")] public RecipeModel Recipe { get; set; } - /// - /// Gets the recipe list. - /// - /// The recipe list. + [JsonPropertyName("recipes")] public List RecipeList { get; set; } } diff --git a/WebSpark.Portal/Areas/RecipeSpark/Services/ChatService.cs b/WebSpark.Portal/Areas/RecipeSpark/Services/ChatService.cs index bad0f7f9..f0a49efb 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Services/ChatService.cs +++ b/WebSpark.Portal/Areas/RecipeSpark/Services/ChatService.cs @@ -1,10 +1,10 @@ using Microsoft.EntityFrameworkCore; using System.Security.Cryptography; using System.Text.Json; -using WebSpark.Core.Interfaces; using WebSpark.Portal.Areas.RecipeSpark.Data; using WebSpark.Portal.Areas.RecipeSpark.Data.Entities; using WebSpark.Portal.Areas.RecipeSpark.Models; +using WebSpark.Recipe.Interfaces; namespace WebSpark.Portal.Areas.RecipeSpark.Services; diff --git a/WebSpark.Portal/Areas/RecipeSpark/Services/HintGenerationService.cs b/WebSpark.Portal/Areas/RecipeSpark/Services/HintGenerationService.cs index d7c2fd6a..3ec2687b 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Services/HintGenerationService.cs +++ b/WebSpark.Portal/Areas/RecipeSpark/Services/HintGenerationService.cs @@ -2,8 +2,8 @@ using Microsoft.EntityFrameworkCore; using WebSpark.Portal.Areas.RecipeSpark.Data; using WebSpark.Portal.Areas.RecipeSpark.Models; -using WebSpark.Core.Interfaces; -using WebSpark.Core.Models; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Models; namespace WebSpark.Portal.Areas.RecipeSpark.Services; diff --git a/WebSpark.Portal/Areas/RecipeSpark/Services/RecipeChatLLMService.cs b/WebSpark.Portal/Areas/RecipeSpark/Services/RecipeChatLLMService.cs index 99524782..3ddcea59 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Services/RecipeChatLLMService.cs +++ b/WebSpark.Portal/Areas/RecipeSpark/Services/RecipeChatLLMService.cs @@ -1,6 +1,6 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; -using WebSpark.Core.Interfaces; +using WebSpark.Recipe.Interfaces; namespace WebSpark.Portal.Areas.RecipeSpark.Services; diff --git a/WebSpark.Portal/Areas/RecipeSpark/Services/RecipeMenuAdapter.cs b/WebSpark.Portal/Areas/RecipeSpark/Services/RecipeMenuAdapter.cs new file mode 100644 index 00000000..3a447d14 --- /dev/null +++ b/WebSpark.Portal/Areas/RecipeSpark/Services/RecipeMenuAdapter.cs @@ -0,0 +1,95 @@ +using WebSpark.Core.Interfaces; +using WebSpark.Core.Models; +using WebSpark.Recipe.Helpers; +using WebSpark.Recipe.Interfaces; + +namespace WebSpark.Portal.Areas.RecipeSpark.Services; + +/// +/// Adapts IRecipeService to IMenuProvider for portal menu integration. +/// Created as part of 006-extract-recipe-library — RecipeProvider no longer implements IMenuProvider. +/// +public class RecipeMenuAdapter(IRecipeService recipeService) : IMenuProvider +{ + public bool Delete(int Id) => recipeService.Delete(Id); + + public IEnumerable GetMenuList() + { + return recipeService.GetRecipeCategoryList() + .Select(cat => new MenuModel + { + Id = cat.Id, + Title = cat.Name, + Controller = "recipe", + Action = "Category", + Description = cat.Description, + Url = RecipeUrlHelper.GetRecipeCategoryURL(cat.Name), + VirtualPath = RecipeUrlHelper.GetRecipeCategoryURL(cat.Name), + Icon = "fa fa-food", + DisplayOrder = cat.DisplayOrder, + DisplayInNavigation = false + }); + } + + public MenuModel GetMenuItem(int Id) + { + var recipe = recipeService.Get(Id); + return new MenuModel + { + Id = recipe.Id, + Title = recipe.Name, + Controller = "recipe", + Action = "Index", + Description = recipe.Description, + Url = recipe.RecipeURL, + VirtualPath = recipe.RecipeURL, + Icon = "fa fa-food", + DisplayOrder = 100, + DisplayInNavigation = false + }; + } + + public async Task GetMenuItemAsync(int Id) + { + return await Task.FromResult(GetMenuItem(Id)); + } + + public List GetSiteMenu(int DomainId) + { + var categories = recipeService.GetRecipeCategoryList() + .Where(c => c.DomainID == DomainId || DomainId == 0) + .Select(cat => new MenuModel + { + Id = cat.Id, + Title = cat.Name, + Controller = "recipe", + Action = "Category", + Description = cat.Description, + Url = RecipeUrlHelper.GetRecipeCategoryURL(cat.Name), + VirtualPath = RecipeUrlHelper.GetRecipeCategoryURL(cat.Name), + Icon = "fa fa-food", + DisplayOrder = cat.DisplayOrder, + DisplayInNavigation = false + }).ToList(); + + var recipes = recipeService.Get() + .Where(r => r.DomainID == DomainId || DomainId == 0) + .Select(r => new MenuModel + { + Id = r.Id, + Title = r.Name, + Controller = "recipe", + Action = "Index", + Description = r.Description, + Url = r.RecipeURL, + VirtualPath = r.RecipeURL, + ParentId = r.RecipeCategoryID, + Icon = "fa fa-food", + DisplayOrder = 100, + DisplayInNavigation = false + }).ToList(); + + categories.AddRange(recipes); + return categories; + } +} diff --git a/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeCategory/Edit.cshtml b/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeCategory/Edit.cshtml index 3683fdd0..1517d28a 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeCategory/Edit.cshtml +++ b/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeCategory/Edit.cshtml @@ -1,4 +1,4 @@ -@model WebSpark.Core.Models.RecipeCategoryModel +@model WebSpark.Recipe.Models.RecipeCategoryModel @{ ViewData["Title"] = Model.Id > 0 ? $"Edit {Model.Name}" : "Create Category"; diff --git a/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeCategory/Index.cshtml b/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeCategory/Index.cshtml index 58757401..dcd069a4 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeCategory/Index.cshtml +++ b/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeCategory/Index.cshtml @@ -1,4 +1,4 @@ -@model IEnumerable +@model IEnumerable @{ ViewData["Title"] = "Recipe Categories"; Layout = "~/Views/Shared/_Layout.cshtml"; diff --git a/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeImage/Edit.cshtml b/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeImage/Edit.cshtml index 4254a4c6..a351c28a 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeImage/Edit.cshtml +++ b/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeImage/Edit.cshtml @@ -1,4 +1,4 @@ -@model WebSpark.Core.Models.RecipeImageModel +@model WebSpark.Recipe.Models.RecipeImageModel @{ ViewData["Title"] = "Edit"; diff --git a/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeImage/Index.cshtml b/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeImage/Index.cshtml index 06e2bc74..83a19f41 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeImage/Index.cshtml +++ b/WebSpark.Portal/Areas/RecipeSpark/Views/RecipeImage/Index.cshtml @@ -1,4 +1,4 @@ -@model IEnumerable +@model IEnumerable @{ ViewData["Title"] = "Recipes"; diff --git a/WebSpark.Portal/Areas/RecipeSpark/Views/_ViewImports.cshtml b/WebSpark.Portal/Areas/RecipeSpark/Views/_ViewImports.cshtml index 08dfccbf..40fbe2f3 100644 --- a/WebSpark.Portal/Areas/RecipeSpark/Views/_ViewImports.cshtml +++ b/WebSpark.Portal/Areas/RecipeSpark/Views/_ViewImports.cshtml @@ -1,6 +1,7 @@ @using WebSpark.Core.Models @using WebSpark.Core.Models.ViewModels @using WebSpark.Core.Models.EditModels +@using WebSpark.Recipe.Models @using WebSpark.Core.Extensions @using WebSpark.Core.Resources @using WebSpark.Portal.Areas.RecipeSpark.Controllers diff --git a/WebSpark.Portal/Program.cs b/WebSpark.Portal/Program.cs index 3a6d3eb8..fea18900 100644 --- a/WebSpark.Portal/Program.cs +++ b/WebSpark.Portal/Program.cs @@ -21,6 +21,9 @@ using WebSpark.Core.Interfaces; using WebSpark.Core.Models; using WebSpark.Core.Providers; +using WebSpark.Recipe.Data; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Providers; using WebSpark.HttpClientUtility.Crawler; using WebSpark.HttpClientUtility.MemoryCache; using WebSpark.HttpClientUtility.RequestResult; @@ -100,6 +103,12 @@ builder.Services.AddDbContext(options => options.UseSqlite(RecipeChatConnectionString, b => b.MigrationsAssembly("WebSpark.Portal"))); +// Recipe Domain Context (extracted from WebSpark.Core as part of 006-extract-recipe-library) +// Uses the same SQLite file as WebSparkContext unless explicitly overridden. +var RecipeConnectionString = builder.Configuration.GetValue("RecipeConnection", "Data Source=c:\\websites\\WebSpark\\webspark.db"); +builder.Services.AddDbContext(options => + options.UseSqlite(RecipeConnectionString, b => b.MigrationsAssembly("WebSpark.Recipe"))); + // ======================== @@ -170,9 +179,18 @@ builder.Services.AddScoped(); builder.Services.AddScoped(); builder.Services.AddScoped(); -builder.Services.AddScoped(); +// Recipe services now resolved from WebSpark.Recipe (extracted from WebSpark.Core) +// NOTE: IMenuProvider is registered as MenuProvider (site-wide menus). +// RecipeMenuAdapter is registered separately for recipe-specific menu generation. +builder.Services.AddScoped(); builder.Services.AddScoped(); -builder.Services.AddScoped(); +builder.Services.AddScoped(); +builder.Services.AddScoped(); + +// Health checks — constitution requires DbContextCheck for all DbContext data stores +builder.Services.AddHealthChecks() + .AddDbContextCheck("recipe-db"); + builder.Services.AddScoped(); builder.Services.AddScoped(); builder.Services.AddScoped(); diff --git a/WebSpark.Portal/WebSpark.Portal.csproj b/WebSpark.Portal/WebSpark.Portal.csproj index 4f1e1203..29794566 100644 --- a/WebSpark.Portal/WebSpark.Portal.csproj +++ b/WebSpark.Portal/WebSpark.Portal.csproj @@ -82,6 +82,7 @@ all + all @@ -111,6 +112,7 @@ + diff --git a/WebSpark.Portal/appsettings.Development.json b/WebSpark.Portal/appsettings.Development.json index ef784fed..b6e3ef7b 100644 --- a/WebSpark.Portal/appsettings.Development.json +++ b/WebSpark.Portal/appsettings.Development.json @@ -1,5 +1,6 @@ { "RecipeChatContext": "Data Source=c:\\websites\\WebSpark\\webspark-recipechat.db", + "RecipeConnection": "Data Source=c:\\websites\\WebSpark\\webspark.db", "RecipeChat": { "ApiKeys": [ { diff --git a/WebSpark.Portal/appsettings.Production.json b/WebSpark.Portal/appsettings.Production.json index 0e3953cb..2abfaae9 100644 --- a/WebSpark.Portal/appsettings.Production.json +++ b/WebSpark.Portal/appsettings.Production.json @@ -1,4 +1,5 @@ { + "RecipeConnection": "", "Logging": { "LogLevel": { "Default": "Information", diff --git a/WebSpark.Portal/appsettings.json b/WebSpark.Portal/appsettings.json index 9bc58400..e038f221 100644 --- a/WebSpark.Portal/appsettings.json +++ b/WebSpark.Portal/appsettings.json @@ -2,6 +2,7 @@ "GPTDbContext": "Data Source=c:\\websites\\WebSpark\\PromptSpark.db", "WebSparkUserContext": "Data Source=c:\\websites\\WebSpark\\ControlSparkUser.db", "WebSparkContext": "Data Source=c:\\websites\\WebSpark\\WebSpark.db", + "RecipeConnection": "Data Source=c:\\websites\\WebSpark\\webspark.db", "Logging": { "LogLevel": { "Default": "Information", diff --git a/WebSpark.Recipe/Constants/RecipeConstants.cs b/WebSpark.Recipe/Constants/RecipeConstants.cs new file mode 100644 index 00000000..9000fae1 --- /dev/null +++ b/WebSpark.Recipe/Constants/RecipeConstants.cs @@ -0,0 +1,6 @@ +namespace WebSpark.Recipe.Constants; + +public static class RecipeConstants +{ + public const int INT_MOM_DomainId = 2; +} diff --git a/WebSpark.Core/Data/Recipe.cs b/WebSpark.Recipe/Data/Recipe.cs similarity index 87% rename from WebSpark.Core/Data/Recipe.cs rename to WebSpark.Recipe/Data/Recipe.cs index d6c1f5aa..861d245e 100644 --- a/WebSpark.Core/Data/Recipe.cs +++ b/WebSpark.Recipe/Data/Recipe.cs @@ -1,6 +1,6 @@ -namespace WebSpark.Core.Data; +namespace WebSpark.Recipe.Data; -public partial class Recipe : BaseEntity +public partial class Recipe : RecipeBaseEntity { public string Name { get; set; } = string.Empty; public string Description { get; set; } = string.Empty; @@ -15,7 +15,7 @@ public partial class Recipe : BaseEntity public int RatingCount { get; set; } public int CommentCount { get; set; } public DateTime LastViewDt { get; set; } - public virtual WebSite Domain { get; set; } = null!; + public int? DomainId { get; set; } public virtual RecipeCategory RecipeCategory { get; set; } = null!; public virtual ICollection RecipeComment { get; set; } = []; public virtual ICollection RecipeImage { get; set; } = []; diff --git a/WebSpark.Recipe/Data/RecipeBaseEntity.cs b/WebSpark.Recipe/Data/RecipeBaseEntity.cs new file mode 100644 index 00000000..e52ea8a9 --- /dev/null +++ b/WebSpark.Recipe/Data/RecipeBaseEntity.cs @@ -0,0 +1,13 @@ +using System.ComponentModel.DataAnnotations; + +namespace WebSpark.Recipe.Data; + +public class RecipeBaseEntity +{ + [Key] + public int Id { get; set; } + public DateTime CreatedDate { get; set; } + public DateTime UpdatedDate { get; set; } + public int? UpdatedID { get; set; } + public int? CreatedID { get; set; } +} diff --git a/WebSpark.Core/Data/RecipeCategory.cs b/WebSpark.Recipe/Data/RecipeCategory.cs similarity index 65% rename from WebSpark.Core/Data/RecipeCategory.cs rename to WebSpark.Recipe/Data/RecipeCategory.cs index 0419f572..c0f3fc54 100644 --- a/WebSpark.Core/Data/RecipeCategory.cs +++ b/WebSpark.Recipe/Data/RecipeCategory.cs @@ -1,11 +1,11 @@ -namespace WebSpark.Core.Data; +namespace WebSpark.Recipe.Data; -public partial class RecipeCategory : BaseEntity +public partial class RecipeCategory : RecipeBaseEntity { public string Name { get; set; } = string.Empty; public string Comment { get; set; } = string.Empty; public int DisplayOrder { get; set; } public bool IsActive { get; set; } - public virtual WebSite Domain { get; set; } = null!; + public int? DomainId { get; set; } public virtual ICollection Recipe { get; set; } = []; } diff --git a/WebSpark.Core/Data/RecipeComment.cs b/WebSpark.Recipe/Data/RecipeComment.cs similarity index 70% rename from WebSpark.Core/Data/RecipeComment.cs rename to WebSpark.Recipe/Data/RecipeComment.cs index 55e74be0..338a1e03 100644 --- a/WebSpark.Core/Data/RecipeComment.cs +++ b/WebSpark.Recipe/Data/RecipeComment.cs @@ -1,6 +1,6 @@ -namespace WebSpark.Core.Data; +namespace WebSpark.Recipe.Data; -public partial class RecipeComment : BaseEntity +public partial class RecipeComment : RecipeBaseEntity { public required string Name { get; set; } public required string Email { get; set; } diff --git a/WebSpark.Recipe/Data/RecipeDbContext.cs b/WebSpark.Recipe/Data/RecipeDbContext.cs new file mode 100644 index 00000000..a3434150 --- /dev/null +++ b/WebSpark.Recipe/Data/RecipeDbContext.cs @@ -0,0 +1,112 @@ +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Diagnostics; + +namespace WebSpark.Recipe.Data; + +public partial class RecipeDbContext(DbContextOptions options) : DbContext(options) +{ + protected readonly DbContextOptions _options = options; + + public virtual DbSet Recipe { get; set; } + public virtual DbSet RecipeCategory { get; set; } + public virtual DbSet RecipeComment { get; set; } + public virtual DbSet RecipeImage { get; set; } + + private void UpdateDateTrackingFields() + { + var entries = ChangeTracker + .Entries() + .Where(e => e.Entity is RecipeBaseEntity && ( + e.State == EntityState.Added + || e.State == EntityState.Modified)); + + foreach (var entityEntry in entries) + { + ((RecipeBaseEntity)entityEntry.Entity).UpdatedDate = DateTime.UtcNow; + if (entityEntry.State == EntityState.Added) + { + ((RecipeBaseEntity)entityEntry.Entity).CreatedDate = DateTime.UtcNow; + } + } + } + + protected override void OnModelCreating(ModelBuilder modelBuilder) + { + modelBuilder.Entity(entity => + { + entity.Property(e => e.AuthorName) + .IsRequired() + .HasMaxLength(50); + entity.Property(e => e.Ingredients).IsRequired(); + entity.Property(e => e.Instructions).IsRequired(); + entity.Property(e => e.Description).HasMaxLength(500); + entity.Property(e => e.Keywords).HasMaxLength(100); + entity.Property(e => e.Name) + .IsRequired() + .HasMaxLength(150); + entity.Property(e => e.DomainId); + entity.HasOne(d => d.RecipeCategory) + .WithMany(p => p.Recipe) + .OnDelete(DeleteBehavior.Restrict) + .HasConstraintName("FK_Recipe_RecipeCategory") + .IsRequired(); + }); + + modelBuilder.Entity(entity => + { + entity.Property(e => e.Comment).HasMaxLength(1500); + entity.Property(e => e.Name) + .IsRequired() + .HasMaxLength(70); + entity.Property(e => e.DomainId); + entity.HasMany(entity => entity.Recipe) + .WithOne(entity => entity.RecipeCategory) + .OnDelete(DeleteBehavior.Restrict); + }); + + modelBuilder.Entity(entity => + { + entity.Property(e => e.Email) + .IsRequired() + .HasMaxLength(50); + entity.Property(e => e.Name) + .IsRequired() + .HasMaxLength(60); + entity.Property(e => e.Comment).IsRequired(); + entity.HasOne(d => d.Recipe) + .WithMany(p => p.RecipeComment) + .OnDelete(DeleteBehavior.ClientCascade) + .HasConstraintName("FK_RecipeComment_Recipe"); + }); + + modelBuilder.Entity(entity => + { + entity.Property(e => e.FileDescription).HasMaxLength(255); + entity.Property(e => e.FileName) + .IsRequired() + .HasMaxLength(50); + entity.HasOne(d => d.Recipe) + .WithMany(p => p.RecipeImage) + .OnDelete(DeleteBehavior.ClientCascade) + .HasConstraintName("FK_RecipeImage_Recipe"); + }); + } + + protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) + { + optionsBuilder.ConfigureWarnings(warnings => + warnings.Ignore(RelationalEventId.NonTransactionalMigrationOperationWarning)); + } + + public override int SaveChanges() + { + UpdateDateTrackingFields(); + return base.SaveChanges(); + } + + public override async Task SaveChangesAsync(CancellationToken cancellationToken = default) + { + UpdateDateTrackingFields(); + return await base.SaveChangesAsync(cancellationToken); + } +} diff --git a/WebSpark.Recipe/Data/RecipeDbContextFactory.cs b/WebSpark.Recipe/Data/RecipeDbContextFactory.cs new file mode 100644 index 00000000..6ac6a07a --- /dev/null +++ b/WebSpark.Recipe/Data/RecipeDbContextFactory.cs @@ -0,0 +1,15 @@ +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Design; + +namespace WebSpark.Recipe.Data; + +public class RecipeDbContextFactory : IDesignTimeDbContextFactory +{ + public RecipeDbContext CreateDbContext(string[] args) + { + var optionsBuilder = new DbContextOptionsBuilder(); + optionsBuilder.UseSqlite("Data Source=webspark-recipe-design.db", + b => b.MigrationsAssembly("WebSpark.Recipe")); + return new RecipeDbContext(optionsBuilder.Options); + } +} diff --git a/WebSpark.Core/Data/RecipeImage.cs b/WebSpark.Recipe/Data/RecipeImage.cs similarity index 76% rename from WebSpark.Core/Data/RecipeImage.cs rename to WebSpark.Recipe/Data/RecipeImage.cs index 855131e7..2807b4c1 100644 --- a/WebSpark.Core/Data/RecipeImage.cs +++ b/WebSpark.Recipe/Data/RecipeImage.cs @@ -1,6 +1,6 @@ -namespace WebSpark.Core.Data; +namespace WebSpark.Recipe.Data; -public partial class RecipeImage : BaseEntity +public partial class RecipeImage : RecipeBaseEntity { public string FileName { get; set; } = string.Empty; public string FileDescription { get; set; } = string.Empty; diff --git a/WebSpark.Recipe/GlobalUsings.cs b/WebSpark.Recipe/GlobalUsings.cs new file mode 100644 index 00000000..2b752a3a --- /dev/null +++ b/WebSpark.Recipe/GlobalUsings.cs @@ -0,0 +1 @@ +global using RecipeEntity = WebSpark.Recipe.Data.Recipe; diff --git a/WebSpark.Core/Helpers/RecipeHelper.cs b/WebSpark.Recipe/Helpers/RecipeHelper.cs similarity index 56% rename from WebSpark.Core/Helpers/RecipeHelper.cs rename to WebSpark.Recipe/Helpers/RecipeHelper.cs index 477bc3e5..a44ae36b 100644 --- a/WebSpark.Core/Helpers/RecipeHelper.cs +++ b/WebSpark.Recipe/Helpers/RecipeHelper.cs @@ -1,22 +1,26 @@ -using WebSpark.Core.Data; +using WebSpark.Recipe.Data; -namespace WebSpark.Core.Helpers; +namespace WebSpark.Recipe.Helpers; public static class RecipeHelper { - public static RecipeCategory GetRecipeCategory(string CategoryName, int displayOrder) + public static RecipeCategory GetRecipeCategory(string categoryName, int displayOrder) { - var myCat = new RecipeCategory() + return new RecipeCategory() { DisplayOrder = displayOrder, IsActive = true, - Comment = CategoryName, - Name = CategoryName + Comment = categoryName, + Name = categoryName }; - return myCat; } - public static Recipe GetRecipe( - WebSite domain, + + /// + /// Creates a Recipe entity. Takes domainId (int) instead of WebSite navigation + /// to keep WebSpark.Recipe independent of WebSpark.Core. + /// + public static RecipeEntity GetRecipe( + int domainId, string name, string authorName, string description, @@ -25,7 +29,7 @@ public static Recipe GetRecipe( RecipeCategory category, string keyWords = "") { - return new Recipe() + return new RecipeEntity() { Name = name, AuthorName = authorName, @@ -33,7 +37,7 @@ public static Recipe GetRecipe( Keywords = string.IsNullOrWhiteSpace(keyWords) ? name : keyWords, Ingredients = ingredients, Instructions = instructions, - Domain = domain, + DomainId = domainId, RecipeCategory = category, }; } diff --git a/WebSpark.Recipe/Helpers/RecipeUrlHelper.cs b/WebSpark.Recipe/Helpers/RecipeUrlHelper.cs new file mode 100644 index 00000000..504ec49b --- /dev/null +++ b/WebSpark.Recipe/Helpers/RecipeUrlHelper.cs @@ -0,0 +1,28 @@ +using System.Globalization; + +namespace WebSpark.Recipe.Helpers; + +public static class RecipeUrlHelper +{ + public static string GetSafePath(string name) + { + return name == null + ? string.Empty + : $"{name.Replace("&", "-").Replace("\n", string.Empty).Replace("/", "-").Replace("'", "-").Replace(" ", "-").ToLower(CultureInfo.CurrentCulture)}"; + } + + public static string GetSafePath(string name, string root) + { + return $"{GetSafePath(root)}{GetSafePath(name)}"; + } + + public static string GetRecipeURL(string recipeName) + { + return $"/recipe/{GetSafePath(recipeName)}"; + } + + public static string GetRecipeCategoryURL(string name) + { + return $"/recipe/category/{GetSafePath(name)}"; + } +} diff --git a/WebSpark.Recipe/Interfaces/IRecipeImageService.cs b/WebSpark.Recipe/Interfaces/IRecipeImageService.cs new file mode 100644 index 00000000..13de645f --- /dev/null +++ b/WebSpark.Recipe/Interfaces/IRecipeImageService.cs @@ -0,0 +1,12 @@ +using WebSpark.Recipe.Models; + +namespace WebSpark.Recipe.Interfaces; + +public interface IRecipeImageService +{ + void AddRecipeImage(RecipeImageModel recipeImageModel); + void DeleteRecipeImage(int id); + RecipeImageModel? GetRecipeImageById(int id); + IEnumerable GetRecipeImages(); + void UpdateRecipeImage(RecipeImageModel recipeImageModel); +} diff --git a/WebSpark.Recipe/Interfaces/IRecipeService.cs b/WebSpark.Recipe/Interfaces/IRecipeService.cs new file mode 100644 index 00000000..92fac8fb --- /dev/null +++ b/WebSpark.Recipe/Interfaces/IRecipeService.cs @@ -0,0 +1,26 @@ +using WebSpark.Recipe.Models; + +namespace WebSpark.Recipe.Interfaces; + +/// +/// Recipe Service Interface. +/// +/// +/// GetRecipeVMHostAsync has been intentionally removed from this interface — it was a +/// presentation-layer concern. Portal callers must construct RecipeVM directly using +/// Get() and GetRecipeCategoryList(). +/// +public interface IRecipeService +{ + bool Delete(int Id); + bool Delete(RecipeCategoryModel saveItem); + RecipeModel Get(int Id); + RecipeCategoryModel GetRecipeCategoryById(int Id); + List GetRecipeCategoryList(); + IEnumerable Get(); + RecipeModel Save(RecipeModel saveItem); + IEnumerable Save(List? saveRecipes); + RecipeCategoryModel Save(RecipeCategoryModel saveItem); + List Save(List? saveCategories); + List GetRecipeImages(); +} diff --git a/WebSpark.Recipe/Migrations/20260501192643_InitialCreate.Designer.cs b/WebSpark.Recipe/Migrations/20260501192643_InitialCreate.Designer.cs new file mode 100644 index 00000000..57382957 --- /dev/null +++ b/WebSpark.Recipe/Migrations/20260501192643_InitialCreate.Designer.cs @@ -0,0 +1,282 @@ +// +using System; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Migrations; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using WebSpark.Recipe.Data; + +#nullable disable + +namespace WebSpark.Recipe.Migrations +{ + [DbContext(typeof(RecipeDbContext))] + [Migration("20260501192643_InitialCreate")] + partial class InitialCreate + { + /// + protected override void BuildTargetModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder.HasAnnotation("ProductVersion", "10.0.7"); + + modelBuilder.Entity("WebSpark.Recipe.Data.Recipe", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AuthorName") + .IsRequired() + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("AverageRating") + .HasColumnType("REAL"); + + b.Property("CommentCount") + .HasColumnType("INTEGER"); + + b.Property("CreatedDate") + .HasColumnType("TEXT"); + + b.Property("CreatedID") + .HasColumnType("INTEGER"); + + b.Property("Description") + .IsRequired() + .HasMaxLength(500) + .HasColumnType("TEXT"); + + b.Property("DomainId") + .HasColumnType("INTEGER"); + + b.Property("Ingredients") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("Instructions") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("IsApproved") + .HasColumnType("INTEGER"); + + b.Property("Keywords") + .IsRequired() + .HasMaxLength(100) + .HasColumnType("TEXT"); + + b.Property("LastViewDt") + .HasColumnType("TEXT"); + + b.Property("Name") + .IsRequired() + .HasMaxLength(150) + .HasColumnType("TEXT"); + + b.Property("RatingCount") + .HasColumnType("INTEGER"); + + b.Property("RecipeCategoryId") + .HasColumnType("INTEGER"); + + b.Property("Servings") + .HasColumnType("INTEGER"); + + b.Property("UpdatedDate") + .HasColumnType("TEXT"); + + b.Property("UpdatedID") + .HasColumnType("INTEGER"); + + b.Property("ViewCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RecipeCategoryId"); + + b.ToTable("Recipe"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeCategory", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Comment") + .IsRequired() + .HasMaxLength(1500) + .HasColumnType("TEXT"); + + b.Property("CreatedDate") + .HasColumnType("TEXT"); + + b.Property("CreatedID") + .HasColumnType("INTEGER"); + + b.Property("DisplayOrder") + .HasColumnType("INTEGER"); + + b.Property("DomainId") + .HasColumnType("INTEGER"); + + b.Property("IsActive") + .HasColumnType("INTEGER"); + + b.Property("Name") + .IsRequired() + .HasMaxLength(70) + .HasColumnType("TEXT"); + + b.Property("UpdatedDate") + .HasColumnType("TEXT"); + + b.Property("UpdatedID") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("RecipeCategory"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeComment", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Comment") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("CreatedDate") + .HasColumnType("TEXT"); + + b.Property("CreatedID") + .HasColumnType("INTEGER"); + + b.Property("Email") + .IsRequired() + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("Name") + .IsRequired() + .HasMaxLength(60) + .HasColumnType("TEXT"); + + b.Property("RecipeId") + .HasColumnType("INTEGER"); + + b.Property("UpdatedDate") + .HasColumnType("TEXT"); + + b.Property("UpdatedID") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RecipeId"); + + b.ToTable("RecipeComment"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeImage", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CreatedDate") + .HasColumnType("TEXT"); + + b.Property("CreatedID") + .HasColumnType("INTEGER"); + + b.Property("DisplayOrder") + .HasColumnType("INTEGER"); + + b.Property("FileDescription") + .IsRequired() + .HasMaxLength(255) + .HasColumnType("TEXT"); + + b.Property("FileName") + .IsRequired() + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("ImageData") + .IsRequired() + .HasColumnType("BLOB"); + + b.Property("RecipeId") + .HasColumnType("INTEGER"); + + b.Property("UpdatedDate") + .HasColumnType("TEXT"); + + b.Property("UpdatedID") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RecipeId"); + + b.ToTable("RecipeImage"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.Recipe", b => + { + b.HasOne("WebSpark.Recipe.Data.RecipeCategory", "RecipeCategory") + .WithMany("Recipe") + .HasForeignKey("RecipeCategoryId") + .OnDelete(DeleteBehavior.Restrict) + .IsRequired() + .HasConstraintName("FK_Recipe_RecipeCategory"); + + b.Navigation("RecipeCategory"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeComment", b => + { + b.HasOne("WebSpark.Recipe.Data.Recipe", "Recipe") + .WithMany("RecipeComment") + .HasForeignKey("RecipeId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired() + .HasConstraintName("FK_RecipeComment_Recipe"); + + b.Navigation("Recipe"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeImage", b => + { + b.HasOne("WebSpark.Recipe.Data.Recipe", "Recipe") + .WithMany("RecipeImage") + .HasForeignKey("RecipeId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired() + .HasConstraintName("FK_RecipeImage_Recipe"); + + b.Navigation("Recipe"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.Recipe", b => + { + b.Navigation("RecipeComment"); + + b.Navigation("RecipeImage"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeCategory", b => + { + b.Navigation("Recipe"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/WebSpark.Recipe/Migrations/20260501192643_InitialCreate.cs b/WebSpark.Recipe/Migrations/20260501192643_InitialCreate.cs new file mode 100644 index 00000000..e26bc6e6 --- /dev/null +++ b/WebSpark.Recipe/Migrations/20260501192643_InitialCreate.cs @@ -0,0 +1,155 @@ +using System; +using Microsoft.EntityFrameworkCore.Migrations; + +#nullable disable + +namespace WebSpark.Recipe.Migrations +{ + /// + public partial class InitialCreate : Migration + { + /// + protected override void Up(MigrationBuilder migrationBuilder) + { + migrationBuilder.CreateTable( + name: "RecipeCategory", + columns: table => new + { + Id = table.Column(type: "INTEGER", nullable: false) + .Annotation("Sqlite:Autoincrement", true), + Name = table.Column(type: "TEXT", maxLength: 70, nullable: false), + Comment = table.Column(type: "TEXT", maxLength: 1500, nullable: false), + DisplayOrder = table.Column(type: "INTEGER", nullable: false), + IsActive = table.Column(type: "INTEGER", nullable: false), + DomainId = table.Column(type: "INTEGER", nullable: true), + CreatedDate = table.Column(type: "TEXT", nullable: false), + UpdatedDate = table.Column(type: "TEXT", nullable: false), + UpdatedID = table.Column(type: "INTEGER", nullable: true), + CreatedID = table.Column(type: "INTEGER", nullable: true) + }, + constraints: table => + { + table.PrimaryKey("PK_RecipeCategory", x => x.Id); + }); + + migrationBuilder.CreateTable( + name: "Recipe", + columns: table => new + { + Id = table.Column(type: "INTEGER", nullable: false) + .Annotation("Sqlite:Autoincrement", true), + Name = table.Column(type: "TEXT", maxLength: 150, nullable: false), + Description = table.Column(type: "TEXT", maxLength: 500, nullable: false), + Keywords = table.Column(type: "TEXT", maxLength: 100, nullable: false), + AuthorName = table.Column(type: "TEXT", maxLength: 50, nullable: false), + Servings = table.Column(type: "INTEGER", nullable: false), + Ingredients = table.Column(type: "TEXT", nullable: false), + Instructions = table.Column(type: "TEXT", nullable: false), + IsApproved = table.Column(type: "INTEGER", nullable: false), + ViewCount = table.Column(type: "INTEGER", nullable: false), + AverageRating = table.Column(type: "REAL", nullable: false), + RatingCount = table.Column(type: "INTEGER", nullable: false), + CommentCount = table.Column(type: "INTEGER", nullable: false), + LastViewDt = table.Column(type: "TEXT", nullable: false), + DomainId = table.Column(type: "INTEGER", nullable: true), + RecipeCategoryId = table.Column(type: "INTEGER", nullable: false), + CreatedDate = table.Column(type: "TEXT", nullable: false), + UpdatedDate = table.Column(type: "TEXT", nullable: false), + UpdatedID = table.Column(type: "INTEGER", nullable: true), + CreatedID = table.Column(type: "INTEGER", nullable: true) + }, + constraints: table => + { + table.PrimaryKey("PK_Recipe", x => x.Id); + table.ForeignKey( + name: "FK_Recipe_RecipeCategory", + column: x => x.RecipeCategoryId, + principalTable: "RecipeCategory", + principalColumn: "Id", + onDelete: ReferentialAction.Restrict); + }); + + migrationBuilder.CreateTable( + name: "RecipeComment", + columns: table => new + { + Id = table.Column(type: "INTEGER", nullable: false) + .Annotation("Sqlite:Autoincrement", true), + Name = table.Column(type: "TEXT", maxLength: 60, nullable: false), + Email = table.Column(type: "TEXT", maxLength: 50, nullable: false), + Comment = table.Column(type: "TEXT", nullable: false), + RecipeId = table.Column(type: "INTEGER", nullable: false), + CreatedDate = table.Column(type: "TEXT", nullable: false), + UpdatedDate = table.Column(type: "TEXT", nullable: false), + UpdatedID = table.Column(type: "INTEGER", nullable: true), + CreatedID = table.Column(type: "INTEGER", nullable: true) + }, + constraints: table => + { + table.PrimaryKey("PK_RecipeComment", x => x.Id); + table.ForeignKey( + name: "FK_RecipeComment_Recipe", + column: x => x.RecipeId, + principalTable: "Recipe", + principalColumn: "Id"); + }); + + migrationBuilder.CreateTable( + name: "RecipeImage", + columns: table => new + { + Id = table.Column(type: "INTEGER", nullable: false) + .Annotation("Sqlite:Autoincrement", true), + FileName = table.Column(type: "TEXT", maxLength: 50, nullable: false), + FileDescription = table.Column(type: "TEXT", maxLength: 255, nullable: false), + DisplayOrder = table.Column(type: "INTEGER", nullable: false), + ImageData = table.Column(type: "BLOB", nullable: false), + RecipeId = table.Column(type: "INTEGER", nullable: false), + CreatedDate = table.Column(type: "TEXT", nullable: false), + UpdatedDate = table.Column(type: "TEXT", nullable: false), + UpdatedID = table.Column(type: "INTEGER", nullable: true), + CreatedID = table.Column(type: "INTEGER", nullable: true) + }, + constraints: table => + { + table.PrimaryKey("PK_RecipeImage", x => x.Id); + table.ForeignKey( + name: "FK_RecipeImage_Recipe", + column: x => x.RecipeId, + principalTable: "Recipe", + principalColumn: "Id"); + }); + + migrationBuilder.CreateIndex( + name: "IX_Recipe_RecipeCategoryId", + table: "Recipe", + column: "RecipeCategoryId"); + + migrationBuilder.CreateIndex( + name: "IX_RecipeComment_RecipeId", + table: "RecipeComment", + column: "RecipeId"); + + migrationBuilder.CreateIndex( + name: "IX_RecipeImage_RecipeId", + table: "RecipeImage", + column: "RecipeId"); + } + + /// + protected override void Down(MigrationBuilder migrationBuilder) + { + migrationBuilder.DropTable( + name: "RecipeComment"); + + migrationBuilder.DropTable( + name: "RecipeImage"); + + migrationBuilder.DropTable( + name: "Recipe"); + + migrationBuilder.DropTable( + name: "RecipeCategory"); + } + } +} diff --git a/WebSpark.Recipe/Migrations/RecipeDbContextModelSnapshot.cs b/WebSpark.Recipe/Migrations/RecipeDbContextModelSnapshot.cs new file mode 100644 index 00000000..87281ba7 --- /dev/null +++ b/WebSpark.Recipe/Migrations/RecipeDbContextModelSnapshot.cs @@ -0,0 +1,279 @@ +// +using System; +using Microsoft.EntityFrameworkCore; +using Microsoft.EntityFrameworkCore.Infrastructure; +using Microsoft.EntityFrameworkCore.Storage.ValueConversion; +using WebSpark.Recipe.Data; + +#nullable disable + +namespace WebSpark.Recipe.Migrations +{ + [DbContext(typeof(RecipeDbContext))] + partial class RecipeDbContextModelSnapshot : ModelSnapshot + { + protected override void BuildModel(ModelBuilder modelBuilder) + { +#pragma warning disable 612, 618 + modelBuilder.HasAnnotation("ProductVersion", "10.0.7"); + + modelBuilder.Entity("WebSpark.Recipe.Data.Recipe", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("AuthorName") + .IsRequired() + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("AverageRating") + .HasColumnType("REAL"); + + b.Property("CommentCount") + .HasColumnType("INTEGER"); + + b.Property("CreatedDate") + .HasColumnType("TEXT"); + + b.Property("CreatedID") + .HasColumnType("INTEGER"); + + b.Property("Description") + .IsRequired() + .HasMaxLength(500) + .HasColumnType("TEXT"); + + b.Property("DomainId") + .HasColumnType("INTEGER"); + + b.Property("Ingredients") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("Instructions") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("IsApproved") + .HasColumnType("INTEGER"); + + b.Property("Keywords") + .IsRequired() + .HasMaxLength(100) + .HasColumnType("TEXT"); + + b.Property("LastViewDt") + .HasColumnType("TEXT"); + + b.Property("Name") + .IsRequired() + .HasMaxLength(150) + .HasColumnType("TEXT"); + + b.Property("RatingCount") + .HasColumnType("INTEGER"); + + b.Property("RecipeCategoryId") + .HasColumnType("INTEGER"); + + b.Property("Servings") + .HasColumnType("INTEGER"); + + b.Property("UpdatedDate") + .HasColumnType("TEXT"); + + b.Property("UpdatedID") + .HasColumnType("INTEGER"); + + b.Property("ViewCount") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RecipeCategoryId"); + + b.ToTable("Recipe"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeCategory", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Comment") + .IsRequired() + .HasMaxLength(1500) + .HasColumnType("TEXT"); + + b.Property("CreatedDate") + .HasColumnType("TEXT"); + + b.Property("CreatedID") + .HasColumnType("INTEGER"); + + b.Property("DisplayOrder") + .HasColumnType("INTEGER"); + + b.Property("DomainId") + .HasColumnType("INTEGER"); + + b.Property("IsActive") + .HasColumnType("INTEGER"); + + b.Property("Name") + .IsRequired() + .HasMaxLength(70) + .HasColumnType("TEXT"); + + b.Property("UpdatedDate") + .HasColumnType("TEXT"); + + b.Property("UpdatedID") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.ToTable("RecipeCategory"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeComment", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("Comment") + .IsRequired() + .HasColumnType("TEXT"); + + b.Property("CreatedDate") + .HasColumnType("TEXT"); + + b.Property("CreatedID") + .HasColumnType("INTEGER"); + + b.Property("Email") + .IsRequired() + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("Name") + .IsRequired() + .HasMaxLength(60) + .HasColumnType("TEXT"); + + b.Property("RecipeId") + .HasColumnType("INTEGER"); + + b.Property("UpdatedDate") + .HasColumnType("TEXT"); + + b.Property("UpdatedID") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RecipeId"); + + b.ToTable("RecipeComment"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeImage", b => + { + b.Property("Id") + .ValueGeneratedOnAdd() + .HasColumnType("INTEGER"); + + b.Property("CreatedDate") + .HasColumnType("TEXT"); + + b.Property("CreatedID") + .HasColumnType("INTEGER"); + + b.Property("DisplayOrder") + .HasColumnType("INTEGER"); + + b.Property("FileDescription") + .IsRequired() + .HasMaxLength(255) + .HasColumnType("TEXT"); + + b.Property("FileName") + .IsRequired() + .HasMaxLength(50) + .HasColumnType("TEXT"); + + b.Property("ImageData") + .IsRequired() + .HasColumnType("BLOB"); + + b.Property("RecipeId") + .HasColumnType("INTEGER"); + + b.Property("UpdatedDate") + .HasColumnType("TEXT"); + + b.Property("UpdatedID") + .HasColumnType("INTEGER"); + + b.HasKey("Id"); + + b.HasIndex("RecipeId"); + + b.ToTable("RecipeImage"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.Recipe", b => + { + b.HasOne("WebSpark.Recipe.Data.RecipeCategory", "RecipeCategory") + .WithMany("Recipe") + .HasForeignKey("RecipeCategoryId") + .OnDelete(DeleteBehavior.Restrict) + .IsRequired() + .HasConstraintName("FK_Recipe_RecipeCategory"); + + b.Navigation("RecipeCategory"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeComment", b => + { + b.HasOne("WebSpark.Recipe.Data.Recipe", "Recipe") + .WithMany("RecipeComment") + .HasForeignKey("RecipeId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired() + .HasConstraintName("FK_RecipeComment_Recipe"); + + b.Navigation("Recipe"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeImage", b => + { + b.HasOne("WebSpark.Recipe.Data.Recipe", "Recipe") + .WithMany("RecipeImage") + .HasForeignKey("RecipeId") + .OnDelete(DeleteBehavior.ClientCascade) + .IsRequired() + .HasConstraintName("FK_RecipeImage_Recipe"); + + b.Navigation("Recipe"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.Recipe", b => + { + b.Navigation("RecipeComment"); + + b.Navigation("RecipeImage"); + }); + + modelBuilder.Entity("WebSpark.Recipe.Data.RecipeCategory", b => + { + b.Navigation("Recipe"); + }); +#pragma warning restore 612, 618 + } + } +} diff --git a/WebSpark.Core/Models/EditModels/RecipeCategoryEditModel.cs b/WebSpark.Recipe/Models/EditModels/RecipeCategoryEditModel.cs similarity index 81% rename from WebSpark.Core/Models/EditModels/RecipeCategoryEditModel.cs rename to WebSpark.Recipe/Models/EditModels/RecipeCategoryEditModel.cs index ddd2e430..abc6a939 100644 --- a/WebSpark.Core/Models/EditModels/RecipeCategoryEditModel.cs +++ b/WebSpark.Recipe/Models/EditModels/RecipeCategoryEditModel.cs @@ -1,11 +1,9 @@ -namespace WebSpark.Core.Models.EditModels; +namespace WebSpark.Recipe.Models.EditModels; public class RecipeCategoryEditModel : RecipeCategoryModel { - public RecipeCategoryEditModel() - { + public RecipeCategoryEditModel() { } - } public RecipeCategoryEditModel(RecipeCategoryModel model) { if (model == null) return; @@ -16,6 +14,5 @@ public RecipeCategoryEditModel(RecipeCategoryModel model) IsActive = model.IsActive; Recipes = model.Recipes; Url = model.Url; - } } diff --git a/WebSpark.Recipe/Models/EditModels/RecipeEditModel.cs b/WebSpark.Recipe/Models/EditModels/RecipeEditModel.cs new file mode 100644 index 00000000..74ff8917 --- /dev/null +++ b/WebSpark.Recipe/Models/EditModels/RecipeEditModel.cs @@ -0,0 +1,35 @@ +namespace WebSpark.Recipe.Models.EditModels; + +public class RecipeEditModel : RecipeModel +{ + public RecipeEditModel() { } + + public RecipeEditModel(RecipeModel recipe) + { + if (recipe == null) return; + AuthorNM = recipe.AuthorNM; + AverageRating = recipe.AverageRating; + IsApproved = recipe.IsApproved; + CommentCount = recipe.CommentCount; + Description = recipe.Description; + FileDescription = recipe.FileDescription; + LastViewDT = recipe.LastViewDT; + FileName = recipe.FileName; + Id = recipe.Id; + ModifiedDT = recipe.ModifiedDT; + RecipeCategoryID = recipe.RecipeCategoryID; + Ingredients = recipe.Ingredients; + Instructions = recipe.Instructions; + ModifiedID = recipe.ModifiedID; + Servings = recipe.Servings; + Name = recipe.Name; + RatingCount = recipe.RatingCount; + RecipeCategories = recipe.RecipeCategories; + RecipeCategory = recipe.RecipeCategory; + RecipeCategoryNM = recipe.RecipeCategoryNM; + RecipeURL = recipe.RecipeURL; + ViewCount = recipe.ViewCount; + } + + public List Categories { get; set; } = []; +} diff --git a/WebSpark.Recipe/Models/RecipeCategoryModel.cs b/WebSpark.Recipe/Models/RecipeCategoryModel.cs new file mode 100644 index 00000000..da064529 --- /dev/null +++ b/WebSpark.Recipe/Models/RecipeCategoryModel.cs @@ -0,0 +1,41 @@ +using System.ComponentModel.DataAnnotations; +using System.Text.Json.Serialization; +using WebSpark.Recipe.Constants; +using WebSpark.Recipe.Helpers; + +namespace WebSpark.Recipe.Models; + +public class RecipeCategoryModel +{ + public RecipeCategoryModel() + { + Recipes = []; + } + + public int Id { get; set; } + + [JsonPropertyName("name")] + [Display(Name = "Category")] + [StringLength(50, ErrorMessage = "Max length is 50.")] + [DataType(DataType.Text)] + [Required] + public string Name { get; set; } = string.Empty; + + [JsonPropertyName("description")] + [Display(Name = "Description")] + [StringLength(100, ErrorMessage = "Max length is 100.")] + [DataType(DataType.MultilineText)] + public string Description { get; set; } = string.Empty; + + [JsonPropertyName("order")] + [Display(Name = "Order")] + public int DisplayOrder { get; set; } + + public bool IsActive { get; set; } + + public List Recipes { get; set; } + + public string Url { get; set; } = string.Empty; + + public int DomainID { get; set; } = RecipeConstants.INT_MOM_DomainId; +} diff --git a/WebSpark.Recipe/Models/RecipeImageModel.cs b/WebSpark.Recipe/Models/RecipeImageModel.cs new file mode 100644 index 00000000..4039d57b --- /dev/null +++ b/WebSpark.Recipe/Models/RecipeImageModel.cs @@ -0,0 +1,11 @@ +namespace WebSpark.Recipe.Models; + +public class RecipeImageModel +{ + public int Id { get; set; } + public string FileName { get; set; } = string.Empty; + public string FileDescription { get; set; } = string.Empty; + public int DisplayOrder { get; set; } + public byte[] ImageData { get; set; } = Array.Empty(); + public RecipeModel Recipe { get; set; } = new RecipeModel(); +} diff --git a/WebSpark.Recipe/Models/RecipeModel.cs b/WebSpark.Recipe/Models/RecipeModel.cs new file mode 100644 index 00000000..9082ae8e --- /dev/null +++ b/WebSpark.Recipe/Models/RecipeModel.cs @@ -0,0 +1,83 @@ +using System.ComponentModel; +using System.ComponentModel.DataAnnotations; +using WebSpark.Recipe.Constants; +using WebSpark.Recipe.Helpers; + +namespace WebSpark.Recipe.Models; + +public class RecipeModel +{ + public RecipeModel() + { + RecipeCategory = new RecipeCategoryModel(); + } + + [DisplayName("Author")] + [Required] + public string AuthorNM { get; set; } = string.Empty; + + [DisplayName("Average Ratings")] + public double AverageRating { get; set; } + + [DisplayName("Comments")] + public int CommentCount { get; set; } + + public string FileDescription { get; set; } = string.Empty; + + public string FileName { get; set; } = string.Empty; + + [DisplayName("Ingredients")] + public string Ingredients { get; set; } = string.Empty; + + [DisplayName("Instructions")] + public string Instructions { get; set; } = string.Empty; + + [DisplayName("Approved")] + public bool IsApproved { get; set; } + + [DisplayName("Last View")] + public DateTime LastViewDT { get; set; } + + [DisplayName("Last Modified")] + public DateTime ModifiedDT { get; set; } + + public int ModifiedID { get; set; } + + [DisplayName("Ratings Count")] + public int RatingCount { get; set; } + + [DisplayName("Servings")] + public int Servings { get; set; } + + public RecipeCategoryModel RecipeCategory { get; set; } + + [DisplayName("Category")] + [Required] + public int RecipeCategoryID { get; set; } + + [DisplayName("Category")] + public string RecipeCategoryNM { get; set; } = string.Empty; + + [DisplayName("Description")] + [Required] + public string Description { get; set; } = string.Empty; + + public int Id { get; set; } + + [DisplayName("Recipe")] + [Required] + public string Name { get; set; } = string.Empty; + + [DisplayName("View Count")] + public int ViewCount { get; set; } + + public string RecipeURL { get; set; } = string.Empty; + + public List Images { get; set; } = []; + + public IEnumerable RecipeCategories { get; set; } = Array.Empty(); + + public int DomainID { get; set; } = RecipeConstants.INT_MOM_DomainId; + + public string SEO_Keywords { get; set; } = string.Empty; +} diff --git a/WebSpark.Recipe/Models/RecipeOptionModel.cs b/WebSpark.Recipe/Models/RecipeOptionModel.cs new file mode 100644 index 00000000..29173e25 --- /dev/null +++ b/WebSpark.Recipe/Models/RecipeOptionModel.cs @@ -0,0 +1,9 @@ +namespace WebSpark.Recipe.Models; + +public class RecipeOptionModel +{ + public string Value { get; set; } = string.Empty; + public string Text { get; set; } = string.Empty; + public bool IsDefault { get; set; } + public bool IsSelected { get; set; } +} diff --git a/WebSpark.Core/Providers/RecipeImageService.cs b/WebSpark.Recipe/Providers/RecipeImageService.cs similarity index 52% rename from WebSpark.Core/Providers/RecipeImageService.cs rename to WebSpark.Recipe/Providers/RecipeImageService.cs index d3cf0182..e85d1857 100644 --- a/WebSpark.Core/Providers/RecipeImageService.cs +++ b/WebSpark.Recipe/Providers/RecipeImageService.cs @@ -1,21 +1,15 @@ -using WebSpark.Core.Data; +using Microsoft.EntityFrameworkCore; +using WebSpark.Recipe.Data; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Models; -namespace WebSpark.Core.Providers; +namespace WebSpark.Recipe.Providers; -public interface IRecipeImageService -{ - void AddRecipeImage(Models.RecipeImageModel recipeImageModel); - void DeleteRecipeImage(int id); - Models.RecipeImageModel? GetRecipeImageById(int id); - IEnumerable GetRecipeImages(); - void UpdateRecipeImage(Models.RecipeImageModel recipeImageModel); -} - -public class RecipeImageService(WebSparkDbContext dbContext) : IRecipeImageService, IDisposable +public class RecipeImageService(RecipeDbContext dbContext) : IRecipeImageService, IDisposable { private bool disposedValue; - private static RecipeImage ConvertToEntity(Models.RecipeImageModel recipeImageModel, RecipeImage? recipeImage = null) + private static RecipeImage ConvertToEntity(RecipeImageModel recipeImageModel, RecipeImage? recipeImage = null) { recipeImage ??= new RecipeImage(); recipeImage.FileName = recipeImageModel.FileName; @@ -26,26 +20,25 @@ private static RecipeImage ConvertToEntity(Models.RecipeImageModel recipeImageMo return recipeImage; } - private static Models.RecipeImageModel ConvertToModel(RecipeImage recipeImage) + private static RecipeImageModel ConvertToModel(RecipeImage recipeImage) { - return new Models.RecipeImageModel + return new RecipeImageModel { Id = recipeImage.Id, FileName = recipeImage.FileName, FileDescription = recipeImage.FileDescription, DisplayOrder = recipeImage.DisplayOrder, ImageData = recipeImage.ImageData, - Recipe = new Models.RecipeModel + Recipe = new RecipeModel { Id = recipeImage.Recipe?.Id ?? 0, Name = recipeImage.Recipe?.Name ?? string.Empty, Description = recipeImage.Recipe?.Description ?? string.Empty } - }; } - public void AddRecipeImage(Models.RecipeImageModel recipeImageModel) + public void AddRecipeImage(RecipeImageModel recipeImageModel) { var recipeImage = ConvertToEntity(recipeImageModel); dbContext.RecipeImage.Add(recipeImage); @@ -62,7 +55,7 @@ public void DeleteRecipeImage(int id) } } - public Models.RecipeImageModel? GetRecipeImageById(int id) + public RecipeImageModel? GetRecipeImageById(int id) { var recipeImage = dbContext.RecipeImage .Include(r => r.Recipe) @@ -70,19 +63,20 @@ public void DeleteRecipeImage(int id) return recipeImage != null ? ConvertToModel(recipeImage) : null; } - public IEnumerable GetRecipeImages() + public IEnumerable GetRecipeImages() { - var recipeImages = dbContext.RecipeImage + return dbContext.RecipeImage .Include(r => r.Recipe) .OrderBy(r => r.DisplayOrder) - .ToList(); - return recipeImages.Select(r => ConvertToModel(r)); + .ToList() + .Select(r => ConvertToModel(r)); } - public void UpdateRecipeImage(Models.RecipeImageModel recipeImageModel) + public void UpdateRecipeImage(RecipeImageModel recipeImageModel) { - var existingRecipeImage = dbContext.RecipeImage.SingleOrDefault(r => r.Id == recipeImageModel.Id) ?? throw new InvalidOperationException("Recipe image not found."); - ConvertToEntity(recipeImageModel, existingRecipeImage); + var existing = dbContext.RecipeImage.SingleOrDefault(r => r.Id == recipeImageModel.Id) + ?? throw new InvalidOperationException("Recipe image not found."); + ConvertToEntity(recipeImageModel, existing); dbContext.SaveChanges(); } @@ -90,22 +84,15 @@ protected virtual void Dispose(bool disposing) { if (!disposedValue) { - if (disposing) - { - if (dbContext != null) - dbContext.Dispose(); - } + if (disposing) dbContext?.Dispose(); disposedValue = true; } } - ~RecipeImageService() - { - // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method - Dispose(disposing: false); - } + + ~RecipeImageService() => Dispose(disposing: false); + public void Dispose() { - // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method Dispose(disposing: true); GC.SuppressFinalize(this); } diff --git a/WebSpark.Recipe/Providers/RecipeProvider.cs b/WebSpark.Recipe/Providers/RecipeProvider.cs new file mode 100644 index 00000000..33277297 --- /dev/null +++ b/WebSpark.Recipe/Providers/RecipeProvider.cs @@ -0,0 +1,341 @@ +using Microsoft.EntityFrameworkCore; +using WebSpark.Recipe.Constants; +using WebSpark.Recipe.Data; +using WebSpark.Recipe.Helpers; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Models; + +namespace WebSpark.Recipe.Providers; + +/// +/// Recipe Service implementation. Implements IRecipeService only. +/// IMenuProvider has been removed — portal callers use RecipeMenuAdapter in WebSpark.Portal. +/// +public class RecipeProvider(RecipeDbContext dbContext) : IRecipeService, IDisposable +{ + private bool disposedValue; + + private List Create(List? list) + { + if (list == null) return []; + return [.. list.Select(Create).OrderBy(x => x.Name)]; + } + + private List Create(List? list) + { + if (list == null) return []; + return [.. list.Select(item => Create(item)).OrderBy(x => x.Name)]; + } + + private RecipeModel Create(RecipeEntity? recipe) + { + if (recipe == null) return new RecipeModel(); + return new RecipeModel() + { + DomainID = recipe.DomainId ?? RecipeConstants.INT_MOM_DomainId, + RecipeURL = RecipeUrlHelper.GetRecipeURL(recipe.Name), + Id = recipe.Id, + Name = recipe.Name, + Ingredients = recipe.Ingredients, + Instructions = recipe.Instructions, + Description = string.IsNullOrEmpty(recipe.Description) ? recipe.Name : recipe.Description, + SEO_Keywords = recipe.Keywords, + Servings = recipe.Servings, + AuthorNM = recipe.AuthorName, + AverageRating = recipe.AverageRating, + IsApproved = recipe.IsApproved, + CommentCount = 0, + RecipeCategory = Create(recipe.RecipeCategory), + RecipeCategoryID = recipe.RecipeCategory?.Id ?? 0, + RatingCount = recipe.RatingCount, + ViewCount = recipe.ViewCount, + LastViewDT = recipe.LastViewDt, + ModifiedDT = recipe.UpdatedDate, + }; + } + + private RecipeEntity Create(RecipeModel? recipe) + { + if (recipe == null) return new RecipeEntity(); + if (recipe.DomainID == 0) recipe.DomainID = RecipeConstants.INT_MOM_DomainId; + + var category = dbContext.RecipeCategory.FirstOrDefault(w => w.Id == recipe.RecipeCategoryID); + + return new RecipeEntity() + { + Id = recipe.Id, + Name = recipe.Name, + Ingredients = recipe.Ingredients, + Instructions = recipe.Instructions, + Keywords = recipe.SEO_Keywords, + Description = string.IsNullOrEmpty(recipe.Description) ? recipe.Name : recipe.Description, + AuthorName = recipe.AuthorNM, + AverageRating = recipe.AverageRating, + IsApproved = recipe.IsApproved, + CommentCount = recipe.CommentCount, + RatingCount = recipe.RatingCount, + ViewCount = recipe.ViewCount, + LastViewDt = recipe.LastViewDT, + DomainId = recipe.DomainID, + RecipeCategory = category ?? new RecipeCategory { Name = recipe.Name, Comment = recipe.Name }, + Servings = recipe.Servings, + CreatedDate = DateTime.UtcNow, + UpdatedDate = DateTime.UtcNow, + }; + } + + private static RecipeCategory Create(RecipeCategoryModel? s) + { + if (s == null) return new RecipeCategory(); + return new RecipeCategory() + { + DisplayOrder = s.DisplayOrder, + IsActive = s.IsActive, + Comment = s.Name, + Id = s.Id, + Name = s.Name + }; + } + + private List Create(List? list) + { + var result = new List(); + if (list == null) return result; + foreach (var item in list) + { + if (item == null) continue; + result.Add(Create(item)); + } + return result; + } + + private RecipeImageModel Create(RecipeImage? item) + { + if (item == null) return new RecipeImageModel(); + return new RecipeImageModel() + { + Id = item.Id, + Recipe = Create(item.Recipe), + DisplayOrder = item.DisplayOrder, + FileDescription = item.FileDescription, + FileName = item.FileName, + }; + } + + private RecipeCategoryModel Create(RecipeCategory? rc, bool loadRecipes = false) + { + if (rc == null) return new RecipeCategoryModel(); + return new RecipeCategoryModel() + { + DomainID = rc.DomainId ?? RecipeConstants.INT_MOM_DomainId, + DisplayOrder = rc.DisplayOrder, + IsActive = rc.IsActive, + Description = rc.Comment, + Id = rc.Id, + Name = rc.Name, + Url = RecipeUrlHelper.GetRecipeCategoryURL(rc.Name), + Recipes = loadRecipes ? Create(rc.Recipe?.ToList()) : [] + }; + } + + public bool Delete(int Id) + { + var item = dbContext.Recipe.FirstOrDefault(w => w.Id == Id); + if (item != null) + { + dbContext.Recipe.Remove(item); + dbContext.SaveChanges(); + return true; + } + return false; + } + + public bool Delete(RecipeCategoryModel saveItem) + { + var item = dbContext.RecipeCategory.FirstOrDefault(w => w.Id == saveItem.Id); + if (item != null) + { + dbContext.RecipeCategory.Remove(item); + dbContext.SaveChanges(); + return true; + } + return false; + } + + public IEnumerable Get() + { + var list = dbContext.Recipe + .AsNoTracking() + .Include(r => r.RecipeCategory) + .Include(i => i.RecipeImage) + .ToList(); + return Create(list); + } + + public RecipeModel Get(int Id) + { + var recipe = Create(dbContext.Recipe + .AsNoTracking() + .Where(w => w.Id == Id) + .Include(r => r.RecipeCategory) + .FirstOrDefault()); + recipe.RecipeCategories = dbContext.RecipeCategory + .AsNoTracking() + .Select(s => new RecipeOptionModel { Value = s.Id.ToString(), Text = s.Name }) + .ToList(); + return recipe; + } + + public RecipeCategoryModel GetRecipeCategoryById(int Id) + { + return Create(dbContext.RecipeCategory + .AsNoTracking() + .Include(i => i.Recipe) + .FirstOrDefault(w => w.Id == Id), loadRecipes: true); + } + + public List GetRecipeCategoryList() + { + return Create(dbContext.RecipeCategory.AsNoTracking().ToList()); + } + + public List GetRecipeImages() + { + return Create(dbContext.RecipeImage.Include(i => i.Recipe).ToList()); + } + + public RecipeModel Save(RecipeModel? saveItem) + { + if (saveItem == null) return new RecipeModel(); + if (saveItem.Id == 0) + { + var entity = Create(saveItem); + dbContext.Recipe.Add(entity); + dbContext.SaveChanges(); + saveItem.Id = entity.Id; + } + else + { + var entity = dbContext.Recipe + .Where(w => w.Id == saveItem.Id) + .Include(i => i.RecipeCategory) + .FirstOrDefault(); + if (entity != null) + { + if (entity.RecipeCategory == null || entity.RecipeCategory.Id != saveItem.RecipeCategoryID) + { + entity.RecipeCategory = (dbContext.RecipeCategory.FirstOrDefault(w => w.Id == saveItem.RecipeCategoryID) ?? entity.RecipeCategory)!; + } + entity.Name = saveItem.Name; + entity.AuthorName = saveItem.AuthorNM; + entity.Description = saveItem.Description; + entity.Ingredients = saveItem.Ingredients; + entity.Instructions = saveItem.Instructions; + entity.Servings = saveItem.Servings; + entity.IsApproved = saveItem.IsApproved; + dbContext.SaveChanges(); + } + } + return Get(saveItem.Id); + } + + public IEnumerable Save(List? saveRecipes) + { + if (saveRecipes == null) return []; + var current = Get(); + var categories = GetRecipeCategoryList(); + foreach (var item in saveRecipes) + { + if (item == null || string.IsNullOrWhiteSpace(item.RecipeCategoryNM)) continue; + item.Id = current.FirstOrDefault(w => w.Name == item.Name)?.Id ?? 0; + item.RecipeCategoryID = categories.FirstOrDefault(w => w.Name == item.RecipeCategoryNM)?.Id ?? 0; + if (item.Id == 0) + { + var entity = Create(item); + entity.UpdatedDate = DateTime.UtcNow; + dbContext.Recipe.Add(entity); + dbContext.SaveChanges(); + item.Id = entity.Id; + } + else + { + var entity = dbContext.Recipe.FirstOrDefault(w => w.Id == item.Id); + if (entity != null) dbContext.SaveChanges(); + } + } + return Get(); + } + + public RecipeCategoryModel Save(RecipeCategoryModel saveItem) + { + if (saveItem == null) return new RecipeCategoryModel(); + if (saveItem.Id == 0) + { + var entity = Create(saveItem); + dbContext.RecipeCategory.Add(entity); + dbContext.SaveChanges(); + saveItem.Id = entity.Id; + } + else + { + var entity = dbContext.RecipeCategory.FirstOrDefault(w => w.Id == saveItem.Id); + if (entity != null) + { + entity.Name = saveItem.Name; + entity.Comment = saveItem.Description; + entity.DisplayOrder = saveItem.DisplayOrder; + entity.IsActive = saveItem.IsActive; + dbContext.SaveChanges(); + } + } + return GetRecipeCategoryById(saveItem.Id); + } + + public List Save(List? saveCategories) + { + if (saveCategories == null) return []; + var current = GetRecipeCategoryList(); + foreach (var item in saveCategories) + { + if (item == null || string.IsNullOrWhiteSpace(item.Name)) continue; + item.Id = current.FirstOrDefault(w => w.Name == item.Name)?.Id ?? 0; + if (item.Id == 0) + { + var entity = Create(item); + entity.UpdatedDate = DateTime.UtcNow; + dbContext.RecipeCategory.Add(entity); + dbContext.SaveChanges(); + item.Id = entity.Id; + } + else + { + var entity = dbContext.RecipeCategory.FirstOrDefault(w => w.Id == item.Id); + if (entity != null) + { + entity.Name = item.Name; + entity.Comment = item.Description; + entity.UpdatedDate = DateTime.UtcNow; + dbContext.SaveChanges(); + } + } + } + return GetRecipeCategoryList(); + } + + protected virtual void Dispose(bool disposing) + { + if (!disposedValue) + { + if (disposing) dbContext?.Dispose(); + disposedValue = true; + } + } + + ~RecipeProvider() => Dispose(disposing: false); + + void IDisposable.Dispose() + { + Dispose(disposing: true); + GC.SuppressFinalize(this); + } +} diff --git a/WebSpark.Recipe/WebSpark.Recipe.csproj b/WebSpark.Recipe/WebSpark.Recipe.csproj new file mode 100644 index 00000000..dc3e42e4 --- /dev/null +++ b/WebSpark.Recipe/WebSpark.Recipe.csproj @@ -0,0 +1,35 @@ + + + net10.0 + Mark Hazleton + Standalone recipe domain library for the WebSpark ecosystem. Contains entities, models, interfaces, services, and RecipeDbContext. + MarkHazleton.com + MarkHazleton.com + en + enable + enable + WebSpark.Recipe + https://github.com/markhazleton/WebSpark + True + + + false + build$([System.DateTime]::UtcNow.ToString("yyyyMMddHHmmss")) + 9.0.0.0 + 9.0.0.0 + 9.0.0+dev + 9.0.0 + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + diff --git a/WebSpark.RecipeCoookbook/Cookbook.cs b/WebSpark.RecipeCoookbook/Cookbook.cs index 6ca7fc7a..a43d896d 100644 --- a/WebSpark.RecipeCoookbook/Cookbook.cs +++ b/WebSpark.RecipeCoookbook/Cookbook.cs @@ -12,8 +12,8 @@ using iText.Layout.Renderer; using Microsoft.Extensions.Logging; using System.Text.RegularExpressions; -using WebSpark.Core.Interfaces; -using WebSpark.Core.Models; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Models; namespace WebSpark.RecipeCookbook; diff --git a/WebSpark.RecipeCoookbook/WebSpark.RecipeCookbook.csproj b/WebSpark.RecipeCoookbook/WebSpark.RecipeCookbook.csproj index 446ae37a..76b012d3 100644 --- a/WebSpark.RecipeCoookbook/WebSpark.RecipeCookbook.csproj +++ b/WebSpark.RecipeCoookbook/WebSpark.RecipeCookbook.csproj @@ -19,6 +19,6 @@ - + \ No newline at end of file diff --git a/WebSpark.sln b/WebSpark.sln index f39a3522..9050d653 100644 --- a/WebSpark.sln +++ b/WebSpark.sln @@ -40,6 +40,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "WebSpark.Portal.Tests", "te EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DocSpark", "DocSpark\DocSpark.csproj", "{A29C8377-71D2-66D6-68AC-FB1F9ACAD22A}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "WebSpark.Recipe", "WebSpark.Recipe\WebSpark.Recipe.csproj", "{5310FC06-4586-4519-95F3-0436CBC2D13C}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -194,6 +196,18 @@ Global {A29C8377-71D2-66D6-68AC-FB1F9ACAD22A}.Release|x64.Build.0 = Release|Any CPU {A29C8377-71D2-66D6-68AC-FB1F9ACAD22A}.Release|x86.ActiveCfg = Release|Any CPU {A29C8377-71D2-66D6-68AC-FB1F9ACAD22A}.Release|x86.Build.0 = Release|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Debug|x64.ActiveCfg = Debug|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Debug|x64.Build.0 = Debug|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Debug|x86.ActiveCfg = Debug|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Debug|x86.Build.0 = Debug|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Release|Any CPU.Build.0 = Release|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Release|x64.ActiveCfg = Release|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Release|x64.Build.0 = Release|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Release|x86.ActiveCfg = Release|Any CPU + {5310FC06-4586-4519-95F3-0436CBC2D13C}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -209,6 +223,7 @@ Global {B7520185-F7A0-4D6F-B21A-B3AD8CECD4EC} = {83E5AC53-6F44-4CDB-9127-01082E0D143F} {B0FB1BAA-53FE-412A-8D3F-115ADB1D4C0F} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} {7FC45BEF-DB39-43F4-A852-5E61966338D4} = {0AB3BF05-4346-4AA6-1389-037BE0695223} + {5310FC06-4586-4519-95F3-0436CBC2D13C} = {83E5AC53-6F44-4CDB-9127-01082E0D143F} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {568FC914-C197-49F3-8CF4-FAB6FD0712B2} diff --git a/specs/006-extract-recipe-library/checklists/requirements.md b/specs/006-extract-recipe-library/checklists/requirements.md new file mode 100644 index 00000000..f2ed1f9a --- /dev/null +++ b/specs/006-extract-recipe-library/checklists/requirements.md @@ -0,0 +1,48 @@ +# Specification Quality Checklist: Extract WebSpark.Recipe Core Library + +**Purpose**: Validate specification completeness and quality before proceeding to planning +**Created**: 2026-05-01 +**Feature**: [spec.md](../spec.md) + +## Shared Validation Contract Checks + +- [x] Frontmatter present with all required keys (`classification`, `risk_level`, `target_workflow`, `required_artifacts`, `recommended_next_step`, `required_gates`) +- [x] `classification: full-spec` maps consistently to `target_workflow: specify-full` and `required_artifacts: spec, plan, tasks` +- [x] `required_gates` matches the full-spec route (`checklist, analyze, critic`) +- [x] Status line present using valid lifecycle state: `**Status**: Draft` +- [x] Required full-spec headings present in canonical order: `## Rationale Summary`, `## User Scenarios & Testing`, `## Requirements`, `## Success Criteria` +- [x] No unresolved placeholder text from stock template remains + +## Content Quality + +- [x] Frontmatter matches the shared validation contract +- [x] Required headings for full-spec route present in canonical order +- [x] Status line uses a valid lifecycle state (`Draft`) +- [x] No implementation details (languages, frameworks, APIs) in requirement text +- [x] Focused on user value and business needs +- [x] Written for non-technical stakeholders (with technical context where needed for scoping) +- [x] All mandatory sections completed + +## Requirement Completeness + +- [x] No `[NEEDS CLARIFICATION]` markers remain +- [x] Requirements are testable and unambiguous +- [x] Success criteria are measurable +- [x] Success criteria are technology-agnostic (no implementation details) +- [x] All acceptance scenarios are defined +- [x] Edge cases are identified (5 edge cases documented) +- [x] Scope is clearly bounded (in-scope: extract recipe code; out-of-scope: EF DbContext migration, new recipe features) +- [x] Dependencies and assumptions identified in Rationale Summary + +## Feature Readiness + +- [x] All functional requirements (FR-001 through FR-012) have clear acceptance criteria +- [x] User scenarios (4 stories) cover primary flows: standalone consumption, WebSpark.Core backward compat, RecipeCookbook migration, PromptSpark migration +- [x] Feature meets measurable outcomes defined in Success Criteria (SC-001 through SC-006) +- [x] No implementation details leak into specification + +## Notes + +- All items pass. Spec is ready for `/devspark.plan`. +- Key risk to flag during critic review: EF Core migration compatibility when entity types move assemblies (Edge Case #2). +- `RecipeOld.cs` disposition is captured in FR-011 and should be confirmed during planning. diff --git a/specs/006-extract-recipe-library/data-model.md b/specs/006-extract-recipe-library/data-model.md new file mode 100644 index 00000000..601ed609 --- /dev/null +++ b/specs/006-extract-recipe-library/data-model.md @@ -0,0 +1,316 @@ +# Data Model: WebSpark.Recipe Core Library + +**Branch**: `006-extract-recipe-library` | **Date**: 2026-05-01 + +## Namespace Root: `WebSpark.Recipe` + +All types use `WebSpark.Recipe.*` namespaces. No `WebSpark.Core.*` namespace remains in this library. + +--- + +## Entities (`WebSpark.Recipe.Data`) + +### RecipeBaseEntity +*Replaces `WebSpark.Core.Data.BaseEntity`* + +| Property | Type | Notes | +|----------|------|-------| +| `Id` | `int` | PK, auto-increment | +| `CreatedDate` | `DateTime` | Audit: creation timestamp | +| `UpdatedDate` | `DateTime` | Audit: last update timestamp | +| `CreatedID` | `int` | Audit: creator user ID | +| `UpdatedID` | `int` | Audit: last updater user ID | + +--- + +### Recipe +*Inherits `RecipeBaseEntity`* + +| Property | Type | Constraints | Notes | +|----------|------|-------------|-------| +| `Name` | `string` | Required, MaxLength 150 | Recipe title | +| `Description` | `string` | MaxLength 500 | Short description | +| `Keywords` | `string` | MaxLength 100 | SEO keywords | +| `AuthorName` | `string` | Required, MaxLength 50 | Author display name | +| `Servings` | `int` | | Number of servings | +| `Ingredients` | `string` | Required | Ingredient list (plain text/markdown) | +| `Instructions` | `string` | Required | Step-by-step instructions | +| `IsApproved` | `bool` | Default false | Moderation flag | +| `ViewCount` | `int` | Default 0 | Incrementing view counter | +| `AverageRating` | `decimal` | | Computed rating | +| `RatingCount` | `int` | Default 0 | Number of ratings | +| `CommentCount` | `int` | Default 0 | Denormalized comment count | +| `LastViewDt` | `DateTime?` | Nullable | Last viewed timestamp | +| `DomainId` | `int?` | FK (no nav), nullable | Multi-tenant domain reference | +| `RecipeCategoryId` | `int` | FK → RecipeCategory, Restrict | Category assignment | + +**Relationships**: +- `RecipeCategory` → FK, `DeleteBehavior.Restrict` +- `RecipeComment` ← 1:N, `DeleteBehavior.ClientCascade` +- `RecipeImage` ← 1:N, `DeleteBehavior.ClientCascade` +- `DomainId` → raw FK column, no EF navigation property to `WebSite` + +--- + +### RecipeCategory +*Inherits `RecipeBaseEntity`* + +| Property | Type | Constraints | Notes | +|----------|------|-------------|-------| +| `Name` | `string` | Required, MaxLength 70 | Category label | +| `Comment` | `string` | MaxLength 1500 | Optional notes | +| `DisplayOrder` | `int` | Default 0 | Sort order | +| `IsActive` | `bool` | Default true | Visibility flag | +| `DomainId` | `int?` | FK (no nav), nullable | Multi-tenant domain reference | + +**Relationships**: +- `Recipe` ← 1:N, `DeleteBehavior.Restrict` +- `DomainId` → raw FK column only + +--- + +### RecipeComment +*Inherits `RecipeBaseEntity`* + +| Property | Type | Constraints | Notes | +|----------|------|-------------|-------| +| `Name` | `string` | Required, MaxLength 60 | Commenter display name | +| `Email` | `string` | Required, MaxLength 50 | Commenter email (not displayed) | +| `Comment` | `string` | Required | Comment body | +| `RecipeId` | `int` | FK → Recipe, ClientCascade | Parent recipe | + +--- + +### RecipeImage +*Inherits `RecipeBaseEntity`* + +| Property | Type | Constraints | Notes | +|----------|------|-------------|-------| +| `FileName` | `string` | Required, MaxLength 50 | Stored file name | +| `FileDescription` | `string` | MaxLength 255 | Alt text / caption | +| `DisplayOrder` | `int` | Default 0 | Sort order for gallery | +| `ImageData` | `byte[]?` | Nullable | Binary image data | +| `RecipeId` | `int` | FK → Recipe, ClientCascade | Parent recipe | + +--- + +## DbContext (`WebSpark.Recipe.Data`) + +### RecipeDbContext + +| DbSet | Type | Notes | +|-------|------|-------| +| `Recipe` | `DbSet` | Main recipe table | +| `RecipeCategory` | `DbSet` | Category lookup | +| `RecipeComment` | `DbSet` | User comments | +| `RecipeImage` | `DbSet` | Recipe images | + +**OnModelCreating configuration mirrors the existing recipe configuration from `WebSparkDbContext`** — all constraints, FK relationships, and cascade behaviors are identical; `WebSite` navigation is removed. + +**Migration assembly**: `WebSpark.Recipe` + +--- + +## Models (`WebSpark.Recipe.Models`) + +### RecipeModel +*Business-layer recipe representation* + +| Property | Type | Notes | +|----------|------|-------| +| `Id` | `int` | | +| `Name` | `string` | Required | +| `Description` | `string` | Required | +| `AuthorNM` | `string` | Required | +| `Servings` | `int` | | +| `Ingredients` | `string` | | +| `Instructions` | `string` | | +| `SEO_Keywords` | `string` | | +| `AverageRating` | `decimal` | | +| `RatingCount` | `int` | | +| `CommentCount` | `int` | | +| `ViewCount` | `int` | | +| `LastViewDT` | `DateTime?` | | +| `ModifiedDT` | `DateTime?` | | +| `ModifiedID` | `int` | | +| `IsApproved` | `bool` | | +| `RecipeURL` | `string` | Generated via `RecipeUrlHelper.GetRecipeURL(Name)` | +| `RecipeCategoryID` | `int` | Required | +| `RecipeCategoryNM` | `string` | | +| `RecipeCategory` | `RecipeCategoryModel?` | Nested | +| `Images` | `List` | | +| `RecipeCategories` | `List` | Dropdown options (replaces `List`) | +| `DomainID` | `int` | Defaults to `RecipeConstants.INT_MOM_DomainId` | +| `FileDescription` | `string` | | +| `FileName` | `string` | | + +--- + +### RecipeCategoryModel + +| Property | Type | Notes | +|----------|------|-------| +| `Id` | `int` | | +| `Name` | `string` | Required, MaxLength 50 | +| `Description` | `string` | | +| `DisplayOrder` | `int` | | +| `IsActive` | `bool` | | +| `Recipes` | `List` | | +| `Url` | `string` | Generated via `RecipeUrlHelper.GetRecipeCategoryURL(Name)` | +| `DomainID` | `int` | Defaults to `RecipeConstants.INT_MOM_DomainId` | + +--- + +### RecipeImageModel + +| Property | Type | Notes | +|----------|------|-------| +| `Id` | `int` | | +| `FileName` | `string` | | +| `FileDescription` | `string` | | +| `DisplayOrder` | `int` | | +| `ImageData` | `byte[]?` | | +| `Recipe` | `RecipeModel?` | Nested parent | + +--- + +### RecipeOptionModel +*Replaces `WebSpark.Core.Models.LookupModel` for recipe dropdown usage* + +| Property | Type | Notes | +|----------|------|-------| +| `Value` | `string` | Option value | +| `Text` | `string` | Display text | +| `IsDefault` | `bool` | Whether this is the default selection | +| `IsSelected` | `bool` | Whether currently selected | + +--- + +## Edit Models (`WebSpark.Recipe.Models.EditModels`) + +### RecipeEditModel +*Inherits `RecipeModel`; adds UI-specific fields* + +| Property | Type | Notes | +|----------|------|-------| +| *(inherits RecipeModel)* | | | +| `Categories` | `List` | Populated for edit form dropdowns | + +*Copy constructor*: `RecipeEditModel(RecipeModel source)` + +--- + +### RecipeCategoryEditModel +*Inherits `RecipeCategoryModel`* + +*Copy constructor*: `RecipeCategoryEditModel(RecipeCategoryModel source)` + +--- + +## Interfaces (`WebSpark.Recipe.Interfaces`) + +### IRecipeService + +```csharp +public interface IRecipeService +{ + bool Delete(int Id); + bool Delete(RecipeCategoryModel saveItem); + RecipeModel Get(int Id); + RecipeCategoryModel GetRecipeCategoryById(int Id); + List GetRecipeCategoryList(); + IEnumerable Get(); + RecipeModel Save(RecipeModel saveItem); + RecipeModel Save(List saveItems); + RecipeCategoryModel Save(RecipeCategoryModel saveItem); + List Save(List saveItems); + List GetRecipeImages(); + // NOTE: GetRecipeVMHostAsync removed — RecipeVM moved to WebSpark.Portal +} +``` + +### IRecipeImageService + +```csharp +public interface IRecipeImageService +{ + void AddRecipeImage(RecipeImageModel recipeImageModel); + void DeleteRecipeImage(int id); + RecipeImageModel? GetRecipeImageById(int id); + IEnumerable GetRecipeImages(); + void UpdateRecipeImage(RecipeImageModel recipeImageModel); +} +``` + +--- + +## Helpers (`WebSpark.Recipe.Helpers`) + +### RecipeUrlHelper +*Replaces recipe-specific methods from `WebSpark.Core.Infrastructure.FormatHelper`* + +```csharp +public static class RecipeUrlHelper +{ + public static string GetSafePath(string name) // Slugify name for URL + public static string GetRecipeURL(string recipeName) // Returns "/recipe/{slug}" + public static string GetRecipeCategoryURL(string name) // Returns "/recipe/category/{slug}" +} +``` + +### RecipeHelper +*Moved from `WebSpark.Core.Helpers.RecipeHelper`* + +```csharp +public static class RecipeHelper +{ + public static RecipeCategory GetRecipeCategory(string categoryName, int displayOrder) + public static Recipe GetRecipe(int domainId, string name, string authorName, + string description, string ingredients, string instructions, + RecipeCategory category, string keyWords = "") +} +``` + +*Note*: `GetRecipe` signature updated — takes `int domainId` instead of `WebSite domain` to remove `WebSite` entity dependency. + +--- + +## Constants (`WebSpark.Recipe.Constants`) + +### RecipeConstants + +```csharp +public static class RecipeConstants +{ + public const int INT_MOM_DomainId = 2; +} +``` + +--- + +## Types NOT Moving to WebSpark.Recipe + +| Type | Location After Extraction | Reason | +|------|--------------------------|--------| +| `RecipeVM` | `WebSpark.Portal` (Areas/RecipeSpark/Models) | Presentation-layer; inherits `WebsiteVM` from `WebSpark.Core` | +| `IMenuProvider` | Stays in `WebSpark.Core` | Non-recipe interface | +| `RecipeOld` | Deleted (after usage scan) | Dead code | +| Menu methods from `RecipeProvider` | `RecipeMenuAdapter` in `WebSpark.Portal` | Presentation/menu concern | + +--- + +## Entity Relationship Diagram + +``` +WebSite (Domain) [in WebSparkDbContext - NOT in RecipeDbContext] + | + | DomainId (raw FK, no navigation) + | +RecipeCategory ──────────────────────── Recipe +(1) (N) + | + ┌───────────────┤ + │ │ + RecipeComment RecipeImage + (cascade del) (cascade del) +``` diff --git a/specs/006-extract-recipe-library/gates/analyze.md b/specs/006-extract-recipe-library/gates/analyze.md new file mode 100644 index 00000000..955fe220 --- /dev/null +++ b/specs/006-extract-recipe-library/gates/analyze.md @@ -0,0 +1,120 @@ +```yaml +gate: analyze +status: pass +blocking: false +severity: info +summary: "5 findings — all resolved (2026-05-01). A1: T047a added for GetRecipeVMHostAsync migration. A2: T050 expanded + T050a added for DI validation. A3: T066 expanded with both migration paths. A4: Plan.md namespace standardized. A5: T001 + T067 updated for baseline tracking. Implementation may proceed." +``` + +## Specification Analysis Report + +**Analysis Date**: 2026-05-01 +**Artifacts**: spec.md, plan.md, tasks.md, data-model.md, research.md +**Constitution**: .documentation/memory/constitution.md + +--- + +| ID | Category | Severity | Location(s) | Summary | Recommendation | +|----|----------|----------|-------------|---------|----------------| +| A1 | Coverage Gap | HIGH | tasks.md T047–T048 | `RecipeVM` move (T047) and `RecipeMenuAdapter` creation (T048) are in Phase 4 (US2), but `GetRecipeVMHostAsync` removal from `IRecipeService` is only mentioned in data-model.md — no explicit task validates that all Portal callers of `GetRecipeVMHostAsync` are updated | Add a task between T047 and T048: scan Portal for all calls to `GetRecipeVMHostAsync` and update them to use the new `RecipeVM` construction path via `RecipeMenuAdapter` | +| A2 | Coverage Gap | HIGH | tasks.md, plan.md | `IMenuProvider` registration: plan.md shows `AddScoped()` but the existing `IMenuProvider` registration in Portal (previously `RecipeProvider`) must be explicitly deregistered/replaced — no task validates only one `IMenuProvider` is registered | Add assertion task: after T050 (Program.cs update), verify exactly one `IMenuProvider` registration exists; document any conflicting registrations found | +| A3 | Underspecification | MEDIUM | spec.md FR-007, tasks.md T015 | Migration strategy for existing databases is documented in plan.md but no task in tasks.md explicitly covers the existing-database migration path (the `__EFMigrationsHistory` insert); T066 is too vague ("apply migration") | Expand T066 to explicitly cover both paths: (a) new database via `dotnet ef database update`, (b) existing database via `__EFMigrationsHistory` row insert with exact SQL | +| A4 | Terminology Drift | MEDIUM | research.md R-002, tasks.md T048 | research.md calls the adapter "RecipeMenuAdapter" in `WebSpark.Portal.Areas.RecipeSpark.Services`; tasks.md T048 says the same; plan.md uses `RecipeMenuAdapter` in `WebSpark.Portal.Services`. Namespace differs slightly | Standardize on `WebSpark.Portal.Areas.RecipeSpark.Services.RecipeMenuAdapter` per plan.md Phase 1 design contracts; update plan.md namespace reference | +| A5 | Underspecification | LOW | spec.md SC-002, tasks.md T067 | SC-002 says "at least 50% smaller by package count" but the baseline `WebSpark.Core` package count is not recorded anywhere — the pass/fail criterion for T067 is ambiguous without the baseline | Record `WebSpark.Core` package count before extraction begins (add step in T001 or T065 preamble); compare against `WebSpark.Recipe` count at T067 | + +--- + +**Coverage Summary Table** + +| Requirement Key | Has Task? | Task IDs | Notes | +|-----------------|-----------|----------|-------| +| FR-001: Create WebSpark.Recipe project | YES | T001–T005 | Full coverage | +| FR-002: No WebSpark.Core reference in WebSpark.Recipe | YES | T028 | Verification task present | +| FR-003: EF Core only; no infra packages | YES | T067 | Verification task present | +| FR-004: Type/signature parity; WebSpark.Recipe.* namespaces | YES | T009–T027 | All types created; T052 updates usings | +| FR-005: WebSpark.Core cleaned; no reference to WebSpark.Recipe | YES | T031–T046 | Full coverage | +| FR-006: RecipeDbContext with recipe DbSets | YES | T013 | Covered | +| FR-007: EF Core migrations established; existing DB preserved | PARTIAL | T015, T066 | T066 needs expansion (A3 above) | +| FR-008: RecipeCookbook updated | YES | T055–T058 | Full coverage | +| FR-009: PromptSpark projects updated | YES | T059–T064 | Full coverage | +| FR-010: Portal updated with RecipeDbContext | YES | T049–T053 | Full coverage | +| FR-011: Full solution zero errors/warnings | YES | T065 | Covered | +| FR-012: RecipeOld.cs deleted | YES | T030 | Covered with usage scan gate | +| FR-013: RecipeVM to Portal | PARTIAL | T047 | Missing caller-update task (A1 above) | +| FR-014: Template-driven csproj | YES | T002 | Covered | +| FR-015: NuGet publication configured | YES | T002, T072 | Covered | + +--- + +**Constitution Alignment Issues**: None — all constitution principles pass per plan.md Constitution Check. + +**Unmapped Tasks**: None — all 72 tasks map to at least one functional requirement or success criterion. + +--- + +**Metrics**: + +- Total Requirements: 15 (FR-001 through FR-015) +- Total Tasks: 72 +- Coverage %: 100% (all FRs have at least one task) +- Ambiguity Count: 1 (SC-002 baseline not recorded) +- Duplication Count: 0 +- Critical Issues Count: 0 + +--- + +## Next Actions + +No critical issues block implementation. Two HIGH findings should be addressed before starting Phase 4: + +1. **A1**: Add task for `GetRecipeVMHostAsync` caller migration in Portal (insert after T047) +2. **A2**: Add task to validate single `IMenuProvider` registration after T050 + +LOW/MEDIUM findings (A3, A4, A5) are improvement suggestions; address during polish phase. + +**Recommended**: Proceed to `/devspark.critic` for risk assessment, then implement. + +--- + +```yaml +findings: + - finding_id: analyze-A1 + severity: high + description: "GetRecipeVMHostAsync is removed from IRecipeService per data-model.md but no task explicitly migrates all Portal callers of this method to the new RecipeVM construction path via RecipeMenuAdapter." + recommended_action: "Insert task after T047: scan WebSpark.Portal for GetRecipeVMHostAsync calls; update each caller to build RecipeVM directly or via RecipeMenuAdapter." + execution_mode: manual + status: open + outcome: "" + + - finding_id: analyze-A2 + severity: high + description: "The existing IMenuProvider registration in WebSpark.Portal (previously pointing to RecipeProvider) must be replaced by RecipeMenuAdapter. No task validates that only one IMenuProvider registration exists after the Program.cs update." + recommended_action: "Add assertion step to T050: after DI registration, verify no duplicate IMenuProvider registrations; document and resolve any conflicts." + execution_mode: selective + status: open + outcome: "" + + - finding_id: analyze-A3 + severity: medium + description: "T066 says 'apply RecipeDbContext migration' but does not distinguish between new-database (dotnet ef database update) and existing-database (__EFMigrationsHistory insert) paths. An implementer unfamiliar with the plan could miss the existing-database case." + recommended_action: "Expand T066 to include both paths explicitly with the SQL INSERT statement from plan.md deployment runbook." + execution_mode: auto + status: open + outcome: "" + + - finding_id: analyze-A4 + severity: medium + description: "RecipeMenuAdapter namespace is 'WebSpark.Portal.Areas.RecipeSpark.Services' in research.md and tasks.md but 'WebSpark.Portal.Services' in plan.md. Minor drift." + recommended_action: "Standardize on WebSpark.Portal.Areas.RecipeSpark.Services across all artifacts; update plan.md." + execution_mode: auto + status: open + outcome: "" + + - finding_id: analyze-A5 + severity: low + description: "SC-002 requires WebSpark.Recipe to be at least 50% smaller by package count than WebSpark.Core, but the WebSpark.Core baseline package count is not recorded, making T067 hard to validate." + recommended_action: "Record WebSpark.Core package count before extraction (add to T001 preamble or T065 notes); compare at T067." + execution_mode: auto + status: open + outcome: "" +``` diff --git a/specs/006-extract-recipe-library/gates/baseline-package-count.txt b/specs/006-extract-recipe-library/gates/baseline-package-count.txt new file mode 100644 index 00000000..f97e9972 --- /dev/null +++ b/specs/006-extract-recipe-library/gates/baseline-package-count.txt @@ -0,0 +1 @@ +WebSpark.Core baseline package count (dotnet list package output lines): 30 diff --git a/specs/006-extract-recipe-library/gates/critic.md b/specs/006-extract-recipe-library/gates/critic.md new file mode 100644 index 00000000..47f1258e --- /dev/null +++ b/specs/006-extract-recipe-library/gates/critic.md @@ -0,0 +1,212 @@ +```yaml +gate: critic +status: pass +blocking: false +severity: info +summary: "GREEN — 0 showstoppers. All 9 risks resolved (2026-05-01). C1: T066 expanded with schema diff + idempotent verification. C2: T047a added for GetRecipeVMHostAsync callers. C3: T050 replace-not-append + T050a validation. H1: T073 rollback runbook. H2: T069a integration smoke test. H3: T025 caller grep. H4: T061 build-before-remove guard. M1: T051 all environments + Key Vault. M2: T050 health check. Implementation may proceed." +``` + +## Technical Risk Assessment + +**Analysis Date**: 2026-05-01 +**Risk Posture**: YELLOW +**Detected Stack**: C# 13 / .NET 10.0 + Entity Framework Core 10.0.7 (SQLite) + ASP.NET Core (WebSpark.Portal) + +### Executive Summary + +The extraction plan is architecturally sound with no circular dependencies and no constitution violations. The primary production risk is the EF Core migration strategy for existing databases — inserting directly into `__EFMigrationsHistory` without careful coordination can corrupt the migration history and block future schema changes. Two additional critical risks involve the `RecipeMenuAdapter` contract coverage and the `IRecipeService` interface incompatibility introduced by removing `GetRecipeVMHostAsync`. These are resolvable during implementation with the mitigations below. No showstoppers present. + +--- + +### Showstopper Risks + +*None identified.* + +--- + +### Critical Risks (High Probability of Costly Issues) + +| ID | Category | Location | Risk Description | Likely Impact | Recommended Action | +|----|----------|----------|-----------------|---------------|--------------------| +| C1 | Deployment / Data | plan.md migration runbook, T066 | `__EFMigrationsHistory` manual insert strategy is fragile. If the InitialCreate migration creates different table/column names than what exists (e.g., column constraints differ), EF will mark it applied but future `dotnet ef migrations add` commands will generate spurious diffs or fail on `dotnet ef database update` | Future migrations generate incorrect diff; schema drift between code and DB | Before inserting history row, run `dotnet ef script --context RecipeDbContext --idempotent` and diff against live schema; only insert history row if schema matches exactly; add this verification to T066 | +| C2 | Breaking API Change | spec.md, data-model.md IRecipeService, T027 | `IRecipeService` removes `GetRecipeVMHostAsync(string host, WebsiteVM baseVM)`. Any Portal controller, Razor page, or service that calls this method will throw `MissingMethodException` at runtime (not caught at compile time if called via interface). The analyze gate (A1) flagged this as a task gap. | Silent runtime failure in recipe browsing paths; 500 errors for users viewing recipes | Before T027, grep all Portal source for `GetRecipeVMHostAsync` calls; implement replacement logic in RecipeMenuAdapter or Portal controller; add integration test for recipe browse path | +| C3 | DI Configuration | tasks.md T050, plan.md DI registration | If `WebSpark.Portal` currently registers `IMenuProvider → RecipeProvider` (the old implementation), removing `RecipeProvider` from Core and replacing with `RecipeMenuAdapter` without explicitly removing the old registration can result in a DI resolution exception at startup, or the old provider staying registered via a module/extension method not covered by T050 | Portal startup crash after deployment | Audit all extension methods and middleware registrations in Portal that reference `IMenuProvider` or `RecipeProvider`; ensure T050 replaces, not appends, the registration | + +--- + +### High-Priority Concerns + +| ID | Category | Location | Issue | Impact | Suggestion | +|----|----------|----------|-------|--------|------------| +| H1 | Rollback Strategy | plan.md, tasks.md | No rollback task exists. If migration or compilation fails mid-extraction (e.g., after WebSpark.Core recipe files are deleted but before Portal compiles), the solution is in a broken state with no documented recovery path | Developer time lost; broken main if accidentally merged | Add a rollback task to Phase 7: document git revert steps; use feature branch isolation (already done) as primary safety net; do not merge until T065 passes | +| H2 | Test Coverage Gap | tasks.md T069, spec.md SC-006 | The existing tests in `tests\WebSpark.Portal.Tests\Areas\RecipeSpark\` primarily cover `RecipeChatApiController` and `RecipeChatContext` (from Spec 005) — not core recipe CRUD via the new `RecipeProvider` + `RecipeDbContext`. SC-006 requires 100% pass rate but these tests may not exercise the extraction path at all | False confidence — tests pass but `RecipeProvider` with `RecipeDbContext` is untested | Before T069, manually verify which tests actually exercise `RecipeProvider` and `RecipeDbContext`; if none exist, add at least one smoke test for recipe list and recipe detail retrieval via `RecipeDbContext` | +| H3 | RecipeHelper Signature Break | data-model.md, T025 | `RecipeHelper.GetRecipe` changes signature from `(WebSite domain, ...)` to `(int domainId, ...)`. Any callers using the `WebSite` parameter version will fail to compile. Callers must be identified and updated as part of T025 | Compilation failure in consumers of `RecipeHelper.GetRecipe` | Add grep step to T025: `grep -r "RecipeHelper.GetRecipe" --include="*.cs"` across solution; update all callers to use `int domainId` overload | +| H4 | PromptSpark.Domain WebSpark.Core Dependency | tasks.md T061 | T061 says "remove `WebSpark.Core` project reference if recipe types were its only use" — but `PromptSpark.Domain` has 7 `Microsoft.CodeAnalysis.*` packages and `Microsoft.SemanticKernel`. It may use non-recipe types from `WebSpark.Core` (logging, HTTP client utilities). Blindly removing `WebSpark.Core` will break compilation | `PromptSpark.Domain` fails to build | Before removing, run `dotnet build PromptSpark.Domain` after adding `WebSpark.Recipe` but BEFORE removing `WebSpark.Core`; only remove `WebSpark.Core` if build still passes without it | + +--- + +### Framework-Specific Red Flags (ASP.NET Core + EF Core) + +- [x] EF Core scoped `DbContext` registered correctly — plan.md shows `AddDbContext` which is scoped by default ✅ +- [x] `AsNoTracking()` used in read-only queries — `RecipeProvider` uses it per constitution standard ✅ +- [ ] **Missing**: `RecipeDbContext` connection string `RecipeConnection` not validated to exist in all environment-specific `appsettings` files (Development, Production, Staging) — T051 only covers Development +- [ ] **Missing**: No health check registered for `RecipeDbContext` — constitution requires health checks; `WebSpark.Portal` should add `services.AddHealthChecks().AddDbContextCheck()` +- [x] Migrations assembly specified in `AddDbContext` call ✅ +- [x] `RecipeDbContextFactory` included for EF design-time tooling ✅ + +--- + +### Architecture Red Flags + +- [x] No over-engineering — single library, not three +- [x] No under-engineering — `RecipeDbContext` is correctly isolated +- [ ] **Concern**: `WebSpark.RecipeCookbook` has `MailKit 4.16.0` as a dependency — if `MailKit` was previously accessed via `WebSpark.Core` (which also has it), replacing `WebSpark.Core` with `WebSpark.Recipe` may break `MailKit`-dependent code in `RecipeCookbook`. Verify `RecipeCookbook` uses MailKit directly (has its own package reference) not transitively through `WebSpark.Core` +- [x] No circular dependencies in the proposed structure + +--- + +### Missing Critical Tasks + +- **Observability**: No task to add `AddHealthChecks().AddDbContextCheck()` to Portal (constitution requirement) +- **Operations**: No task to update deployment pipeline (if any CI/CD) to handle dual `DbContext` database update commands +- **Testing**: No task to add `RecipeConnection` to test project configuration (`WebSpark.Portal.Tests` `appsettings.json`) +- **Security**: No task to validate `RecipeConnection` is sourced from Key Vault / environment variable (not hardcoded) per constitution Azure-First standard + +--- + +### Questionable Assumptions + +1. **"WebSpark.RecipeCookbook only uses WebSpark.Core for recipe types"** → If `RecipeCookbook` uses any non-recipe types from `WebSpark.Core` (e.g., logging helpers, format utilities), removing the `WebSpark.Core` reference will break its build. T056 has a conditional note but the verification step is not explicit. + +2. **"PromptSpark.Domain uses WebSpark.Core only for recipe types"** → `PromptSpark.Domain` is a heavy AI/Semantic Kernel library. It almost certainly uses more than recipe types from `WebSpark.Core`. T061's conditional removal may result in keeping `WebSpark.Core` — which is acceptable but should be explicitly documented as an outcome. + +3. **"RecipeDbContext schema exactly matches WebSparkDbContext recipe schema"** → The `OnModelCreating` configuration must be transcribed precisely. Any difference in column names, constraints, or cascade behaviors will create a schema mismatch between the new `RecipeDbContext` and the existing database. This is the most likely source of production defects. + +--- + +### Dependencies Risk Assessment + +| Dependency | Concern | Alternative to Consider | +|------------|---------|------------------------| +| EF Core 10.0.7 SQLite | New `RecipeDbContext` must reproduce exact schema of `WebSparkDbContext` recipe tables | Use `dotnet ef script` to compare generated DDL against actual DB schema before marking migration applied | +| IMenuProvider (WebSpark.Core) | `RecipeMenuAdapter` must implement all 5 interface methods correctly; any missed method throws `NotImplementedException` at runtime | Review all `IMenuProvider` method call sites in Portal before writing `RecipeMenuAdapter` | +| MailKit in RecipeCookbook | May be transitive through `WebSpark.Core`; unclear if `RecipeCookbook.csproj` has direct reference | Check `RecipeCookbook.csproj` directly — if `MailKit` is absent, it was transitive; add explicit reference before removing `WebSpark.Core` | + +--- + +### Estimated Technical Debt at Launch + +- **Code Debt**: `GetRecipeVMHostAsync` removal creates a gap in `RecipeVM` construction — caller logic must be verified and replaced +- **Operational Debt**: Manual `__EFMigrationsHistory` insert on existing databases — needs runbook and CI/CD integration +- **Documentation Debt**: `RecipeMenuAdapter` contract needs XML doc comments for the 5 adapted methods +- **Testing Debt**: No integration tests for `RecipeProvider` + `RecipeDbContext` path; relies entirely on existing `RecipeChatApiControllerTests` which test a different context + +--- + +### Metrics + +- Showstopper Count: 0 +- Critical Risk Count: 3 +- Missing Operational Tasks: 4 (health check, CI/CD migration, test config, Key Vault validation) +- Underspecified Security Requirements: 1 (RecipeConnection Key Vault sourcing) +- Schema Drift Risks: 1 (RecipeDbContext vs existing DB) + +--- + +## GO/NO-GO RECOMMENDATION + +``` +[ ] STOP - Showstoppers present, cannot proceed to implementation +[X] CONDITIONAL - Fix critical risks first, then reassess +[ ] PROCEED WITH CAUTION - Document acknowledged risks, add mitigation tasks +``` + +**Required Actions Before Implementation**: + +1. **C1 — Migration safety**: Add schema comparison step to T066; do not insert `__EFMigrationsHistory` row without verifying schema match via `dotnet ef script --idempotent` +2. **C2 — GetRecipeVMHostAsync callers**: Add grep + update task between T047 and T048 (already flagged as analyze-A1) +3. **C3 — DI registration audit**: Add explicit DI audit to T050; confirm only one `IMenuProvider` registration exists post-update + +**Recommended Risk Mitigations**: + +- Add task for `AddHealthChecks().AddDbContextCheck()` in Portal (constitution compliance) +- Add task to verify `MailKit` direct reference in `RecipeCookbook` before removing `WebSpark.Core` +- Add task to add `RecipeConnection` to `WebSpark.Portal.Tests` test configuration +- Explicitly document that `PromptSpark.Domain` may retain its `WebSpark.Core` reference if non-recipe types are used + +--- + +```yaml +findings: + - finding_id: critic-C1 + severity: critical + description: "Manual __EFMigrationsHistory insert strategy is fragile. If the InitialCreate migration DDL differs from the live schema (column constraints, defaults), marking it applied creates a permanent schema drift that corrupts future migration generation." + recommended_action: "Before inserting history row, run 'dotnet ef script --context RecipeDbContext --idempotent' and diff the output against the actual database schema. Only proceed if schemas match exactly." + execution_mode: manual + status: open + outcome: "" + + - finding_id: critic-C2 + severity: critical + description: "IRecipeService removes GetRecipeVMHostAsync which is likely called by Portal controllers. This removal is a runtime breaking change — callers will throw MissingMethodException and produce 500 errors for recipe browse pages if not updated." + recommended_action: "Before T027, grep all Portal source for GetRecipeVMHostAsync; implement replacement logic; add integration test for recipe browse path." + execution_mode: manual + status: open + outcome: "" + + - finding_id: critic-C3 + severity: critical + description: "If Portal currently registers IMenuProvider pointing to the old RecipeProvider, removing RecipeProvider from Core without replacing the DI registration can cause a startup crash or silent IMenuProvider resolution failure." + recommended_action: "Audit all Portal DI registrations for IMenuProvider and RecipeProvider before T050; ensure RecipeMenuAdapter replaces the existing registration, not appends to it." + execution_mode: manual + status: open + outcome: "" + + - finding_id: critic-H1 + severity: high + description: "No rollback task exists. If extraction fails mid-way (after recipe files deleted from Core, before Portal compiles), the solution is broken with no documented recovery path." + recommended_action: "Add rollback documentation task to Phase 7; rely on feature branch isolation; do not merge until T065 (full build) passes." + execution_mode: manual + status: open + outcome: "" + + - finding_id: critic-H2 + severity: high + description: "Existing tests in RecipeSpark area cover RecipeChatApiController (Spec 005), not RecipeProvider + RecipeDbContext. SC-006 (100% pass rate) will pass but does not validate the extraction path." + recommended_action: "Before T069, verify which tests exercise RecipeProvider/RecipeDbContext; add at least one smoke test for recipe list and detail via the new RecipeDbContext." + execution_mode: selective + status: open + outcome: "" + + - finding_id: critic-H3 + severity: high + description: "RecipeHelper.GetRecipe signature changes from (WebSite domain, ...) to (int domainId, ...). All callers must be updated or they will fail to compile." + recommended_action: "Add grep step to T025: grep -r 'RecipeHelper.GetRecipe' across solution; update all callers to use int domainId overload." + execution_mode: auto + status: open + outcome: "" + + - finding_id: critic-H4 + severity: high + description: "T061 conditionally removes WebSpark.Core from PromptSpark.Domain 'if recipe types were the only use', but PromptSpark.Domain has heavy AI/Semantic Kernel dependencies that may also use WebSpark.Core types. Blind removal may break the build." + recommended_action: "Build PromptSpark.Domain with WebSpark.Recipe added but WebSpark.Core retained; then attempt removing WebSpark.Core; only remove if build still passes." + execution_mode: selective + status: open + outcome: "" + + - finding_id: critic-M1 + severity: medium + description: "RecipeConnection string not validated across all appsettings environments. T051 only adds to Development. Production/Staging configurations may be missing the connection string." + recommended_action: "Add RecipeConnection to all appsettings variants (Development, Production, Staging); validate Key Vault has a matching secret per constitution Azure-First standard." + execution_mode: auto + status: open + outcome: "" + + - finding_id: critic-M2 + severity: medium + description: "No health check registered for RecipeDbContext. Constitution requires health check endpoints for all data stores. Missing health check means Azure App Service health probes cannot detect RecipeDbContext connectivity failures." + recommended_action: "Add services.AddHealthChecks().AddDbContextCheck() to WebSpark.Portal Program.cs as part of T050." + execution_mode: auto + status: open + outcome: "" +``` diff --git a/specs/006-extract-recipe-library/gates/unified-issues.md b/specs/006-extract-recipe-library/gates/unified-issues.md new file mode 100644 index 00000000..2187882f --- /dev/null +++ b/specs/006-extract-recipe-library/gates/unified-issues.md @@ -0,0 +1,71 @@ +# Unified Issues: Extract WebSpark.Recipe Core Library + +**Sources**: analyze.md + critic.md +**Date**: 2026-05-01 +**Review & Resolution Date**: 2026-05-01 +**Overall Gate Status**: GREEN — All 14 issues resolved; implementation may proceed + +--- + +## Resolution Summary + +| # | ID | Severity | Title | Resolution | Artifact(s) Updated | +|---|----|----------|-------|-----------|---------------------| +| 1 | critic-C1 + analyze-A3 | CRITICAL + MEDIUM | Migration schema drift; T066 both paths missing | Expanded T066 with schema diff step (idempotent script + sqlite3 comparison), both migration paths, and artifact output | tasks.md T066 | +| 2 | critic-C2 + analyze-A1 | CRITICAL + HIGH | `GetRecipeVMHostAsync` removal breaks Portal callers | Updated T024 with XML remarks note; inserted T047a (grep + migrate all callers) | tasks.md T024, T047a; spec.md IRecipeService Key Entity | +| 3 | critic-C3 + analyze-A2 | CRITICAL + HIGH | IMenuProvider DI registration not replaced | Expanded T050 with explicit remove-before-add instruction and health check; inserted T050a validation task | tasks.md T050, T050a; plan.md DI Registration | +| 4 | critic-H1 | HIGH | No rollback task | Added T073 with three-scenario rollback runbook | tasks.md T073 | +| 5 | critic-H2 | HIGH | Tests don't cover RecipeProvider + RecipeDbContext | Inserted T069a (RecipeProviderIntegrationTests); updated T069 to require T069a first; updated SC-006 | tasks.md T069a, T069; spec.md SC-006 | +| 6 | critic-H3 | HIGH | RecipeHelper.GetRecipe signature change — callers not identified | Expanded T025 with grep scan + compile-verify for each call site | tasks.md T025 | +| 7 | critic-H4 | HIGH | PromptSpark.Domain may use WebSpark.Core for non-recipe types | Updated T061 with build-before-remove guard: add WebSpark.Recipe, build, then remove WebSpark.Core, build again — only remove if clean | tasks.md T061 | +| 8 | critic-M1 | MEDIUM | RecipeConnection missing from all environments + Key Vault | Expanded T051 to cover all appsettings variants + Key Vault note; added connection string templates to plan.md | tasks.md T051; plan.md | +| 9 | critic-M2 | MEDIUM | No health check for RecipeDbContext | Added health check registration to T050; added to plan.md DI Registration section | tasks.md T050; plan.md | +| 10 | analyze-A3 | MEDIUM | T066 only one migration path | Merged into critic-C1 resolution — T066 now covers both paths explicitly | tasks.md T066 | +| 11 | analyze-A4 | MEDIUM | RecipeMenuAdapter namespace inconsistency | Standardized on `WebSpark.Portal.Areas.RecipeSpark.Services` in plan.md DI / RecipeMenuAdapter section | plan.md | +| 12 | analyze-A5 | LOW | SC-002 baseline not recorded | Added baseline recording to T001; T067 now reads baseline file and documents both counts | tasks.md T001, T067 | + +--- + +## Final Gate Status + +| Severity | Original Count | Resolved | Remaining | +|----------|---------------|----------|-----------| +| CRITICAL | 3 | 3 | 0 | +| HIGH | 6 | 6 | 0 | +| MEDIUM | 4 | 4 | 0 | +| LOW | 1 | 1 | 0 | +| **Total** | **14** | **14** | **0** | + +**Verdict**: GREEN — All issues resolved. Implementation may proceed starting at Phase 1. + +--- + +## New Tasks Added by Resolution + +| Task | Phase | Resolves | +|------|-------|---------| +| T001 (expanded) | 1 | analyze-A5 — baseline package count recording | +| T024 (expanded) | 3 | critic-C2 — XML remarks on GetRecipeVMHostAsync removal | +| T025 (expanded) | 3 | critic-H3 — grep + compile-verify RecipeHelper callers | +| T047a (new) | 4 | critic-C2 + analyze-A1 — GetRecipeVMHostAsync caller migration | +| T050 (expanded) | 4 | critic-C3 + analyze-A2 + critic-M2 — DI replace-not-append + health check | +| T050a (new) | 4 | critic-C3 + analyze-A2 — DI registration validation | +| T051 (expanded) | 4 | critic-M1 — all environments + Key Vault | +| T061 (expanded) | 6 | critic-H4 — build-before-remove guard | +| T066 (expanded) | 7 | critic-C1 + analyze-A3 — schema diff + both migration paths | +| T067 (expanded) | 7 | analyze-A5 — package count comparison against baseline | +| T069a (new) | 7 | critic-H2 — RecipeProvider + RecipeDbContext integration test | +| T069 (expanded) | 7 | critic-H2 — require T069a to run first | +| T073 (new) | 7 | critic-H1 — rollback runbook | + +**Task count**: 72 original → 78 after resolution (+3 new tasks, +10 expanded tasks) + +--- + +## Artifacts Updated + +| Artifact | Changes | +|----------|---------| +| `tasks.md` | T001, T024, T025, T047a (new), T050, T050a (new), T051, T061, T066, T067, T069a (new), T069, T073 (new); metrics updated to 78 | +| `plan.md` | DI Registration section: health check added, namespace standardized, connection string all-environments template added | +| `spec.md` | IRecipeService Key Entity: GetRecipeVMHostAsync removal documented; SC-006: smoke test requirement added | diff --git a/specs/006-extract-recipe-library/plan.md b/specs/006-extract-recipe-library/plan.md new file mode 100644 index 00000000..394868c9 --- /dev/null +++ b/specs/006-extract-recipe-library/plan.md @@ -0,0 +1,290 @@ +# Implementation Plan: Extract WebSpark.Recipe Core Library + +**Branch**: `006-extract-recipe-library` | **Date**: 2026-05-01 | **Spec**: [spec.md](spec.md) +**Input**: Feature specification from `specs/006-extract-recipe-library/spec.md` + +## Rationale Summary + +### Core Problem + +All recipe-domain code lives inside `WebSpark.Core`, forcing every consumer of recipe functionality to take a dependency on the full general-purpose infrastructure library. This inflates transitive dependencies, prevents independent versioning, and couples recipe schema changes to a release of the core library. + +### Decision Summary + +Extract all recipe-domain code — entities, models, interfaces, service implementations, helpers, and a new `RecipeDbContext` — into a standalone `WebSpark.Recipe` library. `WebSpark.Core` retains no recipe code and no reference to `WebSpark.Recipe`. All four consuming projects are updated to reference `WebSpark.Recipe` directly. + +### Key Drivers + +- **Independent deployability**: `WebSpark.Recipe` publishes as its own NuGet package +- **Lean consumers**: `WebSpark.RecipeCookbook` and PromptSpark projects drop `WebSpark.Core` dependency for recipe access +- **Clean architecture**: Recipe data access uses its own `RecipeDbContext`, fully isolated from `WebSparkDbContext` + +### Source Inputs + +- spec.md with 5 clarified decisions (service placement, namespaces, RecipeVM, RecipeOld, NuGet packaging) +- research.md resolving 10 technical unknowns (BaseEntity, IMenuProvider, FormatHelper, WebSite FK, migrations, etc.) +- data-model.md with complete entity and model definitions + +### Tradeoffs Considered + +- **Partial extraction (interfaces only to WebSpark.Recipe)**: Rejected — consumers still need `WebSpark.Core` for service implementations +- **Three-project split (WebSpark.Recipe.Domain + WebSpark.Recipe.Data)**: Rejected — adds package management overhead without meaningful boundary for this domain size +- **Selected: Single `WebSpark.Recipe` library with EF Core**: Self-contained; EF Core is the only significant package dependency; all recipe concerns in one independently-versioned package + +### Architectural Impact + +- `WebSpark.Core`: ~14 source files removed; recipe DbSets removed from `WebSparkDbContext`; zero recipe dependency remains +- `WebSpark.Recipe`: New net10.0 library; EF Core 10.0.7 dependency; `RecipeDbContext` with own migration history +- `WebSpark.Portal`: Adds `WebSpark.Recipe` reference; registers `RecipeDbContext`; hosts `RecipeVM` and new `RecipeMenuAdapter` +- `WebSpark.RecipeCookbook`: `WebSpark.Core` reference replaced by `WebSpark.Recipe` +- `PromptSpark.Domain` + `PromptSpark.Recipe.Console`: `WebSpark.Core` recipe dependency replaced by `WebSpark.Recipe` + +### Reviewer Guidance + +Focus on: (1) `RecipeMenuAdapter` in Portal correctly adapting `IRecipeService` to `IMenuProvider`; (2) `RecipeDbContext` migration applied correctly on existing databases; (3) zero residual recipe code in `WebSpark.Core` after extraction. + +--- + +## Summary + +Create `WebSpark.Recipe` as a standalone net10.0 library containing all recipe entities, models, interfaces, service implementations, helpers, and `RecipeDbContext`. Migrate consuming projects to reference it directly. Remove all recipe code from `WebSpark.Core`. A `RecipeMenuAdapter` in `WebSpark.Portal` bridges the menu system without creating a `WebSpark.Core` to `WebSpark.Recipe` dependency. + +--- + +## Technical Context + +**Language/Version**: C# 13 / .NET 10.0 +**Primary Dependencies**: Entity Framework Core 10.0.7 (SQLite provider), Microsoft.EntityFrameworkCore.Design 10.0.7 +**Storage**: SQLite (matching existing WebSpark.Core pattern; connection string `RecipeConnection` in `appsettings.json`) +**Testing**: MSTest via `WebSpark.Portal.Tests` (existing test project) +**Target Platform**: .NET 10.0 class library (NuGet publishable) +**Project Type**: Domain library +**Performance Goals**: N/A for library extraction; existing performance characteristics preserved +**Constraints**: No breaking changes to public API surface; existing database schema preserved; zero recipe code in `WebSpark.Core` post-extraction +**Scale/Scope**: ~14 source files migrated; 4 consuming projects updated; 1 new project created + +--- + +## Constitution Check + +*GATE: Pre-Phase-0. Re-checked after Phase 1 design.* + +| Principle | Status | Notes | +|-----------|--------|-------| +| Template-Driven Configuration | PASS | `WebSpark.Recipe.csproj` uses same template pattern as `WebSpark.Core.csproj` | +| API Versioning Standard | N/A | Library, not a web API | +| Azure-First Development | PASS | Library is deployment-agnostic; no Azure-specific concerns | +| Fire-and-Forget Logging | PASS | No logging infrastructure introduced in recipe library | +| API Security by Default | N/A | Library, not an API endpoint host | +| Spec Workflow Enforcement | PASS | Full spec + clarify + plan + tasks + critic workflow followed | +| Async/Await Best Practices | PASS | RecipeProvider retains async patterns with CancellationToken | +| EF Core Standards | PASS | AsNoTracking() for read queries; scoped DbContext; separate DbContext per domain | +| Separate DbContext per Domain | PASS | Aligns with Spec 005 pattern cited in constitution | + +**Gate result**: PASS — no constitution violations. + +--- + +## Project Structure + +### Documentation (this feature) + +```text +specs/006-extract-recipe-library/ +├── plan.md +├── research.md +├── data-model.md +├── quickstart.md +├── gates/ +│ ├── analyze.md +│ └── critic.md +└── tasks.md +``` + +### Source Code Structure + +```text +WebSpark.Recipe/ [NEW project] +├── WebSpark.Recipe.csproj +├── Constants/ +│ └── RecipeConstants.cs +├── Data/ +│ ├── RecipeBaseEntity.cs +│ ├── Recipe.cs +│ ├── RecipeCategory.cs +│ ├── RecipeComment.cs +│ ├── RecipeImage.cs +│ ├── RecipeDbContext.cs +│ └── RecipeDbContextFactory.cs +├── Helpers/ +│ ├── RecipeHelper.cs +│ └── RecipeUrlHelper.cs +├── Interfaces/ +│ ├── IRecipeService.cs +│ └── IRecipeImageService.cs +├── Migrations/ +│ └── [InitialCreate migration - generated] +├── Models/ +│ ├── RecipeCategoryModel.cs +│ ├── RecipeImageModel.cs +│ ├── RecipeModel.cs +│ ├── RecipeOptionModel.cs +│ └── EditModels/ +│ ├── RecipeCategoryEditModel.cs +│ └── RecipeEditModel.cs +└── Providers/ + ├── RecipeImageService.cs + └── RecipeProvider.cs + +WebSpark.Core/ [MODIFIED - recipe files removed] +├── [DELETE] Data/Recipe.cs +├── [DELETE] Data/RecipeCategory.cs +├── [DELETE] Data/RecipeComment.cs +├── [DELETE] Data/RecipeImage.cs +├── [DELETE] Data/RecipeOld.cs (after usage scan) +├── [DELETE] Interfaces/IRecipeService.cs +├── [DELETE] Providers/RecipeProvider.cs +├── [DELETE] Providers/RecipeImageService.cs +├── [DELETE] Helpers/RecipeHelper.cs +├── [DELETE] Models/RecipeModel.cs + RecipeCategoryModel.cs + RecipeImageModel.cs +├── [DELETE] Models/RecipeConstants.cs +├── [DELETE] Models/EditModels/RecipeEditModel.cs + RecipeCategoryEditModel.cs +├── [DELETE] Models/ViewModels/RecipeVM.cs (moves to WebSpark.Portal) +├── [MODIFY] Data/WebSparkDbContext.cs (remove recipe DbSets + recipe OnModelCreating) +└── [KEEP] Migrations/ (retained for history; not modified) + +WebSpark.Portal/ [MODIFIED] +├── [ADD] Areas/RecipeSpark/Models/RecipeVM.cs +├── [ADD] Areas/RecipeSpark/Services/RecipeMenuAdapter.cs +├── [MODIFY] Program.cs +├── [MODIFY] *.cs - update using directives +└── [MODIFY] appsettings*.json - add RecipeConnection string + +WebSpark.RecipeCookbook/ [MODIFIED] +├── [MODIFY] WebSpark.RecipeCookbook.csproj +└── [MODIFY] *.cs + +PromptSpark.Domain/ [MODIFIED] +├── [MODIFY] PromptSpark.Domain.csproj +└── [MODIFY] *.cs + +PromptSpark.Recipe.Console/ [MODIFIED] +├── [MODIFY] PromptSpark.Recipe.Console.csproj +└── [MODIFY] *.cs +``` + +--- + +## Phase 0: Research (Complete) + +All unknowns resolved. See `research.md`. Key decisions: + +| ID | Decision | +|----|----------| +| R-001 | `RecipeBaseEntity` inlined in `WebSpark.Recipe.Data` | +| R-002 | `IMenuProvider` removed from `RecipeProvider`; `RecipeMenuAdapter` added in Portal | +| R-003 | `RecipeUrlHelper` created in `WebSpark.Recipe.Helpers` | +| R-004 | `DomainId` raw FK in `RecipeDbContext`; no WebSite navigation | +| R-005 | `RecipeOptionModel` replaces `LookupModel` in recipe models | +| R-006 | Two-track migration: initial migration for new DBs; manual history entry for existing DBs | +| R-007 | `RecipeConstants` moves to `WebSpark.Recipe.Constants` | +| R-008 | `RecipeProvider` depends only on `RecipeDbContext`, `RecipeUrlHelper`, `RecipeConstants` | +| R-009 | Initial version `9.0.0` matching solution version family | +| R-010 | `RecipeOld.cs` deleted after zero-reference usage scan | + +--- + +## Phase 1: Design and Contracts (Complete) + +See `data-model.md` for complete entity and model specifications. + +### RecipeDbContext Registration (WebSpark.Portal Program.cs) + +```csharp +builder.Services.AddDbContext(options => + options.UseSqlite( + builder.Configuration.GetConnectionString("RecipeConnection"), + b => b.MigrationsAssembly("WebSpark.Recipe"))); +``` + +### DI Registration (WebSpark.Portal Program.cs) + +```csharp +// IMPORTANT: Remove any existing IMenuProvider / RecipeProvider registration before adding these. +// Do NOT append — replace. Adding a second IMenuProvider causes startup failure. +builder.Services.AddScoped(); +builder.Services.AddScoped(); +builder.Services.AddScoped(); + +// Health check — required by constitution for all DbContext data stores +builder.Services.AddHealthChecks() + .AddDbContextCheck("recipe-db"); +``` + +### RecipeMenuAdapter Contract (WebSpark.Portal) + +```csharp +// Canonical namespace: WebSpark.Portal.Areas.RecipeSpark.Services +namespace WebSpark.Portal.Areas.RecipeSpark.Services; + +public class RecipeMenuAdapter : IMenuProvider +{ + private readonly IRecipeService _recipeService; + // Adapts IRecipeService.Get() / GetRecipeCategoryList() to IMenuProvider menu shape + // Methods: Delete, GetMenuList, GetMenuItem, GetMenuItemAsync, GetSiteMenu +} +``` + +### Connection String — All Environments + +```json +// appsettings.json (placeholder — value sourced from environment/Key Vault) +"ConnectionStrings": { + "RecipeConnection": "" +} + +// appsettings.Development.json (local SQLite) +"ConnectionStrings": { + "RecipeConnection": "Data Source=Data/webspark-recipe.db" +} + +// appsettings.Production.json (Azure Key Vault reference — constitution Azure-First standard) +"ConnectionStrings": { + "RecipeConnection": "" // Injected at runtime via Azure Key Vault / App Configuration +} +``` + +### WebSpark.Recipe.csproj NuGet Metadata + +```xml +WebSpark.Recipe +9.0.0 +Mark Hazleton +Standalone recipe domain library for the WebSpark ecosystem. +true +``` + +--- + +## Deployment / Migration Runbook + +### New Databases +```bash +dotnet ef migrations add InitialCreate --context RecipeDbContext --project WebSpark.Recipe --startup-project WebSpark.Portal +dotnet ef database update --context RecipeDbContext --startup-project WebSpark.Portal +``` + +### Existing Databases (recipe tables already exist) +After generating the `InitialCreate` migration, record the migration ID and insert into history: +```sql +INSERT INTO __EFMigrationsHistory (MigrationId, ProductVersion) +VALUES ('_InitialCreate', '10.0.7'); +``` + +### WebSpark.Core Cleanup Verification +```bash +dotnet ef migrations add VerifyCleanup --context WebSparkDbContext --startup-project WebSpark.Portal +# Expected: empty migration (no schema changes from removing DbSet properties) +# If non-empty: review and handle before proceeding +dotnet ef migrations remove --context WebSparkDbContext --startup-project WebSpark.Portal +``` diff --git a/specs/006-extract-recipe-library/quickstart.md b/specs/006-extract-recipe-library/quickstart.md new file mode 100644 index 00000000..8cb62069 --- /dev/null +++ b/specs/006-extract-recipe-library/quickstart.md @@ -0,0 +1,133 @@ +# Quickstart: Validate WebSpark.Recipe Extraction + +**Branch**: `006-extract-recipe-library` | **Date**: 2026-05-01 + +## Prerequisites + +- .NET 10 SDK installed +- Solution at `c:\WebSpark\WebSpark.sln` +- Branch `006-extract-recipe-library` checked out + +--- + +## Step 1: Verify the New Project Exists and Builds in Isolation + +```bash +cd c:\WebSpark\WebSpark.Recipe +dotnet build +# Expected: Build succeeded. 0 Error(s). 0 Warning(s). +``` + +**What to check**: +- `WebSpark.Recipe.csproj` lists only EF Core packages — no MailKit, Serilog, Markdig +- No `using WebSpark.Core` directives in any file +- All namespaces start with `WebSpark.Recipe.*` + +--- + +## Step 2: Verify WebSpark.Core Has Zero Recipe Code + +```bash +# From solution root — should return zero matches +grep -r "Recipe" c:\WebSpark\WebSpark.Core\Data\ --include="*.cs" -l +grep -r "Recipe" c:\WebSpark\WebSpark.Core\Models\ --include="*.cs" -l +grep -r "Recipe" c:\WebSpark\WebSpark.Core\Interfaces\ --include="*.cs" -l +grep -r "Recipe" c:\WebSpark\WebSpark.Core\Providers\ --include="*.cs" -l +grep -r "Recipe" c:\WebSpark\WebSpark.Core\Helpers\ --include="*.cs" -l + +# Verify WebSparkDbContext has no recipe DbSets +grep -n "DbSet.*Recipe" c:\WebSpark\WebSpark.Core\Data\WebSparkDbContext.cs +# Expected: no output +``` + +--- + +## Step 3: Verify RecipeOld.cs Is Deleted + +```bash +grep -r "RecipeOld" c:\WebSpark --include="*.cs" +# Expected: no output (file deleted, no remaining references) +``` + +--- + +## Step 4: Build Full Solution + +```bash +cd c:\WebSpark +dotnet build WebSpark.sln +# Expected: Build succeeded. 0 Error(s). 0 Warning(s). +``` + +--- + +## Step 5: Apply RecipeDbContext Migration (New Database) + +```bash +cd c:\WebSpark +dotnet ef database update --context RecipeDbContext --startup-project WebSpark.Portal --project WebSpark.Recipe +# Expected: Done. (tables created or migration already applied) +``` + +**For existing database** — verify migration history entry exists: +```sql +SELECT * FROM __EFMigrationsHistory WHERE MigrationId LIKE '%InitialCreate%'; +-- Expected: 1 row +``` + +--- + +## Step 6: Verify RecipeCookbook Builds Without WebSpark.Core + +```bash +# Temporarily remove WebSpark.Core reference from WebSpark.RecipeCookbook.csproj (it should already be gone) +cd c:\WebSpark\WebSpark.RecipeCoookbook +dotnet build +# Expected: Build succeeded. 0 Error(s). 0 Warning(s). +``` + +--- + +## Step 7: Run Existing Recipe Tests + +```bash +cd c:\WebSpark +dotnet test tests\WebSpark.Portal.Tests\WebSpark.Portal.Tests.csproj --filter "FullyQualifiedName~RecipeSpark" +# Expected: All tests pass. 0 Failed. +``` + +--- + +## Step 8: Verify DI Registration in Portal + +In `WebSpark.Portal/Program.cs`, confirm all three registrations are present: +```csharp +// services.AddDbContext(...) ← exists +// services.AddScoped() ← exists +// services.AddScoped() ← exists +// services.AddScoped() ← exists (or IMenuProvider registered elsewhere) +``` + +--- + +## Step 9: Smoke Test Recipe Browsing (Manual) + +1. Start `WebSpark.Portal` locally +2. Navigate to `/recipe` (or recipe browse endpoint) +3. Verify recipe list loads without errors +4. Open one recipe — verify detail page renders +5. Check portal logs — no EF Core errors about missing context or unmapped entities + +--- + +## Validation Checklist + +- [ ] `WebSpark.Recipe` builds in isolation +- [ ] `WebSpark.Recipe` has no `WebSpark.Core` reference +- [ ] `WebSpark.Core` has zero recipe source files +- [ ] `WebSparkDbContext` has no recipe `DbSet` properties +- [ ] Full solution builds with zero errors and zero warnings +- [ ] `RecipeDbContext` migration applied on test database +- [ ] `WebSpark.RecipeCookbook` builds without `WebSpark.Core` +- [ ] All `RecipeSpark` tests pass +- [ ] Recipe browsing works end-to-end in `WebSpark.Portal` diff --git a/specs/006-extract-recipe-library/research.md b/specs/006-extract-recipe-library/research.md new file mode 100644 index 00000000..99a3f8cd --- /dev/null +++ b/specs/006-extract-recipe-library/research.md @@ -0,0 +1,125 @@ +# Research: Extract WebSpark.Recipe Core Library + +**Branch**: `006-extract-recipe-library` | **Date**: 2026-05-01 + +## Phase 0 Findings — Resolved Unknowns + +--- + +### R-001: BaseEntity Dependency + +**Question**: Recipe entities inherit `WebSpark.Core.Data.BaseEntity`. Can they move to `WebSpark.Recipe` without creating a dependency on `WebSpark.Core`? + +**Decision**: Introduce `RecipeBaseEntity` directly in `WebSpark.Recipe.Data`. Mirror the same properties as `BaseEntity` (`Id`, `CreatedDate`, `UpdatedDate`, `UpdatedID`, `CreatedID`). This avoids any external dependency for the base class. + +**Rationale**: `BaseEntity` is 14 lines with five standard audit properties. Inlining it is far cheaper than taking a SharedKernel or WebSpark.Core dependency. Future refactoring can unify base entities if a SharedKernel consolidation occurs separately. + +**Alternatives considered**: +- Reference `WebSpark.SharedKernel`: Adds a dependency chain; SharedKernel contents are unknown and may themselves pull in undesired packages +- Reference `WebSpark.Core` for BaseEntity only: Creates the exact circular dependency we are eliminating + +--- + +### R-002: IMenuProvider / RecipeProvider Dual Implementation + +**Question**: `RecipeProvider` currently implements both `IRecipeService` AND `IMenuProvider`. `IMenuProvider` is defined in `WebSpark.Core`. If `RecipeProvider` moves to `WebSpark.Recipe`, it cannot reference `WebSpark.Core` for `IMenuProvider`. + +**Decision**: Remove the `IMenuProvider` implementation from `RecipeProvider`. Create a new adapter class `RecipeMenuAdapter : IMenuProvider` in `WebSpark.Portal` that wraps `IRecipeService`. All menu-generation logic currently in `RecipeProvider` moves to `RecipeMenuAdapter`. + +**Rationale**: `RecipeProvider` is a data-access service for recipe CRUD. Its menu-generation role (`GetSiteMenu`, `GetMenuItem`, `GetMenuItemAsync`) is a portal presentation concern. Extracting it to an adapter in the portal layer is the correct architectural boundary. `WebSpark.Recipe` stays completely independent of any menu system. + +**Alternatives considered**: +- Move `IMenuProvider` to `WebSpark.Recipe`: Makes `WebSpark.Recipe` a menu-system provider, which is not its domain +- Move `IMenuProvider` to `WebSpark.SharedKernel`: Adds a SharedKernel dependency to both `WebSpark.Core` and `WebSpark.Recipe`; overkill for a single interface + +**Methods moving to `RecipeMenuAdapter`**: +- `GetMenuItem(int Id)` +- `GetMenuItemAsync(int Id)` +- `GetSiteMenu(int DomainId)` +- `GetAllMenuItems()` + +--- + +### R-003: FormatHelper Dependency in RecipeProvider + +**Question**: `RecipeProvider.Create(Recipe → RecipeModel)` calls `FormatHelper.GetRecipeURL(name)` and `FormatHelper.GetRecipeCategoryURL(name)` which are defined in `WebSpark.Core`. `RecipeProvider` cannot call these after moving to `WebSpark.Recipe`. + +**Decision**: Create `RecipeUrlHelper` (static class) in `WebSpark.Recipe.Helpers` with methods: `GetRecipeURL(string name)`, `GetRecipeCategoryURL(string name)`, and `GetSafePath(string name)`. These are simple string-manipulation functions (no external dependencies) and can be copied verbatim. + +**Rationale**: Three short, self-contained string methods. Copy-in is cheaper than any cross-library dependency. `FormatHelper` in `WebSpark.Core` retains its own copies for non-recipe consumers. + +--- + +### R-004: WebSite (Domain) FK in Recipe Entities + +**Question**: `Recipe` and `RecipeCategory` have FK relationships to `WebSite` (the domain entity) which lives in `WebSpark.Core`. `RecipeDbContext` won't have a `DbSet`. How should the FK be handled? + +**Decision**: Configure the FK as a raw column (`DomainId`) in `RecipeDbContext` without a navigation property to `WebSite`. The `DomainId` int property stays on both entities. EF Core supports FK constraints without navigation properties; the constraint exists at the database level. No `WebSite` `DbSet` in `RecipeDbContext`. + +**Rationale**: `RecipeDbContext` only needs to own its recipe tables. Domain/site data is accessed separately via `WebSparkDbContext`. Recipes filter by `DomainId` integer; they do not need to join to `WebSite` within the `RecipeDbContext` scope. This is identical to how the existing `RecipeChatContext` (from Spec 005) handles domain references. + +**Impact on RecipeProvider**: Queries that previously eagerly loaded `.Include(r => r.Domain)` (if any) must be removed; `DomainId` is used for filtering instead. + +--- + +### R-005: LookupModel Used in RecipeModel + +**Question**: `RecipeModel.RecipeCategories` is `List` where `LookupModel` is defined in `WebSpark.Core.Models`. Moving `RecipeModel` to `WebSpark.Recipe` without `LookupModel` breaks the model. + +**Decision**: Create `RecipeOptionModel` in `WebSpark.Recipe.Models` with the same properties as `LookupModel` (`Value`, `Text`, `IsDefault`, `IsSelected`). Update `RecipeModel.RecipeCategories` to use `List`. All callers that used `LookupModel` for recipe category dropdowns update to `RecipeOptionModel`. + +**Rationale**: `LookupModel` is a generic utility. Creating a recipe-specific equivalent keeps `WebSpark.Recipe` independent and gives the recipe option model a meaningful name. + +--- + +### R-006: EF Core Migration Strategy + +**Question**: Existing recipe migrations live in `WebSpark.Core/Migrations/` under `WebSparkDbContext`. When recipe entities move to `WebSpark.Recipe` under a new `RecipeDbContext`, what happens to the schema on existing databases? + +**Decision**: Two-track approach: +1. **New databases**: `RecipeDbContext` initial migration creates all recipe tables from scratch (`Recipe`, `RecipeCategory`, `RecipeComment`, `RecipeImage`). +2. **Existing databases**: Recipe tables already exist (created by `WebSparkDbContext` migrations). Apply the initial `RecipeDbContext` migration by inserting a row directly into `__EFMigrationsHistory` to mark it as already applied. A deployment runbook documents this step. +3. **WebSpark.Core migrations**: Recipe-related migrations (`RecipeKeywords`, `AddRecipeBool`, `AddDomainToRecipeCategory`) are retained for historical record but no longer run against `RecipeDbContext`. The `WebSparkDbContext` migration scaffold is left intact; the recipe DbSets are simply removed from `WebSparkDbContext`, which does not require a new migration if no schema change occurs. + +**Rationale**: EF Core tracks applied migrations per-context in `__EFMigrationsHistory`. A new context creates its own tracking rows. Marking the initial migration as applied is a standard EF pattern for adopting an existing database schema. + +--- + +### R-007: RecipeConstants.INT_MOM_DomainId + +**Question**: `RecipeConstants` (in `WebSpark.Core.Models`) defines `INT_MOM_DomainId = 2`. `RecipeModel` and `RecipeCategoryModel` reference it for default domain values. + +**Decision**: Move `RecipeConstants` to `WebSpark.Recipe.Constants` with namespace `WebSpark.Recipe.Constants`. Update references in all recipe models. + +**Rationale**: This constant is exclusively a recipe-domain concept. It belongs in the recipe library. + +--- + +### R-008: RecipeProvider Dependencies Summary + +After extracting, `RecipeProvider` in `WebSpark.Recipe` will depend on: +- `RecipeDbContext` (in `WebSpark.Recipe.Data`) — replaces `WebSparkDbContext` +- `RecipeUrlHelper` (in `WebSpark.Recipe.Helpers`) — replaces `FormatHelper` +- `RecipeConstants` (in `WebSpark.Recipe.Constants`) — moved from `WebSpark.Core.Models` +- No dependency on `WebSpark.Core` or `IMenuProvider` + +--- + +### R-009: NuGet Package Metadata Reference + +Inspecting `WebSpark.Core.csproj` version pattern: +- `AssemblyVersion`: `9.0.0.0` +- `FileVersion`: `9.0.0.0` +- `InformationalVersion`: `9.0.0+dev` +- `TargetFramework`: `net10.0` +- `GeneratePackageOnBuild`: `True` + +`WebSpark.Recipe.csproj` initial version: `9.0.0` (aligns with the solution version family). + +--- + +### R-010: RecipeOld.cs Usage Scan + +**Finding**: `RecipeOld.cs` contains a `GetRecipe()` conversion method from the legacy schema. It has 42 lines. A usage scan is required before deletion to confirm zero references. + +**Approach**: During implementation, run `grep -r "RecipeOld" --include="*.cs"` across the solution. If zero results: delete. If results found: flag for manual review before proceeding. diff --git a/specs/006-extract-recipe-library/spec.md b/specs/006-extract-recipe-library/spec.md new file mode 100644 index 00000000..5545e3e2 --- /dev/null +++ b/specs/006-extract-recipe-library/spec.md @@ -0,0 +1,198 @@ +--- +classification: full-spec +risk_level: medium +target_workflow: specify-full +required_artifacts: spec, plan, tasks +recommended_next_step: plan +required_gates: checklist, analyze, critic +--- + +# Feature Specification: Extract WebSpark.Recipe Core Library + +**Feature Branch**: `006-extract-recipe-library` +**Created**: 2026-05-01 +**Status**: In Progress +**Input**: User description: "extract a stand alone Webspark.Recipe core library with all recipe related entities, models, classes, code removed from WebSpark.Core This can be a child of WebSpark.Core but dependencies should be minimal." + +## Rationale Summary + +### Core Problem + +All recipe-domain code — entities, models, interfaces, service contracts, helpers, and service implementations — currently lives inside `WebSpark.Core`, a general-purpose infrastructure library. This forces every project that needs recipe functionality to take a dependency on the full `WebSpark.Core` library, including unrelated capabilities (email, logging, markdown, weather, SEO, etc.). Conversely, projects that need non-recipe features from `WebSpark.Core` must also carry the recipe code. The coupling makes the dependency graph harder to reason about, inflates transitive dependencies for consumers, and makes the recipe domain harder to evolve independently. + +### Decision Summary + +Extract **all** recipe-domain code from `WebSpark.Core` into a new, fully standalone library `WebSpark.Recipe`. This includes entities, models, DTOs, interfaces, service implementations, helpers, and a new dedicated `RecipeDbContext`. `WebSpark.Core` will remove all recipe code and the recipe-related `DbSet` entries from `WebSparkDbContext` — it will have **no** reference to `WebSpark.Recipe`. All consuming projects (`WebSpark.Portal`, `WebSpark.RecipeCookbook`, `PromptSpark.Domain`, `PromptSpark.Recipe.Console`) will reference `WebSpark.Recipe` directly. + +### Key Drivers + +- **Full domain separation**: Recipe domain is independently deployable as a self-contained NuGet package with its own data access layer +- **Lean dependencies per consumer**: `WebSpark.RecipeCookbook` and `PromptSpark.Recipe.Console` reference only `WebSpark.Recipe` — no general infrastructure from `WebSpark.Core` +- **Independent evolution**: Recipe schema changes, migrations, and service updates have no impact on `WebSpark.Core` + +### Source Inputs + +- Existing recipe code inventory in `WebSpark.Core` (entities, models, interfaces, providers, helpers, migrations) +- Projects currently consuming recipe code via `WebSpark.Core`: `WebSpark.Portal`, `WebSpark.RecipeCookbook`, `PromptSpark.Domain`, `PromptSpark.Recipe.Console` +- Constitution principle: Template-Driven Configuration — new project must use `csproj.template` baseline +- Spec 005 (Recipe Chat API Backend) established patterns for separate `DbContext` per domain (see constitution section "Separate DbContext for Domain Isolation") + +### Tradeoffs Considered + +- **Option A: Interfaces/models/entities → WebSpark.Recipe; implementations stay in WebSpark.Core** — Keeps `WebSpark.Recipe` free of EF Core but means consumers still need `WebSpark.Core` for the service implementations; incomplete separation +- **Option C: Create a third project WebSpark.Recipe.Data for EF-bound implementations** — Cleanest layering but adds a third package for consumers to manage +- **Selected (Option B + RecipeDbContext): All recipe code including implementations and a new RecipeDbContext → WebSpark.Recipe** — Full standalone separation; `WebSpark.Core` has zero recipe code; EF Core is an accepted dependency of `WebSpark.Recipe` because the library is self-contained with its own data access; aligns with the Spec 005 "Separate DbContext" pattern already established in the constitution + +### Architectural Impact + +- `WebSpark.Recipe` becomes the fully self-contained recipe domain library: entities, models, interfaces, service implementations, helper utilities, and `RecipeDbContext` +- `WebSpark.Core` has **no** reference to `WebSpark.Recipe`; recipe `DbSet` entries are removed from `WebSparkDbContext`; all recipe source files are deleted from `WebSpark.Core` +- Recipe EF Core migrations currently in `WebSpark.Core/Migrations/` are migrated to `WebSpark.Recipe` under the new `RecipeDbContext` +- `WebSpark.Portal` references both `WebSpark.Core` (for infrastructure) and `WebSpark.Recipe` (for recipe domain); registers `RecipeDbContext` alongside the existing `WebSparkDbContext` +- `WebSpark.RecipeCookbook` drops its `WebSpark.Core` reference and references only `WebSpark.Recipe` +- `PromptSpark.Domain` and `PromptSpark.Recipe.Console` reference `WebSpark.Recipe` directly; `WebSpark.Core` reference removed if it was only needed for recipe types +- No circular dependencies: `WebSpark.Recipe` has no reference to `WebSpark.Core` + +### Reviewer Guidance + +Focus on: (1) whether all recipe code is fully removed from `WebSpark.Core` with no residual recipe types; (2) whether the `RecipeDbContext` migration strategy preserves existing data and schema; (3) whether `WebSpark.Portal`'s dual-context registration is correct; (4) whether any recipe type was shared with non-recipe code in `WebSpark.Core` (e.g., `RecipeVM` extending a base type from `WebSpark.Core`). + +--- + +## User Scenarios & Testing + +### User Story 1 — Developer consumes full recipe functionality without WebSpark.Core (Priority: P1) + +A developer building a recipe-processing tool adds only `WebSpark.Recipe` as a dependency and gets the full recipe domain: entities, models, interfaces, service implementations, and database access — without pulling in any `WebSpark.Core` infrastructure. + +**Why this priority**: This is the core motivation for the extraction. All other stories depend on this library existing as a self-contained unit. + +**Independent Test**: A new console application references only `WebSpark.Recipe`, registers `RecipeDbContext` and `IRecipeService`, and successfully reads recipe data — with no `WebSpark.Core` reference in the project file. + +**Acceptance Scenarios**: + +1. **Given** a new project with only a `WebSpark.Recipe` package reference, **When** the developer writes code using `RecipeModel`, `RecipeCategory`, `RecipeImage`, `IRecipeService`, `RecipeDbContext`, and `RecipeHelper`, **Then** the project compiles without errors and without requiring `WebSpark.Core` +2. **Given** the `WebSpark.Recipe` package, **When** inspecting its transitive dependencies, **Then** the dependency list contains no references to MailKit, Serilog, Markdig, ReverseMarkdown, or any other `WebSpark.Core`-specific infrastructure packages +3. **Given** existing recipe entity definitions, **When** the extraction is complete, **Then** all public type names, property names, and method signatures are identical to the originals in `WebSpark.Core`, and all namespaces follow the `WebSpark.Recipe.*` pattern (e.g., `WebSpark.Recipe.Data`, `WebSpark.Recipe.Models`) + +--- + +### User Story 2 — WebSpark.Core is fully clean after extraction (Priority: P1) + +`WebSpark.Core` builds and functions correctly with all recipe code removed. `WebSparkDbContext` no longer defines recipe `DbSet` properties. No recipe source files remain in the `WebSpark.Core` project. + +**Why this priority**: The cleanliness of `WebSpark.Core` post-extraction is the direct measure of success for the separation goal. + +**Independent Test**: After extraction, `WebSpark.Core` builds without errors, `WebSparkDbContext` has no recipe `DbSet` entries, and a `grep` for "Recipe" in the `WebSpark.Core` source tree returns zero results (excluding project file references). + +**Acceptance Scenarios**: + +1. **Given** `WebSpark.Core` after extraction, **When** the project is built, **Then** it compiles with zero errors and zero warnings and contains no recipe-related source files +2. **Given** `WebSparkDbContext` after extraction, **When** inspecting its `DbSet` properties, **Then** `Recipe`, `RecipeCategory`, `RecipeComment`, and `RecipeImage` DbSets are absent +3. **Given** `WebSpark.Portal` updated to reference `WebSpark.Recipe` directly, **When** the solution is built, **Then** the portal compiles without recipe-related errors using both `WebSparkDbContext` and `RecipeDbContext` + +--- + +### User Story 3 — WebSpark.RecipeCookbook builds without WebSpark.Core (Priority: P2) + +`WebSpark.RecipeCookbook` is updated to reference `WebSpark.Recipe` directly and its `WebSpark.Core` project reference is removed (if recipe types were the only reason it existed). + +**Why this priority**: `WebSpark.RecipeCookbook` is the clearest demonstration that the new library is truly self-contained. + +**Independent Test**: `WebSpark.RecipeCookbook` builds successfully with only `WebSpark.Recipe` as its WebSpark dependency; `WebSpark.Core` is absent from its dependency graph. + +**Acceptance Scenarios**: + +1. **Given** `WebSpark.RecipeCookbook` with `WebSpark.Core` removed and `WebSpark.Recipe` added, **When** the project is built, **Then** it compiles successfully and all PDF generation functionality remains intact +2. **Given** the updated `WebSpark.RecipeCookbook`, **When** generating a recipe cookbook PDF with sample recipe data, **Then** the output is identical to pre-extraction behavior + +--- + +### User Story 4 — PromptSpark projects reference WebSpark.Recipe directly (Priority: P2) + +`PromptSpark.Domain` and `PromptSpark.Recipe.Console` are updated to reference `WebSpark.Recipe` directly and drop their `WebSpark.Core` dependency (where it was only needed for recipe types). + +**Why this priority**: These projects process recipe data in AI/prompt workflows and benefit most from the leaner dependency. + +**Independent Test**: Both projects build with only `WebSpark.Recipe` (plus their own dependencies) and produce identical runtime behavior. + +**Acceptance Scenarios**: + +1. **Given** `PromptSpark.Domain` referencing `WebSpark.Recipe` directly, **When** the project is built, **Then** all recipe-processing logic compiles and executes correctly +2. **Given** `PromptSpark.Recipe.Console` updated to reference `WebSpark.Recipe`, **When** the console application runs, **Then** recipe processing operations produce the same results as before the extraction + +--- + +### Edge Cases + +- `RecipeVM` is a presentation-layer type that inherits from a `WebSpark.Core` base view model; it moves to `WebSpark.Portal` (not `WebSpark.Recipe`) and stays within the portal's existing inheritance chain — no refactoring of the base class is required +- EF Core migrations currently in `WebSpark.Core/Migrations/` reference recipe entities — a migration strategy is required to transfer them to `RecipeDbContext` without data loss or schema drift on existing databases +- `RecipeOld.cs` (legacy deprecated entity) — MUST be deleted; a usage scan during implementation must confirm zero references before deletion proceeds +- If `WebSpark.Portal` previously resolved recipe service registrations through `WebSpark.Core`'s DI setup, those registrations must be explicitly moved to reference `WebSpark.Recipe` services and `RecipeDbContext` +- Namespace collision risk if a consuming project references both `WebSpark.Core` (for non-recipe infrastructure) and `WebSpark.Recipe` — verify no shared type names exist across both namespaces + +--- + +## Requirements + +### Functional Requirements + +- **FR-001**: A new standalone library project named `WebSpark.Recipe` MUST be created containing all recipe-domain entities, models, DTOs, interfaces, service implementations, helpers, and a new `RecipeDbContext` +- **FR-002**: `WebSpark.Recipe` MUST have no project or package reference to `WebSpark.Core` (zero circular dependency risk) +- **FR-003**: `WebSpark.Recipe` package dependencies MUST be limited to: EF Core (for `RecipeDbContext` and entity annotations), and any packages strictly required by recipe service implementations; no general infrastructure packages (MailKit, Serilog, Markdig, ReverseMarkdown) +- **FR-004**: All public type names, property names, and method signatures in `WebSpark.Recipe` MUST match their originals in `WebSpark.Core` exactly to preserve calling-code compatibility; namespaces MUST be migrated from `WebSpark.Core.*` to `WebSpark.Recipe.*` (e.g., `WebSpark.Core.Data` → `WebSpark.Recipe.Data`, `WebSpark.Core.Models` → `WebSpark.Recipe.Models`); all consuming projects MUST update their `using` directives accordingly +- **FR-005**: `WebSpark.Core` MUST remove all recipe source files and have no project reference to `WebSpark.Recipe`; `WebSparkDbContext` MUST remove all recipe `DbSet` properties +- **FR-006**: A new `RecipeDbContext` MUST be created in `WebSpark.Recipe` defining `DbSet` properties for `Recipe`, `RecipeCategory`, `RecipeComment`, and `RecipeImage` +- **FR-007**: EF Core migrations for recipe entities MUST be established under `RecipeDbContext` in `WebSpark.Recipe`; existing recipe schema on live databases MUST be preserved without data loss +- **FR-008**: `WebSpark.RecipeCookbook` MUST be updated to reference `WebSpark.Recipe` directly; its `WebSpark.Core` project reference MUST be removed if recipe types were its only reason for that dependency +- **FR-009**: `PromptSpark.Domain` and `PromptSpark.Recipe.Console` MUST be updated to reference `WebSpark.Recipe` directly; `WebSpark.Core` reference removed where it was only needed for recipe types +- **FR-010**: `WebSpark.Portal` MUST reference `WebSpark.Recipe` directly and register `RecipeDbContext` in its DI configuration alongside `WebSparkDbContext` +- **FR-011**: The full solution MUST build with zero errors and zero warnings after migration +- **FR-012**: `RecipeOld.cs` MUST be deleted — it is confirmed dead code and must not be carried into `WebSpark.Recipe`; during implementation a usage scan MUST verify zero references before deletion; if any active reference is found the implementer must flag it before proceeding +- **FR-013**: `RecipeVM` MUST be moved to `WebSpark.Portal` (not `WebSpark.Recipe`) — it is a presentation-layer type and its inheritance from a `WebSpark.Core` base view model is appropriate within the portal project; `WebSpark.Recipe` MUST NOT contain any presentation-layer types +- **FR-014**: The new `WebSpark.Recipe` project MUST follow the template-driven configuration standard — starting from the established `csproj.template` pattern used in the solution +- **FR-015**: `WebSpark.Recipe.csproj` MUST be configured for NuGet publication from day one — including `PackageId`, `Version`, `Description`, `Authors`, and `RepositoryUrl` metadata; versioning MUST follow the same scheme used by other WebSpark packages in the solution + +### Key Entities + +- **Recipe**: Core recipe entity with title, description, keywords, domain association, category association; mapped to database via EF Core +- **RecipeCategory**: Taxonomy entity for organizing recipes by type/category; domain-scoped +- **RecipeComment**: User-contributed comment on a recipe; linked to a specific Recipe +- **RecipeImage**: Media asset associated with a recipe; stores image metadata and path +- **RecipeDbContext**: New dedicated EF Core `DbContext` for recipe domain data; owns all recipe `DbSet` properties and migrations +- **RecipeModel**: Business-layer representation of a recipe, used by service and presentation layers +- **RecipeCategoryModel**: Business-layer representation of a category +- **RecipeImageModel**: Business-layer representation of an image asset +- **RecipeEditModel**: DTO for create/update operations on a recipe +- **RecipeCategoryEditModel**: DTO for create/update operations on a category +- **RecipeVM**: Presentation-layer view model; MOVED TO `WebSpark.Portal` (not `WebSpark.Recipe`) — inherits from a `WebSpark.Core` base view model and is a portal-specific presentation concern +- **IRecipeService**: Service contract for recipe CRUD and query operations; `GetRecipeVMHostAsync` is intentionally removed from this interface — it was a presentation-layer concern; Portal callers must be migrated to construct `RecipeVM` directly using `Get()` and `GetRecipeCategoryList()` (see T047a) +- **IRecipeImageService**: Service contract for recipe image management +- **RecipeProvider**: Implementation of `IRecipeService`; uses `RecipeDbContext` +- **RecipeImageService**: Implementation of `IRecipeImageService`; uses `RecipeDbContext` +- **RecipeHelper**: Static utility methods for recipe data transformation + +--- + +## Clarifications + +### Session 2026-05-01 + +- Q: Where should the service implementations (`RecipeProvider`, `RecipeImageService`) live after extraction? → A: All recipe code including implementations and a new dedicated `RecipeDbContext` moves to `WebSpark.Recipe` for full standalone separation; `WebSpark.Core` retains no recipe code or reference to `WebSpark.Recipe` +- Q: Should namespaces in `WebSpark.Recipe` change from `WebSpark.Core.*` to `WebSpark.Recipe.*`? → A: Yes — migrate to `WebSpark.Recipe.*` namespaces (e.g., `WebSpark.Recipe.Data`, `WebSpark.Recipe.Models`, `WebSpark.Recipe.Interfaces`); all consuming projects update their `using` directives as part of the migration +- Q: How should `RecipeVM`'s dependency on a `WebSpark.Core` base view model be resolved? → A: Move `RecipeVM` to `WebSpark.Portal` — it is a presentation-layer type; `WebSpark.Recipe` must not contain presentation-layer types +- Q: Should `RecipeOld.cs` be deleted or carried into `WebSpark.Recipe` with `[Obsolete]`? → A: Delete — it is dead code; implementation must run a usage scan to confirm zero references before deletion +- Q: Should `WebSpark.Recipe` be configured for NuGet publication or remain a local project reference? → A: Configure for NuGet publication from day one — `PackageId`, `Version`, `Description`, `Authors`, `RepositoryUrl` in the `.csproj`; version follows existing WebSpark package versioning scheme + +--- + +## Success Criteria + +### Measurable Outcomes + +- **SC-001**: The full WebSpark solution builds with zero errors and zero warnings after extraction and all reference updates are applied +- **SC-002**: `WebSpark.Recipe` contains only EF Core and recipe-specific package dependencies — no MailKit, Serilog, Markdig, ReverseMarkdown, or other `WebSpark.Core` infrastructure packages +- **SC-003**: `WebSpark.RecipeCookbook` builds in isolation referencing only `WebSpark.Recipe` (and its own PDF-generation dependencies) with no `WebSpark.Core` in the dependency graph +- **SC-004**: All recipe database migrations apply without error on a fresh database using the new `RecipeDbContext`; existing recipe data on live databases is preserved without schema drift +- **SC-005**: A source-level search for "Recipe" in `WebSpark.Core` returns zero results (excluding solution/project file metadata) +- **SC-006**: Zero regression in existing recipe functionality verified by running all tests in `tests\WebSpark.Portal.Tests\Areas\RecipeSpark\` with a pass rate of 100%; the test suite MUST include at least one integration test that exercises `RecipeProvider` with `RecipeDbContext` directly (not just `RecipeChatApiController` tests from Spec 005) to validate the extraction path itself — see T069a diff --git a/specs/006-extract-recipe-library/tasks.md b/specs/006-extract-recipe-library/tasks.md new file mode 100644 index 00000000..ea2d9db9 --- /dev/null +++ b/specs/006-extract-recipe-library/tasks.md @@ -0,0 +1,272 @@ +# Tasks: Extract WebSpark.Recipe Core Library + +**Input**: Design documents from `specs/006-extract-recipe-library/` +**Prerequisites**: plan.md, spec.md, research.md, data-model.md, quickstart.md + +## Rationale Summary + +### Core Problem + +Recipe-domain code embedded in `WebSpark.Core` forces all consumers to take a dependency on a general-purpose infrastructure library. This bloats transitive dependencies and prevents independent versioning of the recipe domain. + +### Decision Summary + +Create `WebSpark.Recipe` as a standalone NuGet-publishable library with its own `RecipeDbContext`. Remove all recipe code from `WebSpark.Core`. Update four consuming projects to reference `WebSpark.Recipe` directly. Add `RecipeMenuAdapter` in `WebSpark.Portal` to preserve `IMenuProvider` without creating circular dependencies. + +### Key Drivers + +- Full recipe domain independence from `WebSpark.Core` +- `WebSpark.RecipeCookbook` and PromptSpark projects achieve lean dependencies +- `RecipeDbContext` provides isolated recipe data access + +### Reviewer Guidance + +Verify zero residual recipe code in `WebSpark.Core`; verify `RecipeMenuAdapter` correctly adapts `IRecipeService` to `IMenuProvider`; verify all four consuming projects build without `WebSpark.Core` as the recipe source. + +## Format: `[ID] [P?] [Story] Description` + +- **[P]**: Can run in parallel (different files, no dependencies) +- **[Story]**: User story label (US1–US4) + +--- + +## Phase 1: Setup (Project Initialization) + +**Purpose**: Create the `WebSpark.Recipe` project scaffold and add it to the solution. + +- [ ] T001 Create `WebSpark.Recipe` directory at `c:\WebSpark\WebSpark.Recipe\`; record `WebSpark.Core` baseline package count via `dotnet list WebSpark.Core/WebSpark.Core.csproj package --include-transitive | wc -l` and save to `specs/006-extract-recipe-library/gates/baseline-package-count.txt` (used for SC-002 comparison in T067) +- [ ] T002 Create `WebSpark.Recipe.csproj` targeting `net10.0` with EF Core 10.0.7 packages, NuGet metadata (PackageId, Version 9.0.0, Authors, Description, GeneratePackageOnBuild), Nullable enabled, ImplicitUsings enabled +- [ ] T003 Add `WebSpark.Recipe` project to `WebSpark.sln` under the Libraries solution folder +- [ ] T004 [P] Create subdirectory structure: `Constants/`, `Data/`, `Helpers/`, `Interfaces/`, `Migrations/`, `Models/EditModels/`, `Providers/` +- [ ] T005 Verify `dotnet build WebSpark.Recipe` succeeds on empty project + +**Checkpoint**: Empty `WebSpark.Recipe` project exists in solution and builds. + +--- + +## Phase 2: Foundational (Blocking Prerequisites) + +**Purpose**: Establish the base types and `RecipeDbContext` that all user stories depend on. No user story work can begin until this phase is complete. + +- [ ] T006 Create `WebSpark.Recipe.Data.RecipeBaseEntity` in `WebSpark.Recipe/Data/RecipeBaseEntity.cs` with properties: `Id` (int PK), `CreatedDate`, `UpdatedDate`, `CreatedID`, `UpdatedID` +- [ ] T007 [P] Create `WebSpark.Recipe.Constants.RecipeConstants` in `WebSpark.Recipe/Constants/RecipeConstants.cs` with `INT_MOM_DomainId = 2` +- [ ] T008 [P] Create `WebSpark.Recipe.Helpers.RecipeUrlHelper` in `WebSpark.Recipe/Helpers/RecipeUrlHelper.cs` with static methods: `GetSafePath(string)`, `GetRecipeURL(string)`, `GetRecipeCategoryURL(string)` — copy logic from `WebSpark.Core.Infrastructure.FormatHelper` +- [ ] T009 [P] Create `WebSpark.Recipe.Data.Recipe` entity in `WebSpark.Recipe/Data/Recipe.cs` inheriting `RecipeBaseEntity` with all properties per data-model.md; `DomainId` as nullable int FK only (no navigation to WebSite) +- [ ] T010 [P] Create `WebSpark.Recipe.Data.RecipeCategory` entity in `WebSpark.Recipe/Data/RecipeCategory.cs` +- [ ] T011 [P] Create `WebSpark.Recipe.Data.RecipeComment` entity in `WebSpark.Recipe/Data/RecipeComment.cs` +- [ ] T012 [P] Create `WebSpark.Recipe.Data.RecipeImage` entity in `WebSpark.Recipe/Data/RecipeImage.cs` +- [ ] T013 Create `WebSpark.Recipe.Data.RecipeDbContext` in `WebSpark.Recipe/Data/RecipeDbContext.cs` with `DbSet`, `DbSet`, `DbSet`, `DbSet`; configure `OnModelCreating` with all constraints per data-model.md; set `MigrationsAssembly("WebSpark.Recipe")` +- [ ] T014 Create `WebSpark.Recipe.Data.RecipeDbContextFactory` in `WebSpark.Recipe/Data/RecipeDbContextFactory.cs` implementing `IDesignTimeDbContextFactory` for EF tooling +- [ ] T015 Run `dotnet ef migrations add InitialCreate --context RecipeDbContext --project WebSpark.Recipe --startup-project WebSpark.Portal` to generate the initial migration +- [ ] T016 Verify `dotnet build WebSpark.Recipe` succeeds with all foundational types in place + +**Checkpoint**: `RecipeDbContext` exists with all entities and initial migration generated. + +--- + +## Phase 3: User Story 1 — Complete WebSpark.Recipe Library (Priority: P1) + +**Goal**: `WebSpark.Recipe` is a fully functional, self-contained recipe domain library that compiles and works without any `WebSpark.Core` reference. + +**Independent Test**: A new console application referencing only `WebSpark.Recipe` compiles and can instantiate `RecipeModel`, `RecipeDbContext`, and `IRecipeService` without errors. + +### Implementation for User Story 1 + +- [ ] T017 [P] [US1] Create `WebSpark.Recipe.Models.RecipeOptionModel` in `WebSpark.Recipe/Models/RecipeOptionModel.cs` — replaces `LookupModel` for recipe dropdown use; properties: `Value`, `Text`, `IsDefault`, `IsSelected` +- [ ] T018 [P] [US1] Create `WebSpark.Recipe.Models.RecipeImageModel` in `WebSpark.Recipe/Models/RecipeImageModel.cs` +- [ ] T019 [P] [US1] Create `WebSpark.Recipe.Models.RecipeCategoryModel` in `WebSpark.Recipe/Models/RecipeCategoryModel.cs`; `Url` property generated via `RecipeUrlHelper.GetRecipeCategoryURL` +- [ ] T020 [US1] Create `WebSpark.Recipe.Models.RecipeModel` in `WebSpark.Recipe/Models/RecipeModel.cs`; `RecipeURL` via `RecipeUrlHelper.GetRecipeURL`; `RecipeCategories` is `List` (not `LookupModel`) +- [ ] T021 [P] [US1] Create `WebSpark.Recipe.Models.EditModels.RecipeCategoryEditModel` in `WebSpark.Recipe/Models/EditModels/RecipeCategoryEditModel.cs` +- [ ] T022 [US1] Create `WebSpark.Recipe.Models.EditModels.RecipeEditModel` in `WebSpark.Recipe/Models/EditModels/RecipeEditModel.cs` +- [ ] T023 [P] [US1] Create `WebSpark.Recipe.Interfaces.IRecipeImageService` in `WebSpark.Recipe/Interfaces/IRecipeImageService.cs` (moved from `WebSpark.Core.Providers`) +- [ ] T024 [US1] Create `WebSpark.Recipe.Interfaces.IRecipeService` in `WebSpark.Recipe/Interfaces/IRecipeService.cs`; remove `GetRecipeVMHostAsync` (presentation-layer method — caller migration is handled in T047a); update all model references to `WebSpark.Recipe.Models.*`; document the removal in an XML `` comment on the interface noting callers must construct `RecipeVM` directly in the portal layer +- [ ] T025 [P] [US1] Create `WebSpark.Recipe.Helpers.RecipeHelper` in `WebSpark.Recipe/Helpers/RecipeHelper.cs`; update signature of `GetRecipe` to take `int domainId` instead of `WebSite domain`; run `grep -r "RecipeHelper.GetRecipe" c:\WebSpark --include="*.cs"` to enumerate all call sites; for each found: update call to pass `domain.Id` (or the available domain int) instead of the `WebSite` object — compile-verify each change before moving on +- [ ] T026 [US1] Create `WebSpark.Recipe.Providers.RecipeImageService` in `WebSpark.Recipe/Providers/RecipeImageService.cs` implementing `IRecipeImageService`; inject `RecipeDbContext`; update all `using` directives to `WebSpark.Recipe.*` +- [ ] T027 [US1] Create `WebSpark.Recipe.Providers.RecipeProvider` in `WebSpark.Recipe/Providers/RecipeProvider.cs` implementing `IRecipeService` (NOT `IMenuProvider`); inject `RecipeDbContext`; replace `FormatHelper` calls with `RecipeUrlHelper`; replace `LookupModel` with `RecipeOptionModel`; remove all `WebSpark.Core.*` usings; remove menu methods (`GetMenuItem`, `GetMenuItemAsync`, `GetSiteMenu`, `GetAllMenuItems`) +- [ ] T028 [US1] Verify `WebSpark.Recipe` has zero `using WebSpark.Core` directives: `grep -r "using WebSpark.Core" WebSpark.Recipe/ --include="*.cs"`; fix any found +- [ ] T029 [US1] Run `dotnet build WebSpark.Recipe` — confirm zero errors, zero warnings + +**Checkpoint**: `WebSpark.Recipe` builds in complete isolation from `WebSpark.Core`. + +--- + +## Phase 4: User Story 2 — WebSpark.Core Fully Clean (Priority: P1) + +**Goal**: All recipe code removed from `WebSpark.Core`; `WebSparkDbContext` has no recipe DbSets; `WebSpark.Portal` updated and builds with `RecipeDbContext`. + +**Independent Test**: `WebSpark.Core` builds; a search for "Recipe" in `WebSpark.Core/Data/`, `Models/`, `Interfaces/`, `Providers/`, `Helpers/` returns zero results. + +### Implementation for User Story 2 + +- [ ] T030 [US2] Run `RecipeOld.cs` usage scan: `grep -r "RecipeOld" c:\WebSpark --include="*.cs"` — if zero results, delete `WebSpark.Core/Data/RecipeOld.cs`; if results found, stop and flag for manual review +- [ ] T031 [P] [US2] Delete `WebSpark.Core/Data/Recipe.cs` +- [ ] T032 [P] [US2] Delete `WebSpark.Core/Data/RecipeCategory.cs` +- [ ] T033 [P] [US2] Delete `WebSpark.Core/Data/RecipeComment.cs` +- [ ] T034 [P] [US2] Delete `WebSpark.Core/Data/RecipeImage.cs` +- [ ] T035 [P] [US2] Delete `WebSpark.Core/Interfaces/IRecipeService.cs` +- [ ] T036 [P] [US2] Delete `WebSpark.Core/Providers/RecipeProvider.cs` +- [ ] T037 [P] [US2] Delete `WebSpark.Core/Providers/RecipeImageService.cs` +- [ ] T038 [P] [US2] Delete `WebSpark.Core/Helpers/RecipeHelper.cs` +- [ ] T039 [P] [US2] Delete `WebSpark.Core/Models/RecipeModel.cs` +- [ ] T040 [P] [US2] Delete `WebSpark.Core/Models/RecipeCategoryModel.cs` +- [ ] T041 [P] [US2] Delete `WebSpark.Core/Models/RecipeImageModel.cs` +- [ ] T042 [P] [US2] Delete `WebSpark.Core/Models/RecipeConstants.cs` +- [ ] T043 [P] [US2] Delete `WebSpark.Core/Models/EditModels/RecipeEditModel.cs` +- [ ] T044 [P] [US2] Delete `WebSpark.Core/Models/EditModels/RecipeCategoryEditModel.cs` +- [ ] T045 [US2] Remove recipe `DbSet` properties (`Recipe`, `RecipeCategory`, `RecipeComment`, `RecipeImage`) from `WebSpark.Core/Data/WebSparkDbContext.cs`; remove recipe `OnModelCreating` configuration blocks; verify no remaining recipe `using` directives +- [ ] T046 [US2] Run `dotnet build WebSpark.Core` — confirm zero errors, zero warnings +- [ ] T047 [US2] Move `WebSpark.Core/Models/ViewModels/RecipeVM.cs` to `WebSpark.Portal/Areas/RecipeSpark/Models/RecipeVM.cs`; update namespace to `WebSpark.Portal.Areas.RecipeSpark.Models`; update any Portal references to `RecipeVM` +- [ ] T047a [US2] Migrate all Portal callers of the removed `GetRecipeVMHostAsync`: run `grep -r "GetRecipeVMHostAsync" c:\WebSpark\WebSpark.Portal --include="*.cs"` — for each call site, replace with direct construction of `RecipeVM` using `IRecipeService.Get()` and `IRecipeService.GetRecipeCategoryList()` calls (the same data `GetRecipeVMHostAsync` was assembling); compile-verify each change; confirm zero remaining `GetRecipeVMHostAsync` references after T047a is complete +- [ ] T048 [US2] Create `WebSpark.Portal/Areas/RecipeSpark/Services/RecipeMenuAdapter.cs` implementing `IMenuProvider` via `IRecipeService` dependency injection; implement all five `IMenuProvider` methods by delegating to `IRecipeService`; namespace `WebSpark.Portal.Areas.RecipeSpark.Services` +- [ ] T049 [US2] Add `` to `WebSpark.Portal/WebSpark.Portal.csproj` +- [ ] T050 [US2] Update `WebSpark.Portal/Program.cs`: (a) register `RecipeDbContext` with `RecipeConnection` string and `MigrationsAssembly("WebSpark.Recipe")`; (b) search for any existing `IMenuProvider` or `RecipeProvider` registration — REMOVE it before adding the new one (do not append a second registration); (c) register `IRecipeService → RecipeProvider`; (d) register `IRecipeImageService → RecipeImageService`; (e) register `IMenuProvider → RecipeMenuAdapter`; (f) add health check: `builder.Services.AddHealthChecks().AddDbContextCheck("recipe-db")` +- [ ] T050a [US2] Validate DI registrations: after T050, run `dotnet build WebSpark.Portal` and start the portal; verify startup logs show no duplicate service registration warnings; verify exactly one `IMenuProvider` registration by checking DI debug output or adding a temporary diagnostic; confirm `/health` endpoint returns healthy for `recipe-db` check +- [ ] T051 [US2] Add `RecipeConnection` connection string to ALL Portal configuration files: `appsettings.json` (placeholder value), `appsettings.Development.json` (local SQLite path `Data Source=Data/webspark-recipe.db`), `appsettings.Production.json` (Azure Key Vault reference placeholder); add a code comment in Program.cs noting that production value must be sourced from Key Vault per constitution Azure-First standard +- [ ] T052 [US2] Update all `using WebSpark.Core.*` recipe directives across `WebSpark.Portal` to `using WebSpark.Recipe.*`; use IDE find/replace: `WebSpark.Core.Data` (recipe types) → `WebSpark.Recipe.Data`; `WebSpark.Core.Models` (recipe models) → `WebSpark.Recipe.Models`; `WebSpark.Core.Interfaces` (recipe interfaces) → `WebSpark.Recipe.Interfaces` +- [ ] T053 [US2] Run `dotnet build WebSpark.Portal` — confirm zero errors, zero warnings +- [ ] T054 [US2] Verify empty migration check: `dotnet ef migrations add VerifyCleanup --context WebSparkDbContext --startup-project WebSpark.Portal`; if migration is empty, remove it; if non-empty, review before proceeding + +**Checkpoint**: `WebSpark.Core` has zero recipe code; `WebSpark.Portal` builds and registers `RecipeDbContext`. + +--- + +## Phase 5: User Story 3 — WebSpark.RecipeCookbook Migration (Priority: P2) + +**Goal**: `WebSpark.RecipeCookbook` references `WebSpark.Recipe` directly and builds without `WebSpark.Core`. + +**Independent Test**: `WebSpark.RecipeCookbook` builds in isolation with only `WebSpark.Recipe` as its WebSpark dependency. + +### Implementation for User Story 3 + +- [ ] T055 [US3] Add `` to `WebSpark.RecipeCoookbook/WebSpark.RecipeCookbook.csproj` +- [ ] T056 [US3] Remove `` from `WebSpark.RecipeCoookbook/WebSpark.RecipeCookbook.csproj` (verify recipe types were the only reason for it; if other types from WebSpark.Core are used, keep the reference and flag for review) +- [ ] T057 [US3] Update all `using WebSpark.Core.*` recipe directives in `WebSpark.RecipeCoookbook/*.cs` to `using WebSpark.Recipe.*` +- [ ] T058 [US3] Run `dotnet build WebSpark.RecipeCoookbook` — confirm zero errors, zero warnings + +**Checkpoint**: `WebSpark.RecipeCookbook` builds with `WebSpark.Recipe` as its sole WebSpark dependency. + +--- + +## Phase 6: User Story 4 — PromptSpark Projects Migration (Priority: P2) + +**Goal**: `PromptSpark.Domain` and `PromptSpark.Recipe.Console` reference `WebSpark.Recipe` directly. + +**Independent Test**: Both projects build with `WebSpark.Recipe` as their recipe dependency. + +### Implementation for User Story 4 + +- [ ] T059 [P] [US4] Add `WebSpark.Recipe` project reference to `PromptSpark.Domain/PromptSpark.Domain.csproj` +- [ ] T060 [P] [US4] Add `WebSpark.Recipe` project reference to `PromptSpark.Recipe.Console/PromptSpark.Recipe.Console.csproj` +- [ ] T061 [P] [US4] Update `PromptSpark.Domain/*.cs`: replace `using WebSpark.Core.*` recipe directives with `using WebSpark.Recipe.*`; THEN run `dotnet build PromptSpark.Domain` with `WebSpark.Core` reference STILL present — if it succeeds, THEN remove the `WebSpark.Core` project reference and run `dotnet build PromptSpark.Domain` again; if the second build succeeds, the removal is safe; if it fails, keep `WebSpark.Core` reference (document in task notes which non-recipe types require it) +- [ ] T062 [P] [US4] Update `PromptSpark.Recipe.Console/*.cs`: replace `using WebSpark.Core.*` recipe directives with `using WebSpark.Recipe.*`; remove `WebSpark.Core` project reference if recipe types were its only use +- [ ] T063 [US4] Run `dotnet build PromptSpark.Domain` — confirm zero errors, zero warnings +- [ ] T064 [US4] Run `dotnet build PromptSpark.Recipe.Console` — confirm zero errors, zero warnings + +**Checkpoint**: Both PromptSpark projects build with `WebSpark.Recipe` as the recipe source. + +--- + +## Phase 7: Polish and Cross-Cutting Concerns + +**Purpose**: Full solution validation, migration deployment, NuGet packaging verification, and quickstart sign-off. + +- [ ] T065 [P] Run full solution build: `dotnet build c:\WebSpark\WebSpark.sln` — confirm zero errors, zero warnings (SC-001) +- [ ] T066 Apply `RecipeDbContext` migration — two paths: (a) **New database**: run `dotnet ef database update --context RecipeDbContext --startup-project WebSpark.Portal`; (b) **Existing database** (recipe tables already exist): FIRST run `dotnet ef script --context RecipeDbContext --idempotent --startup-project WebSpark.Portal --output specs/006-extract-recipe-library/gates/recipe-schema-expected.sql` to generate expected DDL; THEN open the actual SQLite database and export recipe table schemas via `sqlite3 Data/webspark.db ".schema Recipe" ".schema RecipeCategory" ".schema RecipeComment" ".schema RecipeImage"` and compare against the generated DDL; if schemas match, insert the history row: `INSERT INTO __EFMigrationsHistory (MigrationId, ProductVersion) VALUES ('_InitialCreate', '10.0.7')`; if schemas differ, DO NOT insert — instead document the diff and create a compensating migration before proceeding; save schema comparison result to `specs/006-extract-recipe-library/gates/migration-verification.txt` +- [ ] T067 Verify `WebSpark.Recipe` dependency list: run `dotnet list WebSpark.Recipe/WebSpark.Recipe.csproj package --include-transitive`; confirm no MailKit, Serilog, Markdig, or ReverseMarkdown entries; read baseline count from `specs/006-extract-recipe-library/gates/baseline-package-count.txt` (recorded in T001); count `WebSpark.Recipe` packages; confirm `WebSpark.Recipe` package count is at least 50% less than baseline (SC-002); document both counts in `specs/006-extract-recipe-library/gates/package-count-comparison.txt` +- [ ] T068 Verify `WebSpark.RecipeCookbook` dependency graph: `dotnet list WebSpark.RecipeCoookbook/WebSpark.RecipeCookbook.csproj reference` — confirm `WebSpark.Core` absent (SC-003) +- [ ] T069a Add integration smoke test for `RecipeProvider` + `RecipeDbContext`: in `tests\WebSpark.Portal.Tests\Areas\RecipeSpark\` create `RecipeProviderIntegrationTests.cs` with two tests — (1) given a seeded `RecipeDbContext` in-memory SQLite, `RecipeProvider.Get()` returns a non-empty list; (2) `RecipeProvider.Save(new RecipeModel {...})` then `RecipeProvider.Get(id)` returns the saved model — these tests validate the extraction path (new `RecipeDbContext` wired to `RecipeProvider`) and satisfy SC-006's requirement that the extraction is regression-free +- [ ] T069 Run ALL recipe-specific tests including T069a: `dotnet test tests\WebSpark.Portal.Tests\WebSpark.Portal.Tests.csproj --filter "FullyQualifiedName~RecipeSpark"` — confirm 100% pass rate; note which tests exercise `RecipeProvider + RecipeDbContext` vs `RecipeChatApiController` (SC-006) +- [ ] T070 Confirm zero recipe code in WebSpark.Core: `grep -r "Recipe" c:\WebSpark\WebSpark.Core\Data\ c:\WebSpark\WebSpark.Core\Models\ c:\WebSpark\WebSpark.Core\Interfaces\ c:\WebSpark\WebSpark.Core\Providers\ c:\WebSpark\WebSpark.Core\Helpers\ --include="*.cs"` — expect zero results (SC-005) +- [ ] T071 [P] Run `quickstart.md` validation checklist — complete all 9 steps; document any failures +- [ ] T072 Verify `WebSpark.Recipe.nupkg` is generated at expected output path after `dotnet pack WebSpark.Recipe/WebSpark.Recipe.csproj` +- [ ] T073 Document rollback procedure in `specs/006-extract-recipe-library/gates/rollback-runbook.md`: (1) if failure occurs BEFORE Phase 4 deletion tasks — simply discard the branch; all Core files untouched; (2) if failure occurs DURING Phase 4 (mid-deletion) — run `git restore WebSpark.Core/` to recover deleted Core files; re-remove `WebSpark.Recipe` project reference from all consumers; (3) if failure occurs AFTER migration step T066 on existing DB — run the inverse SQL: `DELETE FROM __EFMigrationsHistory WHERE MigrationId LIKE '%InitialCreate%'`; document the exact git commands and SQL for each scenario; the feature branch isolation (already in place) is the primary safety net — do NOT merge to main until T065 passes + +--- + +## Dependencies and Execution Order + +### Phase Dependencies + +- **Phase 1 (Setup)**: No dependencies — start immediately +- **Phase 2 (Foundational)**: Depends on Phase 1 — BLOCKS Phases 3–6 +- **Phase 3 (US1 — WebSpark.Recipe complete)**: Depends on Phase 2 — BLOCKS Phases 4–6 +- **Phase 4 (US2 — WebSpark.Core clean)**: Depends on Phase 3 (recipe types must exist before removing from Core) +- **Phase 5 (US3 — RecipeCookbook)**: Depends on Phase 3; can run in parallel with Phase 4 +- **Phase 6 (US4 — PromptSpark)**: Depends on Phase 3; can run in parallel with Phases 4 and 5 +- **Phase 7 (Polish)**: Depends on Phases 4, 5, 6 all complete + +### User Story Dependencies + +- **US1 (P1)**: Foundational phase complete → implements entire WebSpark.Recipe library +- **US2 (P1)**: US1 complete → removes recipe from Core and updates Portal +- **US3 (P2)**: US1 complete → updates RecipeCookbook; independent of US2 +- **US4 (P2)**: US1 complete → updates PromptSpark projects; independent of US2 and US3 + +### Within Each Phase + +- All tasks marked [P] within a phase can run in parallel +- Entity tasks (T009–T012) can all run in parallel +- Model tasks (T017–T022) depend on entities being complete (T006–T012) +- Provider tasks (T026–T027) depend on interfaces and models being complete + +### Parallel Opportunities + +```bash +# Phase 2 parallel group (after T006-T007 complete): +T009: Create Recipe entity +T010: Create RecipeCategory entity +T011: Create RecipeComment entity +T012: Create RecipeImage entity +T007: Create RecipeConstants +T008: Create RecipeUrlHelper + +# Phase 3 parallel group (after T016): +T017: RecipeOptionModel +T018: RecipeImageModel +T019: RecipeCategoryModel (then T020 RecipeModel after T017-T019) +T023: IRecipeImageService +T025: RecipeHelper + +# Phase 4 delete group (after T030 usage scan): +T031–T044: Delete recipe files (all parallel) +``` + +--- + +## Implementation Strategy + +### MVP First (User Story 1 Only) + +1. Complete Phase 1: Setup +2. Complete Phase 2: Foundational +3. Complete Phase 3: User Story 1 (WebSpark.Recipe complete) +4. **STOP and VALIDATE**: Verify `WebSpark.Recipe` builds in isolation +5. Proceed to Phase 4 (Core cleanup) with confidence + +### Incremental Delivery + +1. Setup + Foundational → `WebSpark.Recipe` skeleton ready +2. US1 → `WebSpark.Recipe` fully functional independently +3. US2 → `WebSpark.Core` clean; `WebSpark.Portal` updated +4. US3 → `WebSpark.RecipeCookbook` migrated +5. US4 → PromptSpark migrated +6. Polish → full solution validated; NuGet package ready + +--- + +## Metrics Summary + +| Item | Count | +|------|-------| +| Total tasks | 78 | +| Phase 1 (Setup) | 5 | +| Phase 2 (Foundational) | 11 | +| Phase 3 (US1) | 13 | +| Phase 4 (US2) | 28 (+T047a, T050a, expanded T050/T051) | +| Phase 5 (US3) | 4 | +| Phase 6 (US4) | 6 | +| Phase 7 (Polish) | 11 (+T069a, T073, expanded T066/T067) | +| Parallelizable tasks [P] | 38 | +| Issues resolved | 14/14 | + +**MVP scope**: Phases 1–3 (29 tasks) — delivers a complete, independently-buildable `WebSpark.Recipe` library. diff --git a/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Controllers/RecipeChatApiControllerTests.cs b/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Controllers/RecipeChatApiControllerTests.cs index 2df1b1af..5ddb470c 100644 --- a/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Controllers/RecipeChatApiControllerTests.cs +++ b/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Controllers/RecipeChatApiControllerTests.cs @@ -9,8 +9,8 @@ using WebSpark.Portal.Areas.RecipeSpark.Data; using WebSpark.Portal.Areas.RecipeSpark.Data.Entities; using WebSpark.Portal.Areas.RecipeSpark.Models; -using WebSpark.Core.Interfaces; -using WebSpark.Core.Models; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Models; namespace WebSpark.Portal.Tests.Areas.RecipeSpark.Controllers; diff --git a/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/ChatServiceTests.cs b/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/ChatServiceTests.cs index 464c04c7..65fbf8c8 100644 --- a/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/ChatServiceTests.cs +++ b/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/ChatServiceTests.cs @@ -5,8 +5,8 @@ using WebSpark.Portal.Areas.RecipeSpark.Data; using WebSpark.Portal.Areas.RecipeSpark.Data.Entities; using WebSpark.Portal.Areas.RecipeSpark.Services; -using WebSpark.Core.Interfaces; -using WebSpark.Core.Models; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Models; namespace WebSpark.Portal.Tests.Areas.RecipeSpark.Services; diff --git a/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/HintGenerationServiceTests.cs b/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/HintGenerationServiceTests.cs index 2f42701a..6f38afc4 100644 --- a/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/HintGenerationServiceTests.cs +++ b/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/HintGenerationServiceTests.cs @@ -3,8 +3,8 @@ using WebSpark.Portal.Areas.RecipeSpark.Data; using WebSpark.Portal.Areas.RecipeSpark.Data.Entities; using WebSpark.Portal.Areas.RecipeSpark.Services; -using WebSpark.Core.Interfaces; -using WebSpark.Core.Models; +using WebSpark.Recipe.Interfaces; +using WebSpark.Recipe.Models; namespace WebSpark.Portal.Tests.Areas.RecipeSpark.Services; diff --git a/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/RecipeChatLLMServiceTests.cs b/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/RecipeChatLLMServiceTests.cs index 5da63f01..9012d67d 100644 --- a/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/RecipeChatLLMServiceTests.cs +++ b/tests/WebSpark.Portal.Tests/Areas/RecipeSpark/Services/RecipeChatLLMServiceTests.cs @@ -3,8 +3,8 @@ using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.ChatCompletion; using WebSpark.Portal.Areas.RecipeSpark.Services; -using WebSpark.Core.Models; -using WebSpark.Core.Interfaces; +using WebSpark.Recipe.Models; +using WebSpark.Recipe.Interfaces; namespace WebSpark.Portal.Tests.Areas.RecipeSpark.Services;