9 Commits

Author SHA1 Message Date
amadzarak 2badb5264b OPC # 0006: OPC Git Trunk-Based management 2026-04-26 14:40:05 -04:00
amadzarak bb0c6e08c7 OPC # 0006: OPC Git Trunk-Based management
Co-authored-by: Copilot <copilot@github.com>
2026-04-26 14:30:10 -04:00
amadzarak 5e969a2b3e OPC # 0006: OPC Git Trunk-Based management
Co-authored-by: Copilot <copilot@github.com>
2026-04-26 13:45:05 -04:00
amadzarak 571f0bf2a4 OPC # 0006: OPC Git Trunk-Based management 2026-04-26 13:14:06 -04:00
amadzarak b9f0f6dd5f OPC # 0007: Patch FDEV provisioning for local aspire development
Co-authored-by: Copilot <copilot@github.com>
2026-04-26 12:48:07 -04:00
amadzarak e8ac7b017c OPC # 0001: Diff viewer improvements 2026-04-26 11:54:50 -04:00
amadzarak 79c69e1363 OPC # 0006: OPC Git Trunk-Based management
Co-authored-by: Copilot <copilot@github.com>
2026-04-26 11:54:24 -04:00
amadzarak 553ea59d39 OPC # 0006: OPC Git Trunk-Based management 2026-04-26 11:32:23 -04:00
amadzarak 9ff1488bb5 OPC # 0006: OPC Git Trunk-Based management
Co-authored-by: Copilot <copilot@github.com>
2026-04-26 11:25:38 -04:00
37 changed files with 1676 additions and 786 deletions
@@ -1,5 +1,6 @@
using ControlPlane.Api.Services;
using ControlPlane.Core.Models;
using ControlPlane.Core.Services;
using System.Text.Json;
namespace ControlPlane.Api.Endpoints;
@@ -178,6 +179,27 @@ public static class PromotionEndpoints
}
});
// GET /api/promotions/build-gate?sha={sha}
// Returns the build-gate status for the given commit SHA.
// If status is "Red", the promote button in the UI should be disabled.
g.MapGet("/build-gate", async (string sha, BuildHistoryService history, CancellationToken ct) =>
{
var builds = await history.GetBuildsByShaAsync(sha);
var latest = builds.MaxBy(b => b.StartedAt);
if (latest is null)
return Results.Ok(new { status = "Unknown", sha, buildId = (string?)null, buildStatus = (string?)null });
var gateStatus = latest.Status switch
{
BuildStatus.Succeeded => "Green",
BuildStatus.Failed => "Red",
BuildStatus.Running => "Running",
_ => "Unknown",
};
return Results.Ok(new { status = gateStatus, sha, buildId = latest.Id, buildStatus = latest.Status.ToString() });
});
return app;
}
}
+41
View File
@@ -126,7 +126,48 @@ await using (var cmd = ds.CreateCommand("""
CREATE INDEX IF NOT EXISTS ix_opc_artifact_opc_id ON opc_artifact(opc_id);
CREATE INDEX IF NOT EXISTS ix_opc_artifact_type ON opc_artifact(opc_id, artifact_type);
CREATE INDEX IF NOT EXISTS ix_opc_pinned_commit_opc_id ON opc_pinned_commit(opc_id);
-- Build + Release history
CREATE TABLE IF NOT EXISTS build_record (
id VARCHAR(8) PRIMARY KEY,
kind VARCHAR(20) NOT NULL,
target VARCHAR(500) NOT NULL,
status VARCHAR(20) NOT NULL DEFAULT 'Running',
started_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
finished_at TIMESTAMPTZ,
duration_ms INTEGER,
image_digest VARCHAR(200),
commit_sha VARCHAR(40),
log TEXT NOT NULL DEFAULT ''
);
CREATE TABLE IF NOT EXISTS release_record (
id VARCHAR(8) PRIMARY KEY,
environment VARCHAR(50) NOT NULL,
image_name VARCHAR(200) NOT NULL,
status VARCHAR(20) NOT NULL DEFAULT 'Running',
started_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
finished_at TIMESTAMPTZ
);
CREATE TABLE IF NOT EXISTS release_tenant_result (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
release_id VARCHAR(8) NOT NULL REFERENCES release_record(id) ON DELETE CASCADE,
subdomain VARCHAR(200) NOT NULL,
container_name VARCHAR(200) NOT NULL,
success BOOLEAN NOT NULL DEFAULT FALSE,
error TEXT
);
CREATE INDEX IF NOT EXISTS ix_build_record_started_at ON build_record(started_at DESC);
CREATE INDEX IF NOT EXISTS ix_build_record_kind ON build_record(kind);
CREATE INDEX IF NOT EXISTS ix_release_record_started_at ON release_record(started_at DESC);
CREATE INDEX IF NOT EXISTS ix_release_tenant_release_id ON release_tenant_result(release_id);
"""))
await cmd.ExecuteNonQueryAsync();
// Idempotent column additions for schema migrations
await using (var migCmd = ds.CreateCommand("""
ALTER TABLE release_record ADD COLUMN IF NOT EXISTS opc_numbers TEXT[] NOT NULL DEFAULT '{}';
ALTER TABLE release_record ADD COLUMN IF NOT EXISTS commit_sha VARCHAR(40);
"""))
await migCmd.ExecuteNonQueryAsync();
app.Run();
+156 -26
View File
@@ -52,6 +52,38 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
private static Signature MakeSig() =>
new("OPC Control Plane", "opc@clarity.internal", DateTimeOffset.UtcNow);
// ── Remote URL (config-driven, never reads .git/config URL) ──────────────
/// <summary>
/// Builds the HTTPS remote URL for a named repo entirely from Gitea config.
/// The local clone's .git/config remote URL is irrelevant — this is the authority.
/// </summary>
private string GetRemoteUrl(string repoName)
{
var baseUrl = (config["Gitea:BaseUrl"]
?? throw new InvalidOperationException("Gitea:BaseUrl is not configured.")).TrimEnd('/');
var owner = config[$"Gitea:Repos:{repoName}:Owner"] ?? config["Gitea:Owner"]
?? throw new InvalidOperationException($"Gitea owner not configured for '{repoName}'.");
var repoSlug = config[$"Gitea:Repos:{repoName}:Repo"] ?? repoName;
return $"{baseUrl}/{owner}/{repoSlug}.git";
}
/// <summary>
/// Returns the 'origin' remote after normalising its URL to the config-driven HTTPS URL.
/// If the clone was checked out with SSH (e.g. on a dev machine), this corrects it silently
/// so that LibGit2Sharp — which has no SSH support — always uses HTTPS.
/// </summary>
private Remote EnsureRemote(Repository repo, string repoName)
{
var url = GetRemoteUrl(repoName);
var remote = repo.Network.Remotes["origin"];
if (remote is null)
return repo.Network.Remotes.Add("origin", url);
if (remote.Url != url)
repo.Network.Remotes.Update("origin", r => r.Url = url);
return repo.Network.Remotes["origin"]!;
}
// ── Branch status ────────────────────────────────────────────────────────
/// <summary>
@@ -72,13 +104,10 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
// Fetch to get up-to-date remote refs; swallow network errors so status still works offline.
try
{
var remote = repo.Network.Remotes["origin"];
if (remote is not null)
{
var remote = EnsureRemote(repo, repoName);
var refSpecs = remote.FetchRefSpecs.Select(r => r.Specification).ToList();
repo.Network.Fetch(remote.Name, refSpecs, MakeFetchOptions());
}
}
catch (Exception ex)
{
logger.LogWarning(ex, "Fetch during ladder status failed — continuing with cached refs");
@@ -91,7 +120,9 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
ct.ThrowIfCancellationRequested();
var branchName = Ladder[i];
var branch = repo.Branches[branchName];
// Always read from the remote tracking ref so the status reflects what is on origin,
// not the server's potentially-stale local branch pointer.
var branch = repo.Branches[$"origin/{branchName}"];
if (branch?.Tip is null)
{
@@ -110,7 +141,7 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
if (i + 1 < Ladder.Length)
{
var nextBranch = repo.Branches[Ladder[i + 1]];
var nextBranch = repo.Branches[$"origin/{Ladder[i + 1]}"];
if (nextBranch?.Tip is not null)
{
var div = repo.ObjectDatabase.CalculateHistoryDivergence(tip, nextBranch.Tip);
@@ -138,7 +169,7 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
}
result.Add(new BranchStatus(branchName, true, tip.Sha[..7], summary,
ahead, behind, unreleasedCommits));
ahead, behind, unreleasedCommits, tip.Sha));
}
return result;
@@ -215,14 +246,16 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
// 1. Fetch latest remote state for all branches
Log(" Fetching origin...");
var remote = repo.Network.Remotes["origin"]
?? throw new InvalidOperationException("No 'origin' remote configured.");
var remote = EnsureRemote(repo, repoName);
var refSpecs = remote.FetchRefSpecs.Select(r => r.Specification).ToList();
repo.Network.Fetch(remote.Name, refSpecs, MakeFetchOptions());
// 2. Resolve local branches
var fromBranch = repo.Branches[from]
?? throw new InvalidOperationException($"Branch '{from}' not found.");
// 2. Resolve branches — always read from origin/ so we reflect what is actually on the remote,
// never the server's potentially-stale local branch pointers.
var fromBranch = repo.Branches[$"origin/{from}"]
?? throw new InvalidOperationException($"Remote branch 'origin/{from}' not found.");
// `to` is read locally because we need to mutate its ref and push — it is immediately
// fast-forwarded to origin/{to} in the next step so it is never stale when used.
var toBranch = repo.Branches[to]
?? throw new InvalidOperationException($"Branch '{to}' not found.");
@@ -327,8 +360,7 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
try
{
var remote = repo.Network.Remotes["origin"]
?? throw new InvalidOperationException("No 'origin' remote.");
var remote = EnsureRemote(repo, repoName);
// Force push — "+" prefix overrides remote reflog
repo.Network.Push(remote, $"+refs/heads/{branchName}:refs/heads/{branchName}", MakePushOptions());
}
@@ -419,8 +451,7 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
// 1. Fetch
Log(" Fetching origin...");
var remote = repo.Network.Remotes["origin"]
?? throw new InvalidOperationException("No 'origin' remote configured.");
var remote = EnsureRemote(repo, repoName);
var refSpecs = remote.FetchRefSpecs.Select(r => r.Specification).ToList();
repo.Network.Fetch(remote.Name, refSpecs, MakeFetchOptions());
@@ -548,13 +579,10 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
// Fetch latest remote refs — swallow network errors so status still works offline.
try
{
var remote = repo.Network.Remotes["origin"];
if (remote is not null)
{
var remote = EnsureRemote(repo, repoName);
var refSpecs = remote.FetchRefSpecs.Select(r => r.Specification).ToList();
repo.Network.Fetch(remote.Name, refSpecs, MakeFetchOptions());
}
}
catch (Exception ex)
{
logger.LogWarning(ex, "Fetch during conformance check failed — continuing with cached refs");
@@ -566,12 +594,13 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
var branchName = Ladder[i];
var srcName = i > 0 ? Ladder[i - 1] : null; // predecessor branch (e.g. develop for staging)
var branch = repo.Branches[branchName];
// Always read from origin/ tracking refs — never local branch pointers.
var branch = repo.Branches[$"origin/{branchName}"];
// ── Branch missing ──────────────────────────────────────────────
if (branch?.Tip is null)
{
var srcTip = srcName is not null ? repo.Branches[srcName]?.Tip?.Sha : null;
var srcTip = srcName is not null ? repo.Branches[$"origin/{srcName}"]?.Tip?.Sha : null;
checks.Add(new BranchConformanceCheck(
branchName, srcName,
ConformanceViolation.Missing,
@@ -592,7 +621,7 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
continue;
}
var srcBranch = repo.Branches[srcName];
var srcBranch = repo.Branches[$"origin/{srcName}"];
if (srcBranch?.Tip is null)
{
// Source branch is itself missing — skip, it will be reported separately.
@@ -666,8 +695,7 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
repo.Refs.Add($"refs/heads/{branchName}", commit.Sha);
var remote = repo.Network.Remotes["origin"]
?? throw new InvalidOperationException("No 'origin' remote configured.");
var remote = EnsureRemote(repo, repoName);
repo.Network.Push(remote, $"refs/heads/{branchName}:refs/heads/{branchName}", MakePushOptions());
@@ -716,6 +744,107 @@ public class PromotionService(IConfiguration config, ILogger<PromotionService> l
try { return JsonSerializer.Deserialize<List<PromotionRequest>>(File.ReadAllText(HistoryPath), JsonOpts) ?? []; }
catch { return []; }
}
// ── OPC number extraction ─────────────────────────────────────────────
private static readonly System.Text.RegularExpressions.Regex OpcTagPattern =
new(@"OPC\s*#\s*(\d+)", System.Text.RegularExpressions.RegexOptions.IgnoreCase
| System.Text.RegularExpressions.RegexOptions.Compiled);
/// <summary>
/// Scans the most recent <paramref name="limit"/> commits on <paramref name="branch"/> and
/// returns a distinct, sorted list of OPC numbers referenced in commit messages (e.g. "OPC # 0042").
/// Safe to call when git is not configured — returns an empty list on any error.
/// </summary>
public Task<List<string>> ExtractOpcNumbersAsync(
string repoName = "Clarity",
string branch = "main",
int limit = 50,
CancellationToken ct = default) =>
Task.Run(() => ExtractOpcNumbersCore(repoName, branch, limit), ct);
private List<string> ExtractOpcNumbersCore(string repoName, string branch, int limit)
{
var repoPath = GetRepoPath(repoName);
if (string.IsNullOrWhiteSpace(repoPath) || !Directory.Exists(repoPath))
return [];
try
{
using var repo = new Repository(repoPath);
var b = repo.Branches[branch] ?? repo.Branches[$"origin/{branch}"];
if (b is null) return [];
var set = new HashSet<string>(StringComparer.Ordinal);
foreach (var commit in b.Commits.Take(limit))
foreach (System.Text.RegularExpressions.Match m in OpcTagPattern.Matches(commit.Message))
set.Add($"OPC # {m.Groups[1].Value.PadLeft(4, '0')}");
return [.. set.OrderBy(x => x)];
}
catch (Exception ex)
{
logger.LogWarning(ex, "ExtractOpcNumbers failed for {Repo}/{Branch}", repoName, branch);
return [];
}
}
/// <summary>
/// Returns distinct, sorted OPC numbers for commits reachable from <paramref name="toSha"/>
/// that are NOT reachable from <paramref name="fromSha"/> — i.e. the exact delta for this release.
/// Falls back to <see cref="ExtractOpcNumbersAsync"/> (last 50 commits) when <paramref name="fromSha"/>
/// is null (first-ever release for this environment).
/// </summary>
public Task<List<string>> ExtractOpcNumbersDeltaAsync(
string repoName,
string toSha,
string? fromSha,
CancellationToken ct = default) =>
fromSha is null
? ExtractOpcNumbersAsync(repoName, ct: ct)
: Task.Run(() => ExtractOpcNumbersDeltaCore(repoName, toSha, fromSha), ct);
private List<string> ExtractOpcNumbersDeltaCore(string repoName, string toSha, string fromSha)
{
var repoPath = GetRepoPath(repoName);
if (string.IsNullOrWhiteSpace(repoPath) || !Directory.Exists(repoPath))
return [];
try
{
using var repo = new Repository(repoPath);
var toCommit = repo.Lookup<Commit>(toSha);
var fromCommit = repo.Lookup<Commit>(fromSha);
if (toCommit is null) return [];
var filter = fromCommit is null
? new CommitFilter { IncludeReachableFrom = toCommit }
: new CommitFilter { IncludeReachableFrom = toCommit, ExcludeReachableFrom = fromCommit };
var set = new HashSet<string>(StringComparer.Ordinal);
foreach (var commit in repo.Commits.QueryBy(filter))
foreach (System.Text.RegularExpressions.Match m in OpcTagPattern.Matches(commit.Message))
set.Add($"OPC # {m.Groups[1].Value.PadLeft(4, '0')}");
return [.. set.OrderBy(x => x)];
}
catch (Exception ex)
{
logger.LogWarning(ex, "ExtractOpcNumbersDelta failed for {Repo} {From}..{To}", repoName, fromSha[..7], toSha[..7]);
return [];
}
}
/// <summary>Returns the full HEAD SHA of <paramref name="branch"/> in <paramref name="repoName"/>, or null.</summary>
public string? GetBranchTipSha(string repoName, string branch)
{
var repoPath = GetRepoPath(repoName);
if (string.IsNullOrWhiteSpace(repoPath) || !Directory.Exists(repoPath)) return null;
try
{
using var repo = new Repository(repoPath);
return (repo.Branches[$"origin/{branch}"] ?? repo.Branches[branch])?.Tip?.Sha;
}
catch { return null; }
}
}
/// <summary>A single unreleased commit — carries full SHA for cherry-pick operations.</summary>
@@ -729,5 +858,6 @@ public record BranchStatus(
string? LastCommitSummary,
int AheadOfNext, // commits this branch has that the next doesn't
int BehindNext, // commits next has that this branch doesn't (diverged)
CommitInfo[] UnreleasedCommits // rich commit objects for cherry-pick UI
CommitInfo[] UnreleasedCommits, // rich commit objects for cherry-pick UI
string? TipSha = null // full 40-char SHA for build-gate checks
);
+19 -1
View File
@@ -17,6 +17,7 @@ public class ReleaseService(
IConfiguration config,
TenantRegistryService registry,
BuildHistoryService history,
PromotionService promotions,
ILogger<ReleaseService> logger)
{
private static readonly SemaphoreSlim _lock = new(1, 1);
@@ -50,7 +51,12 @@ public class ReleaseService(
return blocked;
}
var record = await history.CreateReleaseAsync(targetEnv, ImageName);
// Resolve the Clarity branch for this environment and stamp the HEAD SHA
// before creating the record so we capture "what was deployed" accurately.
var branch = targetEnv switch { "fdev" => "develop", "staging" => "staging", "uat" => "uat", _ => "main" };
var currentSha = promotions.GetBranchTipSha("Clarity", branch);
var record = await history.CreateReleaseAsync(targetEnv, ImageName, currentSha);
try
{
@@ -182,6 +188,18 @@ public class ReleaseService(
}
finally
{
// Stamp the exact OPC ticket numbers introduced by this release:
// diff from previous release's SHA to this release's SHA on the Clarity branch.
try
{
var prev = await history.GetLastSuccessfulReleaseForEnvAsync(targetEnv);
// Exclude the current (in-flight) record — it's not succeeded yet
var prevSha = prev?.Id == record.Id ? null : prev?.CommitSha;
if (currentSha is not null)
record.OpcNumbers = await promotions.ExtractOpcNumbersDeltaAsync("Clarity", currentSha, prevSha, ct);
}
catch { /* git not configured — continue without OPC stamp */ }
await history.UpdateReleaseAsync(record);
_lock.Release();
}
@@ -5,6 +5,7 @@
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Npgsql" />
</ItemGroup>
</Project>
@@ -24,6 +24,11 @@ public class SagaContext
// Written by LaunchStep — primary app container name
public string? ContainerName { get; set; }
// Written by VaultStep — scoped periodic token for the tenant (not the root token)
// and its accessor used for compensation/revocation
public string? VaultToken { get; set; }
public string? VaultTokenAccessor { get; set; }
// Written by PulumiStep (DedicatedVM/Enterprise tier) — target host details for subsequent steps
public string? VmIpAddress { get; set; }
public string? VmSshKeyPath { get; set; }
@@ -16,6 +16,8 @@ public class ReleaseRecord
public DateTimeOffset StartedAt { get; set; } = DateTimeOffset.UtcNow;
public DateTimeOffset? FinishedAt { get; set; }
public List<TenantReleaseResult> Tenants { get; set; } = [];
public List<string> OpcNumbers { get; set; } = [];
public string? CommitSha { get; set; } // Clarity branch HEAD SHA at release time
}
public class TenantReleaseResult
+231 -79
View File
@@ -1,46 +1,36 @@
using System.Text.Json;
using ControlPlane.Core.Models;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Npgsql;
namespace ControlPlane.Core.Services;
/// <summary>
/// Persists build and release history to JSON files in the ClientAssets folder.
/// Thread-safe — all writes go through a single lock per file.
/// Persists build and release history to opcdb (Postgres).
/// Replaces the previous JSON-file implementation.
/// NpgsqlDataSource is singleton and manages the connection pool; this service is safe to register as singleton.
/// </summary>
public class BuildHistoryService
public class BuildHistoryService(NpgsqlDataSource db, ILogger<BuildHistoryService> logger)
{
private readonly string _buildsPath;
private readonly string _releasesPath;
private readonly ILogger<BuildHistoryService> _logger;
private static readonly SemaphoreSlim _buildLock = new(1, 1);
private static readonly SemaphoreSlim _releaseLock = new(1, 1);
private static readonly JsonSerializerOptions JsonOpts = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
Converters = { new System.Text.Json.Serialization.JsonStringEnumConverter() },
};
public BuildHistoryService(IConfiguration config, ILogger<BuildHistoryService> logger)
{
var folder = config["ClientAssets__Folder"] ?? config["ClientAssets:Folder"]
?? Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "ClientAssets"));
Directory.CreateDirectory(folder);
_buildsPath = Path.Combine(folder, "builds.json");
_releasesPath = Path.Combine(folder, "releases.json");
_logger = logger;
}
// ── Builds ──────────────────────────────────────────────────────────────
public async Task<BuildRecord> CreateBuildAsync(BuildKind kind, string target)
{
var record = new BuildRecord { Kind = kind, Target = target };
await SaveBuildAsync(record);
await using var cmd = db.CreateCommand("""
INSERT INTO build_record (id, kind, target, status, started_at, commit_sha, log)
VALUES ($1, $2, $3, $4, $5, $6, $7)
""");
cmd.Parameters.AddWithValue(record.Id);
cmd.Parameters.AddWithValue(record.Kind.ToString());
cmd.Parameters.AddWithValue(record.Target);
cmd.Parameters.AddWithValue(record.Status.ToString());
cmd.Parameters.AddWithValue(record.StartedAt);
cmd.Parameters.AddWithValue((object?)record.CommitSha ?? DBNull.Value);
cmd.Parameters.AddWithValue(string.Empty);
await cmd.ExecuteNonQueryAsync();
return record;
}
@@ -50,89 +40,251 @@ public class BuildHistoryService
record.FinishedAt = DateTimeOffset.UtcNow;
record.DurationMs = (int)(record.FinishedAt.Value - record.StartedAt).TotalMilliseconds;
record.ImageDigest = digest;
await SaveBuildAsync(record);
await using var cmd = db.CreateCommand("""
UPDATE build_record
SET status = $2, finished_at = $3, duration_ms = $4, image_digest = $5, commit_sha = $6, log = $7
WHERE id = $1
""");
cmd.Parameters.AddWithValue(record.Id);
cmd.Parameters.AddWithValue(record.Status.ToString());
cmd.Parameters.AddWithValue(record.FinishedAt!.Value);
cmd.Parameters.AddWithValue((object?)record.DurationMs ?? DBNull.Value);
cmd.Parameters.AddWithValue((object?)record.ImageDigest ?? DBNull.Value);
cmd.Parameters.AddWithValue((object?)record.CommitSha ?? DBNull.Value);
cmd.Parameters.AddWithValue(string.Join('\n', record.Log));
await cmd.ExecuteNonQueryAsync();
}
public async Task AppendBuildLogAsync(BuildRecord record, string line)
{
record.Log.Add(line);
// Flush to disk every 20 lines to avoid excessive I/O but keep reasonable freshness
// Flush to Postgres every 20 lines — keeps the live log queryable without hammering the DB
if (record.Log.Count % 20 == 0)
await SaveBuildAsync(record);
await FlushLogAsync(record);
}
private async Task FlushLogAsync(BuildRecord record)
{
await using var cmd = db.CreateCommand("UPDATE build_record SET log = $2 WHERE id = $1");
cmd.Parameters.AddWithValue(record.Id);
cmd.Parameters.AddWithValue(string.Join('\n', record.Log));
await cmd.ExecuteNonQueryAsync();
}
public async Task<List<BuildRecord>> GetBuildsAsync()
{
await _buildLock.WaitAsync();
try { return LoadJson<BuildRecord>(_buildsPath); }
finally { _buildLock.Release(); }
var result = new List<BuildRecord>();
await using var cmd = db.CreateCommand("""
SELECT id, kind, target, status, started_at, finished_at, duration_ms, image_digest, commit_sha, log
FROM build_record
ORDER BY started_at DESC
LIMIT 100
""");
await using var reader = await cmd.ExecuteReaderAsync();
while (await reader.ReadAsync())
{
var logText = reader.IsDBNull(9) ? "" : reader.GetString(9);
result.Add(new BuildRecord
{
Id = reader.GetString(0),
Kind = Enum.Parse<BuildKind>(reader.GetString(1)),
Target = reader.GetString(2),
Status = Enum.Parse<BuildStatus>(reader.GetString(3)),
StartedAt = reader.GetFieldValue<DateTimeOffset>(4),
FinishedAt = reader.IsDBNull(5) ? null : reader.GetFieldValue<DateTimeOffset>(5),
DurationMs = reader.IsDBNull(6) ? null : reader.GetInt32(6),
ImageDigest = reader.IsDBNull(7) ? null : reader.GetString(7),
CommitSha = reader.IsDBNull(8) ? null : reader.GetString(8),
Log = logText.Length == 0 ? [] : [.. logText.Split('\n')],
});
}
private async Task SaveBuildAsync(BuildRecord record)
{
await _buildLock.WaitAsync();
try
{
var all = LoadJson<BuildRecord>(_buildsPath);
var idx = all.FindIndex(b => b.Id == record.Id);
if (idx >= 0) all[idx] = record;
else all.Insert(0, record);
// Keep last 100 builds
if (all.Count > 100) all = all[..100];
await File.WriteAllTextAsync(_buildsPath, JsonSerializer.Serialize(all, JsonOpts));
return result;
}
finally { _buildLock.Release(); }
// ── Builds by SHA ────────────────────────────────────────────────────────
/// <summary>Returns all build records whose <c>commit_sha</c> exactly matches <paramref name="sha"/>.</summary>
public async Task<List<BuildRecord>> GetBuildsByShaAsync(string sha)
{
var result = new List<BuildRecord>();
await using var cmd = db.CreateCommand("""
SELECT id, kind, target, status, started_at, finished_at, duration_ms, image_digest, commit_sha, log
FROM build_record
WHERE commit_sha = $1
ORDER BY started_at DESC
""");
cmd.Parameters.AddWithValue(sha);
await using var reader = await cmd.ExecuteReaderAsync();
while (await reader.ReadAsync())
{
var logText = reader.IsDBNull(9) ? "" : reader.GetString(9);
result.Add(new BuildRecord
{
Id = reader.GetString(0),
Kind = Enum.Parse<BuildKind>(reader.GetString(1)),
Target = reader.GetString(2),
Status = Enum.Parse<BuildStatus>(reader.GetString(3)),
StartedAt = reader.GetFieldValue<DateTimeOffset>(4),
FinishedAt = reader.IsDBNull(5) ? null : reader.GetFieldValue<DateTimeOffset>(5),
DurationMs = reader.IsDBNull(6) ? null : reader.GetInt32(6),
ImageDigest = reader.IsDBNull(7) ? null : reader.GetString(7),
CommitSha = reader.IsDBNull(8) ? null : reader.GetString(8),
Log = logText.Length == 0 ? [] : [.. logText.Split('\n')],
});
}
return result;
}
// ── Releases ────────────────────────────────────────────────────────────
public async Task<ReleaseRecord> CreateReleaseAsync(string environment, string imageName)
public async Task<ReleaseRecord> CreateReleaseAsync(string environment, string imageName, string? commitSha = null)
{
var record = new ReleaseRecord { Environment = environment, ImageName = imageName };
await SaveReleaseAsync(record);
var record = new ReleaseRecord { Environment = environment, ImageName = imageName, CommitSha = commitSha };
await using var cmd = db.CreateCommand("""
INSERT INTO release_record (id, environment, image_name, status, started_at, opc_numbers, commit_sha)
VALUES ($1, $2, $3, $4, $5, $6, $7)
""");
cmd.Parameters.AddWithValue(record.Id);
cmd.Parameters.AddWithValue(record.Environment);
cmd.Parameters.AddWithValue(record.ImageName);
cmd.Parameters.AddWithValue(record.Status.ToString());
cmd.Parameters.AddWithValue(record.StartedAt);
cmd.Parameters.Add(new NpgsqlParameter<string[]> { TypedValue = [.. record.OpcNumbers] });
cmd.Parameters.AddWithValue((object?)record.CommitSha ?? DBNull.Value);
await cmd.ExecuteNonQueryAsync();
return record;
}
public async Task UpdateReleaseAsync(ReleaseRecord record)
{
record.FinishedAt = DateTimeOffset.UtcNow;
await SaveReleaseAsync(record);
await using var conn = await db.OpenConnectionAsync();
await using var tx = await conn.BeginTransactionAsync();
await using var upd = new NpgsqlCommand("""
UPDATE release_record SET status = $2, finished_at = $3, opc_numbers = $4, commit_sha = $5 WHERE id = $1
""", conn, tx);
upd.Parameters.AddWithValue(record.Id);
upd.Parameters.AddWithValue(record.Status.ToString());
upd.Parameters.AddWithValue(record.FinishedAt!.Value);
upd.Parameters.Add(new NpgsqlParameter<string[]> { TypedValue = [.. record.OpcNumbers] });
upd.Parameters.AddWithValue((object?)record.CommitSha ?? DBNull.Value);
await upd.ExecuteNonQueryAsync();
// Replace tenant results wholesale on each update
await using var del = new NpgsqlCommand(
"DELETE FROM release_tenant_result WHERE release_id = $1", conn, tx);
del.Parameters.AddWithValue(record.Id);
await del.ExecuteNonQueryAsync();
foreach (var t in record.Tenants)
{
await using var ins = new NpgsqlCommand("""
INSERT INTO release_tenant_result (release_id, subdomain, container_name, success, error)
VALUES ($1, $2, $3, $4, $5)
""", conn, tx);
ins.Parameters.AddWithValue(record.Id);
ins.Parameters.AddWithValue(t.Subdomain);
ins.Parameters.AddWithValue(t.ContainerName);
ins.Parameters.AddWithValue(t.Success);
ins.Parameters.AddWithValue((object?)t.Error ?? DBNull.Value);
await ins.ExecuteNonQueryAsync();
}
await tx.CommitAsync();
}
public async Task<List<ReleaseRecord>> GetReleasesAsync()
{
await _releaseLock.WaitAsync();
try { return LoadJson<ReleaseRecord>(_releasesPath); }
finally { _releaseLock.Release(); }
var ordered = new List<ReleaseRecord>();
var lookup = new Dictionary<string, ReleaseRecord>();
await using var cmd = db.CreateCommand("""
SELECT id, environment, image_name, status, started_at, finished_at, opc_numbers, commit_sha
FROM release_record
ORDER BY started_at DESC
LIMIT 50
""");
await using (var reader = await cmd.ExecuteReaderAsync())
{
while (await reader.ReadAsync())
{
var r = new ReleaseRecord
{
Id = reader.GetString(0),
Environment = reader.GetString(1),
ImageName = reader.GetString(2),
Status = Enum.Parse<ReleaseStatus>(reader.GetString(3)),
StartedAt = reader.GetFieldValue<DateTimeOffset>(4),
FinishedAt = reader.IsDBNull(5) ? null : reader.GetFieldValue<DateTimeOffset>(5),
OpcNumbers = reader.IsDBNull(6) ? [] : [.. reader.GetFieldValue<string[]>(6)],
CommitSha = reader.IsDBNull(7) ? null : reader.GetString(7),
};
ordered.Add(r);
lookup[r.Id] = r;
}
}
private async Task SaveReleaseAsync(ReleaseRecord record)
{
await _releaseLock.WaitAsync();
try
{
var all = LoadJson<ReleaseRecord>(_releasesPath);
var idx = all.FindIndex(r => r.Id == record.Id);
if (idx >= 0) all[idx] = record;
else all.Insert(0, record);
if (lookup.Count == 0) return [];
if (all.Count > 50) all = all[..50];
await File.WriteAllTextAsync(_releasesPath, JsonSerializer.Serialize(all, JsonOpts));
}
finally { _releaseLock.Release(); }
// Load all tenant results for the fetched release IDs in one query
await using var cmd2 = db.CreateCommand("""
SELECT release_id, subdomain, container_name, success, error
FROM release_tenant_result
WHERE release_id = ANY($1)
""");
cmd2.Parameters.Add(new NpgsqlParameter<string[]> { TypedValue = [.. lookup.Keys] });
await using var reader2 = await cmd2.ExecuteReaderAsync();
while (await reader2.ReadAsync())
{
if (lookup.TryGetValue(reader2.GetString(0), out var r))
r.Tenants.Add(new TenantReleaseResult
{
Subdomain = reader2.GetString(1),
ContainerName = reader2.GetString(2),
Success = reader2.GetBoolean(3),
Error = reader2.IsDBNull(4) ? null : reader2.GetString(4),
});
}
// ── Helpers ─────────────────────────────────────────────────────────────
private static List<T> LoadJson<T>(string path)
{
if (!File.Exists(path)) return [];
try
{
var json = File.ReadAllText(path);
return JsonSerializer.Deserialize<List<T>>(json, JsonOpts) ?? [];
return ordered;
}
catch { return []; }
/// <summary>
/// Returns the most recent succeeded release for <paramref name="environment"/>, or null if none exists.
/// Used to calculate the OPC ticket delta between releases (previousSha..currentSha).
/// </summary>
public async Task<ReleaseRecord?> GetLastSuccessfulReleaseForEnvAsync(string environment)
{
await using var cmd = db.CreateCommand("""
SELECT id, environment, image_name, status, started_at, finished_at, opc_numbers, commit_sha
FROM release_record
WHERE environment = $1 AND status = 'Succeeded'
ORDER BY started_at DESC
LIMIT 1
""");
cmd.Parameters.AddWithValue(environment);
await using var reader = await cmd.ExecuteReaderAsync();
if (!await reader.ReadAsync()) return null;
return new ReleaseRecord
{
Id = reader.GetString(0),
Environment = reader.GetString(1),
ImageName = reader.GetString(2),
Status = Enum.Parse<ReleaseStatus>(reader.GetString(3)),
StartedAt = reader.GetFieldValue<DateTimeOffset>(4),
FinishedAt = reader.IsDBNull(5) ? null : reader.GetFieldValue<DateTimeOffset>(5),
OpcNumbers = reader.IsDBNull(6) ? [] : [.. reader.GetFieldValue<string[]>(6)],
CommitSha = reader.IsDBNull(7) ? null : reader.GetString(7),
};
}
}
@@ -105,6 +105,9 @@ public class ClarityContainerService(
["clarity.subdomain"] = subdomain,
["clarity.siteCode"] = siteCode,
["clarity.env"] = environment,
// Groups containers in Docker Desktop by environment tier (fdev / uat / prod).
["com.docker.compose.project"] = $"clarity-{environment.ToLowerInvariant()}",
["com.docker.compose.service"] = name,
},
}, cancellationToken);
+13 -2
View File
@@ -41,6 +41,17 @@ public class KeycloakStep(
}, cancellationToken);
// clarity-web-app: public OIDC client used by the React frontend.
// fdev is a developer dogfood environment — allow localhost redirect URIs so that a
// local Aspire dev loop (any port) can complete the OIDC flow against the shared
// OPC infra Keycloak without any post-provisioning patching.
var isFdev = string.Equals(context.Job.Environment, "fdev", StringComparison.OrdinalIgnoreCase);
var redirectUris = isFdev
? new[] { $"{tenantOrigin}/*", "http://localhost:*/*", "http://*.dev.localhost:*/*" }
: new[] { $"{tenantOrigin}/*" };
var webOrigins = isFdev
? "+" // match all valid redirect URI origins
: tenantOrigin;
await adminClient.CreateClientAsync(realmId, new
{
clientId = "clarity-web-app",
@@ -51,8 +62,8 @@ public class KeycloakStep(
directAccessGrantsEnabled = false,
rootUrl = tenantOrigin,
baseUrl = "/",
redirectUris = new[] { $"{tenantOrigin}/*" },
webOrigins = new[] { tenantOrigin },
redirectUris,
webOrigins = new[] { webOrigins },
}, cancellationToken);
// Ensure tokens issued by clarity-web-app include "clarity-rest-api" in the `aud` claim
+1 -1
View File
@@ -32,7 +32,7 @@ public class LaunchStep(
subdomain: job.Subdomain,
keycloakRealm: $"clarity-{job.Subdomain.ToLowerInvariant()}",
postgresConnectionString: context.TenantConnectionString,
vaultToken: ReadVaultToken(config),
vaultToken: context.VaultToken ?? ReadVaultToken(config),
jobId: job.Id,
cancellationToken: cancellationToken);
+105 -23
View File
@@ -1,6 +1,9 @@
using ControlPlane.Core.Interfaces;
using ControlPlane.Core.Models;
using System.Net.Http.Headers;
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
namespace ControlPlane.Worker.Steps;
@@ -8,38 +11,117 @@ public class VaultStep(ILogger<VaultStep> logger, IConfiguration config) : ISaga
{
public string StepName => "Cryptographic Pre-Flight (Vault)";
public Task ExecuteAsync(SagaContext context, CancellationToken cancellationToken)
// Policy grants the tenant token exactly the three Transit operations Clarity.Server needs:
// GenerateTenantKEKAsync → datakey/plaintext (first boot only)
// DecryptTenantKEKAsync → decrypt (every restart)
// RewrapTenantKEKAsync → rewrap (key rotation)
private const string PolicyTemplate = """
path "clarity-transit/datakey/plaintext/master-key" {
capabilities = ["update"]
}
path "clarity-transit/decrypt/master-key" {
capabilities = ["update"]
}
path "clarity-transit/rewrap/master-key" {
capabilities = ["update"]
}
""";
public async Task ExecuteAsync(SagaContext context, CancellationToken cancellationToken)
{
// TODO: VaultSharp
// 1. Assert Transit engine is active and healthy
// 2. Derive/validate TenantContextId (e.g. FL_COM_001)
// 3. Register TenantContextId in a KV entry or TenantRegistry table
// so Clarity.Server can resolve the derivation path later
//
// Root token is read at runtime from the persisted init.json on the Vault volume:
// var token = ReadRootToken();
logger.LogInformation("[{JobId}] Vault step is a stub - VaultSharp not yet wired.", context.Job.Id);
var rootToken = ReadRootToken();
var vaultAddr = (config["Vault:Address"] ?? "http://localhost:8200").TrimEnd('/');
var subdomain = context.Job.Subdomain.ToLowerInvariant();
var policyName = $"clarity-tenant-{subdomain}";
using var http = new HttpClient { BaseAddress = new Uri(vaultAddr) };
http.DefaultRequestHeaders.Add("X-Vault-Token", rootToken);
// ── 1. Assert Transit engine + master-key are healthy ─────────────────
logger.LogInformation("[{JobId}] Verifying Vault Transit engine and master-key.", context.Job.Id);
var healthRes = await http.GetAsync("v1/clarity-transit/keys/master-key", cancellationToken);
if (!healthRes.IsSuccessStatusCode)
throw new InvalidOperationException(
$"Vault Transit master-key not found at {vaultAddr}. " +
"Ensure OPC infra is running and the entrypoint has bootstrapped Vault.");
// ── 2. Upsert per-tenant policy (idempotent PUT) ──────────────────────
logger.LogInformation("[{JobId}] Writing Vault policy '{Policy}'.", context.Job.Id, policyName);
var policyBody = JsonSerializer.Serialize(new { policy = PolicyTemplate });
var policyRes = await http.PutAsync(
$"v1/sys/policies/acl/{policyName}",
new StringContent(policyBody, Encoding.UTF8, "application/json"),
cancellationToken);
policyRes.EnsureSuccessStatusCode();
// ── 3. Create scoped periodic token bound to tenant policy ────────────
logger.LogInformation("[{JobId}] Creating scoped Vault token for policy '{Policy}'.", context.Job.Id, policyName);
var tokenBody = JsonSerializer.Serialize(new
{
policies = new[] { policyName },
period = "72h",
renewable = true,
metadata = new Dictionary<string, string>
{
["tenant"] = subdomain,
["createdBy"] = "ControlPlane.Worker",
},
});
var tokenRes = await http.PostAsync(
"v1/auth/token/create",
new StringContent(tokenBody, Encoding.UTF8, "application/json"),
cancellationToken);
tokenRes.EnsureSuccessStatusCode();
var tokenJson = JsonNode.Parse(await tokenRes.Content.ReadAsStringAsync(cancellationToken))!;
context.VaultToken = tokenJson["auth"]!["client_token"]!.GetValue<string>();
context.VaultTokenAccessor = tokenJson["auth"]!["accessor"]!.GetValue<string>();
logger.LogInformation("[{JobId}] Vault step complete. Token accessor: {Accessor}",
context.Job.Id, context.VaultTokenAccessor);
context.Job.CompletedSteps |= CompletedSteps.VaultVerified;
return Task.CompletedTask;
}
public Task CompensateAsync(SagaContext context, CancellationToken cancellationToken)
public async Task CompensateAsync(SagaContext context, CancellationToken cancellationToken)
{
logger.LogInformation("[{JobId}] Vault step: no compensation needed.", context.Job.Id);
return Task.CompletedTask;
if (string.IsNullOrWhiteSpace(context.VaultTokenAccessor)) return;
logger.LogWarning("[{JobId}] Compensating Vault — revoking token accessor {Accessor}.",
context.Job.Id, context.VaultTokenAccessor);
try
{
var rootToken = ReadRootToken();
var vaultAddr = (config["Vault:Address"] ?? "http://localhost:8200").TrimEnd('/');
using var http = new HttpClient { BaseAddress = new Uri(vaultAddr) };
http.DefaultRequestHeaders.Add("X-Vault-Token", rootToken);
var body = JsonSerializer.Serialize(new { accessor = context.VaultTokenAccessor });
await http.PostAsync(
"v1/auth/token/revoke-accessor",
new StringContent(body, Encoding.UTF8, "application/json"),
cancellationToken);
}
catch (Exception ex)
{
logger.LogError(ex, "[{JobId}] Failed to revoke Vault token accessor {Accessor} during compensation.",
context.Job.Id, context.VaultTokenAccessor);
}
}
/// <summary>
/// Reads the root token from the init.json written by the Vault entrypoint on first boot.
/// Path is injected via Vault__KeysFile config.
/// </summary>
internal string ReadRootToken()
{
var path = config["Vault__KeysFile"]
?? throw new InvalidOperationException("Vault__KeysFile is not configured.");
var path = config["Vault:KeysFile"] ?? config["Vault__KeysFile"];
if (!string.IsNullOrWhiteSpace(path) && File.Exists(path))
{
using var doc = JsonDocument.Parse(File.ReadAllText(path));
return doc.RootElement.GetProperty("root_token").GetString()
?? throw new InvalidOperationException("root_token not found in Vault init.json.");
if (doc.RootElement.TryGetProperty("root_token", out var tok))
return tok.GetString()!;
}
return config["Vault:Token"]
?? throw new InvalidOperationException(
"Cannot resolve Vault root token: neither Vault:KeysFile nor Vault:Token is configured.");
}
}
@@ -0,0 +1,5 @@
{
"Vault": {
"KeysFile": "C:\\Users\\amadzarak\\source\\repos\\ClarityStack\\OPC\\infra\\vault\\data\\init.json"
}
}
+1 -1
View File
@@ -20,7 +20,7 @@
// ── Vault ─────────────────────────────────────────────────────────────────────
// Worker uses localhost:8200 for admin calls.
// Vault__KeysFile is machine-specific → still injected by Aspire AppHost.
// Vault:KeysFile is machine-specific → set in appsettings.Development.json.
"Vault": {
"Address": "http://localhost:8200",
"ContainerAddress": "http://vault:8200"
+59 -1
View File
@@ -13,7 +13,8 @@
"highlight.js": "^11.11.1",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-multistep": "^7.0.0"
"react-multistep": "^7.0.0",
"react-router-dom": "^7.14.2"
},
"devDependencies": {
"@eslint/js": "^9.39.4",
@@ -1710,6 +1711,19 @@
"dev": true,
"license": "MIT"
},
"node_modules/cookie": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz",
"integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==",
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@@ -3043,6 +3057,44 @@
"react-dom": "^16.8.0 || ^17 || ^18"
}
},
"node_modules/react-router": {
"version": "7.14.2",
"resolved": "https://registry.npmjs.org/react-router/-/react-router-7.14.2.tgz",
"integrity": "sha512-yCqNne6I8IB6rVCH7XUvlBK7/QKyqypBFGv+8dj4QBFJiiRX+FG7/nkdAvGElyvVZ/HQP5N19wzteuTARXi5Gw==",
"license": "MIT",
"dependencies": {
"cookie": "^1.0.1",
"set-cookie-parser": "^2.6.0"
},
"engines": {
"node": ">=20.0.0"
},
"peerDependencies": {
"react": ">=18",
"react-dom": ">=18"
},
"peerDependenciesMeta": {
"react-dom": {
"optional": true
}
}
},
"node_modules/react-router-dom": {
"version": "7.14.2",
"resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.14.2.tgz",
"integrity": "sha512-YZcM5ES8jJSM+KrJ9BdvHHqlnGTg5tH3sC5ChFRj4inosKctdyzBDhOyyHdGk597q2OT6NTrCA1OvB/YDwfekQ==",
"license": "MIT",
"dependencies": {
"react-router": "7.14.2"
},
"engines": {
"node": ">=20.0.0"
},
"peerDependencies": {
"react": ">=18",
"react-dom": ">=18"
}
},
"node_modules/react-transition-group": {
"version": "4.4.5",
"resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz",
@@ -3140,6 +3192,12 @@
"upper-case-first": "^2.0.2"
}
},
"node_modules/set-cookie-parser": {
"version": "2.7.2",
"resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz",
"integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==",
"license": "MIT"
},
"node_modules/shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+2 -1
View File
@@ -15,7 +15,8 @@
"highlight.js": "^11.11.1",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-multistep": "^7.0.0"
"react-multistep": "^7.0.0",
"react-router-dom": "^7.14.2"
},
"devDependencies": {
"@eslint/js": "^9.39.4",
+38 -30
View File
@@ -1,6 +1,6 @@
import '@blueprintjs/core/lib/css/blueprint.css';
import './App.css';
import { useState } from 'react';
import { Navigate, Route, Routes, useLocation, useNavigate } from 'react-router-dom';
import { Menu, MenuItem, MenuDivider } from '@blueprintjs/core';
import DashboardPage from './pages/DashboardPage';
import PipelinesPage from './pages/PipelinesPage';
@@ -11,12 +11,12 @@ import OpcPage from './opc/OpcPage';
import InfraPage from './pages/InfraPage';
import ChangesetsPage from './pages/ChangesetsPage';
function App() {
const [activeNav, setActiveNav] = useState('opc');
function Sidebar() {
const navigate = useNavigate();
const { pathname } = useLocation();
const at = (path: string) => pathname === path || pathname.startsWith(path + '/');
return (
<div className="cp-shell">
{/* ── Sidebar ── */}
<aside className="cp-sidebar">
<div className="cp-sidebar-brand">
<span className="brand-mark">CP</span>
@@ -25,17 +25,17 @@ function App() {
<div className="cp-sidebar-nav">
<Menu className="cp-sidebar-menu">
<MenuItem icon="cloud-upload" text="Deployments" active={activeNav === 'deployments'} onClick={() => setActiveNav('deployments')} />
<MenuItem icon="git-branch" text="Pipelines" active={activeNav === 'pipelines'} onClick={() => setActiveNav('pipelines')} />
<MenuItem icon="git-merge" text="Branch Ladder" active={activeNav === 'branches'} onClick={() => setActiveNav('branches')} />
<MenuItem icon="build" text="Image Build" active={activeNav === 'image-build'} onClick={() => setActiveNav('image-build')} />
<MenuItem icon="pulse" text="Build Monitor" active={activeNav === 'build-monitor'} onClick={() => setActiveNav('build-monitor')} />
<MenuItem icon="cloud-upload" text="Deployments" active={at('/deployments')} onClick={() => navigate('/deployments')} />
<MenuItem icon="git-branch" text="Pipelines" active={at('/pipelines')} onClick={() => navigate('/pipelines')} />
<MenuItem icon="git-merge" text="Branch Ladder" active={at('/branches')} onClick={() => navigate('/branches')} />
<MenuItem icon="build" text="Image Build" active={at('/image-build')} onClick={() => navigate('/image-build')} />
<MenuItem icon="pulse" text="Build Monitor" active={at('/build-monitor')} onClick={() => navigate('/build-monitor')} />
<MenuDivider />
<MenuItem icon="heat-grid" text="Infrastructure" active={activeNav === 'infra'} onClick={() => setActiveNav('infra')} />
<MenuItem icon="clipboard" text="OPC" active={activeNav === 'opc'} onClick={() => setActiveNav('opc')} />
<MenuItem icon="history" text="Changesets" active={activeNav === 'changesets'} onClick={() => setActiveNav('changesets')} />
<MenuItem icon="people" text="Clients" active={activeNav === 'clients'} onClick={() => setActiveNav('clients')} />
<MenuItem icon="cog" text="Settings" active={activeNav === 'settings'} onClick={() => setActiveNav('settings')} />
<MenuItem icon="heat-grid" text="Infrastructure" active={at('/infra')} onClick={() => navigate('/infra')} />
<MenuItem icon="clipboard" text="OPC" active={at('/opc')} onClick={() => navigate('/opc')} />
<MenuItem icon="history" text="Changesets" active={at('/changesets')} onClick={() => navigate('/changesets')} />
<MenuItem icon="people" text="Clients" active={at('/clients')} onClick={() => navigate('/clients')} />
<MenuItem icon="cog" text="Settings" active={at('/settings')} onClick={() => navigate('/settings')} />
</Menu>
</div>
@@ -49,21 +49,6 @@ function App() {
</div>
</div>
</aside>
{/* ── Main content ── */}
<main className="cp-main">
{activeNav === 'deployments' && <DashboardPage />}
{activeNav === 'pipelines' && <PipelinesPage />}
{activeNav === 'branches' && <BranchPage />}
{activeNav === 'image-build' && <ImageBuildPage />}
{activeNav === 'build-monitor' && <BuildMonitorPage />}
{activeNav === 'infra' && <InfraPage />}
{activeNav === 'opc' && <OpcPage />}
{activeNav === 'changesets' && <ChangesetsPage />}
{activeNav === 'clients' && <PlaceholderPage title="Clients" />}
{activeNav === 'settings' && <PlaceholderPage title="Settings" />}
</main>
</div>
);
}
@@ -76,4 +61,27 @@ function PlaceholderPage({ title }: { title: string }) {
);
}
function App() {
return (
<div className="cp-shell">
<Sidebar />
<main className="cp-main">
<Routes>
<Route path="/" element={<Navigate to="/opc" replace />} />
<Route path="/deployments" element={<DashboardPage />} />
<Route path="/pipelines" element={<PipelinesPage />} />
<Route path="/branches" element={<BranchPage />} />
<Route path="/image-build" element={<ImageBuildPage />} />
<Route path="/build-monitor" element={<BuildMonitorPage />} />
<Route path="/infra" element={<InfraPage />} />
<Route path="/opc" element={<OpcPage />} />
<Route path="/changesets" element={<ChangesetsPage />} />
<Route path="/clients" element={<PlaceholderPage title="Clients" />} />
<Route path="/settings" element={<PlaceholderPage title="Settings" />} />
</Routes>
</main>
</div>
);
}
export default App;
+49
View File
@@ -0,0 +1,49 @@
const BASE_URL = import.meta.env.VITE_API_URL ?? '';
export interface ProjectDefinition {
name: string;
kind: 'DotnetProject' | 'NpmProject' | 'SolutionBuild';
relativePath: string;
}
export interface BuildRecord {
id: string;
kind: 'DockerImage' | 'DotnetProject' | 'NpmProject' | 'SolutionBuild';
target: string;
status: 'Running' | 'Succeeded' | 'Failed';
startedAt: string;
finishedAt?: string;
durationMs?: number;
commitSha?: string;
log: string[];
}
export async function getProjects(): Promise<ProjectDefinition[]> {
const res = await fetch(`${BASE_URL}/api/builds/projects`);
if (!res.ok) throw new Error(`Failed to get projects: ${res.statusText}`);
return res.json();
}
export async function getBuildHistory(): Promise<BuildRecord[]> {
const res = await fetch(`${BASE_URL}/api/builds/history`);
if (!res.ok) throw new Error(`Failed to get build history: ${res.statusText}`);
return res.json();
}
export function triggerProjectBuild(
projectName: string,
onLine: (line: string) => void,
onDone: (record: BuildRecord) => void,
onError: (err: Event) => void,
): EventSource {
const source = new EventSource(`${BASE_URL}/api/builds/${encodeURIComponent(projectName)}`);
source.onmessage = (e) => {
try {
const msg = JSON.parse(e.data);
if (msg.done && msg.build) { onDone(msg.build as BuildRecord); source.close(); }
else if (typeof msg.line === 'string') onLine(msg.line);
} catch { /* ignore */ }
};
source.onerror = (e) => { onError(e); };
return source;
}
+18
View File
@@ -0,0 +1,18 @@
const BASE_URL = import.meta.env.VITE_API_URL ?? '';
export interface GitCommit {
hash: string;
shortHash: string;
author: string;
date: string;
subject: string;
files: string[];
}
export async function getGitLog(path?: string, limit = 20): Promise<GitCommit[]> {
const params = new URLSearchParams({ limit: String(limit) });
if (path) params.set('path', path);
const res = await fetch(`${BASE_URL}/api/git/log?${params}`);
if (!res.ok) throw new Error(`Failed to get git log: ${res.statusText}`);
return res.json();
}
+51
View File
@@ -0,0 +1,51 @@
const BASE_URL = import.meta.env.VITE_API_URL ?? '';
export interface ImageBuildStatus {
imageName: string | null;
builtAt: string | null;
lastMessage: string;
isBuilding: boolean;
}
export interface BuildHistoryRecord {
id: string;
status: 'Running' | 'Succeeded' | 'Failed';
startedAt: string;
durationMs: number | null;
commitSha: string | null;
imageDigest: string | null;
}
export async function getImageStatus(): Promise<ImageBuildStatus> {
const res = await fetch(`${BASE_URL}/api/image/status`);
if (!res.ok) throw new Error(`Failed to get image status: ${res.statusText}`);
return res.json();
}
export async function getImageBuildHistory(limit = 30): Promise<BuildHistoryRecord[]> {
const res = await fetch(`${BASE_URL}/api/image/history?limit=${limit}`);
if (!res.ok) throw new Error(`Failed to get build history: ${res.statusText}`);
return res.json();
}
export function triggerImageBuild(
onLine: (line: string) => void,
onDone: (success: boolean) => void,
onError: (err: Event) => void,
): EventSource {
const source = new EventSource(`${BASE_URL}/api/image/build-stream`);
source.onmessage = (e) => {
try {
const msg = JSON.parse(e.data);
if (msg.done) { onDone(true); source.close(); }
else if (msg.line) onLine(msg.line);
} catch { /* ignore */ }
};
source.onerror = (e) => { onDone(false); onError(e); };
return source;
}
export async function startImageBuild(): Promise<void> {
const res = await fetch(`${BASE_URL}/api/image/build`, { method: 'POST' });
if (!res.ok) throw new Error(`Build trigger failed: ${res.statusText}`);
}
+1 -1
View File
@@ -328,7 +328,7 @@ export async function listGiteaBranches(repoKey?: string): Promise<GiteaBranch[]
export async function createGiteaBranch(
opcNumber: string,
opcTitle: string,
from = 'master',
from = 'main',
): Promise<GiteaBranch> {
const res = await fetch(`${BASE_URL}/api/gitea/branches`, {
method: 'POST',
@@ -0,0 +1,220 @@
const BASE_URL = import.meta.env.VITE_API_URL ?? '';
export interface CommitInfo {
sha: string;
shortSha: string;
message: string;
author: string;
date: string;
}
export interface BranchStatus {
branch: string;
exists: boolean;
shortHash: string | null;
lastCommitSummary: string | null;
aheadOfNext: number;
behindNext: number;
unreleasedCommits: CommitInfo[];
tipSha: string | null;
}
export interface PromotionRecord {
id: string;
fromBranch: string;
toBranch: string;
requestedBy: string;
note: string | null;
status: 'Pending' | 'Running' | 'Succeeded' | 'Failed';
createdAt: string;
completedAt: string | null;
commitCount: number;
commitLines: string[];
log: string[];
}
export type ConformanceViolation = 'OK' | 'Missing' | 'Diverged' | 'Stale';
export type ConformanceSeverity = 'OK' | 'Info' | 'Warning' | 'Critical';
export interface BranchConformanceCheck {
branch: string;
sourceBranch: string | null;
violation: ConformanceViolation;
severity: ConformanceSeverity;
detail: string;
aheadOfSource: number;
behindSource: number;
fixSha: string | null;
}
export interface ConformanceReport {
repo: string;
isConformant: boolean;
checks: BranchConformanceCheck[];
}
export async function getLadderStatus(repo = 'Clarity'): Promise<BranchStatus[]> {
const res = await fetch(`${BASE_URL}/api/promotions/ladder?repo=${encodeURIComponent(repo)}`);
if (!res.ok) throw new Error(`Failed to get ladder status: ${res.statusText}`);
return res.json();
}
export async function getPromotionHistory(): Promise<PromotionRecord[]> {
const res = await fetch(`${BASE_URL}/api/promotions/history`);
if (!res.ok) throw new Error(`Failed to get promotion history: ${res.statusText}`);
return res.json();
}
export function triggerPromotion(
from: string,
to: string,
requestedBy: string,
note: string | undefined,
onLine: (line: string) => void,
onDone: (record: PromotionRecord) => void,
onError: (err: string) => void,
repo = 'Clarity',
): () => void {
let cancelled = false;
const controller = new AbortController();
(async () => {
try {
const res = await fetch(`${BASE_URL}/api/promotions/promote`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ from, to, requestedBy, note, repo }),
signal: controller.signal,
});
if (!res.ok || !res.body) { onError(res.statusText); return; }
const reader = res.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
while (!cancelled) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const parts = buffer.split('\n\n');
buffer = parts.pop() ?? '';
for (const chunk of parts) {
const dataLine = chunk.replace(/^data:\s*/m, '').trim();
if (!dataLine) continue;
try {
const msg = JSON.parse(dataLine);
if (msg.done && msg.promotion) onDone(msg.promotion as PromotionRecord);
else if (typeof msg.line === 'string') onLine(msg.line);
} catch { /* skip */ }
}
}
} catch (e) {
if (!cancelled) onError(e instanceof Error ? e.message : 'Unknown error');
}
})();
return () => { cancelled = true; controller.abort(); };
}
export function triggerCherryPick(
shas: string[],
from: string,
to: string,
requestedBy: string,
note: string | undefined,
onLine: (line: string) => void,
onDone: (record: PromotionRecord) => void,
onError: (err: string) => void,
repo = 'Clarity',
): () => void {
let cancelled = false;
const controller = new AbortController();
(async () => {
try {
const res = await fetch(`${BASE_URL}/api/promotions/cherry-pick`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ shas, from, to, requestedBy, note, repo }),
signal: controller.signal,
});
if (!res.ok || !res.body) { onError(res.statusText); return; }
const reader = res.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
while (!cancelled) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const parts = buffer.split('\n\n');
buffer = parts.pop() ?? '';
for (const chunk of parts) {
const dataLine = chunk.replace(/^data:\s*/m, '').trim();
if (!dataLine) continue;
try {
const msg = JSON.parse(dataLine);
if (msg.done && msg.promotion) onDone(msg.promotion as PromotionRecord);
else if (typeof msg.line === 'string') onLine(msg.line);
} catch { /* skip */ }
}
}
} catch (e) {
if (!cancelled) onError(e instanceof Error ? e.message : 'Unknown error');
}
})();
return () => { cancelled = true; controller.abort(); };
}
export async function resetBranch(branch: string, toSha: string, repo: string): Promise<void> {
const res = await fetch(`${BASE_URL}/api/promotions/reset`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ branch, toSha, repo }),
});
if (!res.ok) {
const body = await res.json().catch(() => ({}));
throw new Error((body as { error?: string }).error ?? res.statusText);
}
}
export async function getConformanceReport(repo = 'Clarity'): Promise<ConformanceReport> {
const res = await fetch(`${BASE_URL}/api/promotions/conformance?repo=${encodeURIComponent(repo)}`);
if (!res.ok) throw new Error(`Failed to get conformance report: ${res.statusText}`);
return res.json();
}
export async function getAllConformanceReports(): Promise<ConformanceReport[]> {
const res = await fetch(`${BASE_URL}/api/promotions/conformance/all`);
if (!res.ok) throw new Error(`Failed to get conformance reports: ${res.statusText}`);
return res.json();
}
export async function createLadderBranch(branch: string, fromSha: string, repo: string): Promise<void> {
const res = await fetch(`${BASE_URL}/api/promotions/create-branch`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ branch, fromSha, repo }),
});
if (!res.ok) {
const body = await res.json().catch(() => ({}));
throw new Error((body as { error?: string }).error ?? res.statusText);
}
}
// ── Build gate ───────────────────────────────────────────────────────────────────────────────
export interface BuildGate {
status: 'Green' | 'Red' | 'Running' | 'Unknown';
sha: string;
buildId: string | null;
buildStatus: string | null;
}
export async function getBuildGate(sha: string): Promise<BuildGate> {
const res = await fetch(`${BASE_URL}/api/promotions/build-gate?sha=${encodeURIComponent(sha)}`);
if (!res.ok) throw new Error(`Failed to get build gate: ${res.statusText}`);
return res.json();
}
+7 -426
View File
@@ -1,426 +1,7 @@
import type { ProvisioningProgressEvent, ProvisioningRequest, TenantRecord } from '../types/provisioning';
const BASE_URL = import.meta.env.VITE_API_URL ?? '';
export async function submitProvisioningJob(request: ProvisioningRequest): Promise<string> {
const res = await fetch(`${BASE_URL}/api/provision`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(request),
});
if (!res.ok) throw new Error(`Failed to queue job: ${res.statusText}`);
const data = await res.json();
return data.id as string;
}
export async function getTenants(): Promise<TenantRecord[]> {
const res = await fetch(`${BASE_URL}/api/tenants`);
if (!res.ok) throw new Error(`Failed to load tenants: ${res.statusText}`);
return res.json();
}
export function subscribeToTenantLogs(
subdomain: string,
onLine: (line: string) => void,
onError: (err: Event) => void
): EventSource {
const source = new EventSource(`${BASE_URL}/api/tenants/${subdomain}/logs`);
source.onmessage = (e) => { if (e.data) onLine(e.data); };
source.onerror = onError;
return source;
}
export function subscribeToJobStream(
jobId: string,
onEvent: (event: ProvisioningProgressEvent) => void,
onError: (err: Event) => void
): EventSource {
const source = new EventSource(`${BASE_URL}/api/provision/${jobId}/stream`);
source.onmessage = (e) => {
try { onEvent(JSON.parse(e.data)); } catch { /* ignore */ }
};
source.onerror = onError;
return source;
}
export interface ImageBuildStatus {
imageName: string | null;
builtAt: string | null;
lastMessage: string;
isBuilding: boolean;
}
export async function getImageStatus(): Promise<ImageBuildStatus> {
const res = await fetch(`${BASE_URL}/api/image/status`);
if (!res.ok) throw new Error(`Failed to get image status: ${res.statusText}`);
return res.json();
}
/** Triggers a build and streams log lines. Calls onLine for each log chunk, onDone when finished. */
export function triggerImageBuild(
onLine: (line: string) => void,
onDone: (success: boolean) => void,
onError: (err: Event) => void
): EventSource {
const source = new EventSource(`${BASE_URL}/api/image/build-stream`);
source.onmessage = (e) => {
try {
const msg = JSON.parse(e.data);
if (msg.done) { onDone(true); source.close(); }
else if (msg.line) onLine(msg.line);
} catch { /* ignore */ }
};
source.onerror = (e) => { onDone(false); onError(e); };
return source;
}
/** POST to kick off the build — returns immediately; use subscribeToJobStream for progress */
export async function startImageBuild(): Promise<void> {
const res = await fetch(`${BASE_URL}/api/image/build`, { method: 'POST' });
if (!res.ok) throw new Error(`Build trigger failed: ${res.statusText}`);
}
// ── Release API ──────────────────────────────────────────────────────────────
export interface TenantReleaseResult {
subdomain: string;
containerName: string;
success: boolean;
error?: string;
}
export interface ReleaseRecord {
id: string;
environment: string;
imageName: string;
status: 'Running' | 'Succeeded' | 'PartialFailure' | 'Failed';
startedAt: string;
finishedAt?: string;
tenants: TenantReleaseResult[];
}
export async function getReleaseHistory(): Promise<ReleaseRecord[]> {
const res = await fetch(`${BASE_URL}/api/release/history`);
if (!res.ok) throw new Error(`Failed to get release history: ${res.statusText}`);
return res.json();
}
/** Triggers a release to the given environment and streams log lines as SSE. */
export function triggerRelease(
env: string,
onLine: (line: string) => void,
onDone: (record: ReleaseRecord) => void,
onError: (err: Event) => void
): EventSource {
const source = new EventSource(`${BASE_URL}/api/release/${env}`);
source.onmessage = (e) => {
try {
const msg = JSON.parse(e.data);
if (msg.done && msg.release) { onDone(msg.release as ReleaseRecord); source.close(); }
else if (typeof msg.line === 'string') onLine(msg.line);
} catch { /* ignore */ }
};
source.onerror = (e) => { onError(e); };
return source;
}
// ── Project Build API ────────────────────────────────────────────────────────
export interface ProjectDefinition {
name: string;
kind: 'DotnetProject' | 'NpmProject';
relativePath: string;
}
export interface BuildRecord {
id: string;
kind: 'DockerImage' | 'DotnetProject' | 'NpmProject';
target: string;
status: 'Running' | 'Succeeded' | 'Failed';
startedAt: string;
finishedAt?: string;
durationMs?: number;
log: string[];
}
export async function getProjects(): Promise<ProjectDefinition[]> {
const res = await fetch(`${BASE_URL}/api/builds/projects`);
if (!res.ok) throw new Error(`Failed to get projects: ${res.statusText}`);
return res.json();
}
export async function getBuildHistory(): Promise<BuildRecord[]> {
const res = await fetch(`${BASE_URL}/api/builds/history`);
if (!res.ok) throw new Error(`Failed to get build history: ${res.statusText}`);
return res.json();
}
/** Triggers a project build and streams log lines. */
export function triggerProjectBuild(
projectName: string,
onLine: (line: string) => void,
onDone: (record: BuildRecord) => void,
onError: (err: Event) => void
): EventSource {
const source = new EventSource(`${BASE_URL}/api/builds/${encodeURIComponent(projectName)}`);
source.onmessage = (e) => {
try {
const msg = JSON.parse(e.data);
if (msg.done && msg.build) { onDone(msg.build as BuildRecord); source.close(); }
else if (typeof msg.line === 'string') onLine(msg.line);
} catch { /* ignore */ }
};
source.onerror = (e) => { onError(e); };
return source;
}
// ── Git History API ──────────────────────────────────────────────────────────
export interface GitCommit {
hash: string;
shortHash: string;
author: string;
date: string;
subject: string;
files: string[];
}
export async function getGitLog(path?: string, limit = 20): Promise<GitCommit[]> {
const params = new URLSearchParams({ limit: String(limit) });
if (path) params.set('path', path);
const res = await fetch(`${BASE_URL}/api/git/log?${params}`);
if (!res.ok) throw new Error(`Failed to get git log: ${res.statusText}`);
return res.json();
}
// ── Promotion / Branch Ladder API ────────────────────────────────────────────
export interface BuildHistoryRecord {
id: string;
status: 'Running' | 'Succeeded' | 'Failed';
startedAt: string;
durationMs: number | null;
commitSha: string | null;
imageDigest: string | null;
}
export async function getImageBuildHistory(limit = 30): Promise<BuildHistoryRecord[]> {
const res = await fetch(`${BASE_URL}/api/image/history?limit=${limit}`);
if (!res.ok) throw new Error(`Failed to get build history: ${res.statusText}`);
return res.json();
}
export interface CommitInfo {
sha: string;
shortSha: string;
message: string;
author: string;
date: string;
}
export interface BranchStatus {
branch: string;
exists: boolean;
shortHash: string | null;
lastCommitSummary: string | null;
aheadOfNext: number;
behindNext: number;
unreleasedCommits: CommitInfo[];
}
export interface PromotionRecord {
id: string;
fromBranch: string;
toBranch: string;
requestedBy: string;
note: string | null;
status: 'Pending' | 'Running' | 'Succeeded' | 'Failed';
createdAt: string;
completedAt: string | null;
commitCount: number;
commitLines: string[];
log: string[];
}
export async function getLadderStatus(repo = 'Clarity'): Promise<BranchStatus[]> {
const res = await fetch(`${BASE_URL}/api/promotions/ladder?repo=${encodeURIComponent(repo)}`);
if (!res.ok) throw new Error(`Failed to get ladder status: ${res.statusText}`);
return res.json();
}
export async function getPromotionHistory(): Promise<PromotionRecord[]> {
const res = await fetch(`${BASE_URL}/api/promotions/history`);
if (!res.ok) throw new Error(`Failed to get promotion history: ${res.statusText}`);
return res.json();
}
/** Triggers a promotion and streams SSE lines. Calls onDone with the final record. */
export function triggerPromotion(
from: string,
to: string,
requestedBy: string,
note: string | undefined,
onLine: (line: string) => void,
onDone: (record: PromotionRecord) => void,
onError: (err: string) => void,
repo = 'Clarity',
): () => void {
let cancelled = false;
const controller = new AbortController();
(async () => {
try {
const res = await fetch(`${BASE_URL}/api/promotions/promote`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ from, to, requestedBy, note, repo }),
signal: controller.signal,
});
if (!res.ok || !res.body) { onError(res.statusText); return; }
const reader = res.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
while (!cancelled) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const parts = buffer.split('\n\n');
buffer = parts.pop() ?? '';
for (const chunk of parts) {
const dataLine = chunk.replace(/^data:\s*/m, '').trim();
if (!dataLine) continue;
try {
const msg = JSON.parse(dataLine);
if (msg.done && msg.promotion) onDone(msg.promotion as PromotionRecord);
else if (typeof msg.line === 'string') onLine(msg.line);
} catch { /* skip */ }
}
}
} catch (e) {
if (!cancelled) onError(e instanceof Error ? e.message : 'Unknown error');
}
})();
return () => { cancelled = true; controller.abort(); };
}
export async function resetBranch(branch: string, toSha: string, repo: string): Promise<void> {
const res = await fetch(`${BASE_URL}/api/promotions/reset`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ branch, toSha, repo }),
});
if (!res.ok) {
const body = await res.json().catch(() => ({}));
throw new Error((body as { error?: string }).error ?? res.statusText);
}
}
/** Cherry-picks the specified commits (by full SHA) from `from` to `to` and streams SSE progress. */
export function triggerCherryPick(
shas: string[],
from: string,
to: string,
requestedBy: string,
note: string | undefined,
onLine: (line: string) => void,
onDone: (record: PromotionRecord) => void,
onError: (err: string) => void,
repo = 'Clarity',
): () => void {
let cancelled = false;
const controller = new AbortController();
(async () => {
try {
const res = await fetch(`${BASE_URL}/api/promotions/cherry-pick`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ shas, from, to, requestedBy, note, repo }),
signal: controller.signal,
});
if (!res.ok || !res.body) { onError(res.statusText); return; }
const reader = res.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
while (!cancelled) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const parts = buffer.split('\n\n');
buffer = parts.pop() ?? '';
for (const chunk of parts) {
const dataLine = chunk.replace(/^data:\s*/m, '').trim();
if (!dataLine) continue;
try {
const msg = JSON.parse(dataLine);
if (msg.done && msg.promotion) onDone(msg.promotion as PromotionRecord);
else if (typeof msg.line === 'string') onLine(msg.line);
} catch { /* skip */ }
}
}
} catch (e) {
if (!cancelled) onError(e instanceof Error ? e.message : 'Unknown error');
}
})();
return () => { cancelled = true; controller.abort(); };
}
// -- Branch Conformance API --------------------------------------------------
export type ConformanceViolation = 'OK' | 'Missing' | 'Diverged' | 'Stale';
export type ConformanceSeverity = 'OK' | 'Info' | 'Warning' | 'Critical';
export interface BranchConformanceCheck {
branch: string;
sourceBranch: string | null;
violation: ConformanceViolation;
severity: ConformanceSeverity;
detail: string;
aheadOfSource: number;
behindSource: number;
fixSha: string | null;
}
export interface ConformanceReport {
repo: string;
isConformant: boolean;
checks: BranchConformanceCheck[];
}
export async function getConformanceReport(repo = 'Clarity'): Promise<ConformanceReport> {
const res = await fetch(`${BASE_URL}/api/promotions/conformance?repo=${encodeURIComponent(repo)}`);
if (!res.ok) throw new Error(`Failed to get conformance report: ${res.statusText}`);
return res.json();
}
export async function getAllConformanceReports(): Promise<ConformanceReport[]> {
const res = await fetch(`${BASE_URL}/api/promotions/conformance/all`);
if (!res.ok) throw new Error(`Failed to get conformance reports: ${res.statusText}`);
return res.json();
}
export async function createLadderBranch(branch: string, fromSha: string, repo: string): Promise<void> {
const res = await fetch(`${BASE_URL}/api/promotions/create-branch`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ branch, fromSha, repo }),
});
if (!res.ok) {
const body = await res.json().catch(() => ({}));
throw new Error((body as { error?: string }).error ?? res.statusText);
}
}
// Barrel re-export split into domain modules. Import directly from the specific module for new code.
export * from './tenantApi';
export * from './imageApi';
export * from './releaseApi';
export * from './buildApi';
export * from './gitApi';
export * from './promotionApi';
@@ -0,0 +1,43 @@
const BASE_URL = import.meta.env.VITE_API_URL ?? '';
export interface TenantReleaseResult {
subdomain: string;
containerName: string;
success: boolean;
error?: string;
}
export interface ReleaseRecord {
id: string;
environment: string;
imageName: string;
status: 'Running' | 'Succeeded' | 'PartialFailure' | 'Failed';
startedAt: string;
finishedAt?: string;
tenants: TenantReleaseResult[];
opcNumbers: string[];
}
export async function getReleaseHistory(): Promise<ReleaseRecord[]> {
const res = await fetch(`${BASE_URL}/api/release/history`);
if (!res.ok) throw new Error(`Failed to get release history: ${res.statusText}`);
return res.json();
}
export function triggerRelease(
env: string,
onLine: (line: string) => void,
onDone: (record: ReleaseRecord) => void,
onError: (err: Event) => void,
): EventSource {
const source = new EventSource(`${BASE_URL}/api/release/${env}`);
source.onmessage = (e) => {
try {
const msg = JSON.parse(e.data);
if (msg.done && msg.release) { onDone(msg.release as ReleaseRecord); source.close(); }
else if (typeof msg.line === 'string') onLine(msg.line);
} catch { /* ignore */ }
};
source.onerror = (e) => { onError(e); };
return source;
}
+44
View File
@@ -0,0 +1,44 @@
import type { ProvisioningProgressEvent, ProvisioningRequest, TenantRecord } from '../types/provisioning';
const BASE_URL = import.meta.env.VITE_API_URL ?? '';
export async function submitProvisioningJob(request: ProvisioningRequest): Promise<string> {
const res = await fetch(`${BASE_URL}/api/provision`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(request),
});
if (!res.ok) throw new Error(`Failed to queue job: ${res.statusText}`);
const data = await res.json();
return data.id as string;
}
export async function getTenants(): Promise<TenantRecord[]> {
const res = await fetch(`${BASE_URL}/api/tenants`);
if (!res.ok) throw new Error(`Failed to load tenants: ${res.statusText}`);
return res.json();
}
export function subscribeToTenantLogs(
subdomain: string,
onLine: (line: string) => void,
onError: (err: Event) => void,
): EventSource {
const source = new EventSource(`${BASE_URL}/api/tenants/${subdomain}/logs`);
source.onmessage = (e) => { if (e.data) onLine(e.data); };
source.onerror = onError;
return source;
}
export function subscribeToJobStream(
jobId: string,
onEvent: (event: ProvisioningProgressEvent) => void,
onError: (err: Event) => void,
): EventSource {
const source = new EventSource(`${BASE_URL}/api/provision/${jobId}/stream`);
source.onmessage = (e) => {
try { onEvent(JSON.parse(e.data)); } catch { /* ignore */ }
};
source.onerror = onError;
return source;
}
@@ -1,74 +1,127 @@
import { useEffect, useState, useRef } from 'react';
import { Button, Drawer, Intent, NonIdealState, Spinner, Tag, Tooltip } from '@blueprintjs/core';
import { useEffect, useState } from 'react';
import { Button, Collapse, Drawer, Icon, Intent, NonIdealState, Spinner, Tag, Tooltip } from '@blueprintjs/core';
import { html as diff2htmlHtml } from 'diff2html';
import 'diff2html/bundles/css/diff2html.min.css';
import hljs from 'highlight.js';
import 'highlight.js/styles/github.css';
import { getCommitDetail, type CommitDetail } from '../api/opcApi';
import { getCommitDetail, type CommitDetail, type CommitFile } from '../api/opcApi';
interface Props {
hash: string | null;
onClose: () => void;
}
function fileStatusIntent(status: string): Intent {
if (status === 'added') return Intent.SUCCESS;
if (status === 'deleted') return Intent.DANGER;
if (status === 'renamed') return Intent.WARNING;
return Intent.NONE;
}
function fileStatusIcon(status: string): string {
if (status === 'added') return 'plus';
if (status === 'deleted') return 'minus';
if (status === 'renamed') return 'arrow-right';
return 'edit';
}
function FileDiff({ file }: { file: CommitFile }) {
const [open, setOpen] = useState(true);
const diffHtml = file.patch
? diff2htmlHtml(file.patch, {
drawFileList: false,
matching: 'lines',
outputFormat: 'line-by-line',
renderNothingWhenEmpty: true,
})
: '';
const displayPath = file.status === 'renamed' && file.oldPath && file.oldPath !== file.path
? `${file.oldPath}${file.path}`
: file.path;
return (
<div className="gcd-file-section">
<button
className={`gcd-file-header ${open ? 'gcd-file-header--open' : ''}`}
onClick={() => setOpen(o => !o)}
type="button"
>
<Icon icon={open ? 'chevron-down' : 'chevron-right'} size={14} className="gcd-file-chevron" />
<Icon icon={fileStatusIcon(file.status)} size={13} intent={fileStatusIntent(file.status)} className="gcd-file-status-icon" />
<span className="gcd-file-path">{displayPath}</span>
<span className="gcd-file-stats">
{file.additions > 0 && <span className="gcd-adds">+{file.additions}</span>}
{file.deletions > 0 && <span className="gcd-dels">-{file.deletions}</span>}
</span>
</button>
<Collapse isOpen={open} keepChildrenMounted>
{diffHtml
? <div className="git-diff-container" dangerouslySetInnerHTML={{ __html: diffHtml }} />
: <div className="gcd-no-diff">Binary or empty file no textual diff available.</div>
}
</Collapse>
</div>
);
}
export function GitCommitDrawer({ hash, onClose }: Props) {
const [detail, setDetail] = useState<CommitDetail | null>(null);
const [loading, setLoading] = useState(false);
const [error, setError] = useState<string | null>(null);
const diffRef = useRef<HTMLDivElement>(null);
useEffect(() => {
if (!hash) { setDetail(null); setError(null); return; }
setLoading(true); setDetail(null); setError(null);
if (!hash) {
// Delay clearing so the closing animation doesn't flash blank
const t = setTimeout(() => { setDetail(null); setError(null); }, 300);
return () => clearTimeout(t);
}
setLoading(true);
setError(null);
getCommitDetail(hash)
.then(setDetail)
.then(d => { setDetail(d); setError(null); })
.catch(e => setError(String(e)))
.finally(() => setLoading(false));
}, [hash]);
// After diff HTML is injected, run highlight.js over code blocks
useEffect(() => {
if (detail && diffRef.current) {
diffRef.current.querySelectorAll<HTMLElement>('code[class]').forEach(el => {
hljs.highlightElement(el);
});
}
}, [detail]);
const combinedPatch = detail?.files.map(f => f.patch).join('\n') ?? '';
const diffHtml = combinedPatch
? diff2htmlHtml(combinedPatch, {
drawFileList: true,
matching: 'lines',
outputFormat: 'line-by-line',
renderNothingWhenEmpty: false,
})
: '';
const totalAdds = detail?.files.reduce((a, f) => a + f.additions, 0) ?? 0;
const totalDels = detail?.files.reduce((a, f) => a + f.deletions, 0) ?? 0;
return (
<Drawer
isOpen={!!hash}
onClose={onClose}
title={detail ? (
title={
detail ? (
<span className="git-drawer-title">
<code className="git-drawer-hash">{detail.shortHash}</code>
<span className="git-drawer-subject">{detail.subject}</span>
</span>
) : 'Commit Diff'}
) : 'Commit Diff'
}
size="70%"
position="right"
className="git-commit-drawer"
>
<div className="git-drawer-body">
{loading && <NonIdealState icon={<Spinner size={24} />} title="Loading diff…" />}
{error && <NonIdealState icon="error" intent={Intent.DANGER} title="Failed to load commit" description={error} />}
{/* Scrollable body */}
<div className="gcd-body">
{/* Loading overlay — keeps old content visible while fetching next */}
{loading && (
<div className="gcd-loading-overlay">
<Spinner size={28} />
</div>
)}
{detail && (
{error && (
<NonIdealState icon="error" intent={Intent.DANGER}
title="Failed to load commit" description={error} />
)}
{!error && detail && (
<>
{/* Metadata bar */}
<div className="git-commit-meta-bar">
<div className="git-commit-meta-left">
<Tooltip content="Copy full hash">
<Tooltip content="Copy full hash" placement="bottom">
<code
className="git-commit-hash-chip"
onClick={() => navigator.clipboard.writeText(detail.hash)}
@@ -81,26 +134,33 @@ export function GitCommitDrawer({ hash, onClose }: Props) {
<span className="git-commit-date">{detail.date}</span>
</div>
<div className="git-commit-meta-right">
<Tag intent={Intent.SUCCESS} minimal round icon="add">
+{detail.files.reduce((a, f) => a + f.additions, 0)}
{totalAdds > 0 && (
<Tag intent={Intent.SUCCESS} minimal round>+{totalAdds}</Tag>
)}
{totalDels > 0 && (
<Tag intent={Intent.DANGER} minimal round>-{totalDels}</Tag>
)}
<Tag minimal round>
{detail.files.length} file{detail.files.length !== 1 ? 's' : ''}
</Tag>
<Tag intent={Intent.DANGER} minimal round icon="remove">
-{detail.files.reduce((a, f) => a + f.deletions, 0)}
</Tag>
<Tag minimal round>{detail.files.length} file{detail.files.length !== 1 ? 's' : ''}</Tag>
</div>
</div>
{/* Commit body if multiline */}
{/* Extended commit message */}
{detail.body.trim() !== detail.subject.trim() && (
<pre className="git-commit-body">{detail.body.trim()}</pre>
)}
{/* Diff */}
{diffHtml
? <div ref={diffRef} className="git-diff-container" dangerouslySetInnerHTML={{ __html: diffHtml }} />
: <NonIdealState icon="git-commit" title="No diff" description="This commit has no file changes." />
}
{/* Per-file diffs */}
{detail.files.length === 0 ? (
<NonIdealState icon="git-commit" title="No file changes" />
) : (
<div className="gcd-files-list">
{detail.files.map(f => (
<FileDiff key={f.path} file={f} />
))}
</div>
)}
</>
)}
@@ -109,7 +169,8 @@ export function GitCommitDrawer({ hash, onClose }: Props) {
)}
</div>
<div className="git-drawer-footer">
{/* Footer — sticky at bottom */}
<div className="gcd-footer">
<Button text="Close" onClick={onClose} />
</div>
</Drawer>
@@ -1,6 +1,6 @@
import { useEffect, useRef, useState } from 'react';
import { Button, Callout, Intent, Tag } from '@blueprintjs/core';
import { getImageStatus, type ImageBuildStatus } from '../api/provisioningApi';
import { getImageStatus, type ImageBuildStatus } from '../api/imageApi';
const BASE_URL = import.meta.env.VITE_API_URL ?? '';
@@ -4,7 +4,7 @@ import ClientDetailsStep from './ClientDetailsStep';
import DeploymentConfigStep from './DeploymentConfigStep';
import ReviewStep from './ReviewStep';
import DeploymentLiveStep from './DeploymentLiveStep';
import { submitProvisioningJob } from '../../api/provisioningApi';
import { submitProvisioningJob } from '../../api/tenantApi';
import { defaultStackConfig } from '../../types/provisioning';
import type { ProvisioningRequest } from '../../types/provisioning';
@@ -1,6 +1,6 @@
import { useEffect, useRef, useState } from 'react';
import { AnchorButton, Callout, Intent, ProgressBar, Spinner, Tab, Tabs, Tag } from '@blueprintjs/core';
import { subscribeToJobStream } from '../../api/provisioningApi';
import { subscribeToJobStream } from '../../api/tenantApi';
import { tenantUrl } from '../../config';
import type { ProvisioningProgressEvent } from '../../types/provisioning';
+233 -32
View File
@@ -804,30 +804,116 @@ body {
.opc-sdlc-pipeline {
display: flex;
align-items: center;
flex-wrap: wrap;
gap: 0.2rem;
margin-bottom: 0.35rem;
}
.opc-sdlc-stage-item {
display: flex;
align-items: center;
gap: 0.2rem;
align-items: flex-start;
flex-wrap: nowrap;
gap: 0;
overflow-x: auto;
padding-bottom: 0.25rem;
}
.opc-sdlc-arrow {
color: #8f99a8;
font-size: 0.8rem;
font-size: 1rem;
font-weight: 600;
margin: 0 0.1rem;
flex-shrink: 0;
align-self: center;
margin: 0 0.4rem;
user-select: none;
}
.opc-sdlc-furthest {
font-size: 0.75rem;
/* Individual branch box */
.opc-sdlc-box {
flex: 1 1 140px;
min-width: 130px;
max-width: 200px;
display: flex;
flex-direction: column;
border: 1px solid #dce0e6;
border-radius: 6px;
background: #fff;
overflow: hidden;
flex-shrink: 0;
}
.opc-sdlc-box--reached {
border-width: 2px;
}
.opc-sdlc-box-header {
display: flex;
align-items: center;
justify-content: space-between;
padding: 0.3rem 0.5rem;
background: #f6f7f9;
border-bottom: 1px solid #e5e8eb;
flex-shrink: 0;
}
.opc-sdlc-box-count {
font-size: 0.68rem;
color: #738091;
margin-top: 0.3rem;
background: #e5e8eb;
border-radius: 10px;
padding: 0 6px;
line-height: 1.5;
}
/* Scrollable body */
.opc-sdlc-box-body {
flex: 1;
overflow-y: auto;
max-height: 140px;
min-height: 60px;
padding: 0.3rem 0.4rem;
display: flex;
flex-direction: column;
gap: 0.15rem;
}
.opc-sdlc-sha-row {
display: flex;
align-items: baseline;
gap: 0.35rem;
padding: 0.1rem 0.2rem;
border-radius: 3px;
opacity: 0.35;
}
.opc-sdlc-sha-row--reached {
opacity: 1;
}
.opc-sdlc-sha {
font-family: 'Consolas', 'Courier New', monospace;
font-size: 0.7rem;
color: #2d72d2;
flex-shrink: 0;
}
.opc-sdlc-sha-msg {
font-size: 0.68rem;
color: #4a5568;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
flex: 1;
min-width: 0;
}
.opc-sdlc-box-empty {
font-size: 0.7rem;
color: #a3acb6;
font-style: italic;
padding: 0.2rem 0;
}
.opc-sdlc-box-pending {
font-size: 0.68rem;
color: #a3acb6;
font-style: italic;
margin-top: auto;
padding-top: 0.25rem;
border-top: 1px dashed #e5e8eb;
}
/* Commits section labels */
@@ -850,10 +936,50 @@ body {
}
/* ── Git Commit Drawer ──────────────────────────────────────────────────────── */
.git-commit-drawer .bp5-drawer-header {
/* Drawer shell: full-height flex column */
.git-commit-drawer.bp6-drawer {
display: flex;
flex-direction: column;
height: 100%;
overflow: hidden;
}
.git-commit-drawer .bp6-drawer-header {
flex-shrink: 0;
padding: 0.75rem 1rem;
}
/*
* .gcd-body is the scrollable content area.
* Blueprint v6 renders children directly inside .bp6-drawer no body wrapper.
*/
.git-commit-drawer .gcd-body {
flex: 1 1 0; /* 0 basis — don't size from content, allow shrink */
min-height: 0; /* flex children won't shrink past content without this */
overflow-y: auto;
overflow-x: hidden;
padding: 1rem;
display: flex;
flex-direction: column;
gap: 1rem;
position: relative; /* loading overlay anchor */
}
/* Children of the scroll container must NOT shrink if they do, content
* never overflows and the scrollbar never appears. */
.git-commit-drawer .gcd-body > * {
flex-shrink: 0;
}
/* Footer rendered as last child — sits below the scroll area */
.git-commit-drawer .gcd-footer {
flex-shrink: 0;
padding: 0.5rem 1rem;
display: flex;
justify-content: flex-end;
}
.git-drawer-title {
display: flex;
align-items: center;
@@ -871,6 +997,95 @@ body {
font-family: 'JetBrains Mono', 'Fira Code', monospace;
}
/* Loading overlay — keeps old diff visible while fetching next commit */
.gcd-loading-overlay {
position: absolute;
inset: 0;
background: rgba(255, 255, 255, 0.7);
display: flex;
align-items: center;
justify-content: center;
z-index: 10;
pointer-events: none;
}
/* Per-file accordion */
.gcd-files-list {
display: flex;
flex-direction: column;
gap: 0;
border: 1px solid #dce0e6;
border-radius: 6px;
overflow: hidden;
margin: 0.75rem 0;
}
.gcd-file-section {
border-bottom: 1px solid #dce0e6;
}
.gcd-file-section:last-child {
border-bottom: none;
}
.gcd-file-header {
all: unset;
box-sizing: border-box;
display: flex;
align-items: center;
gap: 0.5rem;
width: 100%;
padding: 0.45rem 0.75rem;
background: #f6f8fa;
cursor: pointer;
user-select: none;
transition: background 0.1s;
font-family: 'JetBrains Mono', 'Fira Code', monospace;
font-size: 0.78rem;
color: #1c2127;
}
.gcd-file-header:hover,
.gcd-file-header--open {
background: #edf2f7;
}
.gcd-file-chevron {
flex-shrink: 0;
color: #738091;
}
.gcd-file-status-icon {
flex-shrink: 0;
}
.gcd-file-path {
flex: 1;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
min-width: 0;
}
.gcd-file-stats {
display: flex;
gap: 0.4rem;
flex-shrink: 0;
font-size: 0.73rem;
font-family: 'JetBrains Mono', 'Fira Code', monospace;
}
.gcd-adds { color: #1a7f37; font-weight: 600; }
.gcd-dels { color: #cf222e; font-weight: 600; }
.gcd-no-diff {
padding: 0.6rem 1rem;
font-size: 0.8rem;
color: #738091;
font-style: italic;
background: #fafafa;
}
.git-drawer-subject {
font-size: 0.92rem;
font-weight: 600;
@@ -880,21 +1095,6 @@ body {
color: #1c2127;
}
.git-drawer-body {
flex: 1;
overflow-y: auto;
padding: 1rem;
display: flex;
flex-direction: column;
gap: 1rem;
}
.git-drawer-footer {
padding: 0.75rem 1rem;
border-top: 1px solid #d3d8de;
display: flex;
justify-content: flex-end;
}
.git-commit-meta-bar {
display: flex;
@@ -961,7 +1161,8 @@ body {
font-size: 0.78rem;
line-height: 1.45;
border-radius: 6px;
overflow: hidden;
overflow-x: auto; /* horizontal scroll for wide diffs, not clip */
overflow-y: visible;
border: 1px solid #d0d7de;
}
+3
View File
@@ -1,10 +1,13 @@
import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client'
import { BrowserRouter } from 'react-router-dom'
import './index.css'
import App from './App.tsx'
createRoot(document.getElementById('root')!).render(
<StrictMode>
<BrowserRouter>
<App />
</BrowserRouter>
</StrictMode>,
)
+30 -33
View File
@@ -1,4 +1,4 @@
import { useState, useMemo, useEffect, useCallback } from 'react';
import { useState, useMemo, useEffect, useCallback, Fragment } from 'react';
import { GitCommitDrawer } from '../components/GitCommitDrawer';
import {
Button, Callout, Divider, Drawer, FormGroup,
@@ -76,18 +76,9 @@ const SDLC_STAGES: { branch: string; label: string; intent: Intent }[] = [
{ branch: 'develop', label: 'Dev', intent: Intent.PRIMARY },
{ branch: 'staging', label: 'Staging', intent: Intent.WARNING },
{ branch: 'uat', label: 'UAT', intent: Intent.DANGER },
{ branch: 'master', label: 'Production', intent: Intent.SUCCESS },
{ branch: 'main', label: 'Production', intent: Intent.SUCCESS },
];
function deriveSdlcSummary(coverage: BranchCoverage[]): { label: string; intent: Intent } | null {
for (let i = SDLC_STAGES.length - 1; i >= 0; i--) {
const stage = SDLC_STAGES[i];
const hit = coverage.find(c => c.branch === stage.branch);
if (hit?.contains) return { label: stage.label, intent: stage.intent };
}
return null;
}
// Aggregate per-repo branch coverage into a single view.
// A stage is "reached" only when every repo that recognised at least one hash
// reports contains=true for that branch. Repos that recognised no hashes are
@@ -487,42 +478,48 @@ function CommitsTab({ opc, isActive }: { opc: Opc; isActive: boolean }) {
{/* SDLC Delivery Chain */}
{coverage.length > 0 && (() => {
const summary = deriveSdlcSummary(coverage);
const allCommits = [
...autoCommits,
...pinned.map(p => ({ repoKey: 'pinned', hash: p.hash, shortHash: p.shortHash, author: p.pinnedBy, date: p.pinnedAt, subject: p.subject, files: [] })),
].filter((c, i, a) => a.findIndex(x => x.hash === c.hash) === i);
return (
<div className="opc-delivery-chain">
<div className="opc-field-label" style={{ marginBottom: '0.6rem' }}>Delivery Chain</div>
<div className="opc-field-label" style={{ marginBottom: '0.75rem' }}>Delivery Chain</div>
<div className="opc-sdlc-pipeline">
{SDLC_STAGES.map((stage, i) => {
const hit = coverage.find(c => c.branch === stage.branch);
const reached = hit?.contains ?? false;
return (
<div key={stage.branch} className="opc-sdlc-stage-item">
<Fragment key={stage.branch}>
{i > 0 && <span className="opc-sdlc-arrow"></span>}
<Tooltip content={
reached
? `All linked commits have reached ${stage.label}`
: hit
? `Not all linked commits have reached ${stage.label} yet`
: `${stage.label} branch not found locally`
}>
<Tag
intent={reached ? stage.intent : Intent.NONE}
icon={reached ? 'tick-circle' : 'circle'}
minimal={!reached}
round
>
<div className={`opc-sdlc-box${reached ? ' opc-sdlc-box--reached' : ''}`} style={{ borderColor: reached ? SDLC_STAGES[i].intent === 'primary' ? '#2d72d2' : SDLC_STAGES[i].intent === 'warning' ? '#c87619' : SDLC_STAGES[i].intent === 'danger' ? '#ac2f33' : '#1c6e42' : '#dce0e6' }}>
{/* Box header */}
<div className="opc-sdlc-box-header">
<Tag intent={reached ? stage.intent : Intent.NONE} minimal={!reached} round style={{ fontWeight: 600, fontSize: '0.72rem' }}>
{stage.label}
</Tag>
</Tooltip>
{reached && <span className="opc-sdlc-box-count">{allCommits.length}</span>}
</div>
{/* Scrollable SHA list */}
<div className="opc-sdlc-box-body">
{allCommits.length === 0 ? (
<span className="opc-sdlc-box-empty">No linked commits</span>
) : allCommits.map(c => (
<div key={c.hash} className={`opc-sdlc-sha-row${reached ? ' opc-sdlc-sha-row--reached' : ''}`} title={c.subject}>
<code className="opc-sdlc-sha">{c.shortHash}</code>
<span className="opc-sdlc-sha-msg">{c.subject}</span>
</div>
))}
{!reached && allCommits.length > 0 && (
<div className="opc-sdlc-box-pending">Not yet promoted</div>
)}
</div>
</div>
</Fragment>
);
})}
</div>
{summary && (
<div className="opc-sdlc-furthest">
Furthest: <strong>{summary.label}</strong>
</div>
)}
</div>
);
})()}
+94 -17
View File
@@ -6,10 +6,11 @@ import {
} from '@blueprintjs/core';
import {
getLadderStatus, getPromotionHistory, triggerPromotion, triggerCherryPick,
getImageBuildHistory, resetBranch, getAllConformanceReports, createLadderBranch,
type BranchStatus, type CommitInfo, type PromotionRecord, type BuildHistoryRecord,
type ConformanceReport, type BranchConformanceCheck,
} from '../api/provisioningApi';
resetBranch, getAllConformanceReports, createLadderBranch, getBuildGate,
type BranchStatus, type CommitInfo, type PromotionRecord,
type ConformanceReport, type BranchConformanceCheck, type BuildGate,
} from '../api/promotionApi';
import { getImageBuildHistory, type BuildHistoryRecord } from '../api/imageApi';
// ── Constants ─────────────────────────────────────────────────────────────────
@@ -36,6 +37,19 @@ const BUILD_COLOR: Record<string, string> = {
Running: '#2d72d2',
};
// -- OPC tag helper -----------------------------------------------------------
const OPC_TAG_RE = /OPC\s*#\s*(\d+)/gi;
function extractOpcTags(message: string): string[] {
const tags: string[] = [];
let m: RegExpExecArray | null;
// Reset lastIndex before every use (global flag retains state)
OPC_TAG_RE.lastIndex = 0;
while ((m = OPC_TAG_RE.exec(message)) !== null)
tags.push(`OPC # ${m[1].padStart(4, '0')}`);
return [...new Set(tags)];
}
// -- BuildSparkline -----------------------------------------------------------
const MAX_BAR_H = 44;
@@ -119,9 +133,12 @@ function PromotionTerminal({ lines }: { lines: string[] }) {
// -- PromoteDialog ------------------------------------------------------------
function PromoteDialog({
from, to, repo, onClose, onDone,
from, to, repo, fromSha, onClose, onDone,
}: {
from: string; to: string; repo: RepoName;
from: string;
to: string;
repo: RepoName;
fromSha?: string | null;
onClose: () => void;
onDone: () => void;
}) {
@@ -130,8 +147,19 @@ function PromoteDialog({
const [logs, setLogs] = useState<string[]>([]);
const [done, setDone] = useState(false);
const [error, setError] = useState<string | null>(null);
const [gate, setGate] = useState<BuildGate | null>(null);
const [gateLoading, setGateLoading] = useState(false);
const cancelRef = useRef<(() => void) | null>(null);
useEffect(() => {
if (!fromSha) return;
setGateLoading(true);
getBuildGate(fromSha)
.then(setGate)
.catch(() => {})
.finally(() => setGateLoading(false));
}, [fromSha]);
const fromMeta = LADDER.find((l) => l.branch === from);
const toMeta = LADDER.find((l) => l.branch === to);
@@ -170,6 +198,30 @@ function PromoteDialog({
<p style={{ marginBottom: '0.75rem', color: '#738091', fontSize: '0.85rem' }}>
Merges <code>{from}</code> into <code>{to}</code> via no-fast-forward and pushes to origin.
</p>
{fromSha && (
<div style={{ display: 'flex', alignItems: 'center', gap: '0.5rem', marginBottom: '0.6rem' }}>
<span style={{ fontSize: '0.82rem', color: '#738091' }}>Build gate:</span>
{gateLoading ? (
<Spinner size={12} />
) : gate ? (
<Tag
intent={gate.status === 'Green' ? Intent.SUCCESS : gate.status === 'Red' ? Intent.DANGER : Intent.WARNING}
minimal
icon={gate.status === 'Green' ? 'tick-circle' : gate.status === 'Red' ? 'error' : 'warning-sign' as any}
>
{gate.status === 'Green' ? 'Passed' : gate.status === 'Red' ? 'Build failed' : gate.status}
</Tag>
) : null}
{gate?.buildId && (
<code style={{ fontSize: '0.72rem', color: '#8f99a8' }}>{gate.buildId}</code>
)}
</div>
)}
{gate?.status === 'Red' && (
<Callout intent={Intent.DANGER} icon="error" style={{ marginBottom: '0.75rem', fontSize: '0.82rem' }}>
The build for this commit failed. Fix it before promoting to prevent broken code reaching <strong>{to}</strong>.
</Callout>
)}
<TextArea
fill
rows={3}
@@ -206,7 +258,7 @@ function PromoteDialog({
icon="arrow-right"
text={running ? 'Promoting\u2026' : `Promote ${from} \u2192 ${to}`}
loading={running}
disabled={running}
disabled={running || gate?.status === 'Red'}
onClick={handlePromote}
/>
</>
@@ -285,9 +337,10 @@ function ResetDialog({
// -- CherryPickDialog ---------------------------------------------------------
function CherryPickDialog({
shas, from, to, repo, onClose, onDone,
shas, commits, from, to, repo, onClose, onDone,
}: {
shas: string[];
commits: CommitInfo[];
from: string;
to: string;
repo: RepoName;
@@ -335,11 +388,28 @@ function CherryPickDialog({
>
<DialogBody>
{!running && !done && (
<p style={{ marginBottom: '0.75rem', color: '#738091', fontSize: '0.85rem' }}>
<>
<p style={{ marginBottom: '0.5rem', color: '#738091', fontSize: '0.85rem' }}>
Applies <strong>{shas.length}</strong> selected commit{shas.length > 1 ? 's' : ''} as new
commits on <code>{to}</code>. The branches will diverge use the Reset button to
re-align if a full fast-forward promote is needed later.
</p>
{commits.length > 0 && (
<div style={{ background: '#f6f7f9', border: '1px solid #e5e8eb', borderRadius: 4, padding: '0.4rem 0.6rem', fontSize: '0.74rem', marginBottom: '0.25rem' }}>
{commits.map(c => (
<div key={c.sha} style={{ display: 'flex', alignItems: 'center', gap: '0.4rem', marginBottom: '0.1rem' }}>
<code style={{ color: '#2d72d2', flexShrink: 0 }}>{c.shortSha}</code>
{extractOpcTags(c.message).map(tag => (
<Tag key={tag} intent={Intent.PRIMARY} minimal round style={{ fontSize: '0.65rem', flexShrink: 0, padding: '0 4px' }}>
{tag}
</Tag>
))}
<span style={{ color: '#1c2127', overflow: 'hidden', textOverflow: 'ellipsis', whiteSpace: 'nowrap', flex: 1 }}>{c.message}</span>
</div>
))}
</div>
)}
</>
)}
{(running || logs.length > 0) && (
<div style={{ marginTop: '0.75rem' }}>
@@ -389,9 +459,9 @@ function LadderStep({
meta: typeof LADDER[number];
nextBranch: string | null;
isLast: boolean;
onPromote: (from: string, to: string) => void;
onPromote: (from: string, to: string, tipSha: string | null) => void;
onReset: (branch: string, toSha: string) => void;
onCherryPick: (shas: string[], from: string, to: string) => void;
onCherryPick: (shas: string[], commits: CommitInfo[], from: string, to: string) => void;
builds?: BuildHistoryRecord[];
}) {
const [open, setOpen] = useState(false);
@@ -478,7 +548,7 @@ function LadderStep({
icon="arrow-right"
text={`Promote \u2192 ${nextBranch}`}
disabled={(status?.aheadOfNext ?? 0) === 0}
onClick={() => onPromote(meta.branch, nextBranch)}
onClick={() => onPromote(meta.branch, nextBranch, status?.tipSha ?? null)}
/>
</div>
)}
@@ -522,6 +592,11 @@ function LadderStep({
style={{ margin: 0 }}
/>
<code style={{ color: '#2d72d2', flexShrink: 0 }}>{c.shortSha}</code>
{extractOpcTags(c.message).map(tag => (
<Tag key={tag} intent={Intent.PRIMARY} minimal round style={{ fontSize: '0.65rem', flexShrink: 0, padding: '0 4px' }}>
{tag}
</Tag>
))}
<span style={{ color: '#1c2127', flex: 1, overflow: 'hidden', textOverflow: 'ellipsis', whiteSpace: 'nowrap' }}>
{c.message}
</span>
@@ -536,7 +611,7 @@ function LadderStep({
small
icon="git-merge"
text={`Cherry-pick ${selectedShas.size} \u2192 ${nextBranch}`}
onClick={() => onCherryPick([...selectedShas], meta.branch, nextBranch)}
onClick={() => onCherryPick([...selectedShas], (status?.unreleasedCommits ?? []).filter(c => selectedShas.has(c.sha)), meta.branch, nextBranch)}
/>
</div>
)}
@@ -966,9 +1041,9 @@ export default function BranchPage() {
const [builds, setBuilds] = useState<BuildHistoryRecord[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [dialog, setDialog] = useState<{ from: string; to: string } | null>(null);
const [dialog, setDialog] = useState<{ from: string; to: string; tipSha: string | null } | null>(null);
const [resetDialog, setResetDialog] = useState<{ branch: string; toSha: string } | null>(null);
const [cherryPickDialog, setCherryPickDialog] = useState<{ shas: string[]; from: string; to: string } | null>(null);
const [cherryPickDialog, setCherryPickDialog] = useState<{ shas: string[]; commits: CommitInfo[]; from: string; to: string } | null>(null);
const load = async (r: RepoName = repo) => {
setLoading(true);
@@ -1072,9 +1147,9 @@ export default function BranchPage() {
meta={meta}
nextBranch={nextBranch}
isLast={isLast}
onPromote={(from, to) => setDialog({ from, to })}
onPromote={(from, to, tipSha) => setDialog({ from, to, tipSha })}
onReset={(branch, toSha) => setResetDialog({ branch, toSha })}
onCherryPick={(shas, from, to) => setCherryPickDialog({ shas, from, to })}
onCherryPick={(shas, commits, from, to) => setCherryPickDialog({ shas, commits, from, to })}
builds={meta.branch === 'develop' ? builds : undefined}
/>
{!isLast && (
@@ -1120,6 +1195,7 @@ export default function BranchPage() {
from={dialog.from}
to={dialog.to}
repo={repo}
fromSha={dialog.tipSha}
onClose={() => setDialog(null)}
onDone={() => { setDialog(null); load(); }}
/>
@@ -1145,6 +1221,7 @@ export default function BranchPage() {
{cherryPickDialog && (
<CherryPickDialog
shas={cherryPickDialog.shas}
commits={cherryPickDialog.commits}
from={cherryPickDialog.from}
to={cherryPickDialog.to}
repo={repo}
@@ -3,10 +3,8 @@ import {
Button, Callout, Intent, Tag, Spinner, NonIdealState,
Collapse, HTMLTable,
} from '@blueprintjs/core';
import {
getProjects, getBuildHistory, getGitLog,
type ProjectDefinition, type BuildRecord, type GitCommit,
} from '../api/provisioningApi';
import { getProjects, getBuildHistory, type ProjectDefinition, type BuildRecord } from '../api/buildApi';
import { getGitLog, type GitCommit } from '../api/gitApi';
const BASE_URL = import.meta.env.VITE_API_URL ?? '';
@@ -14,6 +12,7 @@ const KIND_INTENT: Record<string, Intent> = {
DotnetProject: Intent.PRIMARY,
NpmProject: Intent.WARNING,
DockerImage: Intent.NONE,
SolutionBuild: Intent.SUCCESS,
};
const STATUS_INTENT: Record<string, Intent> = {
@@ -2,7 +2,7 @@ import { useEffect, useRef, useState } from 'react';
import { AnchorButton, Button, Callout, Intent, NonIdealState, Spinner, Tab, Tabs, Tag } from '@blueprintjs/core';
import DeployWizard from '../components/wizard/DeployWizard';
import { tenantUrl, CLARITY_DOMAIN } from '../config';
import { getTenants, subscribeToTenantLogs } from '../api/provisioningApi';
import { getTenants, subscribeToTenantLogs } from '../api/tenantApi';
import type { TenantRecord } from '../types/provisioning';
const ENV_INTENT: Record<string, Intent> = {
@@ -4,7 +4,8 @@ import {
HTMLTable, Card, Elevation, Tabs, Tab, type TabId,
FormGroup, InputGroup,
} from '@blueprintjs/core';
import { getImageStatus, getBuildHistory, type ImageBuildStatus, type BuildRecord } from '../api/provisioningApi';
import { getImageStatus, type ImageBuildStatus } from '../api/imageApi';
import { getBuildHistory, type BuildRecord } from '../api/buildApi';
import {
getInfraStatus, streamComposeUp, streamComposeForceUp, streamComposeNuke, streamComposeDown,
type InfraService,
@@ -3,10 +3,8 @@ import {
Button, Callout, Intent, Tag, Spinner, HTMLTable,
NonIdealState,
} from '@blueprintjs/core';
import {
getReleaseHistory, getBuildHistory,
type ReleaseRecord, type BuildRecord,
} from '../api/provisioningApi';
import { getReleaseHistory, type ReleaseRecord } from '../api/releaseApi';
import { getBuildHistory, type BuildRecord } from '../api/buildApi';
const BASE_URL = import.meta.env.VITE_API_URL ?? '';
@@ -145,6 +143,14 @@ function ReleaseHistoryTable({ records }: { records: ReleaseRecord[] }) {
{expanded === r.id && (
<tr key={r.id + '-detail'}>
<td colSpan={7} style={{ padding: '0.4rem 1rem 0.8rem' }}>
{r.opcNumbers?.length > 0 && (
<div style={{ display: 'flex', gap: '0.3rem', flexWrap: 'wrap', alignItems: 'center', marginBottom: '0.5rem', paddingBottom: '0.5rem', borderBottom: '1px solid #e5e8eb' }}>
<span style={{ fontSize: '0.72rem', color: '#8f99a8' }}>OPCs in this release:</span>
{r.opcNumbers.map(n => (
<Tag key={n} intent={Intent.PRIMARY} minimal round style={{ fontFamily: 'monospace', fontSize: '0.72rem' }}>{n}</Tag>
))}
</div>
)}
{r.tenants.map((t) => (
<div key={t.subdomain} style={{ display: 'flex', gap: '0.5rem', marginBottom: 2 }}>
<Tag intent={t.success ? Intent.SUCCESS : Intent.DANGER} minimal round>