chunker ui redo
This commit is contained in:
120
ui/chunker/package-lock.json
generated
120
ui/chunker/package-lock.json
generated
@@ -8,10 +8,15 @@
|
||||
"name": "mpr-chunker",
|
||||
"version": "0.1.0",
|
||||
"dependencies": {
|
||||
"@protobuf-ts/runtime": "^2.11.1",
|
||||
"@protobuf-ts/runtime-rpc": "^2.11.1",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@protobuf-ts/grpcweb-transport": "^2.11.1",
|
||||
"@protobuf-ts/plugin": "^2.11.1",
|
||||
"@protobuf-ts/protoc": "^2.11.1",
|
||||
"@types/react": "^18.2.0",
|
||||
"@types/react-dom": "^18.2.0",
|
||||
"@vitejs/plugin-react": "^4.2.0",
|
||||
@@ -301,6 +306,39 @@
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@bufbuild/protobuf": {
|
||||
"version": "2.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@bufbuild/protobuf/-/protobuf-2.11.0.tgz",
|
||||
"integrity": "sha512-sBXGT13cpmPR5BMgHE6UEEfEaShh5Ror6rfN3yEK5si7QVrtZg8LEPQb0VVhiLRUslD2yLnXtnRzG035J/mZXQ==",
|
||||
"dev": true,
|
||||
"license": "(Apache-2.0 AND BSD-3-Clause)"
|
||||
},
|
||||
"node_modules/@bufbuild/protoplugin": {
|
||||
"version": "2.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@bufbuild/protoplugin/-/protoplugin-2.11.0.tgz",
|
||||
"integrity": "sha512-lyZVNFUHArIOt4W0+dwYBe5GBwbKzbOy8ObaloEqsw9Mmiwv2O48TwddDoHN4itylC+BaEGqFdI1W8WQt2vWJQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@bufbuild/protobuf": "2.11.0",
|
||||
"@typescript/vfs": "^1.6.2",
|
||||
"typescript": "5.4.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@bufbuild/protoplugin/node_modules/typescript": {
|
||||
"version": "5.4.5",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz",
|
||||
"integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.17"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/aix-ppc64": {
|
||||
"version": "0.21.5",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz",
|
||||
@@ -742,6 +780,75 @@
|
||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
||||
}
|
||||
},
|
||||
"node_modules/@protobuf-ts/grpcweb-transport": {
|
||||
"version": "2.11.1",
|
||||
"resolved": "https://registry.npmjs.org/@protobuf-ts/grpcweb-transport/-/grpcweb-transport-2.11.1.tgz",
|
||||
"integrity": "sha512-1W4utDdvOB+RHMFQ0soL4JdnxjXV+ddeGIUg08DvZrA8Ms6k5NN6GBFU2oHZdTOcJVpPrDJ02RJlqtaoCMNBtw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@protobuf-ts/runtime": "^2.11.1",
|
||||
"@protobuf-ts/runtime-rpc": "^2.11.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@protobuf-ts/plugin": {
|
||||
"version": "2.11.1",
|
||||
"resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.11.1.tgz",
|
||||
"integrity": "sha512-HyuprDcw0bEEJqkOWe1rnXUP0gwYLij8YhPuZyZk6cJbIgc/Q0IFgoHQxOXNIXAcXM4Sbehh6kjVnCzasElw1A==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@bufbuild/protobuf": "^2.4.0",
|
||||
"@bufbuild/protoplugin": "^2.4.0",
|
||||
"@protobuf-ts/protoc": "^2.11.1",
|
||||
"@protobuf-ts/runtime": "^2.11.1",
|
||||
"@protobuf-ts/runtime-rpc": "^2.11.1",
|
||||
"typescript": "^3.9"
|
||||
},
|
||||
"bin": {
|
||||
"protoc-gen-dump": "bin/protoc-gen-dump",
|
||||
"protoc-gen-ts": "bin/protoc-gen-ts"
|
||||
}
|
||||
},
|
||||
"node_modules/@protobuf-ts/plugin/node_modules/typescript": {
|
||||
"version": "3.9.10",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz",
|
||||
"integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@protobuf-ts/protoc": {
|
||||
"version": "2.11.1",
|
||||
"resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.11.1.tgz",
|
||||
"integrity": "sha512-mUZJaV0daGO6HUX90o/atzQ6A7bbN2RSuHtdwo8SSF2Qoe3zHwa4IHyCN1evftTeHfLmdz+45qo47sL+5P8nyg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"protoc": "protoc.js"
|
||||
}
|
||||
},
|
||||
"node_modules/@protobuf-ts/runtime": {
|
||||
"version": "2.11.1",
|
||||
"resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.11.1.tgz",
|
||||
"integrity": "sha512-KuDaT1IfHkugM2pyz+FwiY80ejWrkH1pAtOBOZFuR6SXEFTsnb/jiQWQ1rCIrcKx2BtyxnxW6BWwsVSA/Ie+WQ==",
|
||||
"license": "(Apache-2.0 AND BSD-3-Clause)"
|
||||
},
|
||||
"node_modules/@protobuf-ts/runtime-rpc": {
|
||||
"version": "2.11.1",
|
||||
"resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.11.1.tgz",
|
||||
"integrity": "sha512-4CqqUmNA+/uMz00+d3CYKgElXO9VrEbucjnBFEjqI4GuDrEQ32MaI3q+9qPBvIGOlL4PmHXrzM32vBPWRhQKWQ==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@protobuf-ts/runtime": "^2.11.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/pluginutils": {
|
||||
"version": "1.0.0-beta.27",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz",
|
||||
@@ -1179,6 +1286,19 @@
|
||||
"@types/react": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript/vfs": {
|
||||
"version": "1.6.4",
|
||||
"resolved": "https://registry.npmjs.org/@typescript/vfs/-/vfs-1.6.4.tgz",
|
||||
"integrity": "sha512-PJFXFS4ZJKiJ9Qiuix6Dz/OwEIqHD7Dme1UwZhTK11vR+5dqW2ACbdndWQexBzCx+CPuMe5WBYQWCsFyGlQLlQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"debug": "^4.4.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@vitejs/plugin-react": {
|
||||
"version": "4.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz",
|
||||
|
||||
@@ -9,10 +9,15 @@
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@protobuf-ts/grpcweb-transport": "^2.11.1",
|
||||
"@protobuf-ts/runtime": "^2.11.1",
|
||||
"@protobuf-ts/runtime-rpc": "^2.11.1",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@protobuf-ts/plugin": "^2.11.1",
|
||||
"@protobuf-ts/protoc": "^2.11.1",
|
||||
"@types/react": "^18.2.0",
|
||||
"@types/react-dom": "^18.2.0",
|
||||
"@vitejs/plugin-react": "^4.2.0",
|
||||
|
||||
@@ -1,16 +1,4 @@
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto,
|
||||
"Fira Code", monospace, sans-serif;
|
||||
background: #0f0f0f;
|
||||
color: #e0e0e0;
|
||||
font-size: 14px;
|
||||
}
|
||||
@import "../../common/styles/theme.css";
|
||||
|
||||
/* ---- Layout ---- */
|
||||
|
||||
@@ -25,8 +13,8 @@ body {
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 0.75rem 1.25rem;
|
||||
background: #1a1a1a;
|
||||
border-bottom: 1px solid #2a2a2a;
|
||||
background: var(--bg-panel);
|
||||
border-bottom: 1px solid var(--border);
|
||||
}
|
||||
|
||||
.header h1 {
|
||||
@@ -40,19 +28,19 @@ body {
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
font-size: 0.8rem;
|
||||
color: #666;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.dot {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background: #555;
|
||||
background: var(--text-muted);
|
||||
}
|
||||
|
||||
.dot.connected {
|
||||
background: #10b981;
|
||||
box-shadow: 0 0 6px #10b981;
|
||||
background: var(--success);
|
||||
box-shadow: 0 0 6px var(--success);
|
||||
}
|
||||
|
||||
.error-banner {
|
||||
@@ -70,8 +58,8 @@ body {
|
||||
|
||||
.sidebar {
|
||||
width: 300px;
|
||||
background: #141414;
|
||||
border-right: 1px solid #2a2a2a;
|
||||
background: var(--bg-surface);
|
||||
border-right: 1px solid var(--border);
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
@@ -97,163 +85,20 @@ body {
|
||||
gap: 1rem;
|
||||
}
|
||||
|
||||
/* ---- Panel shared ---- */
|
||||
|
||||
.panel-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
.panel-header h2 {
|
||||
font-size: 0.85rem;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: #888;
|
||||
}
|
||||
|
||||
.badge-row {
|
||||
display: flex;
|
||||
gap: 0.25rem;
|
||||
}
|
||||
|
||||
/* ---- Topic Badge ---- */
|
||||
|
||||
.topic-badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 0.25rem;
|
||||
padding: 0.15rem 0.5rem;
|
||||
font-size: 0.65rem;
|
||||
background: #1e293b;
|
||||
border: 1px solid #334155;
|
||||
border-radius: 12px;
|
||||
cursor: pointer;
|
||||
transition: all 0.2s;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.topic-badge:hover {
|
||||
border-color: #3b82f6;
|
||||
}
|
||||
|
||||
.topic-badge.expanded {
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
border-radius: 8px;
|
||||
padding: 0.5rem;
|
||||
position: relative;
|
||||
z-index: 10;
|
||||
background: #1e293b;
|
||||
}
|
||||
|
||||
.topic-number {
|
||||
color: #3b82f6;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
.topic-title {
|
||||
color: #94a3b8;
|
||||
}
|
||||
|
||||
.topic-detail {
|
||||
margin-top: 0.25rem;
|
||||
font-size: 0.7rem;
|
||||
line-height: 1.4;
|
||||
}
|
||||
|
||||
.topic-detail p {
|
||||
color: #cbd5e1;
|
||||
margin-bottom: 0.25rem;
|
||||
}
|
||||
|
||||
.topic-detail code {
|
||||
color: #10b981;
|
||||
font-size: 0.65rem;
|
||||
}
|
||||
|
||||
/* ---- Asset List ---- */
|
||||
|
||||
.scan-button {
|
||||
padding: 0.25rem 0.5rem;
|
||||
font-size: 0.7rem;
|
||||
background: #1e293b;
|
||||
color: #94a3b8;
|
||||
border: 1px solid #334155;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
transition: all 0.2s;
|
||||
}
|
||||
|
||||
.scan-button:hover:not(:disabled) {
|
||||
background: #334155;
|
||||
color: #e0e0e0;
|
||||
}
|
||||
|
||||
.scan-button:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.asset-list {
|
||||
list-style: none;
|
||||
max-height: 200px;
|
||||
overflow-y: auto;
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
.asset-item {
|
||||
padding: 0.4rem 0.5rem;
|
||||
cursor: pointer;
|
||||
border-left: 2px solid transparent;
|
||||
transition: all 0.15s;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 0.1rem;
|
||||
}
|
||||
|
||||
.asset-item:hover {
|
||||
background: #1a1a1a;
|
||||
}
|
||||
|
||||
.asset-item.selected {
|
||||
background: #1e293b;
|
||||
border-left-color: #3b82f6;
|
||||
}
|
||||
|
||||
.asset-filename {
|
||||
font-size: 0.8rem;
|
||||
color: #e0e0e0;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.asset-meta {
|
||||
font-size: 0.65rem;
|
||||
color: #555;
|
||||
}
|
||||
|
||||
.asset-empty {
|
||||
font-size: 0.8rem;
|
||||
color: #444;
|
||||
padding: 0.75rem 0.5rem;
|
||||
text-align: center;
|
||||
}
|
||||
/* ---- Selected Asset Info ---- */
|
||||
|
||||
.selected-asset-info {
|
||||
padding: 0.5rem;
|
||||
background: #1e293b;
|
||||
border: 1px solid #334155;
|
||||
border-radius: 4px;
|
||||
border-radius: var(--radius);
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
.asset-detail {
|
||||
display: block;
|
||||
font-size: 0.8rem;
|
||||
color: #e0e0e0;
|
||||
color: var(--text-primary);
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
@@ -277,12 +122,12 @@ body {
|
||||
.config-field label {
|
||||
display: block;
|
||||
font-size: 0.75rem;
|
||||
color: #888;
|
||||
color: var(--text-secondary);
|
||||
margin-bottom: 0.25rem;
|
||||
}
|
||||
|
||||
.config-field .default {
|
||||
color: #555;
|
||||
color: var(--text-muted);
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
@@ -291,26 +136,26 @@ body {
|
||||
width: 100%;
|
||||
padding: 0.4rem 0.5rem;
|
||||
font-size: 0.8rem;
|
||||
background: #222;
|
||||
color: #e0e0e0;
|
||||
border: 1px solid #333;
|
||||
border-radius: 4px;
|
||||
background: var(--bg-input);
|
||||
color: var(--text-primary);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius);
|
||||
}
|
||||
|
||||
.config-field input:focus,
|
||||
.config-field select:focus {
|
||||
outline: none;
|
||||
border-color: #3b82f6;
|
||||
border-color: var(--accent);
|
||||
}
|
||||
|
||||
.start-button {
|
||||
width: 100%;
|
||||
padding: 0.5rem;
|
||||
font-size: 0.85rem;
|
||||
background: #10b981;
|
||||
background: var(--success);
|
||||
color: #000;
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
border-radius: var(--radius);
|
||||
cursor: pointer;
|
||||
font-weight: 600;
|
||||
margin-top: 0.5rem;
|
||||
@@ -322,116 +167,86 @@ body {
|
||||
}
|
||||
|
||||
.start-button:disabled {
|
||||
background: #333;
|
||||
color: #666;
|
||||
background: var(--bg-input);
|
||||
color: var(--text-muted);
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* ---- Pipeline Diagram ---- */
|
||||
|
||||
.pipeline-diagram {
|
||||
background: #141414;
|
||||
border: 1px solid #2a2a2a;
|
||||
border-radius: 8px;
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.stage-flow {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.stage-wrapper {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.stage {
|
||||
padding: 0.5rem 0.75rem;
|
||||
background: #1a1a1a;
|
||||
border: 1px solid #333;
|
||||
border-radius: 6px;
|
||||
text-align: center;
|
||||
min-width: 120px;
|
||||
transition: all 0.3s;
|
||||
}
|
||||
|
||||
.stage.active {
|
||||
border-color: #3b82f6;
|
||||
background: #1e293b;
|
||||
box-shadow: 0 0 12px rgba(59, 130, 246, 0.2);
|
||||
}
|
||||
|
||||
.stage-label {
|
||||
font-size: 0.8rem;
|
||||
.stop-button {
|
||||
width: 100%;
|
||||
padding: 0.5rem;
|
||||
font-size: 0.85rem;
|
||||
background: var(--error);
|
||||
color: #fff;
|
||||
border: none;
|
||||
border-radius: var(--radius);
|
||||
cursor: pointer;
|
||||
font-weight: 600;
|
||||
color: #e0e0e0;
|
||||
margin-top: 0.5rem;
|
||||
transition: background 0.2s;
|
||||
}
|
||||
|
||||
.stage-sub {
|
||||
font-size: 0.65rem;
|
||||
color: #666;
|
||||
margin-top: 0.15rem;
|
||||
.stop-button:hover {
|
||||
background: #dc2626;
|
||||
}
|
||||
|
||||
.stage-arrow {
|
||||
width: 24px;
|
||||
height: 2px;
|
||||
background: #444;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.stage-arrow::after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
right: 0;
|
||||
top: -3px;
|
||||
border: 4px solid transparent;
|
||||
border-left: 6px solid #444;
|
||||
}
|
||||
|
||||
.processor-hierarchy {
|
||||
margin-top: 0.75rem;
|
||||
padding-top: 0.75rem;
|
||||
border-top: 1px solid #222;
|
||||
}
|
||||
|
||||
.hierarchy-title {
|
||||
font-size: 0.7rem;
|
||||
color: #666;
|
||||
margin-bottom: 0.35rem;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.hierarchy-children {
|
||||
display: flex;
|
||||
gap: 0.5rem;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.hierarchy-node {
|
||||
font-size: 0.7rem;
|
||||
padding: 0.15rem 0.5rem;
|
||||
background: #1a1a1a;
|
||||
border: 1px solid #333;
|
||||
border-radius: 4px;
|
||||
.reset-button {
|
||||
width: 100%;
|
||||
padding: 0.5rem;
|
||||
font-size: 0.85rem;
|
||||
background: #1e293b;
|
||||
color: #94a3b8;
|
||||
border: 1px solid #334155;
|
||||
border-radius: var(--radius);
|
||||
cursor: pointer;
|
||||
font-weight: 600;
|
||||
margin-top: 0.5rem;
|
||||
transition: all 0.2s;
|
||||
}
|
||||
|
||||
.reset-button:hover {
|
||||
background: #334155;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.range-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.range-row input {
|
||||
flex: 1;
|
||||
padding: 0.4rem 0.5rem;
|
||||
font-size: 0.8rem;
|
||||
background: var(--bg-input);
|
||||
color: var(--text-primary);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius);
|
||||
}
|
||||
|
||||
.range-row input:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent);
|
||||
}
|
||||
|
||||
.range-sep {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* ---- Chunk Grid ---- */
|
||||
|
||||
.chunk-grid-panel {
|
||||
background: #141414;
|
||||
border: 1px solid #2a2a2a;
|
||||
background: var(--bg-surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.chunk-count {
|
||||
font-size: 0.7rem;
|
||||
color: #555;
|
||||
color: var(--text-muted);
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
@@ -466,7 +281,7 @@ body {
|
||||
align-items: center;
|
||||
gap: 0.25rem;
|
||||
font-size: 0.65rem;
|
||||
color: #888;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.legend-dot {
|
||||
@@ -478,8 +293,8 @@ body {
|
||||
/* ---- Worker Panel ---- */
|
||||
|
||||
.worker-panel {
|
||||
background: #141414;
|
||||
border: 1px solid #2a2a2a;
|
||||
background: var(--bg-surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 1rem;
|
||||
}
|
||||
@@ -492,8 +307,8 @@ body {
|
||||
|
||||
.worker-card {
|
||||
padding: 0.5rem 0.75rem;
|
||||
background: #1a1a1a;
|
||||
border: 1px solid #2a2a2a;
|
||||
background: var(--bg-panel);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 6px;
|
||||
}
|
||||
|
||||
@@ -516,7 +331,7 @@ body {
|
||||
|
||||
.worker-chunk {
|
||||
font-size: 0.7rem;
|
||||
color: #555;
|
||||
color: var(--text-muted);
|
||||
margin-top: 0.15rem;
|
||||
}
|
||||
|
||||
@@ -524,13 +339,13 @@ body {
|
||||
display: flex;
|
||||
gap: 0.75rem;
|
||||
font-size: 0.65rem;
|
||||
color: #555;
|
||||
color: var(--text-muted);
|
||||
margin-top: 0.25rem;
|
||||
}
|
||||
|
||||
.worker-empty {
|
||||
font-size: 0.8rem;
|
||||
color: #444;
|
||||
color: var(--text-muted);
|
||||
text-align: center;
|
||||
padding: 1rem;
|
||||
}
|
||||
@@ -538,8 +353,8 @@ body {
|
||||
/* ---- Queue Gauge ---- */
|
||||
|
||||
.queue-gauge {
|
||||
background: #141414;
|
||||
border: 1px solid #2a2a2a;
|
||||
background: var(--bg-surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 1rem;
|
||||
}
|
||||
@@ -550,38 +365,38 @@ body {
|
||||
|
||||
.gauge-label {
|
||||
font-size: 0.75rem;
|
||||
color: #888;
|
||||
color: var(--text-secondary);
|
||||
margin-bottom: 0.25rem;
|
||||
}
|
||||
|
||||
.gauge-value {
|
||||
color: #e0e0e0;
|
||||
color: var(--text-primary);
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.gauge-bar {
|
||||
height: 8px;
|
||||
background: #222;
|
||||
border-radius: 4px;
|
||||
background: var(--bg-input);
|
||||
border-radius: var(--radius);
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.gauge-fill {
|
||||
height: 100%;
|
||||
border-radius: 4px;
|
||||
border-radius: var(--radius);
|
||||
transition: width 0.3s, background 0.3s;
|
||||
}
|
||||
|
||||
.gauge-note {
|
||||
font-size: 0.65rem;
|
||||
color: #555;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
/* ---- Stats Panel ---- */
|
||||
|
||||
.stats-panel {
|
||||
background: #141414;
|
||||
border: 1px solid #2a2a2a;
|
||||
background: var(--bg-surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 1rem;
|
||||
}
|
||||
@@ -595,52 +410,29 @@ body {
|
||||
.stat {
|
||||
text-align: center;
|
||||
padding: 0.5rem;
|
||||
background: #1a1a1a;
|
||||
background: var(--bg-panel);
|
||||
border-radius: 6px;
|
||||
}
|
||||
|
||||
.stat-value {
|
||||
font-size: 1.1rem;
|
||||
font-weight: 700;
|
||||
color: #e0e0e0;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
|
||||
.stat-label {
|
||||
font-size: 0.6rem;
|
||||
color: #666;
|
||||
color: var(--text-muted);
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
margin-top: 0.15rem;
|
||||
}
|
||||
|
||||
.test-info {
|
||||
margin-top: 0.75rem;
|
||||
padding-top: 0.5rem;
|
||||
border-top: 1px solid #222;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.test-badge {
|
||||
font-size: 0.65rem;
|
||||
padding: 0.15rem 0.4rem;
|
||||
background: #10b981;
|
||||
color: #000;
|
||||
border-radius: 3px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.test-note {
|
||||
font-size: 0.65rem;
|
||||
color: #555;
|
||||
}
|
||||
|
||||
/* ---- Error Log ---- */
|
||||
|
||||
.error-log {
|
||||
background: #141414;
|
||||
border: 1px solid #2a2a2a;
|
||||
background: var(--bg-surface);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 8px;
|
||||
padding: 1rem;
|
||||
}
|
||||
@@ -654,41 +446,6 @@ body {
|
||||
font-weight: 400;
|
||||
}
|
||||
|
||||
.exception-tree {
|
||||
margin-bottom: 0.75rem;
|
||||
padding: 0.5rem;
|
||||
background: #1a1a1a;
|
||||
border-radius: 6px;
|
||||
font-size: 0.7rem;
|
||||
font-family: "Fira Code", monospace;
|
||||
}
|
||||
|
||||
.tree-node {
|
||||
color: #94a3b8;
|
||||
padding: 0.1rem 0;
|
||||
}
|
||||
|
||||
.tree-node.root {
|
||||
color: #f59e0b;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.tree-node.leaf {
|
||||
color: #64748b;
|
||||
}
|
||||
|
||||
.tree-children {
|
||||
padding-left: 1rem;
|
||||
border-left: 1px solid #333;
|
||||
margin-left: 0.5rem;
|
||||
}
|
||||
|
||||
.tree-grandchildren {
|
||||
padding-left: 1rem;
|
||||
border-left: 1px solid #333;
|
||||
margin-left: 0.5rem;
|
||||
}
|
||||
|
||||
.error-entries {
|
||||
max-height: 150px;
|
||||
overflow-y: auto;
|
||||
@@ -696,7 +453,7 @@ body {
|
||||
|
||||
.error-empty {
|
||||
font-size: 0.8rem;
|
||||
color: #444;
|
||||
color: var(--text-muted);
|
||||
text-align: center;
|
||||
padding: 0.5rem;
|
||||
}
|
||||
@@ -706,26 +463,26 @@ body {
|
||||
gap: 0.5rem;
|
||||
align-items: center;
|
||||
padding: 0.35rem 0;
|
||||
border-bottom: 1px solid #1a1a1a;
|
||||
border-bottom: 1px solid var(--bg-panel);
|
||||
font-size: 0.7rem;
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.error-type {
|
||||
color: #ef4444;
|
||||
color: var(--error);
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.error-seq {
|
||||
color: #f59e0b;
|
||||
color: var(--warning);
|
||||
}
|
||||
|
||||
.error-worker {
|
||||
color: #3b82f6;
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.error-msg {
|
||||
color: #888;
|
||||
color: var(--text-secondary);
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
@@ -733,3 +490,15 @@ body {
|
||||
color: #f97316;
|
||||
font-size: 0.65rem;
|
||||
}
|
||||
|
||||
/* ---- Output download link ---- */
|
||||
|
||||
.fm-download-link {
|
||||
font-size: 0.7rem;
|
||||
color: var(--accent);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.fm-download-link:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
@@ -1,16 +1,23 @@
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import "./App.css";
|
||||
import { createChunkJob, getAssets, scanMediaFolder } from "./api";
|
||||
import {
|
||||
cancelChunkJob,
|
||||
createChunkJob,
|
||||
getAssets,
|
||||
getChunkOutputFiles,
|
||||
scanMediaFolder,
|
||||
} from "./api";
|
||||
import { ChunkGrid } from "./components/ChunkGrid";
|
||||
import { ConfigPanel } from "./components/ConfigPanel";
|
||||
import { ErrorLog } from "./components/ErrorLog";
|
||||
import { PipelineDiagram } from "./components/PipelineDiagram";
|
||||
import { OutputFiles } from "./components/OutputFiles";
|
||||
import { QueueGauge } from "./components/QueueGauge";
|
||||
import { StatsPanel } from "./components/StatsPanel";
|
||||
import { WorkerPanel } from "./components/WorkerPanel";
|
||||
import { useEventStream } from "./hooks/useEventStream";
|
||||
import { useGrpcStream } from "./hooks/useGrpcStream";
|
||||
import type {
|
||||
ChunkInfo,
|
||||
ChunkOutputFile,
|
||||
ErrorEntry,
|
||||
MediaAsset,
|
||||
PipelineConfig,
|
||||
@@ -20,6 +27,7 @@ import type {
|
||||
|
||||
export default function App() {
|
||||
const [jobId, setJobId] = useState<string | null>(null);
|
||||
const [celeryTaskId, setCeleryTaskId] = useState<string | null>(null);
|
||||
const [running, setRunning] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
@@ -28,15 +36,36 @@ export default function App() {
|
||||
const [selectedAsset, setSelectedAsset] = useState<MediaAsset | null>(null);
|
||||
const [scanning, setScanning] = useState(false);
|
||||
|
||||
const { events, connected, done } = useEventStream(jobId);
|
||||
// Output files
|
||||
const [outputFiles, setOutputFiles] = useState<ChunkOutputFile[]>([]);
|
||||
|
||||
const {
|
||||
events,
|
||||
connected,
|
||||
done,
|
||||
reset: resetStream,
|
||||
} = useGrpcStream(jobId);
|
||||
|
||||
// Load assets on mount
|
||||
useEffect(() => {
|
||||
getAssets()
|
||||
.then((data) => setAssets(data.sort((a, b) => a.filename.localeCompare(b.filename))))
|
||||
.catch((e) => setError(e instanceof Error ? e.message : "Failed to load assets"));
|
||||
.then((data) =>
|
||||
setAssets(data.sort((a, b) => a.filename.localeCompare(b.filename))),
|
||||
)
|
||||
.catch((e) =>
|
||||
setError(e instanceof Error ? e.message : "Failed to load assets"),
|
||||
);
|
||||
}, []);
|
||||
|
||||
// Fetch output files when job completes
|
||||
useEffect(() => {
|
||||
if (done && jobId) {
|
||||
getChunkOutputFiles(jobId)
|
||||
.then(setOutputFiles)
|
||||
.catch(() => setOutputFiles([]));
|
||||
}
|
||||
}, [done, jobId]);
|
||||
|
||||
const handleScan = useCallback(async () => {
|
||||
setScanning(true);
|
||||
setError(null);
|
||||
@@ -51,8 +80,8 @@ export default function App() {
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Derive state from events
|
||||
const { chunks, workers, stats, errors, activeStage, queueSize } =
|
||||
// Derive state from raw events
|
||||
const { chunks, workers, stats, errors, queueSize } =
|
||||
useMemo(() => {
|
||||
const chunkMap = new Map<number, ChunkInfo>();
|
||||
const workerMap = new Map<string, WorkerInfo>();
|
||||
@@ -64,45 +93,54 @@ export default function App() {
|
||||
let elapsed = 0;
|
||||
let throughput = 0;
|
||||
let queueSize = 0;
|
||||
let stage = "pending";
|
||||
let pipelineDone = false;
|
||||
|
||||
for (const evt of events) {
|
||||
const evtType = evt.event_type || "";
|
||||
|
||||
if (evt.total_chunks) totalChunks = evt.total_chunks;
|
||||
if (evt.processed_chunks) processed = evt.processed_chunks;
|
||||
if (evt.failed_chunks) failed = evt.failed_chunks;
|
||||
if (evt.elapsed) elapsed = evt.elapsed;
|
||||
if (evt.throughput_mbps) throughput = evt.throughput_mbps;
|
||||
if (evt.queue_size !== undefined) queueSize = evt.queue_size;
|
||||
if (evt.status && evt.status !== "waiting") stage = evt.status;
|
||||
|
||||
// Track chunks
|
||||
if (evtType === "pipeline_complete" || evtType === "pipeline_error") {
|
||||
pipelineDone = true;
|
||||
queueSize = 0;
|
||||
}
|
||||
|
||||
// Track chunks by raw event type
|
||||
if (evt.sequence !== undefined) {
|
||||
const existing = chunkMap.get(evt.sequence) || {
|
||||
sequence: evt.sequence,
|
||||
state: "pending" as const,
|
||||
};
|
||||
|
||||
if (evt.status === "chunking" || evt.status === "pending") {
|
||||
if (evtType === "chunk_queued") {
|
||||
existing.state = "queued";
|
||||
} else if (evt.status === "processing") {
|
||||
} else if (evtType === "chunk_processing") {
|
||||
existing.state = "processing";
|
||||
if (evt.worker_id) existing.worker_id = evt.worker_id;
|
||||
} else if (evt.status === "completed") {
|
||||
} else if (evtType === "chunk_done") {
|
||||
existing.state = "done";
|
||||
if (evt.processing_time)
|
||||
existing.processing_time = evt.processing_time;
|
||||
if (evt.retries) existing.retries = evt.retries;
|
||||
} else if (evt.status === "failed") {
|
||||
} else if (evtType === "chunk_error") {
|
||||
existing.state = "error";
|
||||
if (evt.error) existing.error = evt.error;
|
||||
} else if (evtType === "chunk_retry") {
|
||||
existing.state = "retry";
|
||||
if (evt.retries) existing.retries = evt.retries;
|
||||
}
|
||||
|
||||
if (evt.size) existing.size = evt.size;
|
||||
chunkMap.set(evt.sequence, existing);
|
||||
}
|
||||
|
||||
// Track workers
|
||||
if (evt.worker_id) {
|
||||
// Track workers from worker_status events
|
||||
if (evt.worker_id && evtType === "worker_status") {
|
||||
const w = workerMap.get(evt.worker_id) || {
|
||||
worker_id: evt.worker_id,
|
||||
state: "idle" as const,
|
||||
@@ -119,12 +157,38 @@ export default function App() {
|
||||
w.current_chunk = undefined;
|
||||
} else if (evt.state === "stopped") {
|
||||
w.state = "stopped";
|
||||
w.current_chunk = undefined;
|
||||
}
|
||||
|
||||
if (evt.success !== undefined) {
|
||||
if (evt.success) w.processed++;
|
||||
else w.errors++;
|
||||
workerMap.set(evt.worker_id, w);
|
||||
}
|
||||
|
||||
// Also update workers from chunk lifecycle events
|
||||
if (
|
||||
evt.worker_id &&
|
||||
(evtType === "chunk_processing" ||
|
||||
evtType === "chunk_done" ||
|
||||
evtType === "chunk_error")
|
||||
) {
|
||||
const w = workerMap.get(evt.worker_id) || {
|
||||
worker_id: evt.worker_id,
|
||||
state: "idle" as const,
|
||||
processed: 0,
|
||||
errors: 0,
|
||||
retries: 0,
|
||||
};
|
||||
|
||||
if (evtType === "chunk_processing") {
|
||||
w.state = "processing";
|
||||
w.current_chunk = evt.sequence;
|
||||
} else if (evtType === "chunk_done") {
|
||||
w.processed++;
|
||||
w.state = "idle";
|
||||
w.current_chunk = undefined;
|
||||
} else if (evtType === "chunk_error") {
|
||||
w.errors++;
|
||||
}
|
||||
|
||||
if (evt.retries) {
|
||||
retries += evt.retries;
|
||||
w.retries += evt.retries;
|
||||
@@ -141,11 +205,19 @@ export default function App() {
|
||||
worker_id: evt.worker_id,
|
||||
error: evt.error,
|
||||
retries: evt.retries,
|
||||
event_type: evt.status || "error",
|
||||
event_type: evtType,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// When pipeline is done, mark all workers as stopped
|
||||
if (pipelineDone) {
|
||||
for (const w of workerMap.values()) {
|
||||
w.state = "stopped";
|
||||
w.current_chunk = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
const statsObj: PipelineStats = {
|
||||
total_chunks: totalChunks,
|
||||
processed,
|
||||
@@ -158,12 +230,11 @@ export default function App() {
|
||||
|
||||
return {
|
||||
chunks: Array.from(chunkMap.values()).sort(
|
||||
(a, b) => a.sequence - b.sequence
|
||||
(a, b) => a.sequence - b.sequence,
|
||||
),
|
||||
workers: Array.from(workerMap.values()),
|
||||
stats: statsObj,
|
||||
errors: errorList,
|
||||
activeStage: stage,
|
||||
queueSize,
|
||||
};
|
||||
}, [events]);
|
||||
@@ -171,15 +242,45 @@ export default function App() {
|
||||
const handleStart = useCallback(async (config: PipelineConfig) => {
|
||||
setError(null);
|
||||
setRunning(true);
|
||||
setOutputFiles([]);
|
||||
try {
|
||||
const result = await createChunkJob(config);
|
||||
setJobId(result.id);
|
||||
setCeleryTaskId(result.celery_task_id);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "Failed to start");
|
||||
setRunning(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleStop = useCallback(async () => {
|
||||
if (!celeryTaskId) {
|
||||
setError("No task ID to cancel");
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const result = await cancelChunkJob(celeryTaskId);
|
||||
if (result.ok) {
|
||||
resetStream();
|
||||
setRunning(false);
|
||||
setError(null);
|
||||
} else {
|
||||
setError(result.message || "Failed to cancel");
|
||||
}
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "Failed to cancel");
|
||||
}
|
||||
}, [celeryTaskId, resetStream]);
|
||||
|
||||
const handleReset = useCallback(() => {
|
||||
setJobId(null);
|
||||
setCeleryTaskId(null);
|
||||
setRunning(false);
|
||||
setError(null);
|
||||
setOutputFiles([]);
|
||||
resetStream();
|
||||
}, [resetStream]);
|
||||
|
||||
// Reset running state when done
|
||||
if (done && running) {
|
||||
setRunning(false);
|
||||
@@ -197,10 +298,10 @@ export default function App() {
|
||||
{!jobId
|
||||
? "Configure and launch"
|
||||
: connected
|
||||
? "Streaming"
|
||||
: done
|
||||
? "Complete"
|
||||
: "Connecting..."}
|
||||
? "Streaming"
|
||||
: done
|
||||
? "Complete"
|
||||
: "Connecting..."}
|
||||
</span>
|
||||
</div>
|
||||
</header>
|
||||
@@ -211,7 +312,10 @@ export default function App() {
|
||||
<aside className="sidebar">
|
||||
<ConfigPanel
|
||||
onStart={handleStart}
|
||||
onStop={handleStop}
|
||||
onReset={handleReset}
|
||||
running={running}
|
||||
done={done}
|
||||
assets={assets}
|
||||
selectedAsset={selectedAsset}
|
||||
onSelectAsset={setSelectedAsset}
|
||||
@@ -221,16 +325,13 @@ export default function App() {
|
||||
</aside>
|
||||
|
||||
<main className="main">
|
||||
<PipelineDiagram activeStage={activeStage} />
|
||||
|
||||
<div className="main-grid">
|
||||
<div className="main-left">
|
||||
<ChunkGrid chunks={chunks} totalChunks={stats.total_chunks} />
|
||||
<QueueGauge
|
||||
current={queueSize}
|
||||
max={10}
|
||||
buffered={0}
|
||||
/>
|
||||
<QueueGauge current={queueSize} max={10} buffered={0} />
|
||||
{done && outputFiles.length > 0 && (
|
||||
<OutputFiles files={outputFiles} />
|
||||
)}
|
||||
</div>
|
||||
<div className="main-right">
|
||||
<WorkerPanel workers={workers} />
|
||||
|
||||
@@ -1,55 +1,13 @@
|
||||
/**
|
||||
* GraphQL API client for the chunker UI.
|
||||
* Chunker-specific API functions.
|
||||
* Shared functions (getAssets, scanMediaFolder) come from common.
|
||||
*/
|
||||
|
||||
import type { MediaAsset } from "./types";
|
||||
import { gql } from "../../common/api/graphql";
|
||||
import type { ChunkOutputFile } from "../../common/types/generated";
|
||||
|
||||
const GRAPHQL_URL = "/api/graphql";
|
||||
|
||||
async function gql<T>(query: string, variables?: Record<string, unknown>): Promise<T> {
|
||||
const response = await fetch(GRAPHQL_URL, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ query, variables }),
|
||||
});
|
||||
|
||||
const json = await response.json();
|
||||
|
||||
if (json.errors?.length) {
|
||||
throw new Error(json.errors[0].message);
|
||||
}
|
||||
|
||||
return json.data as T;
|
||||
}
|
||||
|
||||
/** Fetch all media assets. */
|
||||
export async function getAssets(): Promise<MediaAsset[]> {
|
||||
const data = await gql<{ assets: MediaAsset[] }>(`
|
||||
query {
|
||||
assets {
|
||||
id filename file_path status error_message file_size duration
|
||||
video_codec audio_codec width height framerate bitrate
|
||||
properties comments tags created_at updated_at
|
||||
}
|
||||
}
|
||||
`);
|
||||
return data.assets;
|
||||
}
|
||||
|
||||
/** Scan media/in/ folder for new files. */
|
||||
export async function scanMediaFolder(): Promise<{
|
||||
found: number;
|
||||
registered: number;
|
||||
skipped: number;
|
||||
files: string[];
|
||||
}> {
|
||||
const data = await gql<{ scan_media_folder: { found: number; registered: number; skipped: number; files: string[] } }>(`
|
||||
mutation {
|
||||
scan_media_folder { found registered skipped files }
|
||||
}
|
||||
`);
|
||||
return data.scan_media_folder;
|
||||
}
|
||||
// Re-export shared functions
|
||||
export { getAssets, scanMediaFolder } from "../../common/api/media";
|
||||
|
||||
/** Create a chunk job via GraphQL mutation. */
|
||||
export async function createChunkJob(config: {
|
||||
@@ -58,15 +16,70 @@ export async function createChunkJob(config: {
|
||||
num_workers: number;
|
||||
max_retries: number;
|
||||
processor_type: string;
|
||||
}): Promise<{ id: string }> {
|
||||
const data = await gql<{ create_chunk_job: { id: string; status: string } }>(`
|
||||
start_time?: number | null;
|
||||
end_time?: number | null;
|
||||
}): Promise<{ id: string; celery_task_id: string | null }> {
|
||||
const data = await gql<{
|
||||
create_chunk_job: {
|
||||
id: string;
|
||||
status: string;
|
||||
celery_task_id: string | null;
|
||||
};
|
||||
}>(
|
||||
`
|
||||
mutation CreateChunkJob($input: CreateChunkJobInput!) {
|
||||
create_chunk_job(input: $input) {
|
||||
id
|
||||
status
|
||||
celery_task_id
|
||||
}
|
||||
}
|
||||
`, { input: config });
|
||||
`,
|
||||
{ input: config },
|
||||
);
|
||||
|
||||
return data.create_chunk_job;
|
||||
}
|
||||
|
||||
/** Cancel a running chunk job. */
|
||||
export async function cancelChunkJob(
|
||||
celeryTaskId: string,
|
||||
): Promise<{ ok: boolean; message: string | null }> {
|
||||
const data = await gql<{
|
||||
cancel_chunk_job: { ok: boolean; message: string | null };
|
||||
}>(
|
||||
`
|
||||
mutation CancelChunkJob($celery_task_id: String!) {
|
||||
cancel_chunk_job(celery_task_id: $celery_task_id) {
|
||||
ok
|
||||
message
|
||||
}
|
||||
}
|
||||
`,
|
||||
{ celery_task_id: celeryTaskId },
|
||||
);
|
||||
|
||||
return data.cancel_chunk_job;
|
||||
}
|
||||
|
||||
/** Fetch output chunk files for a completed job. */
|
||||
export async function getChunkOutputFiles(
|
||||
jobId: string,
|
||||
): Promise<ChunkOutputFile[]> {
|
||||
const data = await gql<{
|
||||
chunk_output_files: ChunkOutputFile[];
|
||||
}>(
|
||||
`
|
||||
query ChunkOutputFiles($job_id: String!) {
|
||||
chunk_output_files(job_id: $job_id) {
|
||||
key
|
||||
size
|
||||
url
|
||||
}
|
||||
}
|
||||
`,
|
||||
{ job_id: jobId },
|
||||
);
|
||||
|
||||
return data.chunk_output_files;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import type { ChunkInfo } from "../types";
|
||||
import { TopicBadge, TOPICS } from "./TopicBadge";
|
||||
|
||||
interface Props {
|
||||
chunks: ChunkInfo[];
|
||||
@@ -7,19 +6,14 @@ interface Props {
|
||||
}
|
||||
|
||||
const STATE_COLORS: Record<string, string> = {
|
||||
pending: "#333",
|
||||
queued: "#f59e0b",
|
||||
processing: "#3b82f6",
|
||||
done: "#10b981",
|
||||
error: "#ef4444",
|
||||
pending: "var(--bg-input)",
|
||||
queued: "var(--warning)",
|
||||
processing: "var(--processing)",
|
||||
done: "var(--success)",
|
||||
error: "var(--error)",
|
||||
retry: "#f97316",
|
||||
};
|
||||
|
||||
/**
|
||||
* Grid of chunks colored by processing state.
|
||||
* Chunks appear incrementally as the generator yields them.
|
||||
* Interview Topic 3: Generators & iteration.
|
||||
*/
|
||||
export function ChunkGrid({ chunks, totalChunks }: Props) {
|
||||
return (
|
||||
<div className="chunk-grid-panel">
|
||||
@@ -30,14 +24,13 @@ export function ChunkGrid({ chunks, totalChunks }: Props) {
|
||||
{chunks.length} / {totalChunks || "?"}
|
||||
</span>
|
||||
</h2>
|
||||
<TopicBadge topic={TOPICS.iteration} />
|
||||
</div>
|
||||
<div className="chunk-grid">
|
||||
{chunks.map((chunk) => (
|
||||
<div
|
||||
key={chunk.sequence}
|
||||
className="chunk-cell"
|
||||
style={{ background: STATE_COLORS[chunk.state] || "#333" }}
|
||||
style={{ background: STATE_COLORS[chunk.state] || "var(--bg-input)" }}
|
||||
title={`#${chunk.sequence} — ${chunk.state}${
|
||||
chunk.worker_id ? ` (${chunk.worker_id})` : ""
|
||||
}${chunk.retries ? ` retries: ${chunk.retries}` : ""}`}
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
import { useState } from "react";
|
||||
import { useMemo, useState } from "react";
|
||||
import { FileManager } from "../../../common/components/FileManager";
|
||||
import type { FileEntry } from "../../../common/components/FileManager";
|
||||
import { formatDuration, formatSize } from "../../../common/utils/format";
|
||||
import type { MediaAsset, PipelineConfig } from "../types";
|
||||
import { TopicBadge, TOPICS } from "./TopicBadge";
|
||||
|
||||
interface Props {
|
||||
onStart: (config: PipelineConfig) => void;
|
||||
onStop: () => void;
|
||||
onReset: () => void;
|
||||
running: boolean;
|
||||
done: boolean;
|
||||
assets: MediaAsset[];
|
||||
selectedAsset: MediaAsset | null;
|
||||
onSelectAsset: (asset: MediaAsset) => void;
|
||||
@@ -12,13 +17,12 @@ interface Props {
|
||||
scanning: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pipeline configuration form with file browser.
|
||||
* Each parameter shows its default — Interview Topic 1: Function params & defaults.
|
||||
*/
|
||||
export function ConfigPanel({
|
||||
onStart,
|
||||
onStop,
|
||||
onReset,
|
||||
running,
|
||||
done,
|
||||
assets,
|
||||
selectedAsset,
|
||||
onSelectAsset,
|
||||
@@ -31,6 +35,25 @@ export function ConfigPanel({
|
||||
const [processorType, setProcessorType] = useState<
|
||||
"ffmpeg" | "checksum" | "simulated_decode" | "composite"
|
||||
>("ffmpeg");
|
||||
const [startTime, setStartTime] = useState<string>("");
|
||||
const [endTime, setEndTime] = useState<string>("");
|
||||
|
||||
// Map assets to FileEntry for FileManager
|
||||
const fileEntries: FileEntry[] = useMemo(
|
||||
() =>
|
||||
assets.map((a) => ({
|
||||
key: a.id,
|
||||
name: a.filename,
|
||||
size: a.file_size ?? undefined,
|
||||
meta: formatDuration(a.duration),
|
||||
})),
|
||||
[assets],
|
||||
);
|
||||
|
||||
const handleFileSelect = (file: FileEntry) => {
|
||||
const asset = assets.find((a) => a.id === file.key);
|
||||
if (asset) onSelectAsset(asset);
|
||||
};
|
||||
|
||||
const handleSubmit = (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
@@ -41,61 +64,31 @@ export function ConfigPanel({
|
||||
num_workers: numWorkers,
|
||||
max_retries: maxRetries,
|
||||
processor_type: processorType,
|
||||
start_time: startTime ? parseFloat(startTime) : null,
|
||||
end_time: endTime ? parseFloat(endTime) : null,
|
||||
});
|
||||
};
|
||||
|
||||
const formatSize = (bytes: number | null) => {
|
||||
if (!bytes) return "—";
|
||||
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(0)} KB`;
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
||||
};
|
||||
|
||||
const formatDuration = (seconds: number | null) => {
|
||||
if (!seconds) return "—";
|
||||
const m = Math.floor(seconds / 60);
|
||||
const s = Math.floor(seconds % 60);
|
||||
return `${m}:${s.toString().padStart(2, "0")}`;
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="config-panel">
|
||||
{/* Asset Browser */}
|
||||
<div className="panel-header">
|
||||
<h2>Assets</h2>
|
||||
<button
|
||||
onClick={onScan}
|
||||
disabled={scanning}
|
||||
className="scan-button"
|
||||
>
|
||||
{scanning ? "Scanning..." : "Scan Folder"}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<ul className="asset-list">
|
||||
{assets.length === 0 ? (
|
||||
<li className="asset-empty">No assets — click Scan Folder</li>
|
||||
) : (
|
||||
assets.map((asset) => (
|
||||
<li
|
||||
key={asset.id}
|
||||
className={`asset-item ${selectedAsset?.id === asset.id ? "selected" : ""}`}
|
||||
onClick={() => onSelectAsset(asset)}
|
||||
title={asset.filename}
|
||||
>
|
||||
<span className="asset-filename">{asset.filename}</span>
|
||||
<span className="asset-meta">
|
||||
{formatSize(asset.file_size)} · {formatDuration(asset.duration)}
|
||||
</span>
|
||||
</li>
|
||||
))
|
||||
)}
|
||||
</ul>
|
||||
<FileManager
|
||||
title="Assets"
|
||||
files={fileEntries}
|
||||
selectedKey={selectedAsset?.id ?? null}
|
||||
onSelect={handleFileSelect}
|
||||
onScan={onScan}
|
||||
scanning={scanning}
|
||||
emptyMessage="No assets — click Scan Folder"
|
||||
disabled={running}
|
||||
/>
|
||||
|
||||
{selectedAsset && (
|
||||
<div className="selected-asset-info">
|
||||
<span className="asset-detail">{selectedAsset.filename}</span>
|
||||
<span className="asset-detail-meta">
|
||||
{selectedAsset.video_codec} · {selectedAsset.width}x{selectedAsset.height} · {formatDuration(selectedAsset.duration)}
|
||||
{selectedAsset.video_codec} · {selectedAsset.width}x
|
||||
{selectedAsset.height} · {formatDuration(selectedAsset.duration)} ·{" "}
|
||||
{formatSize(selectedAsset.file_size)}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
@@ -103,9 +96,35 @@ export function ConfigPanel({
|
||||
{/* Pipeline Config */}
|
||||
<div className="panel-header" style={{ marginTop: "1rem" }}>
|
||||
<h2>Pipeline Config</h2>
|
||||
<TopicBadge topic={TOPICS.params} />
|
||||
</div>
|
||||
<form onSubmit={handleSubmit}>
|
||||
<div className="config-field">
|
||||
<label>
|
||||
Time Range (seconds){" "}
|
||||
<span className="default">optional — limits what gets chunked</span>
|
||||
</label>
|
||||
<div className="range-row">
|
||||
<input
|
||||
type="number"
|
||||
min={0}
|
||||
step={1}
|
||||
placeholder="start"
|
||||
value={startTime}
|
||||
onChange={(e) => setStartTime(e.target.value)}
|
||||
disabled={running}
|
||||
/>
|
||||
<span className="range-sep">to</span>
|
||||
<input
|
||||
type="number"
|
||||
min={0}
|
||||
step={1}
|
||||
placeholder="end"
|
||||
value={endTime}
|
||||
onChange={(e) => setEndTime(e.target.value)}
|
||||
disabled={running}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className="config-field">
|
||||
<label>
|
||||
Chunk Duration <span className="default">default: 10s</span>
|
||||
@@ -113,6 +132,7 @@ export function ConfigPanel({
|
||||
<select
|
||||
value={chunkDuration}
|
||||
onChange={(e) => setChunkDuration(Number(e.target.value))}
|
||||
disabled={running}
|
||||
>
|
||||
<option value={5}>5 seconds</option>
|
||||
<option value={10}>10 seconds</option>
|
||||
@@ -131,6 +151,7 @@ export function ConfigPanel({
|
||||
max={16}
|
||||
value={numWorkers}
|
||||
onChange={(e) => setNumWorkers(Number(e.target.value))}
|
||||
disabled={running}
|
||||
/>
|
||||
</div>
|
||||
<div className="config-field">
|
||||
@@ -143,6 +164,7 @@ export function ConfigPanel({
|
||||
max={10}
|
||||
value={maxRetries}
|
||||
onChange={(e) => setMaxRetries(Number(e.target.value))}
|
||||
disabled={running}
|
||||
/>
|
||||
</div>
|
||||
<div className="config-field">
|
||||
@@ -153,9 +175,14 @@ export function ConfigPanel({
|
||||
value={processorType}
|
||||
onChange={(e) =>
|
||||
setProcessorType(
|
||||
e.target.value as "ffmpeg" | "checksum" | "simulated_decode" | "composite"
|
||||
e.target.value as
|
||||
| "ffmpeg"
|
||||
| "checksum"
|
||||
| "simulated_decode"
|
||||
| "composite",
|
||||
)
|
||||
}
|
||||
disabled={running}
|
||||
>
|
||||
<option value="ffmpeg">FFmpegExtractProcessor</option>
|
||||
<option value="checksum">ChecksumProcessor</option>
|
||||
@@ -163,10 +190,29 @@ export function ConfigPanel({
|
||||
<option value="composite">CompositeProcessor</option>
|
||||
</select>
|
||||
</div>
|
||||
<button type="submit" className="start-button" disabled={running || !selectedAsset}>
|
||||
{running ? "Running..." : "Launch Pipeline"}
|
||||
</button>
|
||||
|
||||
{!running && !done && (
|
||||
<button
|
||||
type="submit"
|
||||
className="start-button"
|
||||
disabled={!selectedAsset}
|
||||
>
|
||||
Launch Pipeline
|
||||
</button>
|
||||
)}
|
||||
</form>
|
||||
|
||||
{running && (
|
||||
<button type="button" className="stop-button" onClick={onStop}>
|
||||
Stop Pipeline
|
||||
</button>
|
||||
)}
|
||||
|
||||
{done && (
|
||||
<button type="button" className="reset-button" onClick={onReset}>
|
||||
Reset
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,15 +1,9 @@
|
||||
import type { ErrorEntry } from "../types";
|
||||
import { TopicBadge, TOPICS } from "./TopicBadge";
|
||||
|
||||
interface Props {
|
||||
errors: ErrorEntry[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Error and retry event log.
|
||||
* Shows exception types, retry counts, backoff delays.
|
||||
* Interview Topic 7: Exception handling & resilient code.
|
||||
*/
|
||||
export function ErrorLog({ errors }: Props) {
|
||||
return (
|
||||
<div className="error-log">
|
||||
@@ -18,23 +12,6 @@ export function ErrorLog({ errors }: Props) {
|
||||
Errors & Retries{" "}
|
||||
<span className="error-count">{errors.length}</span>
|
||||
</h2>
|
||||
<TopicBadge topic={TOPICS.exceptions} />
|
||||
</div>
|
||||
<div className="exception-tree">
|
||||
<div className="tree-node root">PipelineError</div>
|
||||
<div className="tree-children">
|
||||
<div className="tree-node">ChunkError</div>
|
||||
<div className="tree-grandchildren">
|
||||
<div className="tree-node leaf">ChunkReadError</div>
|
||||
<div className="tree-node leaf">ChunkChecksumError</div>
|
||||
</div>
|
||||
<div className="tree-node">ProcessingError</div>
|
||||
<div className="tree-grandchildren">
|
||||
<div className="tree-node leaf">ProcessorTimeoutError</div>
|
||||
<div className="tree-node leaf">ProcessorFailureError</div>
|
||||
</div>
|
||||
<div className="tree-node">ReassemblyError</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="error-entries">
|
||||
{errors.length === 0 && (
|
||||
|
||||
51
ui/chunker/src/components/OutputFiles.tsx
Normal file
51
ui/chunker/src/components/OutputFiles.tsx
Normal file
@@ -0,0 +1,51 @@
|
||||
import { useMemo } from "react";
|
||||
import { FileManager } from "../../../common/components/FileManager";
|
||||
import type { FileEntry } from "../../../common/components/FileManager";
|
||||
import { formatSize } from "../../../common/utils/format";
|
||||
import type { ChunkOutputFile } from "../types";
|
||||
|
||||
interface Props {
|
||||
files: ChunkOutputFile[];
|
||||
}
|
||||
|
||||
export function OutputFiles({ files }: Props) {
|
||||
const fileEntries: FileEntry[] = useMemo(
|
||||
() =>
|
||||
files.map((f) => ({
|
||||
key: f.key,
|
||||
name: f.key.split("/").pop() || f.key,
|
||||
size: f.size,
|
||||
})),
|
||||
[files],
|
||||
);
|
||||
|
||||
const urlMap = useMemo(() => {
|
||||
const map = new Map<string, string>();
|
||||
for (const f of files) {
|
||||
map.set(f.key, f.url);
|
||||
}
|
||||
return map;
|
||||
}, [files]);
|
||||
|
||||
return (
|
||||
<FileManager
|
||||
title="Output Files"
|
||||
files={fileEntries}
|
||||
emptyMessage="No output files"
|
||||
renderActions={(file) => {
|
||||
const url = urlMap.get(file.key);
|
||||
if (!url) return null;
|
||||
return (
|
||||
<a
|
||||
href={url}
|
||||
download
|
||||
className="fm-download-link"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
{formatSize(file.size)} ↓
|
||||
</a>
|
||||
);
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
import { TopicBadge, TOPICS } from "./TopicBadge";
|
||||
|
||||
interface Props {
|
||||
activeStage: string;
|
||||
}
|
||||
|
||||
const STAGES = [
|
||||
{ id: "chunking", label: "Chunker", sub: "File -> Chunks (generator)" },
|
||||
{ id: "queued", label: "ChunkQueue", sub: "Bounded queue (backpressure)" },
|
||||
{ id: "processing", label: "WorkerPool", sub: "ThreadPoolExecutor" },
|
||||
{ id: "collecting", label: "ResultCollector", sub: "heapq reassembly" },
|
||||
{ id: "completed", label: "PipelineResult", sub: "Aggregate stats" },
|
||||
];
|
||||
|
||||
/**
|
||||
* Visual flow diagram of pipeline stages.
|
||||
* Highlights the currently active stage.
|
||||
* Interview Topic 4: OOP design — shows class hierarchy.
|
||||
*/
|
||||
export function PipelineDiagram({ activeStage }: Props) {
|
||||
return (
|
||||
<div className="pipeline-diagram">
|
||||
<div className="panel-header">
|
||||
<h2>Pipeline Flow</h2>
|
||||
<TopicBadge topic={TOPICS.oop} />
|
||||
</div>
|
||||
<div className="stage-flow">
|
||||
{STAGES.map((stage, i) => (
|
||||
<div key={stage.id} className="stage-wrapper">
|
||||
<div
|
||||
className={`stage ${activeStage === stage.id ? "active" : ""}`}
|
||||
>
|
||||
<div className="stage-label">{stage.label}</div>
|
||||
<div className="stage-sub">{stage.sub}</div>
|
||||
</div>
|
||||
{i < STAGES.length - 1 && <div className="stage-arrow" />}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<div className="processor-hierarchy">
|
||||
<div className="hierarchy-title">Processor ABC</div>
|
||||
<div className="hierarchy-children">
|
||||
<span className="hierarchy-node">ChecksumProcessor</span>
|
||||
<span className="hierarchy-node">SimulatedDecodeProcessor</span>
|
||||
<span className="hierarchy-node">CompositeProcessor</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,15 +1,9 @@
|
||||
import { TopicBadge, TOPICS } from "./TopicBadge";
|
||||
|
||||
interface Props {
|
||||
current: number;
|
||||
max: number;
|
||||
buffered: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Queue fill level gauge + collector heap buffer.
|
||||
* Interview Topic 5: Data structures — queue.Queue, heapq, deque.
|
||||
*/
|
||||
export function QueueGauge({ current, max, buffered }: Props) {
|
||||
const fillPct = max > 0 ? Math.min((current / max) * 100, 100) : 0;
|
||||
|
||||
@@ -17,7 +11,6 @@ export function QueueGauge({ current, max, buffered }: Props) {
|
||||
<div className="queue-gauge">
|
||||
<div className="panel-header">
|
||||
<h2>Queue & Buffer</h2>
|
||||
<TopicBadge topic={TOPICS.datastructures} />
|
||||
</div>
|
||||
<div className="gauge-row">
|
||||
<div className="gauge-label">
|
||||
@@ -28,7 +21,7 @@ export function QueueGauge({ current, max, buffered }: Props) {
|
||||
className="gauge-fill"
|
||||
style={{
|
||||
width: `${fillPct}%`,
|
||||
background: fillPct > 80 ? "#ef4444" : "#3b82f6",
|
||||
background: fillPct > 80 ? "var(--error)" : "var(--processing)",
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -1,24 +1,14 @@
|
||||
import type { PipelineStats } from "../types";
|
||||
import { TopicBadge, TOPICS } from "./TopicBadge";
|
||||
|
||||
interface Props {
|
||||
stats: PipelineStats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Throughput, timing, and error stats.
|
||||
* Interview Topic 6: Algorithms — throughput calculation over sliding window.
|
||||
* Interview Topic 8: TDD — test count and coverage.
|
||||
*/
|
||||
export function StatsPanel({ stats }: Props) {
|
||||
return (
|
||||
<div className="stats-panel">
|
||||
<div className="panel-header">
|
||||
<h2>Stats</h2>
|
||||
<div className="badge-row">
|
||||
<TopicBadge topic={TOPICS.algorithms} />
|
||||
<TopicBadge topic={TOPICS.testing} />
|
||||
</div>
|
||||
</div>
|
||||
<div className="stats-grid">
|
||||
<div className="stat">
|
||||
@@ -48,12 +38,6 @@ export function StatsPanel({ stats }: Props) {
|
||||
<div className="stat-label">Elapsed</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="test-info">
|
||||
<span className="test-badge">64 tests</span>
|
||||
<span className="test-note">
|
||||
7 test files · pytest · parametrized
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
import { useState } from "react";
|
||||
import type { InterviewTopic } from "../types";
|
||||
|
||||
/**
|
||||
* Expandable pill badge annotating an interview topic.
|
||||
* Click to expand and see description + code reference.
|
||||
*/
|
||||
export function TopicBadge({ topic }: { topic: InterviewTopic }) {
|
||||
const [expanded, setExpanded] = useState(false);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`topic-badge ${expanded ? "expanded" : ""}`}
|
||||
onClick={() => setExpanded(!expanded)}
|
||||
>
|
||||
<span className="topic-number">#{topic.number}</span>
|
||||
<span className="topic-title">{topic.title}</span>
|
||||
{expanded && (
|
||||
<div className="topic-detail">
|
||||
<p>{topic.description}</p>
|
||||
<code>{topic.code_ref}</code>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
/** Pre-defined topics mapped to pipeline components. */
|
||||
export const TOPICS: Record<string, InterviewTopic> = {
|
||||
params: {
|
||||
number: 1,
|
||||
title: "Function Params & Defaults",
|
||||
description:
|
||||
"Each pipeline parameter has a sensible default (chunk_duration=10s, num_workers=4, max_retries=3). Tweaking them changes pipeline behavior.",
|
||||
code_ref: "core/chunker/pipeline.py — Pipeline.__init__()",
|
||||
},
|
||||
concurrency: {
|
||||
number: 2,
|
||||
title: "Concurrency (Threading)",
|
||||
description:
|
||||
"Workers run in a ThreadPoolExecutor. The queue coordinates work between producer and consumer threads.",
|
||||
code_ref: "core/chunker/pool.py — WorkerPool, ThreadPoolExecutor",
|
||||
},
|
||||
iteration: {
|
||||
number: 3,
|
||||
title: "Generators & Iteration",
|
||||
description:
|
||||
"Chunks are yielded lazily via a generator — the file is never fully loaded into memory.",
|
||||
code_ref: "core/chunker/chunker.py — Chunker.chunks() generator",
|
||||
},
|
||||
oop: {
|
||||
number: 4,
|
||||
title: "OOP Design (ABC)",
|
||||
description:
|
||||
"Processor is an abstract base class. ChecksumProcessor, SimulatedDecodeProcessor, and CompositeProcessor inherit from it.",
|
||||
code_ref: "core/chunker/processor.py — Processor ABC hierarchy",
|
||||
},
|
||||
datastructures: {
|
||||
number: 5,
|
||||
title: "Data Structures",
|
||||
description:
|
||||
"Bounded queue.Queue for backpressure, heapq min-heap for ordered reassembly, deque for sliding-window throughput.",
|
||||
code_ref: "core/chunker/queue.py, collector.py, models.py",
|
||||
},
|
||||
algorithms: {
|
||||
number: 6,
|
||||
title: "Algorithms & Sorting",
|
||||
description:
|
||||
"ResultCollector uses a min-heap to reassemble chunks in sequence order, even when they arrive out of order.",
|
||||
code_ref: "core/chunker/collector.py — heapq-based reassembly",
|
||||
},
|
||||
exceptions: {
|
||||
number: 7,
|
||||
title: "Exception Handling",
|
||||
description:
|
||||
"PipelineError hierarchy with typed exceptions. Workers retry with exponential backoff before giving up.",
|
||||
code_ref: "core/chunker/exceptions.py, worker.py — retry logic",
|
||||
},
|
||||
testing: {
|
||||
number: 8,
|
||||
title: "TDD & Unit Testing",
|
||||
description:
|
||||
"64 tests covering every module. Parametrized tests, fixtures, edge cases, concurrency tests.",
|
||||
code_ref: "tests/chunker/ — 7 test files, pytest",
|
||||
},
|
||||
};
|
||||
@@ -1,28 +1,21 @@
|
||||
import type { WorkerInfo } from "../types";
|
||||
import { TopicBadge, TOPICS } from "./TopicBadge";
|
||||
|
||||
interface Props {
|
||||
workers: WorkerInfo[];
|
||||
}
|
||||
|
||||
const STATE_COLORS: Record<string, string> = {
|
||||
idle: "#6b7280",
|
||||
processing: "#3b82f6",
|
||||
idle: "var(--text-muted)",
|
||||
processing: "var(--processing)",
|
||||
retry: "#f97316",
|
||||
stopped: "#ef4444",
|
||||
stopped: "var(--error)",
|
||||
};
|
||||
|
||||
/**
|
||||
* Worker thread status cards.
|
||||
* Shows each worker's real-time state and which chunk it's processing.
|
||||
* Interview Topic 2: Concurrency (threading).
|
||||
*/
|
||||
export function WorkerPanel({ workers }: Props) {
|
||||
return (
|
||||
<div className="worker-panel">
|
||||
<div className="panel-header">
|
||||
<h2>Workers</h2>
|
||||
<TopicBadge topic={TOPICS.concurrency} />
|
||||
</div>
|
||||
<div className="worker-cards">
|
||||
{workers.map((w) => (
|
||||
@@ -31,7 +24,7 @@ export function WorkerPanel({ workers }: Props) {
|
||||
<span className="worker-name">{w.worker_id}</span>
|
||||
<span
|
||||
className="worker-state"
|
||||
style={{ color: STATE_COLORS[w.state] || "#888" }}
|
||||
style={{ color: STATE_COLORS[w.state] || "var(--text-secondary)" }}
|
||||
>
|
||||
{w.state}
|
||||
</span>
|
||||
|
||||
@@ -3,8 +3,6 @@ import type { PipelineEvent } from "../types";
|
||||
|
||||
/**
|
||||
* SSE hook — connects to /api/chunker/stream/{jobId} via native EventSource.
|
||||
*
|
||||
* Demonstrates: real-time event streaming from backend to UI.
|
||||
*/
|
||||
export function useEventStream(jobId: string | null) {
|
||||
const [events, setEvents] = useState<PipelineEvent[]>([]);
|
||||
@@ -20,6 +18,12 @@ export function useEventStream(jobId: string | null) {
|
||||
}
|
||||
}, []);
|
||||
|
||||
const reset = useCallback(() => {
|
||||
close();
|
||||
setEvents([]);
|
||||
setDone(false);
|
||||
}, [close]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!jobId) return;
|
||||
|
||||
@@ -35,21 +39,28 @@ export function useEventStream(jobId: string | null) {
|
||||
const handleEvent = (eventType: string) => (e: MessageEvent) => {
|
||||
try {
|
||||
const data = JSON.parse(e.data) as PipelineEvent;
|
||||
setEvents((prev) => [...prev, { ...data, status: eventType }]);
|
||||
setEvents((prev) => [...prev, { ...data, event_type: eventType }]);
|
||||
} catch {
|
||||
// ignore parse errors
|
||||
}
|
||||
};
|
||||
|
||||
// Listen to all chunker event types
|
||||
// Listen to all raw pipeline event types
|
||||
const eventTypes = [
|
||||
"waiting",
|
||||
"pending",
|
||||
"chunking",
|
||||
"processing",
|
||||
"collecting",
|
||||
"completed",
|
||||
"failed",
|
||||
"pipeline_start",
|
||||
"pipeline_info",
|
||||
"chunk_queued",
|
||||
"chunk_processing",
|
||||
"chunk_done",
|
||||
"chunk_retry",
|
||||
"chunk_error",
|
||||
"chunk_collected",
|
||||
"worker_status",
|
||||
"pipeline_progress",
|
||||
"pipeline_complete",
|
||||
"pipeline_error",
|
||||
"producer_error",
|
||||
"cancelled",
|
||||
"done",
|
||||
"timeout",
|
||||
@@ -77,5 +88,5 @@ export function useEventStream(jobId: string | null) {
|
||||
};
|
||||
}, [jobId]);
|
||||
|
||||
return { events, connected, done, close };
|
||||
return { events, connected, done, close, reset };
|
||||
}
|
||||
|
||||
103
ui/chunker/src/hooks/useGrpcStream.ts
Normal file
103
ui/chunker/src/hooks/useGrpcStream.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { GrpcWebFetchTransport } from "@protobuf-ts/grpcweb-transport";
|
||||
import { WorkerServiceClient } from "../../../common/api/grpc/worker.client";
|
||||
import type { ChunkPipelineEvent } from "../../../common/api/grpc/worker";
|
||||
import type { PipelineEvent } from "../types";
|
||||
|
||||
const GRPC_WEB_URL = "/grpc-web";
|
||||
|
||||
function toEvent(msg: ChunkPipelineEvent): PipelineEvent {
|
||||
return {
|
||||
event_type: msg.eventType,
|
||||
job_id: msg.jobId,
|
||||
sequence: msg.sequence || undefined,
|
||||
worker_id: msg.workerId || undefined,
|
||||
state: msg.state || undefined,
|
||||
queue_size: msg.queueSize || undefined,
|
||||
elapsed: msg.elapsed || undefined,
|
||||
throughput_mbps: msg.throughputMbps || undefined,
|
||||
total_chunks: msg.totalChunks || undefined,
|
||||
processed_chunks: msg.processedChunks || undefined,
|
||||
failed_chunks: msg.failedChunks || undefined,
|
||||
error: msg.error || undefined,
|
||||
processing_time: msg.processingTime || undefined,
|
||||
retries: msg.retries || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* gRPC-Web streaming hook — connects to WorkerService.StreamChunkPipeline
|
||||
* via Envoy proxy. Replaces useEventStream (SSE+Redis).
|
||||
*/
|
||||
export function useGrpcStream(jobId: string | null) {
|
||||
const [events, setEvents] = useState<PipelineEvent[]>([]);
|
||||
const [connected, setConnected] = useState(false);
|
||||
const [done, setDone] = useState(false);
|
||||
const abortRef = useRef<AbortController | null>(null);
|
||||
|
||||
const close = useCallback(() => {
|
||||
if (abortRef.current) {
|
||||
abortRef.current.abort();
|
||||
abortRef.current = null;
|
||||
setConnected(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const reset = useCallback(() => {
|
||||
close();
|
||||
setEvents([]);
|
||||
setDone(false);
|
||||
}, [close]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!jobId) return;
|
||||
|
||||
setEvents([]);
|
||||
setDone(false);
|
||||
|
||||
const abort = new AbortController();
|
||||
abortRef.current = abort;
|
||||
|
||||
const transport = new GrpcWebFetchTransport({
|
||||
baseUrl: GRPC_WEB_URL,
|
||||
abort: abort.signal,
|
||||
});
|
||||
|
||||
const client = new WorkerServiceClient(transport);
|
||||
|
||||
const stream = client.streamChunkPipeline({ jobId });
|
||||
|
||||
setConnected(true);
|
||||
|
||||
(async () => {
|
||||
try {
|
||||
for await (const msg of stream.responses) {
|
||||
const evt = toEvent(msg);
|
||||
setEvents((prev) => [...prev, evt]);
|
||||
|
||||
if (
|
||||
evt.event_type === "pipeline_complete" ||
|
||||
evt.event_type === "pipeline_error"
|
||||
) {
|
||||
setDone(true);
|
||||
setConnected(false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (!abort.signal.aborted) {
|
||||
setConnected(false);
|
||||
}
|
||||
} finally {
|
||||
setConnected(false);
|
||||
}
|
||||
})();
|
||||
|
||||
return () => {
|
||||
abort.abort();
|
||||
abortRef.current = null;
|
||||
};
|
||||
}, [jobId]);
|
||||
|
||||
return { events, connected, done, close, reset };
|
||||
}
|
||||
@@ -1,3 +1,19 @@
|
||||
/**
|
||||
* Chunker UI types.
|
||||
*
|
||||
* Domain types (MediaAsset, ChunkEvent, etc.) come from generated schema.
|
||||
* This file holds UI-only types: state enums, SSE envelope, derived views.
|
||||
*/
|
||||
|
||||
// Re-export generated types used by this app
|
||||
export type {
|
||||
MediaAsset,
|
||||
ChunkEvent,
|
||||
WorkerEvent,
|
||||
PipelineStats,
|
||||
ChunkOutputFile,
|
||||
} from "../../common/types/generated";
|
||||
|
||||
/** Pipeline configuration sent to the backend. */
|
||||
export interface PipelineConfig {
|
||||
source_asset_id: string;
|
||||
@@ -5,31 +21,11 @@ export interface PipelineConfig {
|
||||
num_workers: number;
|
||||
max_retries: number;
|
||||
processor_type: "ffmpeg" | "checksum" | "simulated_decode" | "composite";
|
||||
start_time?: number | null;
|
||||
end_time?: number | null;
|
||||
}
|
||||
|
||||
/** Media asset from the backend. */
|
||||
export interface MediaAsset {
|
||||
id: string;
|
||||
filename: string;
|
||||
file_path: string;
|
||||
status: string;
|
||||
error_message: string | null;
|
||||
file_size: number | null;
|
||||
duration: number | null;
|
||||
video_codec: string | null;
|
||||
audio_codec: string | null;
|
||||
width: number | null;
|
||||
height: number | null;
|
||||
framerate: number | null;
|
||||
bitrate: number | null;
|
||||
properties: Record<string, unknown>;
|
||||
comments: string;
|
||||
tags: string[];
|
||||
created_at: string | null;
|
||||
updated_at: string | null;
|
||||
}
|
||||
|
||||
/** State of an individual chunk. */
|
||||
/** UI state of an individual chunk in the grid. */
|
||||
export type ChunkState =
|
||||
| "pending"
|
||||
| "queued"
|
||||
@@ -38,7 +34,7 @@ export type ChunkState =
|
||||
| "error"
|
||||
| "retry";
|
||||
|
||||
/** Tracked chunk in the UI grid. */
|
||||
/** Tracked chunk in the UI grid (derived from events). */
|
||||
export interface ChunkInfo {
|
||||
sequence: number;
|
||||
state: ChunkState;
|
||||
@@ -49,7 +45,7 @@ export interface ChunkInfo {
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/** Worker thread status. */
|
||||
/** Worker thread status (derived from events). */
|
||||
export interface WorkerInfo {
|
||||
worker_id: string;
|
||||
state: "idle" | "processing" | "retry" | "stopped";
|
||||
@@ -59,9 +55,14 @@ export interface WorkerInfo {
|
||||
retries: number;
|
||||
}
|
||||
|
||||
/** SSE event from the backend. */
|
||||
/**
|
||||
* Raw SSE event envelope from the backend.
|
||||
* The event_type field is set by useEventStream from the SSE event name.
|
||||
* All other fields are optional — presence depends on event_type.
|
||||
*/
|
||||
export interface PipelineEvent {
|
||||
job_id: string;
|
||||
job_id?: string;
|
||||
event_type?: string;
|
||||
status?: string;
|
||||
progress?: number;
|
||||
total_chunks?: number;
|
||||
@@ -84,18 +85,7 @@ export interface PipelineEvent {
|
||||
backoff?: number;
|
||||
}
|
||||
|
||||
/** Aggregate pipeline stats. */
|
||||
export interface PipelineStats {
|
||||
total_chunks: number;
|
||||
processed: number;
|
||||
failed: number;
|
||||
retries: number;
|
||||
elapsed: number;
|
||||
throughput_mbps: number;
|
||||
queue_size: number;
|
||||
}
|
||||
|
||||
/** Error log entry. */
|
||||
/** Error log entry (derived from events). */
|
||||
export interface ErrorEntry {
|
||||
timestamp: number;
|
||||
sequence?: number;
|
||||
@@ -104,11 +94,3 @@ export interface ErrorEntry {
|
||||
retries?: number;
|
||||
event_type: string;
|
||||
}
|
||||
|
||||
/** Interview topic for annotation badges. */
|
||||
export interface InterviewTopic {
|
||||
number: number;
|
||||
title: string;
|
||||
description: string;
|
||||
code_ref: string;
|
||||
}
|
||||
|
||||
@@ -14,8 +14,13 @@
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noFallthroughCasesInSwitch": true
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"rootDir": "..",
|
||||
"typeRoots": ["./node_modules/@types"],
|
||||
"paths": {
|
||||
"@protobuf-ts/*": ["./node_modules/@protobuf-ts/*"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx"],
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "../common/**/*.ts", "../common/**/*.tsx"],
|
||||
"references": [{ "path": "./tsconfig.node.json" }]
|
||||
}
|
||||
|
||||
@@ -1,9 +1,26 @@
|
||||
import path from "path";
|
||||
import { defineConfig } from "vite";
|
||||
import react from "@vitejs/plugin-react";
|
||||
|
||||
export default defineConfig({
|
||||
base: "/chunker/",
|
||||
plugins: [react()],
|
||||
resolve: {
|
||||
alias: {
|
||||
"@protobuf-ts/runtime": path.resolve(
|
||||
__dirname,
|
||||
"node_modules/@protobuf-ts/runtime",
|
||||
),
|
||||
"@protobuf-ts/runtime-rpc": path.resolve(
|
||||
__dirname,
|
||||
"node_modules/@protobuf-ts/runtime-rpc",
|
||||
),
|
||||
"@protobuf-ts/grpcweb-transport": path.resolve(
|
||||
__dirname,
|
||||
"node_modules/@protobuf-ts/grpcweb-transport",
|
||||
),
|
||||
},
|
||||
},
|
||||
server: {
|
||||
host: "0.0.0.0",
|
||||
port: 5174,
|
||||
@@ -11,6 +28,9 @@ export default defineConfig({
|
||||
hmr: {
|
||||
path: "/chunker/@vite/client",
|
||||
},
|
||||
fs: {
|
||||
allow: [".."],
|
||||
},
|
||||
proxy: {
|
||||
"/api": {
|
||||
target: "http://fastapi:8702",
|
||||
@@ -20,6 +40,11 @@ export default defineConfig({
|
||||
target: "http://fastapi:8702",
|
||||
changeOrigin: true,
|
||||
},
|
||||
"/grpc-web": {
|
||||
target: "http://envoy:8090",
|
||||
changeOrigin: true,
|
||||
rewrite: (p) => p.replace(/^\/grpc-web/, ""),
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
24
ui/common/api/graphql.ts
Normal file
24
ui/common/api/graphql.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* Shared GraphQL client for all MPR UI apps.
|
||||
*/
|
||||
|
||||
const GRAPHQL_URL = "/api/graphql";
|
||||
|
||||
export async function gql<T>(
|
||||
query: string,
|
||||
variables?: Record<string, unknown>,
|
||||
): Promise<T> {
|
||||
const response = await fetch(GRAPHQL_URL, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ query, variables }),
|
||||
});
|
||||
|
||||
const json = await response.json();
|
||||
|
||||
if (json.errors?.length) {
|
||||
throw new Error(json.errors[0].message);
|
||||
}
|
||||
|
||||
return json.data as T;
|
||||
}
|
||||
95
ui/common/api/grpc/worker.client.ts
Normal file
95
ui/common/api/grpc/worker.client.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
// @generated by protobuf-ts 2.11.1
|
||||
// @generated from protobuf file "worker.proto" (package "mpr.worker", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
// Protocol Buffer Definitions - GENERATED FILE
|
||||
//
|
||||
// Do not edit directly. Regenerate using modelgen.
|
||||
//
|
||||
import type { RpcTransport } from "@protobuf-ts/runtime-rpc";
|
||||
import type { ServiceInfo } from "@protobuf-ts/runtime-rpc";
|
||||
import { WorkerService } from "./worker";
|
||||
import type { ChunkPipelineEvent } from "./worker";
|
||||
import type { ChunkStreamRequest } from "./worker";
|
||||
import type { WorkerStatus } from "./worker";
|
||||
import type { Empty } from "./worker";
|
||||
import type { CancelResponse } from "./worker";
|
||||
import type { CancelRequest } from "./worker";
|
||||
import type { ProgressUpdate } from "./worker";
|
||||
import type { ProgressRequest } from "./worker";
|
||||
import type { ServerStreamingCall } from "@protobuf-ts/runtime-rpc";
|
||||
import { stackIntercept } from "@protobuf-ts/runtime-rpc";
|
||||
import type { JobResponse } from "./worker";
|
||||
import type { JobRequest } from "./worker";
|
||||
import type { UnaryCall } from "@protobuf-ts/runtime-rpc";
|
||||
import type { RpcOptions } from "@protobuf-ts/runtime-rpc";
|
||||
/**
|
||||
* @generated from protobuf service mpr.worker.WorkerService
|
||||
*/
|
||||
export interface IWorkerServiceClient {
|
||||
/**
|
||||
* @generated from protobuf rpc: SubmitJob
|
||||
*/
|
||||
submitJob(input: JobRequest, options?: RpcOptions): UnaryCall<JobRequest, JobResponse>;
|
||||
/**
|
||||
* @generated from protobuf rpc: StreamProgress
|
||||
*/
|
||||
streamProgress(input: ProgressRequest, options?: RpcOptions): ServerStreamingCall<ProgressRequest, ProgressUpdate>;
|
||||
/**
|
||||
* @generated from protobuf rpc: CancelJob
|
||||
*/
|
||||
cancelJob(input: CancelRequest, options?: RpcOptions): UnaryCall<CancelRequest, CancelResponse>;
|
||||
/**
|
||||
* @generated from protobuf rpc: GetWorkerStatus
|
||||
*/
|
||||
getWorkerStatus(input: Empty, options?: RpcOptions): UnaryCall<Empty, WorkerStatus>;
|
||||
/**
|
||||
* @generated from protobuf rpc: StreamChunkPipeline
|
||||
*/
|
||||
streamChunkPipeline(input: ChunkStreamRequest, options?: RpcOptions): ServerStreamingCall<ChunkStreamRequest, ChunkPipelineEvent>;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf service mpr.worker.WorkerService
|
||||
*/
|
||||
export class WorkerServiceClient implements IWorkerServiceClient, ServiceInfo {
|
||||
typeName = WorkerService.typeName;
|
||||
methods = WorkerService.methods;
|
||||
options = WorkerService.options;
|
||||
constructor(private readonly _transport: RpcTransport) {
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf rpc: SubmitJob
|
||||
*/
|
||||
submitJob(input: JobRequest, options?: RpcOptions): UnaryCall<JobRequest, JobResponse> {
|
||||
const method = this.methods[0], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<JobRequest, JobResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf rpc: StreamProgress
|
||||
*/
|
||||
streamProgress(input: ProgressRequest, options?: RpcOptions): ServerStreamingCall<ProgressRequest, ProgressUpdate> {
|
||||
const method = this.methods[1], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<ProgressRequest, ProgressUpdate>("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf rpc: CancelJob
|
||||
*/
|
||||
cancelJob(input: CancelRequest, options?: RpcOptions): UnaryCall<CancelRequest, CancelResponse> {
|
||||
const method = this.methods[2], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<CancelRequest, CancelResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf rpc: GetWorkerStatus
|
||||
*/
|
||||
getWorkerStatus(input: Empty, options?: RpcOptions): UnaryCall<Empty, WorkerStatus> {
|
||||
const method = this.methods[3], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<Empty, WorkerStatus>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf rpc: StreamChunkPipeline
|
||||
*/
|
||||
streamChunkPipeline(input: ChunkStreamRequest, options?: RpcOptions): ServerStreamingCall<ChunkStreamRequest, ChunkPipelineEvent> {
|
||||
const method = this.methods[4], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<ChunkStreamRequest, ChunkPipelineEvent>("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
}
|
||||
946
ui/common/api/grpc/worker.ts
Normal file
946
ui/common/api/grpc/worker.ts
Normal file
@@ -0,0 +1,946 @@
|
||||
// @generated by protobuf-ts 2.11.1
|
||||
// @generated from protobuf file "worker.proto" (package "mpr.worker", syntax proto3)
|
||||
// tslint:disable
|
||||
//
|
||||
// Protocol Buffer Definitions - GENERATED FILE
|
||||
//
|
||||
// Do not edit directly. Regenerate using modelgen.
|
||||
//
|
||||
import { ServiceType } from "@protobuf-ts/runtime-rpc";
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import { WireType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
/**
|
||||
* @generated from protobuf message mpr.worker.JobRequest
|
||||
*/
|
||||
export interface JobRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string job_id = 1
|
||||
*/
|
||||
jobId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string source_path = 2
|
||||
*/
|
||||
sourcePath: string;
|
||||
/**
|
||||
* @generated from protobuf field: string output_path = 3
|
||||
*/
|
||||
outputPath: string;
|
||||
/**
|
||||
* @generated from protobuf field: string preset_json = 4
|
||||
*/
|
||||
presetJson: string;
|
||||
/**
|
||||
* @generated from protobuf field: optional float trim_start = 5
|
||||
*/
|
||||
trimStart?: number;
|
||||
/**
|
||||
* @generated from protobuf field: optional float trim_end = 6
|
||||
*/
|
||||
trimEnd?: number;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message mpr.worker.JobResponse
|
||||
*/
|
||||
export interface JobResponse {
|
||||
/**
|
||||
* @generated from protobuf field: string job_id = 1
|
||||
*/
|
||||
jobId: string;
|
||||
/**
|
||||
* @generated from protobuf field: bool accepted = 2
|
||||
*/
|
||||
accepted: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: string message = 3
|
||||
*/
|
||||
message: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message mpr.worker.ProgressRequest
|
||||
*/
|
||||
export interface ProgressRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string job_id = 1
|
||||
*/
|
||||
jobId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message mpr.worker.ProgressUpdate
|
||||
*/
|
||||
export interface ProgressUpdate {
|
||||
/**
|
||||
* @generated from protobuf field: string job_id = 1
|
||||
*/
|
||||
jobId: string;
|
||||
/**
|
||||
* @generated from protobuf field: int32 progress = 2
|
||||
*/
|
||||
progress: number;
|
||||
/**
|
||||
* @generated from protobuf field: int32 current_frame = 3
|
||||
*/
|
||||
currentFrame: number;
|
||||
/**
|
||||
* @generated from protobuf field: float current_time = 4
|
||||
*/
|
||||
currentTime: number;
|
||||
/**
|
||||
* @generated from protobuf field: float speed = 5
|
||||
*/
|
||||
speed: number;
|
||||
/**
|
||||
* @generated from protobuf field: string status = 6
|
||||
*/
|
||||
status: string;
|
||||
/**
|
||||
* @generated from protobuf field: optional string error = 7
|
||||
*/
|
||||
error?: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message mpr.worker.CancelRequest
|
||||
*/
|
||||
export interface CancelRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string job_id = 1
|
||||
*/
|
||||
jobId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message mpr.worker.CancelResponse
|
||||
*/
|
||||
export interface CancelResponse {
|
||||
/**
|
||||
* @generated from protobuf field: string job_id = 1
|
||||
*/
|
||||
jobId: string;
|
||||
/**
|
||||
* @generated from protobuf field: bool cancelled = 2
|
||||
*/
|
||||
cancelled: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: string message = 3
|
||||
*/
|
||||
message: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message mpr.worker.WorkerStatus
|
||||
*/
|
||||
export interface WorkerStatus {
|
||||
/**
|
||||
* @generated from protobuf field: bool available = 1
|
||||
*/
|
||||
available: boolean;
|
||||
/**
|
||||
* @generated from protobuf field: int32 active_jobs = 2
|
||||
*/
|
||||
activeJobs: number;
|
||||
/**
|
||||
* @generated from protobuf field: repeated string supported_codecs = 3
|
||||
*/
|
||||
supportedCodecs: string[];
|
||||
/**
|
||||
* @generated from protobuf field: bool gpu_available = 4
|
||||
*/
|
||||
gpuAvailable: boolean;
|
||||
}
|
||||
/**
|
||||
* Empty
|
||||
*
|
||||
* @generated from protobuf message mpr.worker.Empty
|
||||
*/
|
||||
export interface Empty {
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message mpr.worker.ChunkStreamRequest
|
||||
*/
|
||||
export interface ChunkStreamRequest {
|
||||
/**
|
||||
* @generated from protobuf field: string job_id = 1
|
||||
*/
|
||||
jobId: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message mpr.worker.ChunkPipelineEvent
|
||||
*/
|
||||
export interface ChunkPipelineEvent {
|
||||
/**
|
||||
* @generated from protobuf field: string job_id = 1
|
||||
*/
|
||||
jobId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string event_type = 2
|
||||
*/
|
||||
eventType: string;
|
||||
/**
|
||||
* @generated from protobuf field: int32 sequence = 3
|
||||
*/
|
||||
sequence: number;
|
||||
/**
|
||||
* @generated from protobuf field: string worker_id = 4
|
||||
*/
|
||||
workerId: string;
|
||||
/**
|
||||
* @generated from protobuf field: string state = 5
|
||||
*/
|
||||
state: string;
|
||||
/**
|
||||
* @generated from protobuf field: int32 queue_size = 6
|
||||
*/
|
||||
queueSize: number;
|
||||
/**
|
||||
* @generated from protobuf field: float elapsed = 7
|
||||
*/
|
||||
elapsed: number;
|
||||
/**
|
||||
* @generated from protobuf field: float throughput_mbps = 8
|
||||
*/
|
||||
throughputMbps: number;
|
||||
/**
|
||||
* @generated from protobuf field: int32 total_chunks = 9
|
||||
*/
|
||||
totalChunks: number;
|
||||
/**
|
||||
* @generated from protobuf field: int32 processed_chunks = 10
|
||||
*/
|
||||
processedChunks: number;
|
||||
/**
|
||||
* @generated from protobuf field: int32 failed_chunks = 11
|
||||
*/
|
||||
failedChunks: number;
|
||||
/**
|
||||
* @generated from protobuf field: string error = 12
|
||||
*/
|
||||
error: string;
|
||||
/**
|
||||
* @generated from protobuf field: float processing_time = 13
|
||||
*/
|
||||
processingTime: number;
|
||||
/**
|
||||
* @generated from protobuf field: int32 retries = 14
|
||||
*/
|
||||
retries: number;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class JobRequest$Type extends MessageType<JobRequest> {
|
||||
constructor() {
|
||||
super("mpr.worker.JobRequest", [
|
||||
{ no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "source_path", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "output_path", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "preset_json", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "trim_start", kind: "scalar", opt: true, T: 2 /*ScalarType.FLOAT*/ },
|
||||
{ no: 6, name: "trim_end", kind: "scalar", opt: true, T: 2 /*ScalarType.FLOAT*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<JobRequest>): JobRequest {
|
||||
const message = globalThis.Object.create((this.messagePrototype!));
|
||||
message.jobId = "";
|
||||
message.sourcePath = "";
|
||||
message.outputPath = "";
|
||||
message.presetJson = "";
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<JobRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobRequest): JobRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string job_id */ 1:
|
||||
message.jobId = reader.string();
|
||||
break;
|
||||
case /* string source_path */ 2:
|
||||
message.sourcePath = reader.string();
|
||||
break;
|
||||
case /* string output_path */ 3:
|
||||
message.outputPath = reader.string();
|
||||
break;
|
||||
case /* string preset_json */ 4:
|
||||
message.presetJson = reader.string();
|
||||
break;
|
||||
case /* optional float trim_start */ 5:
|
||||
message.trimStart = reader.float();
|
||||
break;
|
||||
case /* optional float trim_end */ 6:
|
||||
message.trimEnd = reader.float();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: JobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string job_id = 1; */
|
||||
if (message.jobId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.jobId);
|
||||
/* string source_path = 2; */
|
||||
if (message.sourcePath !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.sourcePath);
|
||||
/* string output_path = 3; */
|
||||
if (message.outputPath !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.outputPath);
|
||||
/* string preset_json = 4; */
|
||||
if (message.presetJson !== "")
|
||||
writer.tag(4, WireType.LengthDelimited).string(message.presetJson);
|
||||
/* optional float trim_start = 5; */
|
||||
if (message.trimStart !== undefined)
|
||||
writer.tag(5, WireType.Bit32).float(message.trimStart);
|
||||
/* optional float trim_end = 6; */
|
||||
if (message.trimEnd !== undefined)
|
||||
writer.tag(6, WireType.Bit32).float(message.trimEnd);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message mpr.worker.JobRequest
|
||||
*/
|
||||
export const JobRequest = new JobRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class JobResponse$Type extends MessageType<JobResponse> {
|
||||
constructor() {
|
||||
super("mpr.worker.JobResponse", [
|
||||
{ no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "accepted", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<JobResponse>): JobResponse {
|
||||
const message = globalThis.Object.create((this.messagePrototype!));
|
||||
message.jobId = "";
|
||||
message.accepted = false;
|
||||
message.message = "";
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<JobResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobResponse): JobResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string job_id */ 1:
|
||||
message.jobId = reader.string();
|
||||
break;
|
||||
case /* bool accepted */ 2:
|
||||
message.accepted = reader.bool();
|
||||
break;
|
||||
case /* string message */ 3:
|
||||
message.message = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: JobResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string job_id = 1; */
|
||||
if (message.jobId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.jobId);
|
||||
/* bool accepted = 2; */
|
||||
if (message.accepted !== false)
|
||||
writer.tag(2, WireType.Varint).bool(message.accepted);
|
||||
/* string message = 3; */
|
||||
if (message.message !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.message);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message mpr.worker.JobResponse
|
||||
*/
|
||||
export const JobResponse = new JobResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ProgressRequest$Type extends MessageType<ProgressRequest> {
|
||||
constructor() {
|
||||
super("mpr.worker.ProgressRequest", [
|
||||
{ no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ProgressRequest>): ProgressRequest {
|
||||
const message = globalThis.Object.create((this.messagePrototype!));
|
||||
message.jobId = "";
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ProgressRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProgressRequest): ProgressRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string job_id */ 1:
|
||||
message.jobId = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ProgressRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string job_id = 1; */
|
||||
if (message.jobId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.jobId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message mpr.worker.ProgressRequest
|
||||
*/
|
||||
export const ProgressRequest = new ProgressRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ProgressUpdate$Type extends MessageType<ProgressUpdate> {
|
||||
constructor() {
|
||||
super("mpr.worker.ProgressUpdate", [
|
||||
{ no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "progress", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
|
||||
{ no: 3, name: "current_frame", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
|
||||
{ no: 4, name: "current_time", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ },
|
||||
{ no: 5, name: "speed", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ },
|
||||
{ no: 6, name: "status", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 7, name: "error", kind: "scalar", opt: true, T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ProgressUpdate>): ProgressUpdate {
|
||||
const message = globalThis.Object.create((this.messagePrototype!));
|
||||
message.jobId = "";
|
||||
message.progress = 0;
|
||||
message.currentFrame = 0;
|
||||
message.currentTime = 0;
|
||||
message.speed = 0;
|
||||
message.status = "";
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ProgressUpdate>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProgressUpdate): ProgressUpdate {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string job_id */ 1:
|
||||
message.jobId = reader.string();
|
||||
break;
|
||||
case /* int32 progress */ 2:
|
||||
message.progress = reader.int32();
|
||||
break;
|
||||
case /* int32 current_frame */ 3:
|
||||
message.currentFrame = reader.int32();
|
||||
break;
|
||||
case /* float current_time */ 4:
|
||||
message.currentTime = reader.float();
|
||||
break;
|
||||
case /* float speed */ 5:
|
||||
message.speed = reader.float();
|
||||
break;
|
||||
case /* string status */ 6:
|
||||
message.status = reader.string();
|
||||
break;
|
||||
case /* optional string error */ 7:
|
||||
message.error = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ProgressUpdate, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string job_id = 1; */
|
||||
if (message.jobId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.jobId);
|
||||
/* int32 progress = 2; */
|
||||
if (message.progress !== 0)
|
||||
writer.tag(2, WireType.Varint).int32(message.progress);
|
||||
/* int32 current_frame = 3; */
|
||||
if (message.currentFrame !== 0)
|
||||
writer.tag(3, WireType.Varint).int32(message.currentFrame);
|
||||
/* float current_time = 4; */
|
||||
if (message.currentTime !== 0)
|
||||
writer.tag(4, WireType.Bit32).float(message.currentTime);
|
||||
/* float speed = 5; */
|
||||
if (message.speed !== 0)
|
||||
writer.tag(5, WireType.Bit32).float(message.speed);
|
||||
/* string status = 6; */
|
||||
if (message.status !== "")
|
||||
writer.tag(6, WireType.LengthDelimited).string(message.status);
|
||||
/* optional string error = 7; */
|
||||
if (message.error !== undefined)
|
||||
writer.tag(7, WireType.LengthDelimited).string(message.error);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message mpr.worker.ProgressUpdate
|
||||
*/
|
||||
export const ProgressUpdate = new ProgressUpdate$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CancelRequest$Type extends MessageType<CancelRequest> {
|
||||
constructor() {
|
||||
super("mpr.worker.CancelRequest", [
|
||||
{ no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<CancelRequest>): CancelRequest {
|
||||
const message = globalThis.Object.create((this.messagePrototype!));
|
||||
message.jobId = "";
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<CancelRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CancelRequest): CancelRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string job_id */ 1:
|
||||
message.jobId = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: CancelRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string job_id = 1; */
|
||||
if (message.jobId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.jobId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message mpr.worker.CancelRequest
|
||||
*/
|
||||
export const CancelRequest = new CancelRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CancelResponse$Type extends MessageType<CancelResponse> {
|
||||
constructor() {
|
||||
super("mpr.worker.CancelResponse", [
|
||||
{ no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "cancelled", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<CancelResponse>): CancelResponse {
|
||||
const message = globalThis.Object.create((this.messagePrototype!));
|
||||
message.jobId = "";
|
||||
message.cancelled = false;
|
||||
message.message = "";
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<CancelResponse>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CancelResponse): CancelResponse {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string job_id */ 1:
|
||||
message.jobId = reader.string();
|
||||
break;
|
||||
case /* bool cancelled */ 2:
|
||||
message.cancelled = reader.bool();
|
||||
break;
|
||||
case /* string message */ 3:
|
||||
message.message = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: CancelResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string job_id = 1; */
|
||||
if (message.jobId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.jobId);
|
||||
/* bool cancelled = 2; */
|
||||
if (message.cancelled !== false)
|
||||
writer.tag(2, WireType.Varint).bool(message.cancelled);
|
||||
/* string message = 3; */
|
||||
if (message.message !== "")
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.message);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message mpr.worker.CancelResponse
|
||||
*/
|
||||
export const CancelResponse = new CancelResponse$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class WorkerStatus$Type extends MessageType<WorkerStatus> {
|
||||
constructor() {
|
||||
super("mpr.worker.WorkerStatus", [
|
||||
{ no: 1, name: "available", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
|
||||
{ no: 2, name: "active_jobs", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
|
||||
{ no: 3, name: "supported_codecs", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 4, name: "gpu_available", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<WorkerStatus>): WorkerStatus {
|
||||
const message = globalThis.Object.create((this.messagePrototype!));
|
||||
message.available = false;
|
||||
message.activeJobs = 0;
|
||||
message.supportedCodecs = [];
|
||||
message.gpuAvailable = false;
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<WorkerStatus>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: WorkerStatus): WorkerStatus {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bool available */ 1:
|
||||
message.available = reader.bool();
|
||||
break;
|
||||
case /* int32 active_jobs */ 2:
|
||||
message.activeJobs = reader.int32();
|
||||
break;
|
||||
case /* repeated string supported_codecs */ 3:
|
||||
message.supportedCodecs.push(reader.string());
|
||||
break;
|
||||
case /* bool gpu_available */ 4:
|
||||
message.gpuAvailable = reader.bool();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: WorkerStatus, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bool available = 1; */
|
||||
if (message.available !== false)
|
||||
writer.tag(1, WireType.Varint).bool(message.available);
|
||||
/* int32 active_jobs = 2; */
|
||||
if (message.activeJobs !== 0)
|
||||
writer.tag(2, WireType.Varint).int32(message.activeJobs);
|
||||
/* repeated string supported_codecs = 3; */
|
||||
for (let i = 0; i < message.supportedCodecs.length; i++)
|
||||
writer.tag(3, WireType.LengthDelimited).string(message.supportedCodecs[i]);
|
||||
/* bool gpu_available = 4; */
|
||||
if (message.gpuAvailable !== false)
|
||||
writer.tag(4, WireType.Varint).bool(message.gpuAvailable);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message mpr.worker.WorkerStatus
|
||||
*/
|
||||
export const WorkerStatus = new WorkerStatus$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class Empty$Type extends MessageType<Empty> {
|
||||
constructor() {
|
||||
super("mpr.worker.Empty", []);
|
||||
}
|
||||
create(value?: PartialMessage<Empty>): Empty {
|
||||
const message = globalThis.Object.create((this.messagePrototype!));
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<Empty>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Empty): Empty {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: Empty, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message mpr.worker.Empty
|
||||
*/
|
||||
export const Empty = new Empty$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ChunkStreamRequest$Type extends MessageType<ChunkStreamRequest> {
|
||||
constructor() {
|
||||
super("mpr.worker.ChunkStreamRequest", [
|
||||
{ no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ChunkStreamRequest>): ChunkStreamRequest {
|
||||
const message = globalThis.Object.create((this.messagePrototype!));
|
||||
message.jobId = "";
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ChunkStreamRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ChunkStreamRequest): ChunkStreamRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string job_id */ 1:
|
||||
message.jobId = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ChunkStreamRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string job_id = 1; */
|
||||
if (message.jobId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.jobId);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message mpr.worker.ChunkStreamRequest
|
||||
*/
|
||||
export const ChunkStreamRequest = new ChunkStreamRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class ChunkPipelineEvent$Type extends MessageType<ChunkPipelineEvent> {
|
||||
constructor() {
|
||||
super("mpr.worker.ChunkPipelineEvent", [
|
||||
{ no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "event_type", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 3, name: "sequence", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
|
||||
{ no: 4, name: "worker_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 5, name: "state", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 6, name: "queue_size", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
|
||||
{ no: 7, name: "elapsed", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ },
|
||||
{ no: 8, name: "throughput_mbps", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ },
|
||||
{ no: 9, name: "total_chunks", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
|
||||
{ no: 10, name: "processed_chunks", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
|
||||
{ no: 11, name: "failed_chunks", kind: "scalar", T: 5 /*ScalarType.INT32*/ },
|
||||
{ no: 12, name: "error", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 13, name: "processing_time", kind: "scalar", T: 2 /*ScalarType.FLOAT*/ },
|
||||
{ no: 14, name: "retries", kind: "scalar", T: 5 /*ScalarType.INT32*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<ChunkPipelineEvent>): ChunkPipelineEvent {
|
||||
const message = globalThis.Object.create((this.messagePrototype!));
|
||||
message.jobId = "";
|
||||
message.eventType = "";
|
||||
message.sequence = 0;
|
||||
message.workerId = "";
|
||||
message.state = "";
|
||||
message.queueSize = 0;
|
||||
message.elapsed = 0;
|
||||
message.throughputMbps = 0;
|
||||
message.totalChunks = 0;
|
||||
message.processedChunks = 0;
|
||||
message.failedChunks = 0;
|
||||
message.error = "";
|
||||
message.processingTime = 0;
|
||||
message.retries = 0;
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<ChunkPipelineEvent>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ChunkPipelineEvent): ChunkPipelineEvent {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string job_id */ 1:
|
||||
message.jobId = reader.string();
|
||||
break;
|
||||
case /* string event_type */ 2:
|
||||
message.eventType = reader.string();
|
||||
break;
|
||||
case /* int32 sequence */ 3:
|
||||
message.sequence = reader.int32();
|
||||
break;
|
||||
case /* string worker_id */ 4:
|
||||
message.workerId = reader.string();
|
||||
break;
|
||||
case /* string state */ 5:
|
||||
message.state = reader.string();
|
||||
break;
|
||||
case /* int32 queue_size */ 6:
|
||||
message.queueSize = reader.int32();
|
||||
break;
|
||||
case /* float elapsed */ 7:
|
||||
message.elapsed = reader.float();
|
||||
break;
|
||||
case /* float throughput_mbps */ 8:
|
||||
message.throughputMbps = reader.float();
|
||||
break;
|
||||
case /* int32 total_chunks */ 9:
|
||||
message.totalChunks = reader.int32();
|
||||
break;
|
||||
case /* int32 processed_chunks */ 10:
|
||||
message.processedChunks = reader.int32();
|
||||
break;
|
||||
case /* int32 failed_chunks */ 11:
|
||||
message.failedChunks = reader.int32();
|
||||
break;
|
||||
case /* string error */ 12:
|
||||
message.error = reader.string();
|
||||
break;
|
||||
case /* float processing_time */ 13:
|
||||
message.processingTime = reader.float();
|
||||
break;
|
||||
case /* int32 retries */ 14:
|
||||
message.retries = reader.int32();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: ChunkPipelineEvent, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string job_id = 1; */
|
||||
if (message.jobId !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.jobId);
|
||||
/* string event_type = 2; */
|
||||
if (message.eventType !== "")
|
||||
writer.tag(2, WireType.LengthDelimited).string(message.eventType);
|
||||
/* int32 sequence = 3; */
|
||||
if (message.sequence !== 0)
|
||||
writer.tag(3, WireType.Varint).int32(message.sequence);
|
||||
/* string worker_id = 4; */
|
||||
if (message.workerId !== "")
|
||||
writer.tag(4, WireType.LengthDelimited).string(message.workerId);
|
||||
/* string state = 5; */
|
||||
if (message.state !== "")
|
||||
writer.tag(5, WireType.LengthDelimited).string(message.state);
|
||||
/* int32 queue_size = 6; */
|
||||
if (message.queueSize !== 0)
|
||||
writer.tag(6, WireType.Varint).int32(message.queueSize);
|
||||
/* float elapsed = 7; */
|
||||
if (message.elapsed !== 0)
|
||||
writer.tag(7, WireType.Bit32).float(message.elapsed);
|
||||
/* float throughput_mbps = 8; */
|
||||
if (message.throughputMbps !== 0)
|
||||
writer.tag(8, WireType.Bit32).float(message.throughputMbps);
|
||||
/* int32 total_chunks = 9; */
|
||||
if (message.totalChunks !== 0)
|
||||
writer.tag(9, WireType.Varint).int32(message.totalChunks);
|
||||
/* int32 processed_chunks = 10; */
|
||||
if (message.processedChunks !== 0)
|
||||
writer.tag(10, WireType.Varint).int32(message.processedChunks);
|
||||
/* int32 failed_chunks = 11; */
|
||||
if (message.failedChunks !== 0)
|
||||
writer.tag(11, WireType.Varint).int32(message.failedChunks);
|
||||
/* string error = 12; */
|
||||
if (message.error !== "")
|
||||
writer.tag(12, WireType.LengthDelimited).string(message.error);
|
||||
/* float processing_time = 13; */
|
||||
if (message.processingTime !== 0)
|
||||
writer.tag(13, WireType.Bit32).float(message.processingTime);
|
||||
/* int32 retries = 14; */
|
||||
if (message.retries !== 0)
|
||||
writer.tag(14, WireType.Varint).int32(message.retries);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message mpr.worker.ChunkPipelineEvent
|
||||
*/
|
||||
export const ChunkPipelineEvent = new ChunkPipelineEvent$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service mpr.worker.WorkerService
|
||||
*/
|
||||
export const WorkerService = new ServiceType("mpr.worker.WorkerService", [
|
||||
{ name: "SubmitJob", options: {}, I: JobRequest, O: JobResponse },
|
||||
{ name: "StreamProgress", serverStreaming: true, options: {}, I: ProgressRequest, O: ProgressUpdate },
|
||||
{ name: "CancelJob", options: {}, I: CancelRequest, O: CancelResponse },
|
||||
{ name: "GetWorkerStatus", options: {}, I: Empty, O: WorkerStatus },
|
||||
{ name: "StreamChunkPipeline", serverStreaming: true, options: {}, I: ChunkStreamRequest, O: ChunkPipelineEvent }
|
||||
]);
|
||||
42
ui/common/api/media.ts
Normal file
42
ui/common/api/media.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
/**
|
||||
* Shared media API functions — identical across all MPR UI apps.
|
||||
*/
|
||||
|
||||
import type { MediaAsset } from "../types/generated";
|
||||
import { gql } from "./graphql";
|
||||
|
||||
/** Fetch all media assets. */
|
||||
export async function getAssets(): Promise<MediaAsset[]> {
|
||||
const data = await gql<{ assets: MediaAsset[] }>(`
|
||||
query {
|
||||
assets {
|
||||
id filename file_path status error_message file_size duration
|
||||
video_codec audio_codec width height framerate bitrate
|
||||
properties comments tags created_at updated_at
|
||||
}
|
||||
}
|
||||
`);
|
||||
return data.assets;
|
||||
}
|
||||
|
||||
/** Scan media/in/ folder for new files. */
|
||||
export async function scanMediaFolder(): Promise<{
|
||||
found: number;
|
||||
registered: number;
|
||||
skipped: number;
|
||||
files: string[];
|
||||
}> {
|
||||
const data = await gql<{
|
||||
scan_media_folder: {
|
||||
found: number;
|
||||
registered: number;
|
||||
skipped: number;
|
||||
files: string[];
|
||||
};
|
||||
}>(`
|
||||
mutation {
|
||||
scan_media_folder { found registered skipped files }
|
||||
}
|
||||
`);
|
||||
return data.scan_media_folder;
|
||||
}
|
||||
97
ui/common/components/FileManager.css
Normal file
97
ui/common/components/FileManager.css
Normal file
@@ -0,0 +1,97 @@
|
||||
.file-manager {
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.fm-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.fm-header h2 {
|
||||
font-size: 0.85rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.fm-scan-btn {
|
||||
padding: 0.25rem 0.6rem;
|
||||
background: var(--bg-input);
|
||||
color: var(--text-secondary);
|
||||
font-size: var(--font-size-xs);
|
||||
}
|
||||
.fm-scan-btn:hover:not(:disabled) {
|
||||
color: var(--text-primary);
|
||||
background: var(--border-light);
|
||||
}
|
||||
|
||||
.fm-list {
|
||||
list-style: none;
|
||||
max-height: 200px;
|
||||
overflow-y: auto;
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius-sm);
|
||||
background: var(--bg-primary);
|
||||
}
|
||||
|
||||
.fm-empty {
|
||||
padding: 1rem;
|
||||
text-align: center;
|
||||
color: var(--text-muted);
|
||||
font-size: var(--font-size-sm);
|
||||
}
|
||||
|
||||
.fm-item {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 0.4rem 0.6rem;
|
||||
border-bottom: 1px solid var(--border);
|
||||
transition: background 0.1s;
|
||||
}
|
||||
.fm-item:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.fm-clickable {
|
||||
cursor: pointer;
|
||||
}
|
||||
.fm-clickable:hover {
|
||||
background: var(--bg-input);
|
||||
}
|
||||
|
||||
.fm-selected {
|
||||
background: var(--accent) !important;
|
||||
color: #fff;
|
||||
}
|
||||
.fm-selected .fm-meta {
|
||||
color: rgba(255, 255, 255, 0.7);
|
||||
}
|
||||
|
||||
.fm-item-info {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 0.15rem;
|
||||
overflow: hidden;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.fm-filename {
|
||||
font-size: var(--font-size-sm);
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.fm-meta {
|
||||
font-size: var(--font-size-xs);
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.fm-actions {
|
||||
flex-shrink: 0;
|
||||
margin-left: 0.5rem;
|
||||
}
|
||||
84
ui/common/components/FileManager.tsx
Normal file
84
ui/common/components/FileManager.tsx
Normal file
@@ -0,0 +1,84 @@
|
||||
/**
|
||||
* FileManager — pluggable file browser for S3/MinIO files.
|
||||
*
|
||||
* Handles both input file selection and output file listing.
|
||||
* Used by timeline (assets + output), chunker (assets + chunk output),
|
||||
* and future tools.
|
||||
*/
|
||||
|
||||
import type { ReactNode } from "react";
|
||||
import { formatSize } from "../utils/format";
|
||||
import "./FileManager.css";
|
||||
|
||||
export interface FileEntry {
|
||||
key: string;
|
||||
name: string;
|
||||
size?: number;
|
||||
meta?: string;
|
||||
}
|
||||
|
||||
interface FileManagerProps {
|
||||
title: string;
|
||||
files: FileEntry[];
|
||||
selectedKey?: string | null;
|
||||
onSelect?: (file: FileEntry) => void;
|
||||
onScan?: () => void;
|
||||
scanning?: boolean;
|
||||
emptyMessage?: string;
|
||||
renderActions?: (file: FileEntry) => ReactNode;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export function FileManager({
|
||||
title,
|
||||
files,
|
||||
selectedKey,
|
||||
onSelect,
|
||||
onScan,
|
||||
scanning = false,
|
||||
emptyMessage = "No files",
|
||||
renderActions,
|
||||
disabled = false,
|
||||
}: FileManagerProps) {
|
||||
return (
|
||||
<div className="file-manager">
|
||||
<div className="fm-header">
|
||||
<h2>{title}</h2>
|
||||
{onScan && (
|
||||
<button
|
||||
className="fm-scan-btn"
|
||||
onClick={onScan}
|
||||
disabled={scanning || disabled}
|
||||
>
|
||||
{scanning ? "Scanning..." : "Scan Folder"}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<ul className="fm-list">
|
||||
{files.length === 0 ? (
|
||||
<li className="fm-empty">{emptyMessage}</li>
|
||||
) : (
|
||||
files.map((file) => (
|
||||
<li
|
||||
key={file.key}
|
||||
className={`fm-item ${selectedKey === file.key ? "fm-selected" : ""} ${onSelect && !disabled ? "fm-clickable" : ""}`}
|
||||
onClick={() => onSelect && !disabled && onSelect(file)}
|
||||
title={file.name}
|
||||
>
|
||||
<div className="fm-item-info">
|
||||
<span className="fm-filename">{file.name}</span>
|
||||
<span className="fm-meta">
|
||||
{file.size != null && formatSize(file.size)}
|
||||
{file.meta && (file.size != null ? ` · ${file.meta}` : file.meta)}
|
||||
</span>
|
||||
</div>
|
||||
{renderActions && (
|
||||
<div className="fm-actions">{renderActions(file)}</div>
|
||||
)}
|
||||
</li>
|
||||
))
|
||||
)}
|
||||
</ul>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
33
ui/common/components/StatusDot.tsx
Normal file
33
ui/common/components/StatusDot.tsx
Normal file
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* StatusDot — small colored indicator for connection/state.
|
||||
*/
|
||||
|
||||
const STATE_COLORS: Record<string, string> = {
|
||||
connected: "var(--success)",
|
||||
idle: "var(--text-muted)",
|
||||
processing: "var(--processing)",
|
||||
stopped: "var(--text-muted)",
|
||||
error: "var(--error)",
|
||||
done: "var(--success)",
|
||||
};
|
||||
|
||||
interface StatusDotProps {
|
||||
state: string;
|
||||
glow?: boolean;
|
||||
}
|
||||
|
||||
export function StatusDot({ state, glow = false }: StatusDotProps) {
|
||||
const color = STATE_COLORS[state] || "var(--text-muted)";
|
||||
return (
|
||||
<span
|
||||
style={{
|
||||
display: "inline-block",
|
||||
width: 8,
|
||||
height: 8,
|
||||
borderRadius: "50%",
|
||||
background: color,
|
||||
boxShadow: glow ? `0 0 6px ${color}` : undefined,
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
109
ui/common/styles/theme.css
Normal file
109
ui/common/styles/theme.css
Normal file
@@ -0,0 +1,109 @@
|
||||
/**
|
||||
* MPR Shared Theme — CSS custom properties + base styles.
|
||||
* Import from any UI app: @import "../../common/styles/theme.css";
|
||||
*/
|
||||
|
||||
:root {
|
||||
--bg-primary: #0f0f0f;
|
||||
--bg-panel: #1a1a1a;
|
||||
--bg-surface: #141414;
|
||||
--bg-input: #2a2a2a;
|
||||
--border: #2a2a2a;
|
||||
--border-light: #333;
|
||||
--text-primary: #e0e0e0;
|
||||
--text-secondary: #999;
|
||||
--text-muted: #666;
|
||||
--accent: #3b82f6;
|
||||
--success: #10b981;
|
||||
--warning: #f59e0b;
|
||||
--error: #ef4444;
|
||||
--processing: #3b82f6;
|
||||
|
||||
--radius: 8px;
|
||||
--radius-sm: 4px;
|
||||
--font-mono: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto,
|
||||
"Fira Code", monospace, sans-serif;
|
||||
--font-size: 14px;
|
||||
--font-size-sm: 0.8rem;
|
||||
--font-size-xs: 0.75rem;
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: var(--font-mono);
|
||||
background: var(--bg-primary);
|
||||
color: var(--text-primary);
|
||||
font-size: var(--font-size);
|
||||
}
|
||||
|
||||
/* Scrollbar */
|
||||
::-webkit-scrollbar {
|
||||
width: 6px;
|
||||
}
|
||||
::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: var(--border-light);
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
/* Shared button base */
|
||||
button {
|
||||
cursor: pointer;
|
||||
border: none;
|
||||
border-radius: var(--radius-sm);
|
||||
font-family: var(--font-mono);
|
||||
font-size: var(--font-size-sm);
|
||||
transition: opacity 0.15s;
|
||||
}
|
||||
button:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Shared input base */
|
||||
input,
|
||||
select {
|
||||
font-family: var(--font-mono);
|
||||
font-size: var(--font-size-sm);
|
||||
background: var(--bg-input);
|
||||
color: var(--text-primary);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius-sm);
|
||||
padding: 0.4rem 0.5rem;
|
||||
}
|
||||
input:focus,
|
||||
select:focus {
|
||||
outline: none;
|
||||
border-color: var(--accent);
|
||||
}
|
||||
|
||||
/* Panel base */
|
||||
.panel {
|
||||
background: var(--bg-panel);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: var(--radius);
|
||||
padding: 1rem;
|
||||
}
|
||||
|
||||
.panel-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 0.75rem;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.panel-header h2 {
|
||||
font-size: 0.85rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
170
ui/common/types/generated.ts
Normal file
170
ui/common/types/generated.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
/**
|
||||
* TypeScript Types - GENERATED FILE
|
||||
*
|
||||
* Do not edit directly. Regenerate using modelgen.
|
||||
*/
|
||||
|
||||
export type AssetStatus = "pending" | "ready" | "error";
|
||||
export type JobStatus = "pending" | "processing" | "completed" | "failed" | "cancelled";
|
||||
export type ChunkJobStatus = "pending" | "chunking" | "processing" | "collecting" | "completed" | "failed" | "cancelled";
|
||||
|
||||
export interface MediaAsset {
|
||||
id: string;
|
||||
filename: string;
|
||||
file_path: string;
|
||||
status: AssetStatus;
|
||||
error_message: string | null;
|
||||
file_size: number | null;
|
||||
duration: number | null;
|
||||
video_codec: string | null;
|
||||
audio_codec: string | null;
|
||||
width: number | null;
|
||||
height: number | null;
|
||||
framerate: number | null;
|
||||
bitrate: number | null;
|
||||
properties: Record<string, unknown>;
|
||||
comments: string;
|
||||
tags: string[];
|
||||
created_at: string | null;
|
||||
updated_at: string | null;
|
||||
}
|
||||
|
||||
export interface TranscodePreset {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
is_builtin: boolean;
|
||||
container: string;
|
||||
video_codec: string;
|
||||
video_bitrate: string | null;
|
||||
video_crf: number | null;
|
||||
video_preset: string | null;
|
||||
resolution: string | null;
|
||||
framerate: number | null;
|
||||
audio_codec: string;
|
||||
audio_bitrate: string | null;
|
||||
audio_channels: number | null;
|
||||
audio_samplerate: number | null;
|
||||
extra_args: string[];
|
||||
created_at: string | null;
|
||||
updated_at: string | null;
|
||||
}
|
||||
|
||||
export interface TranscodeJob {
|
||||
id: string;
|
||||
source_asset_id: string;
|
||||
preset_id: string | null;
|
||||
preset_snapshot: Record<string, unknown>;
|
||||
trim_start: number | null;
|
||||
trim_end: number | null;
|
||||
output_filename: string;
|
||||
output_path: string | null;
|
||||
output_asset_id: string | null;
|
||||
status: JobStatus;
|
||||
progress: number;
|
||||
current_frame: number | null;
|
||||
current_time: number | null;
|
||||
speed: string | null;
|
||||
error_message: string | null;
|
||||
celery_task_id: string | null;
|
||||
execution_arn: string | null;
|
||||
priority: number;
|
||||
created_at: string | null;
|
||||
started_at: string | null;
|
||||
completed_at: string | null;
|
||||
}
|
||||
|
||||
export interface ChunkJob {
|
||||
id: string;
|
||||
source_asset_id: string;
|
||||
chunk_duration: number;
|
||||
num_workers: number;
|
||||
max_retries: number;
|
||||
processor_type: string;
|
||||
status: ChunkJobStatus;
|
||||
progress: number;
|
||||
total_chunks: number;
|
||||
processed_chunks: number;
|
||||
failed_chunks: number;
|
||||
retry_count: number;
|
||||
error_message: string | null;
|
||||
throughput_mbps: number | null;
|
||||
elapsed_seconds: number | null;
|
||||
celery_task_id: string | null;
|
||||
priority: number;
|
||||
created_at: string | null;
|
||||
started_at: string | null;
|
||||
completed_at: string | null;
|
||||
}
|
||||
|
||||
export interface CreateJobRequest {
|
||||
source_asset_id: string;
|
||||
preset_id: string | null;
|
||||
trim_start: number | null;
|
||||
trim_end: number | null;
|
||||
output_filename: string | null;
|
||||
priority: number;
|
||||
}
|
||||
|
||||
export interface UpdateAssetRequest {
|
||||
comments: string | null;
|
||||
tags: string[] | null;
|
||||
}
|
||||
|
||||
export interface SystemStatus {
|
||||
status: string;
|
||||
version: string;
|
||||
}
|
||||
|
||||
export interface ScanResult {
|
||||
found: number;
|
||||
registered: number;
|
||||
skipped: number;
|
||||
files: string[];
|
||||
}
|
||||
|
||||
export interface DeleteResult {
|
||||
ok: boolean;
|
||||
}
|
||||
|
||||
export interface WorkerStatus {
|
||||
available: boolean;
|
||||
active_jobs: number;
|
||||
supported_codecs: string[];
|
||||
gpu_available: boolean;
|
||||
}
|
||||
|
||||
export interface ChunkEvent {
|
||||
sequence: number;
|
||||
status: string;
|
||||
size: number | null;
|
||||
worker_id: string | null;
|
||||
processing_time: number | null;
|
||||
error: string | null;
|
||||
retries: number;
|
||||
}
|
||||
|
||||
export interface WorkerEvent {
|
||||
worker_id: string;
|
||||
state: string;
|
||||
current_chunk: number | null;
|
||||
processed: number;
|
||||
errors: number;
|
||||
retries: number;
|
||||
}
|
||||
|
||||
export interface PipelineStats {
|
||||
total_chunks: number;
|
||||
processed: number;
|
||||
failed: number;
|
||||
retries: number;
|
||||
elapsed: number;
|
||||
throughput_mbps: number;
|
||||
queue_size: number;
|
||||
}
|
||||
|
||||
export interface ChunkOutputFile {
|
||||
key: string;
|
||||
size: number;
|
||||
url: string;
|
||||
}
|
||||
21
ui/common/utils/format.ts
Normal file
21
ui/common/utils/format.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* Shared formatting utilities.
|
||||
*/
|
||||
|
||||
export function formatSize(bytes: number | null | undefined): string {
|
||||
if (!bytes) return "—";
|
||||
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(0)} KB`;
|
||||
if (bytes < 1024 * 1024 * 1024)
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
||||
return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`;
|
||||
}
|
||||
|
||||
export function formatDuration(seconds: number | null | undefined): string {
|
||||
if (!seconds) return "—";
|
||||
const h = Math.floor(seconds / 3600);
|
||||
const m = Math.floor((seconds % 3600) / 60);
|
||||
const s = Math.floor(seconds % 60);
|
||||
if (h > 0)
|
||||
return `${h}:${m.toString().padStart(2, "0")}:${s.toString().padStart(2, "0")}`;
|
||||
return `${m}:${s.toString().padStart(2, "0")}`;
|
||||
}
|
||||
1736
ui/timeline/package-lock.json
generated
Normal file
1736
ui/timeline/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,15 +1,4 @@
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family:
|
||||
-apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
||||
background: #1a1a1a;
|
||||
color: #e0e0e0;
|
||||
}
|
||||
@import "../../common/styles/theme.css";
|
||||
|
||||
.app {
|
||||
display: flex;
|
||||
|
||||
@@ -115,7 +115,6 @@ function App() {
|
||||
setJobs(data);
|
||||
};
|
||||
|
||||
const assetJobs = jobs.filter((j) => j.source_asset_id === selectedAsset?.id);
|
||||
const completedJobs = jobs.filter((j) => j.status === "completed");
|
||||
|
||||
if (loading) return <div className="loading">Loading...</div>;
|
||||
|
||||
@@ -42,6 +42,8 @@ export default function JobPanel({
|
||||
preset_id: selectedPresetId || null,
|
||||
trim_start: hasTrim ? trimStart : null,
|
||||
trim_end: hasTrim ? trimEnd : null,
|
||||
output_filename: null,
|
||||
priority: 0,
|
||||
});
|
||||
onJobCreated();
|
||||
} catch (e) {
|
||||
|
||||
@@ -2,45 +2,17 @@
|
||||
* GraphQL API client
|
||||
*/
|
||||
|
||||
import { gql } from "../../common/api/graphql";
|
||||
import { getAssets, scanMediaFolder } from "../../common/api/media";
|
||||
import type {
|
||||
MediaAsset,
|
||||
TranscodePreset,
|
||||
TranscodeJob,
|
||||
CreateJobRequest,
|
||||
SystemStatus,
|
||||
MediaAsset,
|
||||
} from "./types";
|
||||
|
||||
const GRAPHQL_URL = "/api/graphql";
|
||||
|
||||
async function gql<T>(query: string, variables?: Record<string, unknown>): Promise<T> {
|
||||
const response = await fetch(GRAPHQL_URL, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ query, variables }),
|
||||
});
|
||||
|
||||
const json = await response.json();
|
||||
|
||||
if (json.errors?.length) {
|
||||
throw new Error(json.errors[0].message);
|
||||
}
|
||||
|
||||
return json.data as T;
|
||||
}
|
||||
|
||||
// Assets
|
||||
export async function getAssets(): Promise<MediaAsset[]> {
|
||||
const data = await gql<{ assets: MediaAsset[] }>(`
|
||||
query {
|
||||
assets {
|
||||
id filename file_path status error_message file_size duration
|
||||
video_codec audio_codec width height framerate bitrate
|
||||
properties comments tags created_at updated_at
|
||||
}
|
||||
}
|
||||
`);
|
||||
return data.assets;
|
||||
}
|
||||
export { getAssets, scanMediaFolder };
|
||||
|
||||
export async function getAsset(id: string): Promise<MediaAsset> {
|
||||
const data = await gql<{ asset: MediaAsset }>(`
|
||||
@@ -55,20 +27,6 @@ export async function getAsset(id: string): Promise<MediaAsset> {
|
||||
return data.asset;
|
||||
}
|
||||
|
||||
export async function scanMediaFolder(): Promise<{
|
||||
found: number;
|
||||
registered: number;
|
||||
skipped: number;
|
||||
files: string[];
|
||||
}> {
|
||||
const data = await gql<{ scan_media_folder: { found: number; registered: number; skipped: number; files: string[] } }>(`
|
||||
mutation {
|
||||
scan_media_folder { found registered skipped files }
|
||||
}
|
||||
`);
|
||||
return data.scan_media_folder;
|
||||
}
|
||||
|
||||
// Presets
|
||||
export async function getPresets(): Promise<TranscodePreset[]> {
|
||||
const data = await gql<{ presets: TranscodePreset[] }>(`
|
||||
|
||||
@@ -1,135 +1,21 @@
|
||||
/**
|
||||
* TypeScript Types - GENERATED FILE
|
||||
* TypeScript Types — re-exported from common generated types.
|
||||
*
|
||||
* Do not edit directly. Regenerate using modelgen.
|
||||
*/
|
||||
|
||||
export type AssetStatus = "pending" | "ready" | "error";
|
||||
export type JobStatus = "pending" | "processing" | "completed" | "failed" | "cancelled";
|
||||
export type ChunkJobStatus = "pending" | "chunking" | "processing" | "collecting" | "completed" | "failed" | "cancelled";
|
||||
|
||||
export interface MediaAsset {
|
||||
id: string;
|
||||
filename: string;
|
||||
file_path: string;
|
||||
status: AssetStatus;
|
||||
error_message: string | null;
|
||||
file_size: number | null;
|
||||
duration: number | null;
|
||||
video_codec: string | null;
|
||||
audio_codec: string | null;
|
||||
width: number | null;
|
||||
height: number | null;
|
||||
framerate: number | null;
|
||||
bitrate: number | null;
|
||||
properties: Record<string, unknown>;
|
||||
comments: string;
|
||||
tags: string[];
|
||||
created_at: string | null;
|
||||
updated_at: string | null;
|
||||
}
|
||||
|
||||
export interface TranscodePreset {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
is_builtin: boolean;
|
||||
container: string;
|
||||
video_codec: string;
|
||||
video_bitrate: string | null;
|
||||
video_crf: number | null;
|
||||
video_preset: string | null;
|
||||
resolution: string | null;
|
||||
framerate: number | null;
|
||||
audio_codec: string;
|
||||
audio_bitrate: string | null;
|
||||
audio_channels: number | null;
|
||||
audio_samplerate: number | null;
|
||||
extra_args: string[];
|
||||
created_at: string | null;
|
||||
updated_at: string | null;
|
||||
}
|
||||
|
||||
export interface TranscodeJob {
|
||||
id: string;
|
||||
source_asset_id: string;
|
||||
preset_id: string | null;
|
||||
preset_snapshot: Record<string, unknown>;
|
||||
trim_start: number | null;
|
||||
trim_end: number | null;
|
||||
output_filename: string;
|
||||
output_path: string | null;
|
||||
output_asset_id: string | null;
|
||||
status: JobStatus;
|
||||
progress: number;
|
||||
current_frame: number | null;
|
||||
current_time: number | null;
|
||||
speed: string | null;
|
||||
error_message: string | null;
|
||||
celery_task_id: string | null;
|
||||
execution_arn: string | null;
|
||||
priority: number;
|
||||
created_at: string | null;
|
||||
started_at: string | null;
|
||||
completed_at: string | null;
|
||||
}
|
||||
|
||||
export interface ChunkJob {
|
||||
id: string;
|
||||
source_asset_id: string;
|
||||
chunk_duration: number;
|
||||
num_workers: number;
|
||||
max_retries: number;
|
||||
processor_type: string;
|
||||
status: ChunkJobStatus;
|
||||
progress: number;
|
||||
total_chunks: number;
|
||||
processed_chunks: number;
|
||||
failed_chunks: number;
|
||||
retry_count: number;
|
||||
error_message: string | null;
|
||||
throughput_mbps: number | null;
|
||||
elapsed_seconds: number | null;
|
||||
celery_task_id: string | null;
|
||||
priority: number;
|
||||
created_at: string | null;
|
||||
started_at: string | null;
|
||||
completed_at: string | null;
|
||||
}
|
||||
|
||||
export interface CreateJobRequest {
|
||||
source_asset_id: string;
|
||||
preset_id: string | null;
|
||||
trim_start: number | null;
|
||||
trim_end: number | null;
|
||||
output_filename: string | null;
|
||||
priority: number;
|
||||
}
|
||||
|
||||
export interface UpdateAssetRequest {
|
||||
comments: string | null;
|
||||
tags: string[] | null;
|
||||
}
|
||||
|
||||
export interface SystemStatus {
|
||||
status: string;
|
||||
version: string;
|
||||
}
|
||||
|
||||
export interface ScanResult {
|
||||
found: number;
|
||||
registered: number;
|
||||
skipped: number;
|
||||
files: string[];
|
||||
}
|
||||
|
||||
export interface DeleteResult {
|
||||
ok: boolean;
|
||||
}
|
||||
|
||||
export interface WorkerStatus {
|
||||
available: boolean;
|
||||
active_jobs: number;
|
||||
supported_codecs: string[];
|
||||
gpu_available: boolean;
|
||||
}
|
||||
export type {
|
||||
AssetStatus,
|
||||
JobStatus,
|
||||
ChunkJobStatus,
|
||||
MediaAsset,
|
||||
TranscodePreset,
|
||||
TranscodeJob,
|
||||
ChunkJob,
|
||||
CreateJobRequest,
|
||||
UpdateAssetRequest,
|
||||
SystemStatus,
|
||||
ScanResult,
|
||||
DeleteResult,
|
||||
WorkerStatus,
|
||||
} from "../../common/types/generated";
|
||||
|
||||
@@ -15,7 +15,10 @@
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"rootDir": "..",
|
||||
"typeRoots": ["./node_modules/@types"],
|
||||
},
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx"],
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "../common/**/*.ts", "../common/**/*.tsx"],
|
||||
"exclude": ["../common/api/grpc/**"],
|
||||
"references": [{ "path": "./tsconfig.node.json" }],
|
||||
}
|
||||
|
||||
@@ -11,6 +11,9 @@ export default defineConfig({
|
||||
hmr: {
|
||||
path: "/timeline/@vite/client",
|
||||
},
|
||||
fs: {
|
||||
allow: [".."],
|
||||
},
|
||||
proxy: {
|
||||
"/api": {
|
||||
target: "http://fastapi:8702",
|
||||
|
||||
Reference in New Issue
Block a user