BREAKING CHANGE(constraints): make TaskConstraintGroup constraint matcher input-aware and add shouldExecute pre-execution hook
This commit is contained in:
@@ -54,7 +54,7 @@ tap.test('should enforce group concurrency limit', async () => {
|
||||
const constraint = new taskbuffer.TaskConstraintGroup<{ group: string }>({
|
||||
name: 'concurrency-test',
|
||||
maxConcurrent: 2,
|
||||
constraintKeyForTask: (task) =>
|
||||
constraintKeyForExecution: (task) =>
|
||||
task.data.group === 'workers' ? 'workers' : null,
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
@@ -97,7 +97,7 @@ tap.test('should enforce key-based mutual exclusion', async () => {
|
||||
const constraint = new taskbuffer.TaskConstraintGroup<{ domain: string }>({
|
||||
name: 'domain-mutex',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: (task) => task.data.domain,
|
||||
constraintKeyForExecution: (task) => task.data.domain,
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -149,7 +149,7 @@ tap.test('should enforce cooldown between task executions', async () => {
|
||||
name: 'cooldown-test',
|
||||
maxConcurrent: 1,
|
||||
cooldownMs: 300,
|
||||
constraintKeyForTask: (task) => task.data.key,
|
||||
constraintKeyForExecution: (task) => task.data.key,
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -194,13 +194,13 @@ tap.test('should apply multiple constraint groups to one task', async () => {
|
||||
const globalConstraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'global',
|
||||
maxConcurrent: 3,
|
||||
constraintKeyForTask: () => 'all',
|
||||
constraintKeyForExecution: () => 'all',
|
||||
});
|
||||
|
||||
const groupConstraint = new taskbuffer.TaskConstraintGroup<{ group: string }>({
|
||||
name: 'group',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: (task) => task.data.group,
|
||||
constraintKeyForExecution: (task) => task.data.group,
|
||||
});
|
||||
|
||||
manager.addConstraintGroup(globalConstraint);
|
||||
@@ -242,7 +242,7 @@ tap.test('should run task unconstrained when matcher returns null', async () =>
|
||||
const constraint = new taskbuffer.TaskConstraintGroup<{ skip: boolean }>({
|
||||
name: 'selective',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: (task) => (task.data.skip ? null : 'constrained'),
|
||||
constraintKeyForExecution: (task) => (task.data.skip ? null : 'constrained'),
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -269,7 +269,7 @@ tap.test('should release slot and drain queue when task fails', async () => {
|
||||
const constraint = new taskbuffer.TaskConstraintGroup<{ key: string }>({
|
||||
name: 'error-drain',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: (task) => task.data.key,
|
||||
constraintKeyForExecution: (task) => task.data.key,
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -313,7 +313,7 @@ tap.test('should route triggerTaskByName through constraints', async () => {
|
||||
const constraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'manager-integration',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: () => 'all',
|
||||
constraintKeyForExecution: () => 'all',
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -356,7 +356,7 @@ tap.test('should remove a constraint group by name', async () => {
|
||||
const constraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'removable',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: () => 'all',
|
||||
constraintKeyForExecution: () => 'all',
|
||||
});
|
||||
|
||||
manager.addConstraintGroup(constraint);
|
||||
@@ -373,7 +373,7 @@ tap.test('should reset constraint group state', async () => {
|
||||
name: 'resettable',
|
||||
maxConcurrent: 2,
|
||||
cooldownMs: 1000,
|
||||
constraintKeyForTask: () => 'key',
|
||||
constraintKeyForExecution: () => 'key',
|
||||
});
|
||||
|
||||
// Simulate usage
|
||||
@@ -395,7 +395,7 @@ tap.test('should return correct result from queued tasks', async () => {
|
||||
const constraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'return-value-test',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: () => 'shared',
|
||||
constraintKeyForExecution: () => 'shared',
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -434,7 +434,7 @@ tap.test('should propagate errors from queued tasks (catchErrors: false)', async
|
||||
const constraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'error-propagation',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: () => 'shared',
|
||||
constraintKeyForExecution: () => 'shared',
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -484,7 +484,7 @@ tap.test('should route triggerTask() through constraints', async () => {
|
||||
const constraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'trigger-task-test',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: () => 'all',
|
||||
constraintKeyForExecution: () => 'all',
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -523,7 +523,7 @@ tap.test('should route addExecuteRemoveTask() through constraints', async () =>
|
||||
const constraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'add-execute-remove-test',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: () => 'all',
|
||||
constraintKeyForExecution: () => 'all',
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -561,7 +561,7 @@ tap.test('should execute queued tasks in FIFO order', async () => {
|
||||
const constraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'fifo-test',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: () => 'shared',
|
||||
constraintKeyForExecution: () => 'shared',
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -603,7 +603,7 @@ tap.test('should enforce both concurrency and cooldown together', async () => {
|
||||
name: 'combined-test',
|
||||
maxConcurrent: 2,
|
||||
cooldownMs: 200,
|
||||
constraintKeyForTask: () => 'shared',
|
||||
constraintKeyForExecution: () => 'shared',
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -645,7 +645,7 @@ tap.test('should unblock queued tasks when constraint group is removed', async (
|
||||
const constraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'removable-constraint',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForTask: () => 'shared',
|
||||
constraintKeyForExecution: () => 'shared',
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
@@ -690,4 +690,184 @@ tap.test('should unblock queued tasks when constraint group is removed', async (
|
||||
await manager.stop();
|
||||
});
|
||||
|
||||
// Test 20: Intra-task concurrency by input — same task, different inputs, key extracts TLD
|
||||
tap.test('should serialize same-TLD inputs and parallelize different-TLD inputs', async () => {
|
||||
const manager = new taskbuffer.TaskManager();
|
||||
const log: string[] = [];
|
||||
|
||||
const extractTLD = (domain: string) => {
|
||||
const parts = domain.split('.');
|
||||
return parts.slice(-2).join('.');
|
||||
};
|
||||
|
||||
const constraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'tld-mutex',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForExecution: (_task, input?: string) => {
|
||||
if (!input) return null;
|
||||
return extractTLD(input);
|
||||
},
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
const getCert = new taskbuffer.Task({
|
||||
name: 'get-cert',
|
||||
taskFunction: async (domain: string) => {
|
||||
log.push(`${domain}-start`);
|
||||
await smartdelay.delayFor(100);
|
||||
log.push(`${domain}-end`);
|
||||
},
|
||||
});
|
||||
manager.addTask(getCert);
|
||||
|
||||
await Promise.all([
|
||||
manager.triggerTaskConstrained(getCert, 'a.example.com'),
|
||||
manager.triggerTaskConstrained(getCert, 'b.example.com'),
|
||||
manager.triggerTaskConstrained(getCert, 'c.other.org'),
|
||||
]);
|
||||
|
||||
// a.example.com and b.example.com share TLD "example.com" → serialized
|
||||
const aEndIdx = log.indexOf('a.example.com-end');
|
||||
const bStartIdx = log.indexOf('b.example.com-start');
|
||||
expect(bStartIdx).toBeGreaterThanOrEqual(aEndIdx);
|
||||
|
||||
// c.other.org has different TLD → runs in parallel with a.example.com
|
||||
const aStartIdx = log.indexOf('a.example.com-start');
|
||||
const cStartIdx = log.indexOf('c.other.org-start');
|
||||
expect(cStartIdx).toBeLessThan(aEndIdx);
|
||||
|
||||
await manager.stop();
|
||||
});
|
||||
|
||||
// Test 21: shouldExecute skips queued task based on external state
|
||||
tap.test('should skip queued task when shouldExecute returns false', async () => {
|
||||
const manager = new taskbuffer.TaskManager();
|
||||
const execLog: string[] = [];
|
||||
const certCache = new Map<string, string>();
|
||||
|
||||
const extractTLD = (domain: string) => {
|
||||
const parts = domain.split('.');
|
||||
return parts.slice(-2).join('.');
|
||||
};
|
||||
|
||||
const constraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'cert-mutex',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForExecution: (_task, input?: string) => {
|
||||
if (!input) return null;
|
||||
return extractTLD(input);
|
||||
},
|
||||
shouldExecute: (_task, input?: string) => {
|
||||
if (!input) return true;
|
||||
return certCache.get(extractTLD(input)) !== 'wildcard';
|
||||
},
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
const getCert = new taskbuffer.Task({
|
||||
name: 'get-cert-skip',
|
||||
taskFunction: async (domain: string) => {
|
||||
execLog.push(domain);
|
||||
// First execution sets wildcard in cache
|
||||
certCache.set(extractTLD(domain), 'wildcard');
|
||||
await smartdelay.delayFor(100);
|
||||
return `cert-for-${domain}`;
|
||||
},
|
||||
});
|
||||
manager.addTask(getCert);
|
||||
|
||||
const [r1, r2] = await Promise.all([
|
||||
manager.triggerTaskConstrained(getCert, 'app.example.com'),
|
||||
manager.triggerTaskConstrained(getCert, 'api.example.com'),
|
||||
]);
|
||||
|
||||
// First ran, second was skipped
|
||||
expect(execLog).toEqual(['app.example.com']);
|
||||
expect(r1).toEqual('cert-for-app.example.com');
|
||||
expect(r2).toEqual(undefined);
|
||||
|
||||
await manager.stop();
|
||||
});
|
||||
|
||||
// Test 22: shouldExecute on immediate (non-queued) trigger
|
||||
tap.test('should skip immediate trigger when shouldExecute returns false', async () => {
|
||||
const manager = new taskbuffer.TaskManager();
|
||||
let executed = false;
|
||||
|
||||
const constraint = new taskbuffer.TaskConstraintGroup({
|
||||
name: 'always-skip',
|
||||
maxConcurrent: 10,
|
||||
constraintKeyForExecution: () => 'all',
|
||||
shouldExecute: () => false,
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
const task = new taskbuffer.Task({
|
||||
name: 'skip-immediate',
|
||||
taskFunction: async () => {
|
||||
executed = true;
|
||||
return 'should-not-see';
|
||||
},
|
||||
});
|
||||
manager.addTask(task);
|
||||
|
||||
const result = await manager.triggerTaskConstrained(task);
|
||||
expect(executed).toBeFalse();
|
||||
expect(result).toEqual(undefined);
|
||||
|
||||
await manager.stop();
|
||||
});
|
||||
|
||||
// Test 23: Mixed task.data + input constraint key
|
||||
tap.test('should use both task.data and input in constraint key', async () => {
|
||||
const manager = new taskbuffer.TaskManager();
|
||||
let running = 0;
|
||||
let maxRunning = 0;
|
||||
|
||||
const constraint = new taskbuffer.TaskConstraintGroup<{ provider: string }>({
|
||||
name: 'provider-domain',
|
||||
maxConcurrent: 1,
|
||||
constraintKeyForExecution: (task, input?: string) => {
|
||||
return `${task.data.provider}:${input || 'default'}`;
|
||||
},
|
||||
});
|
||||
manager.addConstraintGroup(constraint);
|
||||
|
||||
const makeTask = (name: string, provider: string) =>
|
||||
new taskbuffer.Task<undefined, [], { provider: string }>({
|
||||
name,
|
||||
data: { provider },
|
||||
taskFunction: async () => {
|
||||
running++;
|
||||
maxRunning = Math.max(maxRunning, running);
|
||||
await smartdelay.delayFor(100);
|
||||
running--;
|
||||
},
|
||||
});
|
||||
|
||||
// Same provider + same domain input → should serialize
|
||||
const t1 = makeTask('mixed-1', 'acme');
|
||||
const t2 = makeTask('mixed-2', 'acme');
|
||||
// Different provider + same domain → parallel
|
||||
const t3 = makeTask('mixed-3', 'cloudflare');
|
||||
|
||||
manager.addTask(t1);
|
||||
manager.addTask(t2);
|
||||
manager.addTask(t3);
|
||||
|
||||
await Promise.all([
|
||||
manager.triggerTaskConstrained(t1, 'example.com'),
|
||||
manager.triggerTaskConstrained(t2, 'example.com'),
|
||||
manager.triggerTaskConstrained(t3, 'example.com'),
|
||||
]);
|
||||
|
||||
// t1 and t2 share key "acme:example.com" → serialized (max 1 at a time)
|
||||
// t3 has key "cloudflare:example.com" → parallel with t1
|
||||
// So maxRunning should be exactly 2 (t1 + t3, or t3 + t2)
|
||||
expect(maxRunning).toBeLessThanOrEqual(2);
|
||||
expect(maxRunning).toBeGreaterThanOrEqual(2);
|
||||
|
||||
await manager.stop();
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
|
||||
Reference in New Issue
Block a user