 85bf1341f3
			
		
	
	85bf1341f3
	
	
	
		
			
			Frontend Enhancements: - Complete React TypeScript frontend with modern UI components - Distributed workflows management interface with real-time updates - Socket.IO integration for live agent status monitoring - Agent management dashboard with cluster visualization - Project management interface with metrics and task tracking - Responsive design with proper error handling and loading states Backend Infrastructure: - Distributed coordinator for multi-agent workflow orchestration - Cluster management API with comprehensive agent operations - Enhanced database models for agents and projects - Project service for filesystem-based project discovery - Performance monitoring and metrics collection - Comprehensive API documentation and error handling Documentation: - Complete distributed development guide (README_DISTRIBUTED.md) - Comprehensive development report with architecture insights - System configuration templates and deployment guides The platform now provides a complete web interface for managing the distributed AI cluster with real-time monitoring, workflow orchestration, and agent coordination capabilities. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
		
			
				
	
	
		
			72 lines
		
	
	
		
			1.5 KiB
		
	
	
	
		
			JavaScript
		
	
	
	
	
	
			
		
		
	
	
			72 lines
		
	
	
		
			1.5 KiB
		
	
	
	
		
			JavaScript
		
	
	
	
	
	
| 'use strict';
 | |
| const Queue = require('yocto-queue');
 | |
| 
 | |
| const pLimit = concurrency => {
 | |
| 	if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) {
 | |
| 		throw new TypeError('Expected `concurrency` to be a number from 1 and up');
 | |
| 	}
 | |
| 
 | |
| 	const queue = new Queue();
 | |
| 	let activeCount = 0;
 | |
| 
 | |
| 	const next = () => {
 | |
| 		activeCount--;
 | |
| 
 | |
| 		if (queue.size > 0) {
 | |
| 			queue.dequeue()();
 | |
| 		}
 | |
| 	};
 | |
| 
 | |
| 	const run = async (fn, resolve, ...args) => {
 | |
| 		activeCount++;
 | |
| 
 | |
| 		const result = (async () => fn(...args))();
 | |
| 
 | |
| 		resolve(result);
 | |
| 
 | |
| 		try {
 | |
| 			await result;
 | |
| 		} catch {}
 | |
| 
 | |
| 		next();
 | |
| 	};
 | |
| 
 | |
| 	const enqueue = (fn, resolve, ...args) => {
 | |
| 		queue.enqueue(run.bind(null, fn, resolve, ...args));
 | |
| 
 | |
| 		(async () => {
 | |
| 			// This function needs to wait until the next microtask before comparing
 | |
| 			// `activeCount` to `concurrency`, because `activeCount` is updated asynchronously
 | |
| 			// when the run function is dequeued and called. The comparison in the if-statement
 | |
| 			// needs to happen asynchronously as well to get an up-to-date value for `activeCount`.
 | |
| 			await Promise.resolve();
 | |
| 
 | |
| 			if (activeCount < concurrency && queue.size > 0) {
 | |
| 				queue.dequeue()();
 | |
| 			}
 | |
| 		})();
 | |
| 	};
 | |
| 
 | |
| 	const generator = (fn, ...args) => new Promise(resolve => {
 | |
| 		enqueue(fn, resolve, ...args);
 | |
| 	});
 | |
| 
 | |
| 	Object.defineProperties(generator, {
 | |
| 		activeCount: {
 | |
| 			get: () => activeCount
 | |
| 		},
 | |
| 		pendingCount: {
 | |
| 			get: () => queue.size
 | |
| 		},
 | |
| 		clearQueue: {
 | |
| 			value: () => {
 | |
| 				queue.clear();
 | |
| 			}
 | |
| 		}
 | |
| 	});
 | |
| 
 | |
| 	return generator;
 | |
| };
 | |
| 
 | |
| module.exports = pLimit;
 |