Implement a prototype of a resource allocation system in a distributed parallel computing infrastructure.
There are n resources and m tasks to schedule on them where the ith task has a processing time of burst Time[i]. The total load time of a resource is the sum of the total burst times of the jobs assigned to the resources. However, a particular resource can be allocated jobs in a contiguous segment only i.e. from some index x to some index yor [x, x + 1, x + 2, ..., y].
Find the minimum possible value of the maximum total load time of the servers if resources are allocated optimally.
What I have tried:
#include <stdio.h>
int canAllocate(int tasks[], int n, int m, int maxLoad) {
int resourceIdx = 0;
int currentLoad = 0;
for (int i = 0; i < m; i++) {
if (currentLoad + tasks[i] <= maxLoad) {
currentLoad += tasks[i];
} else {
resourceIdx++;
currentLoad = tasks[i];
if (resourceIdx >= n) {
return 0;
}
}
}
return 1;
}
int minMaxTotalLoadTime(int resources, int tasks[], int m) {
int low = 0;
int high = 0;
for (int i = 0; i < m; i++) {
high += tasks[i];
}
while (low < high) {
int mid = (low + high) / 2;
if (canAllocate(tasks, resources, m, mid)) {
high = mid;
} else {
low = mid + 1;
}
}
return high;
}
int main() {
int resources = 3;
int tasks[] = {2, 4, 6, 8, 10};
int m = sizeof(tasks) / sizeof(tasks[0]);
int result = minMaxTotalLoadTime(resources, tasks, m);
printf("Minimum possible maximum total load time: %d\n", result);
return 0;
}