将可变大小的项目分配到平衡组中并保持项目顺序的算法

问题描述 投票:0回答:1

我正在寻找一种解决方案,将不同大小的项目列表拆分为 N 个大小相似的组,同时保留项目顺序。

它是约翰逊算法或装箱算法。我仍然不知道如何为 N 组实现它并保留项目顺序。

分成 3 组的示例:

要分发的物品:

  • 商品 A - 尺码 5
  • 商品 B - 尺寸 1
  • 商品 C - 尺寸 8
  • 商品 D - 尺寸 2
  • 商品 E - 尺寸 3

所需输出:

Group 1 (total size 6): Item A, Item B
Group 2 (total size 8): Item C
Group 3 (total size 5): Item D, Item E

function distributeItems(items, numGroups) {
    const totalItems = items.length;
    const groupSizes = Array.from({ length: numGroups }, () => 0);
    const groups = Array.from({ length: numGroups }, () => []);

    for (let i = 0; i < totalItems; i++) {
        const currentItem = items[i];
        let minSizeIndex = 0;

        for (let j = 1; j < numGroups; j++) {
            if (groupSizes[j] < groupSizes[minSizeIndex]) {
                minSizeIndex = j;
            }
        }

        groups[minSizeIndex].push(currentItem);
        groupSizes[minSizeIndex] += currentItem.size;
    }

    for (let i = 0; i < numGroups; i++) {
        console.log(`Group ${i + 1} (total size ${groupSizes[i]}): ${groups[i].map(item => item.title).join(', ')}`);
    }
}

const items = [
    { title: 'Item A', size: 5 },
    { title: 'Item B', size: 1 },
    { title: 'Item C', size: 8 },
    { title: 'Item D', size: 2 },
    { title: 'Item E', size: 3 },
];

distributeItems(items, 3);

javascript algorithm math logic
1个回答
0
投票

算法如下:

  1. 将项目索引分散
    size
  2. 使所需组数的索引块相等
  3. 比较块的边缘并减少兄弟块边缘上的较小索引计数
  4. 如果找到则处理相同大小的索引
  5. 将块映射到最终组中

我没有用等边测试,你可以准备一个测试数据。

const items = [
    { title: 'Item A', size: 5 },
    { title: 'Item B', size: 1 },
    { title: 'Item C', size: 8 },
    { title: 'Item D', size: 2 },
    { title: 'Item E', size: 3 },
];

distributeItems(items, 3);

function distributeItems(items, groupNum) {
    const spread = items.reduce((r, item, i) => (r.push(...Array.from({ length: item.size }, () => i)), r), []);
    const chunks = [];
    let count = -1;
    const size = spread.length / groupNum | 0;
    while (++count < groupNum) chunks.push(spread.slice(size * count, count === groupNum - 1 ? undefined : size * (count + 1)));
    const equalEdges = [];

    for (let i = 1; i < chunks.length; i++) {
        const chunk = chunks[i - 1], edge = chunk.at(-1);
        const next = chunks[i], nextEdge = next[0];
        if (edge !== nextEdge) { // definite cut
            continue;
        }
        let size = 1;
        for (let j = chunk.length - 2; j >= 0; j--) {
            if (chunk[j] === edge) size++;
        }
        let nextSize = 1;
        for (let j = 1; j < next.length; j++) {
            if (next[j] === nextEdge) nextSize++;
        }
        if (size > nextSize) {
            next.splice(0, nextSize);
        } else if (size < nextSize) {
            chunk.splice(chunk.length - size, size);
        } else { // cut from bigger chunk, but that's known after all unequal edges are processed
            equalEdges.push([chunk, next, size]);
        }
    }

    // we have some equal edges, cut from a bigger chunk (not tested)
    do {
        count = equalEdges.length;
        for (let i = 0; i < equalEdges.length; i++) {
            const [chunk, next, size] = equalEdges[i];
            if (chunk.length > next.length) {
                chunk.splice(chunk.length - size, size);
                equalEdges.splice(i--, 1);
            } else if (chunk.length < next.length) {
                next.splice(0, size);
                equalEdges.splice(i--, 1);
            }
        }

    } while (count !== equalEdges.length);

    for (const [chunk, next, size] of equalEdges) {
        if (Math.random() * 2 | 0 === 0) {
            chunk.splice(chunk.length - size, size);
        } else {
            next.splice(0, size);
        }
    }

    return chunks.map(chunk => {
        let idx = chunk[0];
        const group = [items[idx]];
        for (let i = 1; i < chunk.length; i++) {
            const next = chunk[i];
            if (next !== idx) group.push(items[next]);
            idx = next;
        }
        return group;
    });
}

© www.soinside.com 2019 - 2024. All rights reserved.