Let A
and B
be two sets. I'm looking for really fast or elegant ways to compute the set difference (A - B
or A \B
, depending on your preference) between them. The two sets are stored and manipulated as Javascript arrays, as the title says.
N开发者_JAVA技巧otes:
- Gecko-specific tricks are okay
- I'd prefer sticking to native functions (but I am open to a lightweight library if it's way faster)
- I've seen, but not tested, JS.Set (see previous point)
Edit: I noticed a comment about sets containing duplicate elements. When I say "set" I'm referring to the mathematical definition, which means (among other things) that they do not contain duplicate elements.
I don't know if this is most effective, but perhaps the shortest:
var A = [1, 2, 3, 4];
var B = [1, 3, 4, 7];
var diff = A.filter(function(x) {
return B.indexOf(x) < 0;
});
console.log(diff); // [2]
Updated to ES6:
const A = [1, 2, 3, 4];
const B = [1, 3, 4, 7];
const diff = A.filter(x => !B.includes(x));
console.log(diff); // [2]
Well, 7 years later, with ES6's Set object it's quite easy (but still not as compact as python's A - B
), and reportedly faster than indexOf
for large arrays:
console.clear();
let a = new Set([1, 2, 3, 4]);
let b = new Set([5, 4, 3, 2]);
let a_minus_b = new Set([...a].filter(x => !b.has(x)));
let b_minus_a = new Set([...b].filter(x => !a.has(x)));
let a_intersect_b = new Set([...a].filter(x => b.has(x)));
let a_union_b = new Set([...a, ...b]);
console.log(...a_minus_b); // {1}
console.log(...b_minus_a); // {5}
console.log(...a_intersect_b); // {2,3,4}
console.log(...a_union_b); // {1,2,3,4,5}
Looking at a lof of these solutions, they do fine for small cases. But, when you blow them up to a million items, the time complexity starts getting silly.
A.filter(v => B.includes(v))
That starts looking like an O(N^2) solution. Since there is an O(N) solution, let's use it, you can easily modify to not be a generator if you're not up to date on your JS runtime.
function *setMinus(A, B) {
const setA = new Set(A);
const setB = new Set(B);
for (const v of setB.values()) {
if (!setA.delete(v)) {
yield v;
}
}
for (const v of setA.values()) {
yield v;
}
}
a = [1,2,3];
b = [2,3,4];
console.log(Array.from(setMinus(a, b)));
While this is a bit more complex than many of the other solutions, when you have large lists this will be far faster.
Let's take a quick look at the performance difference, running it on a set of 1,000,000 random integers between 0...10,000 we see the following performance results.
setMinus time = 181 ms
diff time = 19099 ms
function buildList(count, range) {
result = [];
for (i = 0; i < count; i++) {
result.push(Math.floor(Math.random() * range))
}
return result;
}
function *setMinus(A, B) {
const setA = new Set(A);
const setB = new Set(B);
for (const v of setB.values()) {
if (!setA.delete(v)) {
yield v;
}
}
for (const v of setA.values()) {
yield v;
}
}
function doDiff(A, B) {
return A.filter(function(x) { return B.indexOf(x) < 0 })
}
const listA = buildList(100_000, 100_000_000);
const listB = buildList(100_000, 100_000_000);
let t0 = process.hrtime.bigint()
const _x = Array.from(setMinus(listA, listB))
let t1 = process.hrtime.bigint()
const _y = doDiff(listA, listB)
let t2 = process.hrtime.bigint()
console.log("setMinus time = ", (t1 - t0) / 1_000_000n, "ms");
console.log("diff time = ", (t2 - t1) / 1_000_000n, "ms");
You can use an object as a map to avoid linearly scanning B
for each element of A
as in user187291's answer:
function setMinus(A, B) {
var map = {}, C = [];
for(var i = B.length; i--; )
map[B[i].toSource()] = null; // any other value would do
for(var i = A.length; i--; ) {
if(!map.hasOwnProperty(A[i].toSource()))
C.push(A[i]);
}
return C;
}
The non-standard toSource()
method is used to get unique property names; if all elements already have unique string representations (as is the case with numbers), you can speed up the code by dropping the toSource()
invocations.
If you're using Set
s, it can be quite simple and performant:
function setDifference(a, b) {
return new Set(Array.from(a).filter(item => !b.has(item)));
}
Since Set
s use Hash functions* under the hood, the has
function is much faster than indexOf
(this matters if you have, say, more than 100 items).
The shortest, using jQuery, is:
var A = [1, 2, 3, 4];
var B = [1, 3, 4, 7];
var diff = $(A).not(B);
console.log(diff.toArray());
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
I would hash the array B, then keep values from the array A not present in B:
function getHash(array){
// Hash an array into a set of properties
//
// params:
// array - (array) (!nil) the array to hash
//
// return: (object)
// hash object with one property set to true for each value in the array
var hash = {};
for (var i=0; i<array.length; i++){
hash[ array[i] ] = true;
}
return hash;
}
function getDifference(a, b){
// compute the difference a\b
//
// params:
// a - (array) (!nil) first array as a set of values (no duplicates)
// b - (array) (!nil) second array as a set of values (no duplicates)
//
// return: (array)
// the set of values (no duplicates) in array a and not in b,
// listed in the same order as in array a.
var hash = getHash(b);
var diff = [];
for (var i=0; i<a.length; i++){
var value = a[i];
if ( !hash[value]){
diff.push(value);
}
}
return diff;
}
Using Underscore.js (Library for functional JS)
>>> var foo = [1,2,3]
>>> var bar = [1,2,4]
>>> _.difference(foo, bar);
[4]
Some simple functions, borrowing from @milan's answer:
const setDifference = (a, b) => new Set([...a].filter(x => !b.has(x)));
const setIntersection = (a, b) => new Set([...a].filter(x => b.has(x)));
const setUnion = (a, b) => new Set([...a, ...b]);
Usage:
const a = new Set([1, 2]);
const b = new Set([2, 3]);
setDifference(a, b); // Set { 1 }
setIntersection(a, b); // Set { 2 }
setUnion(a, b); // Set { 1, 2, 3 }
Incorporating the idea from Christoph and assuming a couple of non-standard iteration methods on arrays and objects/hashes (each
and friends), we can get set difference, union and intersection in linear time in about 20 lines total:
var setOPs = {
minusAB : function (a, b) {
var h = {};
b.each(function (v) { h[v] = true; });
return a.filter(function (v) { return !h.hasOwnProperty(v); });
},
unionAB : function (a, b) {
var h = {}, f = function (v) { h[v] = true; };
a.each(f);
b.each(f);
return myUtils.keys(h);
},
intersectAB : function (a, b) {
var h = {};
a.each(function (v) { h[v] = 1; });
b.each(function (v) { h[v] = (h[v] || 0) + 1; });
var fnSel = function (v, count) { return count > 1; };
var fnVal = function (v, c) { return v; };
return myUtils.select(h, fnSel, fnVal);
}
};
This assumes that each
and filter
are defined for arrays, and that we have two utility methods:
myUtils.keys(hash)
: returns an array with the keys of the hashmyUtils.select(hash, fnSelector, fnEvaluator)
: returns an array with the results of callingfnEvaluator
on the key/value pairs for whichfnSelector
returns true.
The select()
is loosely inspired by Common Lisp, and is merely filter()
and map()
rolled into one. (It would be better to have them defined on Object.prototype
, but doing so wrecks havoc with jQuery, so I settled for static utility methods.)
Performance: Testing with
var a = [], b = [];
for (var i = 100000; i--; ) {
if (i % 2 !== 0) a.push(i);
if (i % 3 !== 0) b.push(i);
}
gives two sets with 50,000 and 66,666 elements. With these values A-B takes about 75ms, while union and intersection are about 150ms each. (Mac Safari 4.0, using Javascript Date for timing.)
I think that's decent payoff for 20 lines of code.
As for the fasted way, this isn't so elegant but I've run some tests to be sure. Loading one array as an object is far faster to process in large quantities:
var t, a, b, c, objA;
// Fill some arrays to compare
a = Array(30000).fill(0).map(function(v,i) {
return i.toFixed();
});
b = Array(20000).fill(0).map(function(v,i) {
return (i*2).toFixed();
});
// Simple indexOf inside filter
t = Date.now();
c = b.filter(function(v) { return a.indexOf(v) < 0; });
console.log('completed indexOf in %j ms with result %j length', Date.now() - t, c.length);
// Load `a` as Object `A` first to avoid indexOf in filter
t = Date.now();
objA = {};
a.forEach(function(v) { objA[v] = true; });
c = b.filter(function(v) { return !objA[v]; });
console.log('completed Object in %j ms with result %j length', Date.now() - t, c.length);
Results:
completed indexOf in 1219 ms with result 5000 length
completed Object in 8 ms with result 5000 length
However, this works with strings only. If you plan to compare numbered sets you'll want to map results with parseFloat.
The function below are ports of the methods found in Python's set()
class and follows the TC39 Set methods proposal.
const
union = (a, b) => new Set([...a, ...b]),
intersection = (a, b) => new Set([...a].filter(x => b.has(x))),
difference = (a, b) => new Set([...a].filter(x => !b.has(x))),
symetricDifference = (a, b) => union(difference(a, b), difference(b, a)),
isSubsetOf = (a, b) => [...b].every(x => a.has(x)),
isSupersetOf = (a, b) => [...a].every(x => b.has(x)),
isDisjointFrom = (a, b) => !intersection(a, b).size;
const
a = new Set([1, 2, 3, 4]),
b = new Set([5, 4, 3, 2]);
console.log(...union(a, b)); // [1, 2, 3, 4, 5]
console.log(...intersection(a, b)); // [2, 3, 4]
console.log(...difference(a, b)); // [1]
console.log(...difference(b, a)); // [5]
console.log(...symetricDifference(a, b)); // [1, 5]
const
c = new Set(['A', 'B', 'C', 'D', 'E']),
d = new Set(['B', 'D']);
console.log(isSubsetOf(c, d)); // true
console.log(isSupersetOf(d, c)); // true
const
e = new Set(['A', 'B', 'C']),
f = new Set(['X', 'Y', 'Z']);
console.log(isDisjointFrom(e, f)); // true
.as-console-wrapper { top: 0; max-height: 100% !important; }
This works, but I think another one is much more shorter, and elegant too
A = [1, 'a', 'b', 12];
B = ['a', 3, 4, 'b'];
diff_set = {
ar : {},
diff : Array(),
remove_set : function(a) { ar = a; return this; },
remove: function (el) {
if(ar.indexOf(el)<0) this.diff.push(el);
}
}
A.forEach(diff_set.remove_set(B).remove,diff_set);
C = diff_set.diff;
Using core-js
to polyfill the New Set
methods proposal:
import "core-js"
new Set(A).difference(B)
In theory, the time complexity should be Θ(n)
, where n
is the number of elements in B
.
精彩评论