I have an array of objects.
const arr = [
{ title: \"sky\", artist: \"Jon\", id: 1 },
{ title: \"rain\", artist: \"Paul\", id: 2 },
{ title: \"sky\", artis
You could run the array through reduce() and then use some to see if you should add if it does not exist, or remove using filter if it does.
Snippet:
const arr = [
{ title: "sky", artist: "Jon", id: 1 },
{ title: "rain", artist: "Paul", id: 2 },
{ title: "sky", artist: "Jon", id: 1 },
{ title: "rain", artist: "Paul", id: 2 },
{ title: "earth", artist: "Frank", id: 3 },
];
const unique = arr.reduce((accumulator, currentValue) => {
// add if we don't have
if (!accumulator.some(x => x.id === currentValue.id)) {
accumulator.push(currentValue);
} else {
// remove if we do
accumulator = accumulator.filter(x => x.id !== currentValue.id);
}
return accumulator;
}, []);
console.info(unique);
you can do it in one line like this:
const res = arr.filter(elem => (arr.filter(obj => obj.id === elem.id)).length === 1)
or you can do it like this(better in terms of time complexity):
const arr = [
{ title: "sky", artist: "Jon", id: 1 },
{ title: "rain", artist: "Paul", id: 2 },
{ title: "sky", artist: "Jon", id: 1 },
];
const counts = arr.reduce((counts, curr) => (counts[curr.id] = ++counts[curr.id] || 1, counts), {})
const res = arr.filter(curr => counts[curr.id] === 1)
If you know the title
, artist
, id
are gonna be in the same order in each of the object, one solution can be this:
var arrayX=[
{ title: "sky", artist: "Jon", id: 1 },
{ title: "rain", artist: "Paul", id: 2 },
{ title: "sky", artist: "Jon", id: 1 }
];
var newArray = arrayX.map(i=> JSON.stringify(i)); //stringify all the objects so as to compare them
var res = newArray.filter((elem, index)=>{
if(newArray.indexOf(elem) === newArray.lastIndexOf(elem)){
return elem //get only those elements whihc do not have dupes
}
});
var finalResult = res.map(i=>JSON.parse(i)); //parse the result to get the array of object
console.log(finalResult)
That's a one-liner:
list.filter(el => list.filter(e => e.title == el.title).length == 1);
const arr = [{
title: "sky",
artist: "Jon",
id: 1
},
{
title: "rain",
artist: "Paul",
id: 2
},
{
title: "sky",
artist: "Jon",
id: 1
}
];
const arr1 = [{
title: "sky",
artist: "Jon",
id: 1
},
{
title: "rain",
artist: "Paul",
id: 2
},
{
title: "sky",
artist: "Jon",
id: 1
},
{
title: "rain",
artist: "Paul",
id: 2
}
];
function removeDupes(list) {
return list.filter(el => list.filter(e => e.id == el.id).length == 1);
}
console.log(removeDupes(arr));
console.log(removeDupes(arr1));
You could take an object and filter with the value of the hash table.
const
array = [{ title: "sky", artist: "Jon", id: 1 }, { title: "rain", artist: "Paul", id: 2 }, { title: "sky", artist: "Jon", id: 1 }, { title: "rain", artist: "Paul", id: 2 }],
ids = array.reduce((r, { id }) => (r[id] = !(id in r), r), {}),
result = array.filter(({ id }) => ids[id]);
console.log(result);
You can group the items by id
, and then use _.flatMap()
to convert back a single array. In the _.flatMap()
callback return an empty array if the group has more than one item:
const fn = arr => _.flatMap(
_.groupBy(arr, 'id'), // group by the id
group => _.size(group) > 1 ? [] : group // check the size and return an empty array for groups with more than a single item
)
const arr1 = [{"title":"sky","artist":"Jon","id":1},{"title":"rain","artist":"Paul","id":2},{"title":"sky","artist":"Jon","id":1}]
const arr2 = [{"title":"sky","artist":"Jon","id":1},{"title":"rain","artist":"Paul","id":2},{"title":"sky","artist":"Jon","id":1},{"title":"rain","artist":"Paul","id":2}]
console.log(fn(arr1))
console.log(fn(arr2))
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.15/lodash.js"></script>
Another example