Amazon Interview Question
Quality Assurance EngineersTeam: Kindle
Country: India
Interview Type: Written Test
I see a lot of Java Code to tackle this problem. My solution makes use of Python
from collections import defaultdict
my_list = [5,3,4,6,7,5,3,2,1]
occurrence_count = defaultdict(int)
for item in my_list:
occurrence_count[item] += 1
duplicate_count = {dup: occurrence_count[dup] for dup in occurrence_count if occurrence_count[dup] > 1}
print len(duplicate_count)
print duplicate_count.keys()
I think we can go with the hashmap where key will be the number and value will be count. The problem with this is the space complexity.
With hashmap code will be
import java.util.*;
public class arrayDuplicate
{
public static void main(String args[])
{
int arr[]={5,3,4,6,7,3,5,1,5,5,2,3,2,3};
int duparr[];
int i,j,count;
HashMap hm=new HashMap();
for(i=0;i<arr.length;i++)
{
count=1;
for(j=i+1;j<arr.length;j++)
{
if(arr[i]!=arr[j])
continue;
else
{
count=count+1;
hm.put(arr[i],count);
}
}
}
Set set=hm.entrySet();
Iterator it=set.iterator();
while(it.hasNext())
{
Map.Entry me=(Map.Entry)it.next();
System.out.println("Element:- "+me.getKey()+" Count:- "+me.getValue());
}
System.out.println("Total duplicate elements:- "+hm.size());
}
}
public class ArrayDuplicates {
static int[] arr = {4,3,4,2,3,1,5,1,2,6,7,8,6};
public static void main(String[] args) {
Arrays.sort(arr);
int count = 0;
boolean updated = false;
for(int i=0; i<arr.length; i++)
{
if((i+1) >= arr.length) break;
if(arr[i] == arr[i+1] && updated == false)
{
System.out.println(arr[i]);
count++;
updated = true;
}
else if(arr[i] != arr[i+1])
updated = false;
}
System.out.println(count);
}
}
final int[] array = { 5, 3, 4, 6, 7, 5, 3, 2, 1 };
final int[] duplicateElements = new int[array.length];
int count = 0;
for (int i = 0; i < array.length; i++) {
for (int j = i + 1; j< array.length; j++) {
if (array[i] == array[j]) {
count++;
duplicateElements[count - 1] = array[i];
}
}
}
for (int i = 0; i < duplicateElements.length; i++) {
if (duplicateElements[i] == 0) {
break;
}
System.out.println(duplicateElements[i]);
}
System.out.println("count : " + count);
final int[] array = { 5, 3, 4, 6, 7, 5, 3, 2, 1 };
final int[] duplicateElements = new int[array.length];
int count = 0;
for (int i = 0; i < array.length; i++) {
for (int j = i + 1; j< array.length; j++) {
if (array[i] == array[j]) {
count++;
duplicateElements[count - 1] = array[i];
}
}
}
for (int i = 0; i < duplicateElements.length; i++) {
if (duplicateElements[i] == 0) {
break;
}
System.out.println(duplicateElements[i]);
}
System.out.println("count : " + count);
private static void GetDuplicate()
{
int[] array = { 5, 3, 4, 6, 7, 5, 3, 2, 1 };
var dict = new Dictionary<int, int>();
List<int> duplicate = new List<int>();
foreach (var value in array)
{
if (dict.ContainsKey(value))
dict[value]++;
else
dict[value] = 1;
}
var test = dict.OrderByDescending(x => x.Value);
foreach (var pair in dict.OrderByDescending(x => x.Value))
{
if (pair.Value == 1)
break;
duplicate.Add(pair.Key);
}
Console.WriteLine("Count: " + duplicate.Count);
Console.WriteLine(String.Join(", ", duplicate));
Console.ReadKey();
}
if we use one for loop inside another for loop time complexity would be O(n*n) but if we sort the array then time complexity would be O(n*logn) because O(nlonn) for sorting plus O(n) for searching that is equivalent to O(n*logn) but we have no restriction on memory we can do it by hashtable it will take time complexity O(n) and space complexity O(n) for hashtable.
int[] occ = {1,1,2,2,2,3,3,3,5,6,7,8,8,9};
Arrays.sort(occ);
int NoOfDupEle=0;
int[] dupelemt = new int[occ.length/2];
for(int i=0;i<occ.length;){
int count=0;
for(int j=0;j<occ.length-i;j++){
if(occ[i]==occ[i+j]){
count++;
}
}
if(count>1){
dupelemt[NoOfDupEle]=occ[i];
NoOfDupEle++;
}
i=i+(count);
}
System.out.println("Totla number of duplicates elements :" + NoOfDupEle);
for(int dup=0;dup<dupelemt.length-1;dup++){
if(dupelemt[dup]!=0){
System.out.println("duplicates elements :" + dupelemt[dup] + ",");
}
}
List<Integer> arr1 = new ArrayList<>();
List<Integer> arr2 = new ArrayList<>();
int arr [] = {5,3,4,6,7,5,3,2,1};
int count=0;
for(int i=0; i<arr.length; i++){
if(arr1.contains(arr[i])){
if(!arr2.contains(arr[i])){
arr2.add(arr[i]);
}
count++;
}
arr1.add(arr[i]);
}
printf(arr2);
printf("Count: %d", count);
Java Implementation using HashSet
import java.util.HashSet;
import java.util.Set;
public class DuplicateElements {
public static void main(String[] args) {
// TODO Auto-generated method stub
int[] arr = {5,3,4,6,7,5,3,2,1};
Set<Integer> s = new HashSet<Integer>();
int count=0;
for (int i=0; i<arr.length;i++){
if (s.contains(arr[i]))
{
System.out.print( arr[i] + ",");
count = count +1;
}
s.add(arr[i]);
}
System.out.println();
System.out.println("number of duplicate elements : "+count);
}
}
import java.util.HashSet;
import java.util.Set;
public class DuplicateElements {
public static void main(String[] args) {
// TODO Auto-generated method stub
int[] arr = {5,3,4,6,7,5,3,2,1};
Set<Integer> s = new HashSet<Integer>();
int count=0;
for (int i=0; i<arr.length;i++){
if (s.contains(arr[i]))
{
System.out.print( arr[i] + ",");
count = count +1;
}
s.add(arr[i]);
}
System.out.println();
System.out.println("number of duplicate elements : "+count);
}
}
Well
### if there is no space constraint then just take an auxiliary array (just like we do in Counting sort). And keep adding one to the index value of elements present in this array.
{
arr1[5] = {2,3,2,4,1};
arr2[arr1[x]] ++;
arr2[5] = {1,2,1,1,0};
}
at the end just traverse the arr2[ ] for count greater than 1. The number of places where you get value greater than 1 is your answer.
### In other case if there is space constraint...then just use the XOR method.
public class NumberTest {
/**
* @param args
*/
public static void main(String[] args) {
int arr [] = {5,3,4,6,7,5,3,2,1};
int count =0;
for(int i =0 ;i < arr.length ; i++){
for(int j =i+1 ; j< arr.length; j++){
if(arr[i] == arr[j]){
System.out.println(arr[i]);
count ++;
}
}
}
System.out.println("Count = "+ count);
}
}
Either sort and find out the duplicates or use a hashtable to count the duplicates.
- Murali Mohan July 28, 2014