Very easy to measure...
In a small number of tight-loop processing code where I know the length is fixed I use arrays for that extra tiny bit of micro-optimisation; arrays can be marginally faster if you use the indexer / for form - but IIRC believe it depends on the type of data in the array. But unless you need to micro-optimise, keep it simple and use List<T>
etc.
Of course, this only applies if you are reading all of the data; a dictionary would be quicker for key-based lookups.
Here's my results using "int" (the second number is a checksum to verify they all did the same work):
(edited to fix bug)
List/for: 1971ms (589725196)
Array/for: 1864ms (589725196)
List/foreach: 3054ms (589725196)
Array/foreach: 1860ms (589725196)
based on the test rig:
using System;
using System.Collections.Generic;
using System.Diagnostics;
static class Program
{
static void Main()
{
List<int> list = new List<int>(6000000);
Random rand = new Random(12345);
for (int i = 0; i < 6000000; i++)
{
list.Add(rand.Next(5000));
}
int[] arr = list.ToArray();
int chk = 0;
Stopwatch watch = Stopwatch.StartNew();
for (int rpt = 0; rpt < 100; rpt++)
{
int len = list.Count;
for (int i = 0; i < len; i++)
{
chk += list[i];
}
}
watch.Stop();
Console.WriteLine("List/for: {0}ms ({1})", watch.ElapsedMilliseconds, chk);
chk = 0;
watch = Stopwatch.StartNew();
for (int rpt = 0; rpt < 100; rpt++)
{
for (int i = 0; i < arr.Length; i++)
{
chk += arr[i];
}
}
watch.Stop();
Console.WriteLine("Array/for: {0}ms ({1})", watch.ElapsedMilliseconds, chk);
chk = 0;
watch = Stopwatch.StartNew();
for (int rpt = 0; rpt < 100; rpt++)
{
foreach (int i in list)
{
chk += i;
}
}
watch.Stop();
Console.WriteLine("List/foreach: {0}ms ({1})", watch.ElapsedMilliseconds, chk);
chk = 0;
watch = Stopwatch.StartNew();
for (int rpt = 0; rpt < 100; rpt++)
{
foreach (int i in arr)
{
chk += i;
}
}
watch.Stop();
Console.WriteLine("Array/foreach: {0}ms ({1})", watch.ElapsedMilliseconds, chk);
Console.ReadLine();
}
}