The time complexity of Merge Sorting and heap sorting is O (nlgn), and the time complexity of insert sorting and Bubble Sorting is O (n ^ 2 ), the time complexity of fast sorting is O (nlgn) on average. These sorting algorithms determine the order by comparing the elements. Therefore, they are called comparative sorting.
Comparison sorting can be seen as a decision tree (a full binary tree), because each comparison is a branch. The sequence of n elements. The sorting result is n! Possible (N rows of all elements), so this decision tree has n! Leaf nodes. If the height of the tree is h, there are: n! <= 2 ^ h, so h> = lg (N !) = Ω (nlgn ). A comparative sorting is performed from the root node of the decision tree to the leaf node. Therefore, the time complexity of the comparative sorting is Ω (nlgn ).
The counting sorting, base sorting, and bucket sorting are non-Comparative sorting, and the time complexity is O (n). However, these three sorting algorithms are not in-situ sorting and occupy a large amount of memory, most of the comparative sorting algorithms are in-situ sorting.
/** Introduction to algorithms Chapter 8 linear time sorting * counting sorting, base sorting, and bucket sorting */# include <iostream> # include <cmath> # include <vector> # include <ctime> using namespace STD; void printarray (INT arr [], int Len, char * Str) {cout <STR <Endl; For (INT I = 0; I <Len; I ++) {cout <arr [I] <"" ;}cout <Endl;} int * countingsort (int * arr, int Len, int K ); int * radixsort (int * arr, int Len, int D); int getdigit (INT num, int D); int * bucketsort (int * arr, int Len, int maxnum ); int main () {int Len = 30; int K = 10; srand (Time (null); int * arr = new int [Len]; for (INT I = 0; I <Len; I ++) {arr [I] = rand () % K;} // count the sorted printarray (ARR, Len, "Count the sorted array "); int * result = countingsort (ARR, Len, k); printarray (result, Len, "Count sorted array"); Delete [] result; // base sorting for (INT I = 0; I <Len; I ++) {arr [I] = 100 + rand () % 500;} printarray (ARR, Len, "array before base sorting"); Result = radixsort (ARR, Len, 3); printarray (result, Len, "array after base sorting"); Delete [] result; // sort buckets for (INT I = 0; I <Len; I ++) {arr [I] = rand () % 100;} printarray (ARR, Len, "array before sorting buckets"); Result = bucketsort (ARR, Len, 100); printarray (result, Len, "array after sorting buckets"); Delete [] result; return 0;}/** counting sorting * time complexity is O (n + k) * using counting sorting requires that all elements are in a small range, that is, when K is much less than N * at k = O (n), the time complexity is O (n) */int * countingsort (int * arr, int Len, int K) {int * numcount = new int [k] (); int * result = new int [Len]; // number of elements stored in numcount equal to I for (INT I = 0; I <Len; I ++) {numcount [arr [I] ++;} // number of elements smaller than or equal to I in numcount for (INT I = 1; I <K; I ++) {numcount [I] + = numcount [I-1];} // sort the elements from the back to the front to ensure stability, can also back from the back, but the sorting is not stable for (INT I = len-1; I> = 0; I --) {result [numcount [arr [I]-1] = arr [I]; numcount [arr [I] --;} Delete [] numcount; return result ;} /** base sorting * is based on counting sorting. The stability of counting sorting is very important * otherwise, an error occurs in base sorting, for example, array [27, 15, 43, 42]. If the sub-sorting process is unstable *, the result is [15, 27, 43, 42] * time complexity is O (D * (N + k )), when D is a constant and K = O (n), the time complexity is O (n) */int * radixsort (int * arr, int Len, int D) {int * A = new int [Len]; for (INT I = 0; I <Len; I ++) A [I] = arr [I]; for (Int J = 0; j <D; j ++) {int K = 10; int * numcount = new int [k] (); int * result = new int [Len]; // number of elements stored in numcount equal to I for (INT I = 0; I <Len; I ++) {numcount [getdigit (A [I], j)] ++;} // number of elements stored in numcount less than or equal to I for (INT I = 1; I <K; I ++) {numcount [I] + = numcount [I-1];} // sort the elements from the back to the front to ensure stability, you can also back, but the sorting is not stable for (INT I = len-1; I> = 0; I --) {result [numcount [getdigit (A [I], j)] -1] = A [I]; numcount [getdigit (A [I], j)] --;} Delete [] A; Delete [] numcount; A = result ;} return A;} int getdigit (INT num, int d) {return (Num % (INT) Pow (10.0, D + 1)/POW (10.0, d );} /** bucket sorting * When the input conforms to the uniform distribution, the bucket sorting effect is better * each element is distributed in N buckets, sort by insert in each bucket * as long as the sum of squares of the sizes of each bucket has a linear relationship with the total number of elements * the time complexity is O (n) */int * bucketsort (int * arr, int Len, int maxnum) {// create n barrels of vector <int> * result = new vector <int> [Len]; // distribute each element to each bucket for (INT I = 0; I <Len; I ++) {result [(INT) (ARR [I]/(double) maxnum) * Len)]. push_back (ARR [I]);} For (INT I = 0; I <Len; I ++) {int n = Result [I]. size (); // insert sorting for (Int J = 1; j <n; j ++) {int K = J-1; int key = Result [I] [J]; while (k> = 0 & Result [I] [k]> key) {result [I] [k + 1] = Result [I] [k]; k --;} result [I] [k + 1] = Key ;}} // merge the elements of each bucket for (INT I = 0, j = 0; j <Len; j ++) {int length = Result [J]. size (); For (int K = 0; k <length; k ++) {arr [I ++] = Result [J] [k] ;}} delete [] result; return arr ;}