I've created BRESort - an adaptive sorting engine that dynamically selects optimal algorithms based on data patterns. It achieves 3.6-4.2x speedup over stdlib qsort while preventing O(n²) catastrophes.
GitHub Repository: https://github.com/LazyCauchPotato/BRESort
Key Components:
bresort.h- Public API and data generatorsbresort.c- Core algorithm implementation- Test suites for validation and performance
Core Innovation: The engine analyzes data in one pass (range, sortedness, patterns) to choose between counting sort, insertion sort, or hybrid approaches.
/* Core analysis function - the "brain" of BRESort */
static int bres_analyze_strategy(unsigned char arr[], int n) {
if (n <= 16) return 1; // Insertion sort for tiny arrays
unsigned char min_val = 255, max_val = 0;
int out_of_order = 0;
int consecutive_sorted = 0;
int max_consecutive = 0;
// Single-pass analysis: extract data patterns
for (int i = 0; i < n; i++) {
// 1. Measure value range
if (arr[i] < min_val) min_val = arr[i];
if (arr[i] > max_val) max_val = arr[i];
// 2. Measure sortedness and patterns
if (i > 0) {
if (arr[i] >= arr[i-1]) {
consecutive_sorted++;
if (consecutive_sorted > max_consecutive) {
max_consecutive = consecutive_sorted;
}
} else {
out_of_order++;
consecutive_sorted = 0;
}
}
}
unsigned char range = max_val - min_val;
// Decision tree based on extracted patterns
if (range == 0) return 0; // All identical - counting sort
if (range < 16) return 0; // Small range - counting sort
if (range < 64 && n > 100) return 2; // Medium complexity - hybrid
if (out_of_order < 3 || max_consecutive > n * 0.8) return 1; // Mostly sorted - insertion
if (n > 1000 && range > 128) return 2; // Large & complex - hybrid
return 0; // Default: counting sort (safe)
}
/* Main dispatch function */
void BRESort_byte(unsigned char arr[], int n) {
if (n <= 1) return;
// Tiny arrays always use insertion sort
if (n <= 8) {
bres_insertion_sort(arr, n);
return;
}
// Analyze and choose optimal strategy
int strategy = bres_analyze_strategy(arr, n);
switch (strategy) {
case 0: bres_counting_sort(arr, n); break; // Counting sort
case 1: bres_insertion_sort(arr, n); break; // Insertion sort
case 2: bres_hybrid_sort(arr, n); break; // Hybrid approach
default: bres_counting_sort(arr, n); break; // Safe fallback
}
}
/* Optimized counting sort for bytes */
static void bres_counting_sort(unsigned char arr[], int n) {
int count[256] = {0};
// Count occurrences
for (int i = 0; i < n; i++) {
count[arr[i]]++;
}
// Reconstruct sorted array
int index = 0;
for (int value = 0; value < 256; value++) {
int occurrences = count[value];
if (occurrences > 0) {
if (occurrences > 3) {
// Optimize runs with memset
memset(&arr[index], value, occurrences);
index += occurrences;
} else {
// Manual assignment for small counts
for (int j = 0; j < occurrences; j++) {
arr[index++] = value;
}
}
}
}
}
I'd appreciate any feedback on the overall architecture and C implementation!