Thanks for all the ideas guys.
I've tried all of them except a sha-256 which my server doesn't have installed.
Here's the results:
Average (http_build_query): 1.3954045954045E-5
Average (diff): 0.00011533766233766
Average (serialize): 1.7588411588412E-5
Average (md5): 1.6036963036966E-5
Average (implode-haval160,4): 1.5349650349649E-5
That's running the operation 1000 times and averaging the result. After refreshing a couple times I could tell that the http_build_query was the quickest. I guess my next question would be if anyone can think of any pitfalls of using this method?
Thanks
Here's my code:
class a {
static $input;
function test() {
$start = null;
$s = $e = $d = $g = $h = $i = $k = array();
self::$input = array();
for ($x = 0; $x <= 30; $x++) {
self::$input['variable_' . $x] = rand();
}
for ($x = 0; $x <= 1000; $x++) {
$start = microtime();
$c = http_build_query(self::$input);
($c == $c);
$s[] = microtime() - $start;
}
for ($x = 0; $x <= 1000; $x++) {
$start = microtime();
$c = md5(http_build_query(self::$input));
($c == $c);
$e[] = microtime() - $start;
}
for ($x = 0; $x <= 1000; $x++) {
$start = microtime();
$c = array_diff(self::$input, self::$input);
$d[] = microtime() - $start;
}
for ($x = 0; $x <= 1000; $x++) {
$start = microtime();
$c = serialize(self::$input);
($c == $c);
$g[] = microtime() - $start;
}
for ($x = 0; $x <= 1000; $x++) {
$start = microtime();
$c = hash("haval160,4", implode(',',self::$input));
($c == $c);
$h[] = microtime() - $start;
}
echo "<pre>";
//print_r($s);
echo "Average (http_build_query): " . array_sum($s) / count($s) . "<br>";
echo "Average (diff): " . array_sum($d) / count($d) . "<br>";
echo "Average (serialize): " . array_sum($g) / count($g) . "<br>";
echo "Average (md5): " . array_sum($e) / count($e). "<br>";
echo "Average (implode-haval160,4): " . array_sum($h) / count($h);
}
}
a::test();