From d7e352402cf21da54933392372f7a9232d3d7f23 Mon Sep 17 00:00:00 2001 From: dfandrich Date: Thu, 12 Nov 2015 20:40:26 +0100 Subject: [PATCH] unit1603: Added unit tests for hash functions --- lib/hash.c | 23 +++++- tests/data/Makefile.inc | 2 +- tests/data/test1603 | 26 +++++++ tests/unit/Makefile.inc | 5 +- tests/unit/unit1305.c | 2 - tests/unit/unit1603.c | 151 ++++++++++++++++++++++++++++++++++++++++ 6 files changed, 203 insertions(+), 6 deletions(-) create mode 100644 tests/data/test1603 create mode 100644 tests/unit/unit1603.c diff --git a/lib/hash.c b/lib/hash.c index c46760ae1..9f83426be 100644 --- a/lib/hash.c +++ b/lib/hash.c @@ -46,7 +46,12 @@ hash_element_dtor(void *user, void *element) free(e); } -/* return 1 on error, 0 is fine */ +/* Initializes a hash structure. + * Return 1 on error, 0 is fine. + * + * @unittest: 1602 + * @unittest: 1603 + */ int Curl_hash_init(struct curl_hash *h, int slots, @@ -119,6 +124,8 @@ mk_hash_element(const void *key, size_t key_len, const void *p) * that data is replaced. * * @unittest: 1305 + * @unittest: 1602 + * @unittest: 1603 */ void * Curl_hash_add(struct curl_hash *h, void *key, size_t key_len, void *p) @@ -155,7 +162,11 @@ Curl_hash_add(struct curl_hash *h, void *key, size_t key_len, void *p) return NULL; /* failure */ } -/* remove the identified hash entry, returns non-zero on failure */ +/* Remove the identified hash entry. + * Returns non-zero on failure. + * + * @unittest: 1603 + */ int Curl_hash_delete(struct curl_hash *h, void *key, size_t key_len) { struct curl_llist_element *le; @@ -173,6 +184,10 @@ int Curl_hash_delete(struct curl_hash *h, void *key, size_t key_len) return 1; } +/* Retrieves a hash element. + * + * @unittest: 1603 + */ void * Curl_hash_pick(struct curl_hash *h, void *key, size_t key_len) { @@ -214,6 +229,10 @@ Curl_hash_apply(curl_hash *h, void *user, /* Destroys all the entries in the given hash and resets its attributes, * prepping the given hash for [static|dynamic] deallocation. + * + * @unittest: 1305 + * @unittest: 1602 + * @unittest: 1603 */ void Curl_hash_destroy(struct curl_hash *h) diff --git a/tests/data/Makefile.inc b/tests/data/Makefile.inc index 3cc4d8de3..bc47b9035 100644 --- a/tests/data/Makefile.inc +++ b/tests/data/Makefile.inc @@ -156,7 +156,7 @@ test1520 \ \ test1525 test1526 test1527 test1528 test1529 test1530 test1531 \ \ -test1600 test1601 test1602 \ +test1600 test1601 test1602 test1603 \ \ test1800 test1801 \ \ diff --git a/tests/data/test1603 b/tests/data/test1603 new file mode 100644 index 000000000..805c9e378 --- /dev/null +++ b/tests/data/test1603 @@ -0,0 +1,26 @@ + + + +unittest +hash + + + +# +# Client-side + + +none + + +unittest + + +Internal hash add, retrieval, deletion testing + + +unit1603 + + + + diff --git a/tests/unit/Makefile.inc b/tests/unit/Makefile.inc index 9073b34e6..056a8fbf2 100644 --- a/tests/unit/Makefile.inc +++ b/tests/unit/Makefile.inc @@ -7,7 +7,7 @@ UNITFILES = curlcheck.h \ # These are all unit test programs UNITPROGS = unit1300 unit1301 unit1302 unit1303 unit1304 unit1305 unit1307 \ unit1308 unit1309 unit1330 unit1394 unit1395 unit1396 unit1397 unit1398 \ - unit1600 unit1601 unit1602 + unit1600 unit1601 unit1602 unit1603 unit1300_SOURCES = unit1300.c $(UNITFILES) unit1300_CPPFLAGS = $(AM_CPPFLAGS) @@ -66,3 +66,6 @@ unit1601_CPPFLAGS = $(AM_CPPFLAGS) unit1602_SOURCES = unit1602.c $(UNITFILES) unit1602_CPPFLAGS = $(AM_CPPFLAGS) +unit1603_SOURCES = unit1603.c $(UNITFILES) +unit1603_CPPFLAGS = $(AM_CPPFLAGS) + diff --git a/tests/unit/unit1305.c b/tests/unit/unit1305.c index 96913f1fa..9db488812 100644 --- a/tests/unit/unit1305.c +++ b/tests/unit/unit1305.c @@ -134,8 +134,6 @@ UNITTEST_START abort_unless(nodep, "insertion into hash failed"); /* Freeing will now be done by Curl_hash_destroy */ data_node = NULL; - - /* To do: test retrieval, deletion, edge conditions */ } UNITTEST_STOP diff --git a/tests/unit/unit1603.c b/tests/unit/unit1603.c new file mode 100644 index 000000000..27a08a73f --- /dev/null +++ b/tests/unit/unit1603.c @@ -0,0 +1,151 @@ +/*************************************************************************** + * _ _ ____ _ + * Project ___| | | | _ \| | + * / __| | | | |_) | | + * | (__| |_| | _ <| |___ + * \___|\___/|_| \_\_____| + * + * Copyright (C) 2015, Daniel Stenberg, , et al. + * + * This software is licensed as described in the file COPYING, which + * you should have received as part of this distribution. The terms + * are also available at http://curl.haxx.se/docs/copyright.html. + * + * You may opt to use, copy, modify, merge, publish, distribute and/or sell + * copies of the Software, and permit persons to whom the Software is + * furnished to do so, under the terms of the COPYING file. + * + * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY + * KIND, either express or implied. + * + ***************************************************************************/ +#include "curlcheck.h" + +#define ENABLE_CURLX_PRINTF +#include "curlx.h" + +#include "hash.h" + +#include "memdebug.h" /* LAST include file */ + +static struct curl_hash hash_static; +static const int slots = 3; + +static void mydtor(void *p) +{ + /* Data are statically allocated */ + (void)p; /* unused */ +} + +static CURLcode unit_setup( void ) +{ + return Curl_hash_init(&hash_static, slots, Curl_hash_str, + Curl_str_key_compare, mydtor); +} + +static void unit_stop( void ) +{ + Curl_hash_destroy(&hash_static); +} + +UNITTEST_START + char key1[] = "key1"; + char key2[] = "key2b"; + char key3[] = "key3"; + char key4[] = "key4"; + char notakey[] = "notakey"; + char *nodep; + int rc; + + /* Ensure the key1 hashes are as expected in order to test both hash + collisions and a full table */ + fail_unless(Curl_hash_str(key1, strlen(key1), slots) == 1, + "hashes are not computed as expected"); + fail_unless(Curl_hash_str(key2, strlen(key2), slots) == 0, + "hashes are not computed as expected"); + fail_unless(Curl_hash_str(key3, strlen(key3), slots) == 2, + "hashes are not computed as expected"); + fail_unless(Curl_hash_str(key4, strlen(key4), slots) == 1, + "hashes are not computed as expected"); + + nodep = Curl_hash_add(&hash_static, &key1, strlen(key1), &key1); + fail_unless(nodep, "insertion into hash failed"); + nodep = Curl_hash_pick(&hash_static, &key1, strlen(key1)); + fail_unless(nodep == key1, "hash retrieval failed"); + + nodep = Curl_hash_add(&hash_static, &key2, strlen(key2), &key2); + fail_unless(nodep, "insertion into hash failed"); + nodep = Curl_hash_pick(&hash_static, &key2, strlen(key2)); + fail_unless(nodep == key2, "hash retrieval failed"); + + nodep = Curl_hash_add(&hash_static, &key3, strlen(key3), &key3); + fail_unless(nodep, "insertion into hash failed"); + nodep = Curl_hash_pick(&hash_static, &key3, strlen(key3)); + fail_unless(nodep == key3, "hash retrieval failed"); + + /* The fourth element exceeds the number of slots & collides */ + nodep = Curl_hash_add(&hash_static, &key4, strlen(key4), &key4); + fail_unless(nodep, "insertion into hash failed"); + nodep = Curl_hash_pick(&hash_static, &key4, strlen(key4)); + fail_unless(nodep == key4, "hash retrieval failed"); + + /* Make sure all elements are still accessible */ + nodep = Curl_hash_pick(&hash_static, &key1, strlen(key1)); + fail_unless(nodep == key1, "hash retrieval failed"); + nodep = Curl_hash_pick(&hash_static, &key2, strlen(key2)); + fail_unless(nodep == key2, "hash retrieval failed"); + nodep = Curl_hash_pick(&hash_static, &key3, strlen(key3)); + fail_unless(nodep == key3, "hash retrieval failed"); + nodep = Curl_hash_pick(&hash_static, &key4, strlen(key4)); + fail_unless(nodep == key4, "hash retrieval failed"); + + /* Delete the second of two entries in a bucket */ + rc = Curl_hash_delete(&hash_static, &key4, strlen(key4)); + fail_unless(rc == 0, "hash delete failed"); + nodep = Curl_hash_pick(&hash_static, &key1, strlen(key1)); + fail_unless(nodep == key1, "hash retrieval failed"); + nodep = Curl_hash_pick(&hash_static, &key4, strlen(key4)); + fail_unless(!nodep, "hash retrieval should have failed"); + + /* Insert that deleted node again */ + nodep = Curl_hash_add(&hash_static, &key4, strlen(key4), &key4); + fail_unless(nodep, "insertion into hash failed"); + nodep = Curl_hash_pick(&hash_static, &key4, strlen(key4)); + fail_unless(nodep == key4, "hash retrieval failed"); + + /* Delete the first of two entries in a bucket */ + rc = Curl_hash_delete(&hash_static, &key1, strlen(key1)); + fail_unless(rc == 0, "hash delete failed"); + nodep = Curl_hash_pick(&hash_static, &key1, strlen(key1)); + fail_unless(!nodep, "hash retrieval should have failed"); + nodep = Curl_hash_pick(&hash_static, &key4, strlen(key4)); + fail_unless(nodep == key4, "hash retrieval failed"); + + /* Delete the remaining one of two entries in a bucket */ + rc = Curl_hash_delete(&hash_static, &key4, strlen(key4)); + fail_unless(rc == 0, "hash delete failed"); + nodep = Curl_hash_pick(&hash_static, &key1, strlen(key1)); + fail_unless(!nodep, "hash retrieval should have failed"); + nodep = Curl_hash_pick(&hash_static, &key4, strlen(key4)); + fail_unless(!nodep, "hash retrieval should have failed"); + + /* Delete an already deleted node */ + rc = Curl_hash_delete(&hash_static, &key4, strlen(key4)); + fail_unless(rc, "hash delete should have failed"); + + /* Replace an existing node */ + nodep = Curl_hash_add(&hash_static, &key1, strlen(key1), ¬akey); + fail_unless(nodep, "insertion into hash failed"); + nodep = Curl_hash_pick(&hash_static, &key1, strlen(key1)); + fail_unless(nodep == notakey, "hash retrieval failed"); + + /* Make sure all remaining elements are still accessible */ + nodep = Curl_hash_pick(&hash_static, &key2, strlen(key2)); + fail_unless(nodep == key2, "hash retrieval failed"); + nodep = Curl_hash_pick(&hash_static, &key3, strlen(key3)); + fail_unless(nodep == key3, "hash retrieval failed"); + + /* Clean up */ + Curl_hash_clean(&hash_static); + +UNITTEST_STOP