aboutsummaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorDaniel Stenberg <daniel@haxx.se>2020-06-09 16:08:11 +0200
committerDaniel Stenberg <daniel@haxx.se>2020-06-10 08:49:17 +0200
commiteab2f95c0de94e9816c8a6110d20673761dd97a4 (patch)
tree422e27ecb86fb1f74de1895b9a84c0a181e2799c /tests
parentf54b6c4bc2f745fa32014819788c90126121729e (diff)
wording: avoid blacklist/whitelist stereotypes
Instead of discussing if there's value or meaning (implied or not) in the colors, let's use words without the same possibly negative associations. Closes #5546
Diffstat (limited to 'tests')
-rw-r--r--tests/data/test15502
-rw-r--r--tests/data/test19014
-rw-r--r--tests/libtest/lib1900.c38
-rwxr-xr-xtests/manpage-scan.pl4
4 files changed, 24 insertions, 24 deletions
diff --git a/tests/data/test1550 b/tests/data/test1550
index bbf60f98a..c305dd6f2 100644
--- a/tests/data/test1550
+++ b/tests/data/test1550
@@ -20,7 +20,7 @@ lib1550
</tool>
<name>
-verify setting pipeling blacklisting options
+verify setting pipeling blocklisting options
</name>
<command>
http://%HOSTIP:%NOLISTENPORT/1550
diff --git a/tests/data/test1901 b/tests/data/test1901
index 83cdf7290..793bd667a 100644
--- a/tests/data/test1901
+++ b/tests/data/test1901
@@ -40,13 +40,13 @@ http
lib1900
</tool>
<name>
-HTTP GET using pipelining, blacklisted site
+HTTP GET using pipelining, blocklisted site
</name>
<command>
http://%HOSTIP:%HTTPPIPEPORT/ log/urls1901.txt
</command>
<file name="log/urls1901.txt">
-blacklist_site 127.0.0.1:%HTTPPIPEPORT
+blocklist_site 127.0.0.1:%HTTPPIPEPORT
0 1k.txt
1000 100k.txt
0 1k.txt
diff --git a/tests/libtest/lib1900.c b/tests/libtest/lib1900.c
index 2a70f8eba..fd7e5bc62 100644
--- a/tests/libtest/lib1900.c
+++ b/tests/libtest/lib1900.c
@@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
- * Copyright (C) 2013 - 2019, Linus Nielsen Feltzing, <linus@haxx.se>
+ * Copyright (C) 2013 - 2020, Linus Nielsen Feltzing, <linus@haxx.se>
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@@ -27,16 +27,16 @@
#define TEST_HANG_TIMEOUT 60 * 1000
#define MAX_URLS 200
-#define MAX_BLACKLIST 20
+#define MAX_BLOCKLIST 20
static int urltime[MAX_URLS];
static char *urlstring[MAX_URLS];
static CURL *handles[MAX_URLS];
-static char *site_blacklist[MAX_BLACKLIST];
-static char *server_blacklist[MAX_BLACKLIST];
+static char *site_blocklist[MAX_BLOCKLIST];
+static char *server_blocklist[MAX_BLOCKLIST];
static int num_handles;
-static int blacklist_num_servers;
-static int blacklist_num_sites;
+static int blocklist_num_servers;
+static int blocklist_num_sites;
static size_t
write_callback(void *contents, size_t size, size_t nmemb, void *userp)
@@ -55,8 +55,8 @@ static int parse_url_file(const char *filename)
char buf[200];
num_handles = 0;
- blacklist_num_sites = 0;
- blacklist_num_servers = 0;
+ blocklist_num_sites = 0;
+ blocklist_num_servers = 0;
f = fopen(filename, "rb");
if(!f)
@@ -70,9 +70,9 @@ static int parse_url_file(const char *filename)
continue;
}
- if(fscanf(f, "blacklist_site %199s\n", buf)) {
- site_blacklist[blacklist_num_sites] = strdup(buf);
- blacklist_num_sites++;
+ if(fscanf(f, "blocklist_site %199s\n", buf)) {
+ site_blocklist[blocklist_num_sites] = strdup(buf);
+ blocklist_num_sites++;
continue;
}
@@ -80,8 +80,8 @@ static int parse_url_file(const char *filename)
}
fclose(f);
- site_blacklist[blacklist_num_sites] = NULL;
- server_blacklist[blacklist_num_servers] = NULL;
+ site_blocklist[blocklist_num_sites] = NULL;
+ server_blocklist[blocklist_num_servers] = NULL;
return num_handles;
}
@@ -91,11 +91,11 @@ static void free_urls(void)
for(i = 0; i < num_handles; i++) {
Curl_safefree(urlstring[i]);
}
- for(i = 0; i < blacklist_num_servers; i++) {
- Curl_safefree(server_blacklist[i]);
+ for(i = 0; i < blocklist_num_servers; i++) {
+ Curl_safefree(server_blocklist[i]);
}
- for(i = 0; i < blacklist_num_sites; i++) {
- Curl_safefree(site_blacklist[i]);
+ for(i = 0; i < blocklist_num_sites; i++) {
+ Curl_safefree(site_blocklist[i]);
}
}
@@ -159,8 +159,8 @@ int test(char *URL)
multi_setopt(m, CURLMOPT_CONTENT_LENGTH_PENALTY_SIZE, 15000L);
multi_setopt(m, CURLMOPT_CHUNK_LENGTH_PENALTY_SIZE, 10000L);
- multi_setopt(m, CURLMOPT_PIPELINING_SITE_BL, site_blacklist);
- multi_setopt(m, CURLMOPT_PIPELINING_SERVER_BL, server_blacklist);
+ multi_setopt(m, CURLMOPT_PIPELINING_SITE_BL, site_blocklist);
+ multi_setopt(m, CURLMOPT_PIPELINING_SERVER_BL, server_blocklist);
last_handle_add = tutil_tvnow();
diff --git a/tests/manpage-scan.pl b/tests/manpage-scan.pl
index ba6577c18..10d2d15a1 100755
--- a/tests/manpage-scan.pl
+++ b/tests/manpage-scan.pl
@@ -6,7 +6,7 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
-# Copyright (C) 2016 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
+# Copyright (C) 2016 - 2020, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
@@ -130,7 +130,7 @@ scanmanpage("$root/docs/libcurl/curl_easy_setopt.3", @curlopt);
scanmanpage("$root/docs/libcurl/curl_easy_getinfo.3", @curlinfo);
scanmanpage("$root/docs/libcurl/curl_multi_setopt.3", @curlmopt);
-# using this hash array, we can whitelist specific options
+# using this hash array, we can skip specific options
my %opts = (
# pretend these --no options exists in tool_getparam.c
'--no-alpn' => 1,