Fixes #71.
36. [`--http.retry.count`](#--httpretrycount)
37. [`--http.retry.interval`](#--httpretryinterval)
38. [`--http.user-agent`](#--httpuser-agent)
+ 38. [`--http.max-redirs`](#--httpmax-redirs)
39. [`--http.connect-timeout`](#--httpconnect-timeout)
40. [`--http.transfer-timeout`](#--httptransfer-timeout)
41. [`--http.low-speed-limit`](#--httplow-speed-limit)
The value specified (either by the argument or the default value) is utilized in libcurl's option [CURLOPT_USERAGENT](https://curl.haxx.se/libcurl/c/CURLOPT_USERAGENT.html).
+### `--http.max-redirs`
+
+- **Type:** Integer
+- **Availability:** `argv` and JSON
+- **Default:** 10
+- **Range:** [0, [`UINT_MAX`](http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/limits.h.html)]
+
+Maximum allowed number of redirections to follow per HTTP request. (The total number of requests is `--http.max-redirs + 1`.)
+
+Unlike [`curl`'s `--max-redirs`](https://curl.se/docs/manpage.html#--max-redirs), Fort does not provide a means to allow for infinite redirects.
+
### `--http.connect-timeout`
- **Type:** Integer
.RE
.P
+.BR \-\-http.max\-redirs=\fIUNSIGNED_INTEGER\fR
+.RS 4
+Maximum number of redirections to follow, per HTTP request.
+.P
+Defaults to 10.
+.RE
+.P
+
.B \-\-http.connect\-timeout=\fIUNSIGNED_INTEGER\fR
.RS 4
Timeout (in seconds) for the connect phase.
struct {
/* Enables the protocol */
bool enabled;
- /* Deprecated; does nothing. */
+ /* Protocol preference; compared to http.priority */
unsigned int priority;
- /* Synchronization download strategy. */
+ /* Deprecated; does nothing. */
char *strategy;
/* Retry conf, utilized on errors */
struct {
} retry;
char *program;
struct {
- struct string_array flat;
+ struct string_array flat; /* Deprecated */
struct string_array recursive;
} args;
} rsync;
struct {
/* Enables the protocol */
bool enabled;
- /* Deprecated; does nothing. */
+ /* Protocol preference; compared to rsync.priority */
unsigned int priority;
/* Retry conf, utilized on errors */
struct {
/* Interval (in seconds) between each retry */
unsigned int interval;
} retry;
- /* User-Agent header set at requests */
+ /* HTTP User-Agent request header */
char *user_agent;
+ /* Allowed redirects per request */
+ unsigned int max_redirs;
/* CURLOPT_CONNECTTIMEOUT for our HTTP transfers. */
unsigned int connect_timeout;
/* CURLOPT_TIMEOUT for our HTTP transfers. */
.type = >_string,
.offset = offsetof(struct rpki_config, http.user_agent),
.doc = "User-Agent to use at HTTP requests, eg. Fort Validator Local/1.0",
+ }, {
+ .id = 9012,
+ .name = "http.max-redirs",
+ .type = >_uint,
+ .offset = offsetof(struct rpki_config, http.max_redirs),
+ .doc = "Maximum number of redirections to follow, per request.",
+ .min = 0,
+ .max = UINT_MAX,
}, {
.id = 9005,
.name = "http.connect-timeout",
rpki_config.http.retry.count = 1;
rpki_config.http.retry.interval = 4;
rpki_config.http.user_agent = pstrdup(PACKAGE_NAME "/" PACKAGE_VERSION);
+ rpki_config.http.max_redirs = 10;
rpki_config.http.connect_timeout = 30;
rpki_config.http.transfer_timeout = 0;
rpki_config.http.low_speed_limit = 100000;
return rpki_config.http.user_agent;
}
+unsigned int
+config_get_max_redirs(void)
+{
+ return rpki_config.http.max_redirs;
+}
+
long
config_get_http_connect_timeout(void)
{
unsigned int config_get_max_cert_depth(void);
enum mode config_get_mode(void);
char const *config_get_http_user_agent(void);
+unsigned int config_get_max_redirs(void);
long config_get_http_connect_timeout(void);
long config_get_http_transfer_timeout(void);
long config_get_http_low_speed_limit(void);
setopt_str(result, CURLOPT_USERAGENT, config_get_http_user_agent());
+ setopt_long(result, CURLOPT_FOLLOWLOCATION, 1);
+ setopt_long(result, CURLOPT_MAXREDIRS, config_get_max_redirs());
+
setopt_long(result, CURLOPT_CONNECTTIMEOUT,
config_get_http_connect_timeout());
setopt_long(result, CURLOPT_TIMEOUT,
case CURLE_FTP_ACCEPT_TIMEOUT:
error = EAGAIN; /* Retry */
goto end;
+ case CURLE_TOO_MANY_REDIRECTS:
+ error = -EINVAL;
+ goto end;
default:
error = handle_http_response_code(http_code);
goto end;
error = 0;
goto end;
}
- if (http_code >= 300) {
- /*
- * If you're ever forced to implement this, please remember that
- * a malicious server can send us on a wild chase with infinite
- * redirects, so there needs to be a limit.
- */
- pr_val_err("HTTP result code: %ld. I don't follow redirects; discarding file.",
- http_code);
- error = -EINVAL; /* Do not retry. */
- goto end;
- }
pr_val_debug("HTTP result code: %ld", http_code);
error = 0;
r = 0;
do {
- pr_val_debug("Download attempt #%u...", r);
+ pr_val_debug("Download attempt #%u...", r + 1);
error = http_fetch(src, dst, ims, changed);
switch (error) {
break;
default:
- pr_val_debug("Download failed: %s", strerror(error));
+ pr_val_debug("Download failed.");
return error;
}