Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions html/fcguide.html
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,7 @@ <h2> Basics </h2>
Proxy options:
P proxy use (-P proxy:port or -P user:pass@proxy:port) (--proxy <param>)
%f *use proxy for ftp (f0 don't use) (--httpproxy-ftp[=N])
%C *use proxy for ssl (C0 don't use) (--httpproxy-ssl[=N])

Limits options:
rN set the mirror depth to N (* r9999) (--depth[=N])
Expand Down Expand Up @@ -407,6 +408,7 @@ <h2> Syntax </h2>
<pre>
w *mirror web sites
%f *use proxy for ftp (f0 don't use)
%C *use proxy for ssl (C0 don't use)
cN number of multiple connections (*c8)
RN number of retries, in case of timeout or non-fatal errors (*R1)
%P *extended parsing, attempt to parse all links, even in unknown tags or Javascript (%P0 don't use)
Expand Down Expand Up @@ -442,6 +444,11 @@ <h2> Syntax </h2>
file transfer protocol (FTP) rather than the hypertext transfer protocol
HTTP), go through an ftp proxy server to get them.

<pre><b><i> %C *use proxy for ssl (C0 don't use) </i></b></pre>

<p align=justify> If there are and links to ssl URLs (URLs using HTTPS rather than the hypertext transfer protocol
HTTP), go through the proxy server to get them.

<pre><b><i> cN number of multiple connections (*c8) </i></b></pre>

<p align=justify> Use up to 8 simultaneous downloads so that at any
Expand Down Expand Up @@ -678,8 +685,10 @@ <h3>Proxy Options</h3>
<pre><b><i>Proxy options:
P proxy use (-P proxy:port or -P user:pass@proxy:port)
%f *use proxy for ftp (f0 don't use)
%C *use proxy for ssl (C0 don't use)
</i></b></pre>


<p align=justify> If you are using a standard proxy that doesn't require
a user ID and password, you would do something like this:

Expand Down
4 changes: 4 additions & 0 deletions man/httrack.1
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ httrack \- offline browser : copy websites to a local directory
] [
.B \-%f, \-\-httpproxy\-ftp[=N]
] [
.B \-%C, \-\-httpproxy\-ssl[=N]
] [
.B \-%b, \-\-bind
] [
.B \-rN, \-\-depth[=N]
Expand Down Expand Up @@ -230,6 +232,8 @@ mirror ALL links located in the first level pages (mirror links) (\-\-mirrorlink
proxy use (\-P proxy:port or \-P user:pass@proxy:port) (\-\-proxy <param>)
.IP \-%f
*use proxy for ftp (f0 don t use) (\-\-httpproxy\-ftp[=N])
.IP \-%C
*use proxy for ssl (C0 don t use) (\-\-httpproxy\-ssl[=N])
.IP \-%b
use this local hostname to make/send requests (\-%b hostname) (\-\-bind <param>)

Expand Down
1 change: 1 addition & 0 deletions src/htsalias.c
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ const char *hts_optalias[][4] = {
{"proxy", "-P", "param1", "proxy name:port"},
{"bind", "-%b", "param1", "hostname to bind"},
{"httpproxy-ftp", "-%f", "param", ""},
{"httpproxy-ssl", "-%C", "param", ""},
{"depth", "-r", "param", ""}, {"recurse-levels", "-r", "param", ""},
{"ext-depth", "-%e", "param", ""},
{"max-files", "-m", "param", ""},
Expand Down
2 changes: 1 addition & 1 deletion src/htsback.c
Original file line number Diff line number Diff line change
Expand Up @@ -1955,7 +1955,7 @@ int back_add(struct_back * sback, httrackp * opt, cache_back * cache, const char
}
}
#if HTS_USEOPENSSL
else if (strfield(back[p].url_adr, "https://")) { // let's rock
else if (strfield(back[p].url_adr, "https://") && ! opt->https_proxy) { // let's rock
back[p].r.ssl = 1;
// back[p].r.ssl_soc = NULL;
back[p].r.ssl_con = NULL;
Expand Down
8 changes: 8 additions & 0 deletions src/htscoremain.c
Original file line number Diff line number Diff line change
Expand Up @@ -1530,6 +1530,13 @@ static int hts_main_internal(int argc, char **argv, httrackp * opt) {
com++;
}
break; // pas de compression
case 'C':
opt->https_proxy = 1;
if (*(com + 1) == '0') {
opt->https_proxy = 0;
com++;
}
break;
case 'f':
opt->ftp_proxy = 1;
if (*(com + 1) == '0') {
Expand Down Expand Up @@ -2669,6 +2676,7 @@ static int hts_main_internal(int argc, char **argv, httrackp * opt) {
na++;
opt->proxy.active = 1;
// Rechercher MAIS en partant de la fin à cause de user:pass@proxy:port
// trans: Search BUT starting from the end because of user:pass@proxy:port
a = argv[na] + strlen(argv[na]) - 1;
// a=strstr(argv[na],":"); // port
while((a > argv[na]) && (*a != ':') && (*a != '@'))
Expand Down
1 change: 1 addition & 0 deletions src/htshelp.c
Original file line number Diff line number Diff line change
Expand Up @@ -508,6 +508,7 @@ void help(const char *app, int more) {
infomsg("");
infomsg("Proxy options:");
infomsg(" P proxy use (-P proxy:port or -P user:pass@proxy:port)");
infomsg(" %C *use proxy for ssl (C0 don't use)");
infomsg(" %f *use proxy for ftp (f0 don't use)");
infomsg(" %b use this local hostname to make/send requests (-%b hostname)");
infomsg("");
Expand Down
17 changes: 14 additions & 3 deletions src/htslib.c
Original file line number Diff line number Diff line change
Expand Up @@ -681,7 +681,7 @@ T_SOC http_xfopen(httrackp * opt, int mode, int treat, int waitconnect,
if (retour) {
if ((!(retour->req.proxy.active))
|| ((strcmp(adr, "file://") == 0)
|| (strncmp(adr, "https://", 8) == 0)
|| (strncmp(adr, "https://", 8) == 0 && ! opt->https_proxy)
)
) { /* pas de proxy, ou non utilisable ici */
soc = newhttp(opt, adr, retour, -1, waitconnect);
Expand Down Expand Up @@ -951,13 +951,14 @@ int http_sendhead(httrackp * opt, t_cookie * cookie, int mode,
}

// si on gère un proxy, il faut une Absolute URI: on ajoute avant http://www.adr.dom
if (retour->req.proxy.active && (strncmp(adr, "https://", 8) != 0)) {
BOOL is_https = strncmp(adr, "https://", 8) == 0;
if (retour->req.proxy.active && (! is_https || opt->https_proxy)) {
if (!link_has_authority(adr)) { // default http
#if HDEBUG
printf("Proxy Use: for %s%s proxy %d port %d\n", adr, fil,
retour->req.proxy.name, retour->req.proxy.port);
#endif
print_buffer(&bstr, "http://%s", jump_identification_const(adr));
print_buffer(&bstr, is_https ? "https://%s" : "http://%s", jump_identification_const(adr));
} else { // ftp:// en proxy http
#if HDEBUG
printf("Proxy Use for ftp: for %s%s proxy %d port %d\n", adr, fil,
Expand Down Expand Up @@ -2018,6 +2019,12 @@ LLint http_xfread1(htsblk * r, int bufl) {
// en cas de moved xx, dans location
// abandonne désormais au bout de 30 secondes (aurevoir les sites
// qui nous font poireauter 5 heures..) -> -2=timeout
//trans:
// test if a URL (validity, header, size)
// returns 200 or the error code (404=NOT FOUND, etc)
// in case of moved xx, in location
// now give up after 30 seconds (goodbye sites
// which make us hang around for 5 hours..) -> -2=timeout
htsblk http_test(httrackp * opt, const char *adr, const char *fil, char *loc) {
T_SOC soc;
htsblk retour;
Expand Down Expand Up @@ -2123,6 +2130,9 @@ htsblk http_test(httrackp * opt, const char *adr, const char *fil, char *loc) {
// Crée un lien (http) vers une adresse internet iadr
// retour: structure (adresse, taille, message si erreur (si !adr))
// peut ouvrir avec des connect() non bloquants: waitconnect=0/1
// trans: Create a link (http) to an iadr internet address
// return: structure (address, size, message if error (if !adr))
// can open with non-blocking connect(): waitconnect=0/1
T_SOC newhttp(httrackp * opt, const char *_iadr, htsblk * retour, int port,
int waitconnect) {
T_SOC soc; // descipteur de la socket
Expand Down Expand Up @@ -5513,6 +5523,7 @@ HTSEXT_API httrackp *hts_create_opt(void) {
opt->urlhack = 1; // url hack (normalizer)
StringCopy(opt->footer, HTS_DEFAULT_FOOTER);
opt->ftp_proxy = 1; // proxy http pour ftp
opt->https_proxy = 0; // proxy http pour https
opt->convert_utf8 = 1; // convert html to UTF-8
StringCopy(opt->filelist, "");
StringCopy(opt->lang_iso, "en, *");
Expand Down
1 change: 1 addition & 0 deletions src/htsopt.h
Original file line number Diff line number Diff line change
Expand Up @@ -370,6 +370,7 @@ struct httrackp {
int maxcache; // maximum en mémoire au niveau du cache (backing)
//int maxcache_anticipate; // maximum de liens à anticiper (majorant)
int ftp_proxy; // proxy http pour ftp
int https_proxy; // proxy http pour https
String filelist; // fichier liste URL à inclure
String urllist; // fichier liste de filtres à inclure
htsfilters filters; // contient les pointeurs pour les filtres
Expand Down