diff -urp wget-1.10.2/src/connect.c wget-1.10.2ultra/src/connect.c --- wget-1.10.2/src/connect.c 2005-06-19 17:47:10.000000000 +0400 +++ wget-1.10.2ultra/src/connect.c 2006-12-04 03:39:32.000000000 +0300 @@ -264,7 +264,7 @@ connect_to_ip (const ip_address *ip, int /* If PRINT is non-NULL, print the "Connecting to..." line, with PRINT being the host name we're connecting to. */ - if (print) + if (opt.debug && print) { const char *txt_addr = pretty_print_address (ip); if (print && 0 != strcmp (print, txt_addr)) @@ -331,7 +331,7 @@ connect_to_ip (const ip_address *ip, int /* Success. */ assert (sock >= 0); - if (print) + if (opt.debug && print) logprintf (LOG_VERBOSE, _("connected.\n")); DEBUGP (("Created socket %d.\n", sock)); return sock; @@ -362,7 +362,7 @@ connect_to_host (const char *host, int p int i, start, end; int sock; - struct address_list *al = lookup_host (host, 0); + struct address_list *al = lookup_host (host, LH_SILENT); retry: if (!al) diff -urp wget-1.10.2/src/cookies.c wget-1.10.2ultra/src/cookies.c --- wget-1.10.2/src/cookies.c 2005-06-27 19:35:45.000000000 +0400 +++ wget-1.10.2ultra/src/cookies.c 2006-12-04 03:39:32.000000000 +0300 @@ -856,6 +856,8 @@ cookie_handle_set_cookie (struct cookie_ simply prepend slash to PATH. */ PREPEND_SLASH (path); + logprintf (LOG_NOTQUIET, "Set-Cookie: %s\n", set_cookie); + cookie = parse_set_cookies (set_cookie, update_cookie_field, 0); if (!cookie) goto out; diff -urp wget-1.10.2/src/http.c wget-1.10.2ultra/src/http.c --- wget-1.10.2/src/http.c 2005-08-09 02:54:16.000000000 +0400 +++ wget-1.10.2ultra/src/http.c 2006-12-04 04:01:11.000000000 +0300 @@ -77,6 +77,7 @@ extern int errno; extern char *version_string; extern SUM_SIZE_INT total_downloaded_bytes; +extern SUM_SIZE_INT total_cached_bytes; extern FILE *output_stream; extern int output_stream_regular; @@ -128,6 +129,7 @@ static struct cookie_jar *wget_cookie_ja #define HTTP_STATUS_NOT_IMPLEMENTED 501 #define HTTP_STATUS_BAD_GATEWAY 502 #define HTTP_STATUS_UNAVAILABLE 503 +#define HTTP_STATUS_GATEWAY_TIMEOUT 504 enum rp { rel_none, rel_name, rel_value, rel_both @@ -489,7 +491,7 @@ read_http_response_head (int fd) struct response { /* The response data. */ - const char *data; + char *data; /* The array of pointers that indicate where each header starts. For example, given this HTTP response: @@ -518,7 +520,7 @@ struct response { resp_header_*. */ static struct response * -resp_new (const char *head) +resp_new (char *head) { const char *hdr; int count, size; @@ -998,7 +1000,7 @@ persistent_available_p (const char *host invalidate_persistent (); return 0; } - al = lookup_host (host, 0); + al = lookup_host (host, LH_SILENT); if (!al) { *host_lookup_failed = 1; @@ -1741,12 +1743,23 @@ gethttp (struct url *u, struct http_stat &entity_length)) contrange = first_byte_pos; } - resp_free (resp); /* 20x responses are counted among successful by default. */ if (H_20X (statcode)) *dt |= RETROKF; + if (statcode == HTTP_STATUS_INTERNAL || + statcode == HTTP_STATUS_BAD_GATEWAY || + statcode == HTTP_STATUS_UNAVAILABLE || + statcode == HTTP_STATUS_GATEWAY_TIMEOUT) + { + if (!opt.server_response) + print_server_response (resp, " "); + xfree_null (type); + CLOSE_INVALIDATE (sock); + return SERVERROR; + } + /* Return if redirected. */ if (H_REDIRECTED (statcode) || statcode == HTTP_STATUS_MULTIPLE_CHOICES) { @@ -1763,6 +1776,8 @@ gethttp (struct url *u, struct http_stat _("Location: %s%s\n"), hs->newloc ? escnonprint_uri (hs->newloc) : _("unspecified"), hs->newloc ? _(" [following]") : ""); + if (!opt.debug && !opt.server_response) + print_server_response (resp, " "); if (keep_alive && !head_only && skip_short_body (sock, contlen)) CLOSE_FINISH (sock); else @@ -1772,6 +1787,8 @@ gethttp (struct url *u, struct http_stat } } + resp_free (resp); + /* If content-type is not given, assume text/html. This is because of the multitude of broken CGI's that "forget" to generate the content-type. */ @@ -2052,6 +2069,8 @@ http_loop (struct url *u, char **newloc, if (opt.noclobber && file_exists_p (*hstat.local_file)) { + total_cached_bytes += file_size(*hstat.local_file); + opt.numcurls++; /* If opt.noclobber is turned on and file already exists, do not retrieve the file */ logprintf (LOG_VERBOSE, _("\ @@ -2205,7 +2224,7 @@ File `%s' already there; not retrieving. { case HERR: case HEOF: case CONSOCKERR: case CONCLOSED: case CONERROR: case READERR: case WRITEFAILED: - case RANGEERR: case FOPEN_EXCL_ERR: + case RANGEERR: case FOPEN_EXCL_ERR: case SERVERROR: /* Non-fatal errors continue executing the loop, which will bring them to "while" statement at the end, to judge whether the number of tries was exceeded. */ diff -urp wget-1.10.2/src/init.c wget-1.10.2ultra/src/init.c --- wget-1.10.2/src/init.c 2005-08-09 02:54:16.000000000 +0400 +++ wget-1.10.2ultra/src/init.c 2006-12-04 03:39:32.000000000 +0300 @@ -217,6 +217,7 @@ static struct { { "readtimeout", &opt.read_timeout, cmd_time }, { "reclevel", &opt.reclevel, cmd_number_inf }, { "recursive", NULL, cmd_spec_recursive }, + { "redirs", &opt.redirs, cmd_number_inf }, { "referer", &opt.referer, cmd_string }, { "reject", &opt.rejects, cmd_vector }, { "relativeonly", &opt.relative_only, cmd_boolean }, @@ -231,9 +232,11 @@ static struct { { "secureprotocol", &opt.secure_protocol, cmd_spec_secure_protocol }, #endif { "serverresponse", &opt.server_response, cmd_boolean }, + { "showurls", &opt.showurls, cmd_boolean }, { "spanhosts", &opt.spanhost, cmd_boolean }, { "spider", &opt.spider, cmd_boolean }, { "strictcomments", &opt.strict_comments, cmd_boolean }, + { "stripquery", &opt.strip_query, cmd_boolean }, { "timeout", NULL, cmd_spec_timeout }, { "timestamping", &opt.timestamping, cmd_boolean }, { "tries", &opt.ntry, cmd_number_inf }, @@ -285,6 +288,7 @@ defaults (void) opt.cookies = 1; opt.verbose = -1; opt.ntry = 20; + opt.redirs = 20; opt.reclevel = 5; opt.add_hostdir = 1; opt.netrc = 1; @@ -496,8 +500,7 @@ initialize (void) #endif /* Override it with your own, if one exists. */ file = wgetrc_file_name (); - if (!file) - return; + if (file) { /* #### We should canonicalize `file' and SYSTEM_WGETRC with something like realpath() before comparing them with `strcmp' */ #ifdef SYSTEM_WGETRC @@ -510,11 +513,24 @@ initialize (void) else #endif ok &= run_wgetrc (file); + } + { // lets load current dir .wgetrc + char buf1[PATH_MAX] = { 0 }, buf2[PATH_MAX] = { 0 }; + char *p1, *p2; + + p1 = realpath(file, buf1); + p2 = realpath(".wgetrc", buf2); + if (p1 && p2 && strcmp(p1, p2)) { + if (file_exists_p (".wgetrc")) + ok &= run_wgetrc (".wgetrc"); + } + } /* If there were errors processing either `.wgetrc', abort. */ if (!ok) exit (2); + if (file) xfree (file); return; } diff -urp wget-1.10.2/src/main.c wget-1.10.2ultra/src/main.c --- wget-1.10.2/src/main.c 2005-07-01 05:20:30.000000000 +0400 +++ wget-1.10.2ultra/src/main.c 2006-12-04 03:54:32.000000000 +0300 @@ -75,6 +75,7 @@ extern int errno; struct options opt; extern SUM_SIZE_INT total_downloaded_bytes; +extern SUM_SIZE_INT total_cached_bytes; extern char *version_string; extern struct cookie_jar *wget_cookie_jar; @@ -252,12 +253,14 @@ struct cmdline_option option_data[] = { "save-headers", 0, OPT_BOOLEAN, "saveheaders", -1 }, { IF_SSL ("secure-protocol"), 0, OPT_VALUE, "secureprotocol", -1 }, { "server-response", 'S', OPT_BOOLEAN, "serverresponse", -1 }, + { "show-urls", 0, OPT_BOOLEAN, "showurls", -1 }, { "span-hosts", 'H', OPT_BOOLEAN, "spanhosts", -1 }, { "spider", 0, OPT_BOOLEAN, "spider", -1 }, { "strict-comments", 0, OPT_BOOLEAN, "strictcomments", -1 }, { "timeout", 'T', OPT_VALUE, "timeout", -1 }, { "timestamping", 'N', OPT_BOOLEAN, "timestamping", -1 }, { "tries", 't', OPT_VALUE, "tries", -1 }, + { "redirs", 'j', OPT_VALUE, "redirs", -1 }, { "user", 0, OPT_VALUE, "user", -1 }, { "user-agent", 'U', OPT_VALUE, "useragent", -1 }, { "verbose", 'v', OPT_BOOLEAN, "verbose", -1 }, @@ -969,10 +972,13 @@ Can't timestamp and not clobber old file || (opt.input_filename && total_downloaded_bytes != 0)) { logprintf (LOG_NOTQUIET, - _("\nFINISHED --%s--\nDownloaded: %s bytes in %d files\n"), + _("\nFINISHED --%s--\nDownloaded: %s bytes in %d files"), time_str (NULL), with_thousand_seps_sum (total_downloaded_bytes), opt.numurls); + logprintf (LOG_NOTQUIET, + _(", cached %s bytes, in %d files\n"), + with_thousand_seps_sum (total_cached_bytes), opt.numcurls); /* Print quota warning, if exceeded. */ if (opt.quota && total_downloaded_bytes > opt.quota) logprintf (LOG_NOTQUIET, diff -urp wget-1.10.2/src/openssl.c wget-1.10.2ultra/src/openssl.c --- wget-1.10.2/src/openssl.c 2005-08-26 14:44:54.000000000 +0400 +++ wget-1.10.2ultra/src/openssl.c 2006-12-04 03:39:32.000000000 +0300 @@ -449,6 +449,13 @@ ssl_check_certificate (int fd, const cha vresult = SSL_get_verify_result (ssl); if (vresult != X509_V_OK) { + char *subject = X509_NAME_oneline (X509_get_subject_name (cert), 0, 0); + char *issuer = X509_NAME_oneline (X509_get_issuer_name (cert), 0, 0); + logprintf (LOG_NOTQUIET, "certificate:\n subject: %s\n issuer: %s\n", + escnonprint (subject), escnonprint (issuer)); + OPENSSL_free (subject); + OPENSSL_free (issuer); + /* #### We might want to print saner (and translatable) error messages for several frequently encountered errors. The candidates would include diff -urp wget-1.10.2/src/options.h wget-1.10.2ultra/src/options.h --- wget-1.10.2/src/options.h 2005-08-09 02:54:16.000000000 +0400 +++ wget-1.10.2ultra/src/options.h 2006-12-04 03:39:32.000000000 +0300 @@ -32,10 +32,12 @@ struct options int verbose; /* Are we verbose? */ int quiet; /* Are we quiet? */ int ntry; /* Number of tries per URL */ + int redirs; /* Number of redirects */ int retry_connrefused; /* Treat CONNREFUSED as non-fatal. */ int background; /* Whether we should work in background. */ int ignore_length; /* Do we heed content-length at all? */ int recursive; /* Are we recursive? */ + int showurls; /* show queued urls */ int spanhost; /* Do we span across hosts in recursion? */ int relative_only; /* Follow only relative links. */ @@ -116,6 +118,7 @@ struct options store. */ int numurls; /* Number of successfully downloaded URLs */ + int numcurls; /* Number of successfully cached urls */ int server_response; /* Do we print server response? */ int save_headers; /* Do we save headers together with @@ -216,6 +219,7 @@ struct options prefer_none } prefer_family; /* preferred address family when more than one type is available */ + int strip_query; /* do not dl url if already downloaded w/o "?query" part */ }; extern struct options opt; diff -urp wget-1.10.2/src/recur.c wget-1.10.2ultra/src/recur.c --- wget-1.10.2/src/recur.c 2005-06-25 18:47:52.000000000 +0400 +++ wget-1.10.2ultra/src/recur.c 2006-12-04 03:54:50.000000000 +0300 @@ -60,6 +60,7 @@ extern int errno; extern char *version_string; extern SUM_SIZE_INT total_downloaded_bytes; +SUM_SIZE_INT total_cached_bytes; extern struct hash_table *dl_url_file_map; extern struct hash_table *downloaded_html_set; @@ -72,6 +73,7 @@ struct queue_element { int depth; /* the depth */ unsigned int html_allowed :1; /* whether the document is allowed to be treated as HTML. */ + unsigned int link_inline :1; struct queue_element *next; /* next element in queue */ }; @@ -105,13 +107,21 @@ url_queue_delete (struct url_queue *queu static void url_enqueue (struct url_queue *queue, - const char *url, const char *referer, int depth, int html_allowed) + const char *url, const char *referer, int depth, int html_allowed, + int link_inline) { - struct queue_element *qel = xnew (struct queue_element); + struct queue_element *qel; + + if (opt.showurls) { + printf("+ENQ%c/%d %s\n", link_inline? 'h' : 't', depth, url); + } + + qel = xnew (struct queue_element); qel->url = url; qel->referer = referer; qel->depth = depth; qel->html_allowed = html_allowed; + qel->link_inline = link_inline; qel->next = NULL; ++queue->count; @@ -121,6 +131,12 @@ url_enqueue (struct url_queue *queue, DEBUGP (("Enqueuing %s at depth %d\n", url, depth)); DEBUGP (("Queue count %d, maxcount %d.\n", queue->count, queue->maxcount)); + if (link_inline) { + qel->next = queue->head; + queue->head = qel; + if (!queue->tail) + queue->tail = queue->head; + } else { if (queue->tail) queue->tail->next = qel; queue->tail = qel; @@ -128,6 +144,7 @@ url_enqueue (struct url_queue *queue, if (!queue->head) queue->head = queue->tail; } +} /* Take a URL out of the queue. Return 1 if this operation succeeded, or 0 if the queue is empty. */ @@ -135,7 +152,7 @@ url_enqueue (struct url_queue *queue, static int url_dequeue (struct url_queue *queue, const char **url, const char **referer, int *depth, - int *html_allowed) + int *html_allowed, int *link_inline) { struct queue_element *qel = queue->head; @@ -150,6 +167,7 @@ url_dequeue (struct url_queue *queue, *referer = qel->referer; *depth = qel->depth; *html_allowed = qel->html_allowed; + *link_inline = qel->link_inline; --queue->count; @@ -214,15 +232,18 @@ retrieve_tree (const char *start_url) /* Enqueue the starting URL. Use start_url_parsed->url rather than just URL so we enqueue the canonical form of the URL. */ - url_enqueue (queue, xstrdup (start_url_parsed->url), NULL, 0, 1); + url_enqueue (queue, xstrdup (start_url_parsed->url), NULL, /*0*/1, 1, 0); string_set_add (blacklist, start_url_parsed->url); while (1) { int descend = 0; char *url, *referer, *file = NULL; - int depth, html_allowed; + int depth, html_allowed, link_inline; int dash_p_leaf_HTML = 0; + SUM_SIZE_INT last_downloaded_bytes = total_downloaded_bytes; + int real_download = 0; + if (opt.quota && total_downloaded_bytes > opt.quota) break; @@ -233,7 +254,7 @@ retrieve_tree (const char *start_url) if (!url_dequeue (queue, (const char **)&url, (const char **)&referer, - &depth, &html_allowed)) + &depth, &html_allowed, &link_inline)) break; /* ...and download it. Note that this download is in most cases @@ -261,10 +282,16 @@ retrieve_tree (const char *start_url) int dt = 0; char *redirected = NULL; int oldrec = opt.recursive; + int wait_saved; opt.recursive = 0; + wait_saved = opt.wait; + if (link_inline) + opt.wait = 0; status = retrieve_url (url, &file, &redirected, referer, &dt); + opt.wait = wait_saved; opt.recursive = oldrec; + real_download = (last_downloaded_bytes != total_downloaded_bytes); if (html_allowed && file && status == RETROK && (dt & RETROKF) && (dt & TEXTHTML)) @@ -280,11 +307,12 @@ retrieve_tree (const char *start_url) if (!descend_redirect_p (redirected, url, depth, start_url_parsed, blacklist)) descend = 0; - else + else { /* Make sure that the old pre-redirect form gets blacklisted. */ string_set_add (blacklist, url); } + } xfree (url); url = redirected; @@ -320,7 +348,7 @@ retrieve_tree (const char *start_url) /* If the downloaded document was HTML, parse it and enqueue the links it contains. */ - if (descend) + if (descend || opt.showurls) { int meta_disallow_follow = 0; struct urlpos *children @@ -340,16 +368,27 @@ retrieve_tree (const char *start_url) for (; child; child = child->next) { - if (child->ignore_when_downloading) + if (child->ignore_when_downloading) { + if (opt.showurls) + printf("-IGN %s\n", child->url->url); continue; - if (dash_p_leaf_HTML && !child->link_inline_p) + } + if (dash_p_leaf_HTML && !child->link_inline_p) { + if (opt.showurls) + printf("-NOTINL %s\n", child->url->url); continue; + } if (download_child_p (child, url_parsed, depth, start_url_parsed, blacklist)) { + if (!descend) { + printf("-RECLEVEL %s\n", child->url->url); + continue; + } url_enqueue (queue, xstrdup (child->url->url), xstrdup (url), depth + 1, - child->link_expect_html); + child->link_expect_html, + (child->link_inline_p && real_download)); /* We blacklist the URL we have enqueued, because we don't want to enqueue (and hence download) the same URL twice. */ @@ -390,9 +429,9 @@ retrieve_tree (const char *start_url) now. */ { char *d1, *d2; - int d3, d4; + int d3, d4, d5; while (url_dequeue (queue, - (const char **)&d1, (const char **)&d2, &d3, &d4)) + (const char **)&d1, (const char **)&d2, &d3, &d4, &d5)) { xfree (d1); xfree_null (d2); @@ -433,9 +472,25 @@ download_child_p (const struct urlpos *u if (string_set_contains (blacklist, url)) { DEBUGP (("Already on the black list.\n")); + if (opt.showurls) + printf("-ALRDY %s\n", url); goto out; } + if (opt.strip_query) { + char *surl = alloca(strlen(url) + 1); + char *spos = strpbrk(url, "?;"); + if (spos) { + strncpy(surl, url, spos - url); + surl[spos - url] = 0; + if (string_set_contains (blacklist, surl)) { + DEBUGP (("Short form is already on the black list.\n")); + if (opt.showurls) + printf("-ALRDQ %s\n", url); + goto out; + } + } + } /* Several things to check for: 1. if scheme is not http, and we don't load it 2. check for relative links (if relative_only is set) @@ -464,6 +519,8 @@ download_child_p (const struct urlpos *u if (!u_scheme_like_http && !(u->scheme == SCHEME_FTP && opt.follow_ftp)) { DEBUGP (("Not following non-HTTP schemes.\n")); + if (opt.showurls) + printf("-SCHEME %s\n", url); goto out; } @@ -473,6 +530,8 @@ download_child_p (const struct urlpos *u if (opt.relative_only && !upos->link_relative_p) { DEBUGP (("It doesn't really look like a relative link.\n")); + if (opt.showurls) + printf("-RELTV %s\n", url); goto out; } @@ -481,6 +540,8 @@ download_child_p (const struct urlpos *u if (!accept_domain (u)) { DEBUGP (("The domain was not accepted.\n")); + if (opt.showurls) + printf("-DOMAIN %s\n", url); goto out; } @@ -499,6 +560,8 @@ download_child_p (const struct urlpos *u { DEBUGP (("Going to \"%s\" would escape \"%s\" with no_parent on.\n", u->dir, start_url_parsed->dir)); + if (opt.showurls) + printf("-NP %s\n", url); goto out; } } @@ -511,6 +574,8 @@ download_child_p (const struct urlpos *u if (!accdir (u->dir, ALLABS)) { DEBUGP (("%s (%s) is excluded/not-included.\n", url, u->dir)); + if (opt.showurls) + printf("-DIREXCL %s\n", url); goto out; } } @@ -536,6 +601,8 @@ download_child_p (const struct urlpos *u { DEBUGP (("%s (%s) does not match acc/rej rules.\n", url, u->file)); + if (opt.showurls) + printf("-FILREJ %s\n", url); goto out; } } @@ -546,6 +613,8 @@ download_child_p (const struct urlpos *u { DEBUGP (("This is not the same hostname as the parent's (%s and %s).\n", u->host, parent->host)); + if (opt.showurls) + printf("-HOST %s\n", url); goto out; } @@ -577,6 +646,8 @@ download_child_p (const struct urlpos *u { DEBUGP (("Not following %s because robots.txt forbids it.\n", url)); string_set_add (blacklist, url); + if (opt.showurls) + printf("-ROBOTS %s\n", url); goto out; } } diff -urp wget-1.10.2/src/retr.c wget-1.10.2ultra/src/retr.c --- wget-1.10.2/src/retr.c 2005-06-25 19:07:11.000000000 +0400 +++ wget-1.10.2ultra/src/retr.c 2006-12-04 03:39:32.000000000 +0300 @@ -561,7 +561,7 @@ calc_rate (wgint bytes, double msecs, in high enough to guarantee that normal retrievals will not be hurt by the check. */ -#define MAX_REDIRECTIONS 20 +// opt.redirs #define MAX_REDIRECTIONS 20 #define SUSPEND_POST_DATA do { \ post_data_suspended = 1; \ @@ -735,10 +735,10 @@ retrieve_url (const char *origurl, char mynewloc = xstrdup (newloc_parsed->url); /* Check for max. number of redirections. */ - if (++redirection_count > MAX_REDIRECTIONS) + if (++redirection_count > opt.redirs) { logprintf (LOG_NOTQUIET, _("%d redirections exceeded.\n"), - MAX_REDIRECTIONS); + opt.redirs); url_free (newloc_parsed); url_free (u); xfree (url); @@ -886,11 +886,11 @@ sleep_between_retrievals (int count) if (opt.waitretry && count > 1) { /* If opt.waitretry is specified and this is a retry, wait for - COUNT-1 number of seconds, or for opt.waitretry seconds. */ + COUNT-1 number of seconds, or for opt.waitretry seconds. if (count <= opt.waitretry) xsleep (count - 1.0); - else - xsleep (opt.waitretry); + else */ + xsleep (opt.waitretry + (double)count - 1.0); } else if (opt.wait) { diff -urp wget-1.10.2/src/url.h wget-1.10.2ultra/src/url.h --- wget-1.10.2/src/url.h 2003-09-22 02:47:14.000000000 +0400 +++ wget-1.10.2ultra/src/url.h 2006-12-04 03:39:32.000000000 +0300 @@ -57,14 +57,15 @@ struct url int port; /* Port number */ /* URL components (URL-quoted). */ + // scheme://host[:port][/path][;params][?query][#fragment] char *path; char *params; char *query; char *fragment; /* Extracted path info (unquoted). */ - char *dir; - char *file; + char *dir; // http://host/[dir/dir]/file?query + char *file; // http://host/dir/[file]?query /* Username and password (unquoted). */ char *user; diff -urp wget-1.10.2/src/utils.c wget-1.10.2ultra/src/utils.c --- wget-1.10.2/src/utils.c 2005-06-27 18:12:20.000000000 +0400 +++ wget-1.10.2ultra/src/utils.c 2006-12-04 03:39:32.000000000 +0300 @@ -1843,6 +1843,7 @@ run_with_timeout (double timeout, void ( void xsleep (double seconds) { + logprintf (LOG_VERBOSE, " sleeping %.0f seconds..", seconds); #ifdef HAVE_NANOSLEEP /* nanosleep is the preferred interface because it offers high accuracy and, more importantly, because it allows us to reliably @@ -1888,6 +1889,7 @@ xsleep (double seconds) #endif /* not HAVE_SELECT */ #endif /* not HAVE_USLEEP */ #endif /* not HAVE_NANOSLEEP */ + logprintf (LOG_VERBOSE, ".\n"); } #endif /* not WINDOWS */ diff -urp wget-1.10.2/src/version.c wget-1.10.2ultra/src/version.c --- wget-1.10.2/src/version.c 2005-10-13 13:22:24.000000000 +0400 +++ wget-1.10.2ultra/src/version.c 2006-12-04 03:52:51.000000000 +0300 @@ -1 +1 @@ -char *version_string = "1.10.2"; +char *version_string = "1.10.2ultra"; diff -urp wget-1.10.2/src/wget.h wget-1.10.2ultra/src/wget.h --- wget-1.10.2/src/wget.h 2005-08-12 01:35:27.000000000 +0400 +++ wget-1.10.2ultra/src/wget.h 2006-12-04 03:39:32.000000000 +0300 @@ -294,7 +294,8 @@ typedef enum CONTNOTSUPPORTED, RETRUNNEEDED, RETRFINISHED, READERR, TRYLIMEXC, URLBADPATTERN, FILEBADFILE, RANGEERR, RETRBADPATTERN, RETNOTSUP, ROBOTSOK, NOROBOTS, PROXERR, AUTHFAILED, - QUOTEXC, WRITEFAILED, SSLINITFAILED + QUOTEXC, WRITEFAILED, SSLINITFAILED, + SERVERROR } uerr_t; #endif /* WGET_H */