aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/Makefile334
-rw-r--r--src/Makefile.am27
-rw-r--r--src/Makefile.in334
-rw-r--r--src/Makefile.m3265
-rw-r--r--src/Makefile.vc668
-rw-r--r--src/config-win32.h12
-rw-r--r--src/config.h13
-rw-r--r--src/config.h.in12
-rwxr-xr-xsrc/curlbin0 -> 1830293 bytes
-rw-r--r--src/hugehelp.c1049
-rw-r--r--src/main.c1154
-rw-r--r--src/mkhelp.pl85
-rw-r--r--src/setup.h91
-rw-r--r--src/stamp-h21
-rw-r--r--src/stamp-h2.in1
-rw-r--r--src/urlglob.c332
-rw-r--r--src/urlglob.h74
-rw-r--r--src/version.h3
18 files changed, 3655 insertions, 0 deletions
diff --git a/src/Makefile b/src/Makefile
new file mode 100644
index 000000000..a4b373e22
--- /dev/null
+++ b/src/Makefile
@@ -0,0 +1,334 @@
+# Generated automatically from Makefile.in by configure.
+# Makefile.in generated automatically by automake 1.4 from Makefile.am
+
+# Copyright (C) 1994, 1995-8, 1999 Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+#
+# $Id$
+#
+
+# Some flags needed when trying to cause warnings ;-)
+# CFLAGS = -Wall -pedantic
+
+
+SHELL = /bin/sh
+
+srcdir = .
+top_srcdir = ..
+prefix = /usr/local
+exec_prefix = ${prefix}
+
+bindir = ${exec_prefix}/bin
+sbindir = ${exec_prefix}/sbin
+libexecdir = ${exec_prefix}/libexec
+datadir = ${prefix}/share
+sysconfdir = ${prefix}/etc
+sharedstatedir = ${prefix}/com
+localstatedir = ${prefix}/var
+libdir = ${exec_prefix}/lib
+infodir = ${prefix}/info
+mandir = ${prefix}/man
+includedir = ${prefix}/include
+oldincludedir = /usr/include
+
+DESTDIR =
+
+pkgdatadir = $(datadir)/curl
+pkglibdir = $(libdir)/curl
+pkgincludedir = $(includedir)/curl
+
+top_builddir = ..
+
+ACLOCAL = aclocal
+AUTOCONF = autoconf
+AUTOMAKE = automake
+AUTOHEADER = autoheader
+
+INSTALL = .././install-sh -c
+INSTALL_PROGRAM = ${INSTALL} $(AM_INSTALL_PROGRAM_FLAGS)
+INSTALL_DATA = ${INSTALL} -m 644
+INSTALL_SCRIPT = ${INSTALL_PROGRAM}
+transform = s,x,x,
+
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+host_alias = sparc-sun-solaris2.6
+host_triplet = sparc-sun-solaris2.6
+CC = gcc
+MAKEINFO = makeinfo
+PACKAGE = curl
+PERL = /usr/local/bin/perl
+RANLIB = ranlib
+VERSION = 6.3.1
+YACC = bison -y
+
+CPPFLAGS = -DGLOBURL -DCURL_SEPARATORS
+
+INCLUDES = -I$(top_srcdir)/include
+
+bin_PROGRAMS = curl
+
+curl_SOURCES = main.c hugehelp.c urlglob.c
+curl_LDADD = ../lib/libcurl.a
+curl_DEPENDENCIES = ../lib/libcurl.a
+BUILT_SOURCES = hugehelp.c
+CLEANFILES = hugehelp.c
+NROFF = /bin/nroff
+
+EXTRA_DIST = mkhelp.pl Makefile.vc6
+
+AUTOMAKE_OPTIONS = foreign no-dependencies
+mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs
+CONFIG_HEADER = ../config.h config.h
+CONFIG_CLEAN_FILES =
+PROGRAMS = $(bin_PROGRAMS)
+
+
+DEFS = -DHAVE_CONFIG_H -I. -I$(srcdir) -I.. -I.
+LDFLAGS =
+LIBS = -lssl -lcrypto -ldl -lsocket -lnsl -L/home/dast/lib
+curl_OBJECTS = main.o hugehelp.o urlglob.o
+curl_LDFLAGS =
+CFLAGS = -g -O2
+COMPILE = $(CC) $(DEFS) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(LDFLAGS) -o $@
+DIST_COMMON = ./stamp-h2.in Makefile.am Makefile.in config.h.in
+
+
+DISTFILES = $(DIST_COMMON) $(SOURCES) $(HEADERS) $(TEXINFOS) $(EXTRA_DIST)
+
+TAR = tar
+GZIP_ENV = --best
+SOURCES = $(curl_SOURCES)
+OBJECTS = $(curl_OBJECTS)
+
+all: all-redirect
+.SUFFIXES:
+.SUFFIXES: .S .c .o .s
+$(srcdir)/Makefile.in: Makefile.am $(top_srcdir)/configure.in $(ACLOCAL_M4)
+ cd $(top_srcdir) && $(AUTOMAKE) --foreign --include-deps src/Makefile
+
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+ cd $(top_builddir) \
+ && CONFIG_FILES=$(subdir)/$@ CONFIG_HEADERS= $(SHELL) ./config.status
+
+
+config.h: stamp-h2
+ @if test ! -f $@; then \
+ rm -f stamp-h2; \
+ $(MAKE) stamp-h2; \
+ else :; fi
+stamp-h2: $(srcdir)/config.h.in $(top_builddir)/config.status
+ cd $(top_builddir) \
+ && CONFIG_FILES= CONFIG_HEADERS=src/config.h \
+ $(SHELL) ./config.status
+ @echo timestamp > stamp-h2 2> /dev/null
+$(srcdir)/config.h.in: $(srcdir)/stamp-h2.in
+ @if test ! -f $@; then \
+ rm -f $(srcdir)/stamp-h2.in; \
+ $(MAKE) $(srcdir)/stamp-h2.in; \
+ else :; fi
+$(srcdir)/stamp-h2.in: $(top_srcdir)/configure.in $(ACLOCAL_M4)
+ cd $(top_srcdir) && $(AUTOHEADER)
+ @echo timestamp > $(srcdir)/stamp-h2.in 2> /dev/null
+
+mostlyclean-hdr:
+
+clean-hdr:
+
+distclean-hdr:
+ -rm -f config.h
+
+maintainer-clean-hdr:
+
+mostlyclean-binPROGRAMS:
+
+clean-binPROGRAMS:
+ -test -z "$(bin_PROGRAMS)" || rm -f $(bin_PROGRAMS)
+
+distclean-binPROGRAMS:
+
+maintainer-clean-binPROGRAMS:
+
+install-binPROGRAMS: $(bin_PROGRAMS)
+ @$(NORMAL_INSTALL)
+ $(mkinstalldirs) $(DESTDIR)$(bindir)
+ @list='$(bin_PROGRAMS)'; for p in $$list; do \
+ if test -f $$p; then \
+ echo " $(INSTALL_PROGRAM) $$p $(DESTDIR)$(bindir)/`echo $$p|sed 's/$(EXEEXT)$$//'|sed '$(transform)'|sed 's/$$/$(EXEEXT)/'`"; \
+ $(INSTALL_PROGRAM) $$p $(DESTDIR)$(bindir)/`echo $$p|sed 's/$(EXEEXT)$$//'|sed '$(transform)'|sed 's/$$/$(EXEEXT)/'`; \
+ else :; fi; \
+ done
+
+uninstall-binPROGRAMS:
+ @$(NORMAL_UNINSTALL)
+ list='$(bin_PROGRAMS)'; for p in $$list; do \
+ rm -f $(DESTDIR)$(bindir)/`echo $$p|sed 's/$(EXEEXT)$$//'|sed '$(transform)'|sed 's/$$/$(EXEEXT)/'`; \
+ done
+
+.c.o:
+ $(COMPILE) -c $<
+
+.s.o:
+ $(COMPILE) -c $<
+
+.S.o:
+ $(COMPILE) -c $<
+
+mostlyclean-compile:
+ -rm -f *.o core *.core
+
+clean-compile:
+
+distclean-compile:
+ -rm -f *.tab.c
+
+maintainer-clean-compile:
+
+curl: $(curl_OBJECTS) $(curl_DEPENDENCIES)
+ @rm -f curl
+ $(LINK) $(curl_LDFLAGS) $(curl_OBJECTS) $(curl_LDADD) $(LIBS)
+
+tags: TAGS
+
+ID: $(HEADERS) $(SOURCES) $(LISP)
+ list='$(SOURCES) $(HEADERS)'; \
+ unique=`for i in $$list; do echo $$i; done | \
+ awk ' { files[$$0] = 1; } \
+ END { for (i in files) print i; }'`; \
+ here=`pwd` && cd $(srcdir) \
+ && mkid -f$$here/ID $$unique $(LISP)
+
+TAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) $(LISP)
+ tags=; \
+ here=`pwd`; \
+ list='$(SOURCES) $(HEADERS)'; \
+ unique=`for i in $$list; do echo $$i; done | \
+ awk ' { files[$$0] = 1; } \
+ END { for (i in files) print i; }'`; \
+ test -z "$(ETAGS_ARGS)config.h.in$$unique$(LISP)$$tags" \
+ || (cd $(srcdir) && etags $(ETAGS_ARGS) $$tags config.h.in $$unique $(LISP) -o $$here/TAGS)
+
+mostlyclean-tags:
+
+clean-tags:
+
+distclean-tags:
+ -rm -f TAGS ID
+
+maintainer-clean-tags:
+
+distdir = $(top_builddir)/$(PACKAGE)-$(VERSION)/$(subdir)
+
+subdir = src
+
+distdir: $(DISTFILES)
+ @for file in $(DISTFILES); do \
+ d=$(srcdir); \
+ if test -d $$d/$$file; then \
+ cp -pr $$/$$file $(distdir)/$$file; \
+ else \
+ test -f $(distdir)/$$file \
+ || ln $$d/$$file $(distdir)/$$file 2> /dev/null \
+ || cp -p $$d/$$file $(distdir)/$$file || :; \
+ fi; \
+ done
+info-am:
+info: info-am
+dvi-am:
+dvi: dvi-am
+check-am: all-am
+check: check-am
+installcheck-am:
+installcheck: installcheck-am
+all-recursive-am: config.h
+ $(MAKE) $(AM_MAKEFLAGS) all-recursive
+
+install-exec-am: install-binPROGRAMS
+install-exec: install-exec-am
+
+install-data-am:
+install-data: install-data-am
+
+install-am: all-am
+ @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+install: install-am
+uninstall-am: uninstall-binPROGRAMS
+uninstall: uninstall-am
+all-am: Makefile $(PROGRAMS) config.h
+all-redirect: all-am
+install-strip:
+ $(MAKE) $(AM_MAKEFLAGS) AM_INSTALL_PROGRAM_FLAGS=-s install
+installdirs:
+ $(mkinstalldirs) $(DESTDIR)$(bindir)
+
+
+mostlyclean-generic:
+
+clean-generic:
+ -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)
+
+distclean-generic:
+ -rm -f Makefile $(CONFIG_CLEAN_FILES)
+ -rm -f config.cache config.log stamp-h stamp-h[0-9]*
+
+maintainer-clean-generic:
+ -test -z "$(BUILT_SOURCES)" || rm -f $(BUILT_SOURCES)
+mostlyclean-am: mostlyclean-hdr mostlyclean-binPROGRAMS \
+ mostlyclean-compile mostlyclean-tags \
+ mostlyclean-generic
+
+mostlyclean: mostlyclean-am
+
+clean-am: clean-hdr clean-binPROGRAMS clean-compile clean-tags \
+ clean-generic mostlyclean-am
+
+clean: clean-am
+
+distclean-am: distclean-hdr distclean-binPROGRAMS distclean-compile \
+ distclean-tags distclean-generic clean-am
+
+distclean: distclean-am
+
+maintainer-clean-am: maintainer-clean-hdr maintainer-clean-binPROGRAMS \
+ maintainer-clean-compile maintainer-clean-tags \
+ maintainer-clean-generic distclean-am
+ @echo "This command is intended for maintainers to use;"
+ @echo "it deletes files that may require special tools to rebuild."
+
+maintainer-clean: maintainer-clean-am
+
+.PHONY: mostlyclean-hdr distclean-hdr clean-hdr maintainer-clean-hdr \
+mostlyclean-binPROGRAMS distclean-binPROGRAMS clean-binPROGRAMS \
+maintainer-clean-binPROGRAMS uninstall-binPROGRAMS install-binPROGRAMS \
+mostlyclean-compile distclean-compile clean-compile \
+maintainer-clean-compile tags mostlyclean-tags distclean-tags \
+clean-tags maintainer-clean-tags distdir info-am info dvi-am dvi check \
+check-am installcheck-am installcheck all-recursive-am install-exec-am \
+install-exec install-data-am install-data install-am install \
+uninstall-am uninstall all-redirect all-am all installdirs \
+mostlyclean-generic distclean-generic clean-generic \
+maintainer-clean-generic clean mostlyclean distclean maintainer-clean
+
+
+# This generates the hugehelp.c file
+hugehelp.c: $(top_srcdir)/README.curl $(top_srcdir)/curl.1 mkhelp.pl
+ rm -f hugehelp.c
+ $(NROFF) -man $(top_srcdir)/curl.1 | $(PERL) $(top_srcdir)/src/mkhelp.pl $(top_srcdir)/README.curl > hugehelp.c
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/src/Makefile.am b/src/Makefile.am
new file mode 100644
index 000000000..561142ed3
--- /dev/null
+++ b/src/Makefile.am
@@ -0,0 +1,27 @@
+#
+# $Id$
+#
+
+# Some flags needed when trying to cause warnings ;-)
+# CFLAGS = -Wall -pedantic
+CPPFLAGS = -DGLOBURL -DCURL_SEPARATORS
+
+INCLUDES = -I$(top_srcdir)/include
+
+bin_PROGRAMS = curl
+
+curl_SOURCES = main.c hugehelp.c urlglob.c
+curl_LDADD = ../lib/libcurl.a
+curl_DEPENDENCIES = ../lib/libcurl.a
+BUILT_SOURCES = hugehelp.c
+CLEANFILES = hugehelp.c
+NROFF=@NROFF@
+
+EXTRA_DIST = mkhelp.pl Makefile.vc6
+
+AUTOMAKE_OPTIONS = foreign no-dependencies
+
+# This generates the hugehelp.c file
+hugehelp.c: $(top_srcdir)/README.curl $(top_srcdir)/curl.1 mkhelp.pl
+ rm -f hugehelp.c
+ $(NROFF) -man $(top_srcdir)/curl.1 | $(PERL) $(top_srcdir)/src/mkhelp.pl $(top_srcdir)/README.curl > hugehelp.c
diff --git a/src/Makefile.in b/src/Makefile.in
new file mode 100644
index 000000000..f7afbd977
--- /dev/null
+++ b/src/Makefile.in
@@ -0,0 +1,334 @@
+# Makefile.in generated automatically by automake 1.4 from Makefile.am
+
+# Copyright (C) 1994, 1995-8, 1999 Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+#
+# $Id$
+#
+
+# Some flags needed when trying to cause warnings ;-)
+# CFLAGS = -Wall -pedantic
+
+
+SHELL = @SHELL@
+
+srcdir = @srcdir@
+top_srcdir = @top_srcdir@
+VPATH = @srcdir@
+prefix = @prefix@
+exec_prefix = @exec_prefix@
+
+bindir = @bindir@
+sbindir = @sbindir@
+libexecdir = @libexecdir@
+datadir = @datadir@
+sysconfdir = @sysconfdir@
+sharedstatedir = @sharedstatedir@
+localstatedir = @localstatedir@
+libdir = @libdir@
+infodir = @infodir@
+mandir = @mandir@
+includedir = @includedir@
+oldincludedir = /usr/include
+
+DESTDIR =
+
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+
+top_builddir = ..
+
+ACLOCAL = @ACLOCAL@
+AUTOCONF = @AUTOCONF@
+AUTOMAKE = @AUTOMAKE@
+AUTOHEADER = @AUTOHEADER@
+
+INSTALL = @INSTALL@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@ $(AM_INSTALL_PROGRAM_FLAGS)
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+transform = @program_transform_name@
+
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+host_alias = @host_alias@
+host_triplet = @host@
+CC = @CC@
+MAKEINFO = @MAKEINFO@
+PACKAGE = @PACKAGE@
+PERL = @PERL@
+RANLIB = @RANLIB@
+VERSION = @VERSION@
+YACC = @YACC@
+
+CPPFLAGS = -DGLOBURL -DCURL_SEPARATORS
+
+INCLUDES = -I$(top_srcdir)/include
+
+bin_PROGRAMS = curl
+
+curl_SOURCES = main.c hugehelp.c urlglob.c
+curl_LDADD = ../lib/libcurl.a
+curl_DEPENDENCIES = ../lib/libcurl.a
+BUILT_SOURCES = hugehelp.c
+CLEANFILES = hugehelp.c
+NROFF = @NROFF@
+
+EXTRA_DIST = mkhelp.pl Makefile.vc6
+
+AUTOMAKE_OPTIONS = foreign no-dependencies
+mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs
+CONFIG_HEADER = ../config.h config.h
+CONFIG_CLEAN_FILES =
+PROGRAMS = $(bin_PROGRAMS)
+
+
+DEFS = @DEFS@ -I. -I$(srcdir) -I.. -I.
+LDFLAGS = @LDFLAGS@
+LIBS = @LIBS@
+curl_OBJECTS = main.o hugehelp.o urlglob.o
+curl_LDFLAGS =
+CFLAGS = @CFLAGS@
+COMPILE = $(CC) $(DEFS) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(LDFLAGS) -o $@
+DIST_COMMON = ./stamp-h2.in Makefile.am Makefile.in config.h.in
+
+
+DISTFILES = $(DIST_COMMON) $(SOURCES) $(HEADERS) $(TEXINFOS) $(EXTRA_DIST)
+
+TAR = tar
+GZIP_ENV = --best
+SOURCES = $(curl_SOURCES)
+OBJECTS = $(curl_OBJECTS)
+
+all: all-redirect
+.SUFFIXES:
+.SUFFIXES: .S .c .o .s
+$(srcdir)/Makefile.in: Makefile.am $(top_srcdir)/configure.in $(ACLOCAL_M4)
+ cd $(top_srcdir) && $(AUTOMAKE) --foreign --include-deps src/Makefile
+
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+ cd $(top_builddir) \
+ && CONFIG_FILES=$(subdir)/$@ CONFIG_HEADERS= $(SHELL) ./config.status
+
+
+config.h: stamp-h2
+ @if test ! -f $@; then \
+ rm -f stamp-h2; \
+ $(MAKE) stamp-h2; \
+ else :; fi
+stamp-h2: $(srcdir)/config.h.in $(top_builddir)/config.status
+ cd $(top_builddir) \
+ && CONFIG_FILES= CONFIG_HEADERS=src/config.h \
+ $(SHELL) ./config.status
+ @echo timestamp > stamp-h2 2> /dev/null
+$(srcdir)/config.h.in: $(srcdir)/stamp-h2.in
+ @if test ! -f $@; then \
+ rm -f $(srcdir)/stamp-h2.in; \
+ $(MAKE) $(srcdir)/stamp-h2.in; \
+ else :; fi
+$(srcdir)/stamp-h2.in: $(top_srcdir)/configure.in $(ACLOCAL_M4)
+ cd $(top_srcdir) && $(AUTOHEADER)
+ @echo timestamp > $(srcdir)/stamp-h2.in 2> /dev/null
+
+mostlyclean-hdr:
+
+clean-hdr:
+
+distclean-hdr:
+ -rm -f config.h
+
+maintainer-clean-hdr:
+
+mostlyclean-binPROGRAMS:
+
+clean-binPROGRAMS:
+ -test -z "$(bin_PROGRAMS)" || rm -f $(bin_PROGRAMS)
+
+distclean-binPROGRAMS:
+
+maintainer-clean-binPROGRAMS:
+
+install-binPROGRAMS: $(bin_PROGRAMS)
+ @$(NORMAL_INSTALL)
+ $(mkinstalldirs) $(DESTDIR)$(bindir)
+ @list='$(bin_PROGRAMS)'; for p in $$list; do \
+ if test -f $$p; then \
+ echo " $(INSTALL_PROGRAM) $$p $(DESTDIR)$(bindir)/`echo $$p|sed 's/$(EXEEXT)$$//'|sed '$(transform)'|sed 's/$$/$(EXEEXT)/'`"; \
+ $(INSTALL_PROGRAM) $$p $(DESTDIR)$(bindir)/`echo $$p|sed 's/$(EXEEXT)$$//'|sed '$(transform)'|sed 's/$$/$(EXEEXT)/'`; \
+ else :; fi; \
+ done
+
+uninstall-binPROGRAMS:
+ @$(NORMAL_UNINSTALL)
+ list='$(bin_PROGRAMS)'; for p in $$list; do \
+ rm -f $(DESTDIR)$(bindir)/`echo $$p|sed 's/$(EXEEXT)$$//'|sed '$(transform)'|sed 's/$$/$(EXEEXT)/'`; \
+ done
+
+.c.o:
+ $(COMPILE) -c $<
+
+.s.o:
+ $(COMPILE) -c $<
+
+.S.o:
+ $(COMPILE) -c $<
+
+mostlyclean-compile:
+ -rm -f *.o core *.core
+
+clean-compile:
+
+distclean-compile:
+ -rm -f *.tab.c
+
+maintainer-clean-compile:
+
+curl: $(curl_OBJECTS) $(curl_DEPENDENCIES)
+ @rm -f curl
+ $(LINK) $(curl_LDFLAGS) $(curl_OBJECTS) $(curl_LDADD) $(LIBS)
+
+tags: TAGS
+
+ID: $(HEADERS) $(SOURCES) $(LISP)
+ list='$(SOURCES) $(HEADERS)'; \
+ unique=`for i in $$list; do echo $$i; done | \
+ awk ' { files[$$0] = 1; } \
+ END { for (i in files) print i; }'`; \
+ here=`pwd` && cd $(srcdir) \
+ && mkid -f$$here/ID $$unique $(LISP)
+
+TAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) $(LISP)
+ tags=; \
+ here=`pwd`; \
+ list='$(SOURCES) $(HEADERS)'; \
+ unique=`for i in $$list; do echo $$i; done | \
+ awk ' { files[$$0] = 1; } \
+ END { for (i in files) print i; }'`; \
+ test -z "$(ETAGS_ARGS)config.h.in$$unique$(LISP)$$tags" \
+ || (cd $(srcdir) && etags $(ETAGS_ARGS) $$tags config.h.in $$unique $(LISP) -o $$here/TAGS)
+
+mostlyclean-tags:
+
+clean-tags:
+
+distclean-tags:
+ -rm -f TAGS ID
+
+maintainer-clean-tags:
+
+distdir = $(top_builddir)/$(PACKAGE)-$(VERSION)/$(subdir)
+
+subdir = src
+
+distdir: $(DISTFILES)
+ @for file in $(DISTFILES); do \
+ d=$(srcdir); \
+ if test -d $$d/$$file; then \
+ cp -pr $$/$$file $(distdir)/$$file; \
+ else \
+ test -f $(distdir)/$$file \
+ || ln $$d/$$file $(distdir)/$$file 2> /dev/null \
+ || cp -p $$d/$$file $(distdir)/$$file || :; \
+ fi; \
+ done
+info-am:
+info: info-am
+dvi-am:
+dvi: dvi-am
+check-am: all-am
+check: check-am
+installcheck-am:
+installcheck: installcheck-am
+all-recursive-am: config.h
+ $(MAKE) $(AM_MAKEFLAGS) all-recursive
+
+install-exec-am: install-binPROGRAMS
+install-exec: install-exec-am
+
+install-data-am:
+install-data: install-data-am
+
+install-am: all-am
+ @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+install: install-am
+uninstall-am: uninstall-binPROGRAMS
+uninstall: uninstall-am
+all-am: Makefile $(PROGRAMS) config.h
+all-redirect: all-am
+install-strip:
+ $(MAKE) $(AM_MAKEFLAGS) AM_INSTALL_PROGRAM_FLAGS=-s install
+installdirs:
+ $(mkinstalldirs) $(DESTDIR)$(bindir)
+
+
+mostlyclean-generic:
+
+clean-generic:
+ -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)
+
+distclean-generic:
+ -rm -f Makefile $(CONFIG_CLEAN_FILES)
+ -rm -f config.cache config.log stamp-h stamp-h[0-9]*
+
+maintainer-clean-generic:
+ -test -z "$(BUILT_SOURCES)" || rm -f $(BUILT_SOURCES)
+mostlyclean-am: mostlyclean-hdr mostlyclean-binPROGRAMS \
+ mostlyclean-compile mostlyclean-tags \
+ mostlyclean-generic
+
+mostlyclean: mostlyclean-am
+
+clean-am: clean-hdr clean-binPROGRAMS clean-compile clean-tags \
+ clean-generic mostlyclean-am
+
+clean: clean-am
+
+distclean-am: distclean-hdr distclean-binPROGRAMS distclean-compile \
+ distclean-tags distclean-generic clean-am
+
+distclean: distclean-am
+
+maintainer-clean-am: maintainer-clean-hdr maintainer-clean-binPROGRAMS \
+ maintainer-clean-compile maintainer-clean-tags \
+ maintainer-clean-generic distclean-am
+ @echo "This command is intended for maintainers to use;"
+ @echo "it deletes files that may require special tools to rebuild."
+
+maintainer-clean: maintainer-clean-am
+
+.PHONY: mostlyclean-hdr distclean-hdr clean-hdr maintainer-clean-hdr \
+mostlyclean-binPROGRAMS distclean-binPROGRAMS clean-binPROGRAMS \
+maintainer-clean-binPROGRAMS uninstall-binPROGRAMS install-binPROGRAMS \
+mostlyclean-compile distclean-compile clean-compile \
+maintainer-clean-compile tags mostlyclean-tags distclean-tags \
+clean-tags maintainer-clean-tags distdir info-am info dvi-am dvi check \
+check-am installcheck-am installcheck all-recursive-am install-exec-am \
+install-exec install-data-am install-data install-am install \
+uninstall-am uninstall all-redirect all-am all installdirs \
+mostlyclean-generic distclean-generic clean-generic \
+maintainer-clean-generic clean mostlyclean distclean maintainer-clean
+
+
+# This generates the hugehelp.c file
+hugehelp.c: $(top_srcdir)/README.curl $(top_srcdir)/curl.1 mkhelp.pl
+ rm -f hugehelp.c
+ $(NROFF) -man $(top_srcdir)/curl.1 | $(PERL) $(top_srcdir)/src/mkhelp.pl $(top_srcdir)/README.curl > hugehelp.c
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/src/Makefile.m32 b/src/Makefile.m32
new file mode 100644
index 000000000..d398083fb
--- /dev/null
+++ b/src/Makefile.m32
@@ -0,0 +1,65 @@
+#############################################################
+## Makefile for building curl.exe with MingW32 (GCC-2.95) and
+## optionally OpenSSL (0.9.4)
+##
+## Use: make -f Makefile.m32 [SSL=1]
+##
+## Comments to: Troy Engel <tengel@sonic.net> or
+## Joern Hartroth <hartroth@acm.org>
+
+CC = gcc
+STRIP = strip -s
+OPENSSL_PATH = ../../openssl-0.9.4
+
+# We may need these someday
+# PERL = perl
+# NROFF = nroff
+
+########################################################
+## Nothing more to do below this line!
+
+INCLUDES = -I. -I.. -I../include
+CFLAGS = -g -O2 -DGLOBURL -DCURL_SEPARATORS
+LDFLAGS =
+COMPILE = $(CC) $(INCLUDES) $(CFLAGS)
+LINK = $(CC) $(CFLAGS) $(LDFLAGS) -o $@
+
+curl_PROGRAMS = curl.exe
+curl_OBJECTS = main.o hugehelp.o urlglob.o
+curl_SOURCES = main.c hugehelp.c urlglob.c
+curl_DEPENDENCIES = ../lib/libcurl.a
+curl_LDADD = -L../lib -lcurl -lwsock32
+ifdef SSL
+ curl_LDADD += -L$(OPENSSL_PATH)/out -leay32 -lssl32 -lRSAglue
+endif
+
+PROGRAMS = $(curl_PROGRAMS)
+SOURCES = $(curl_SOURCES)
+OBJECTS = $(curl_OBJECTS)
+
+all: curl
+
+curl: $(curl_OBJECTS) $(curl_DEPENDENCIES)
+ -@erase curl.exe
+ $(LINK) $(curl_OBJECTS) $(curl_LDADD)
+ $(STRIP) $(curl_PROGRAMS)
+
+# We don't have nroff normally under win32
+# hugehelp.c: ../README.curl ../curl.1 mkhelp.pl
+# -@erase hugehelp.c
+# $(NROFF) -man ../curl.1 | $(PERL) mkhelp.pl ../README.curl > hugehelp.c
+
+.c.o:
+ $(COMPILE) -c $<
+
+.s.o:
+ $(COMPILE) -c $<
+
+.S.o:
+ $(COMPILE) -c $<
+
+clean:
+ -@erase $(curl_OBJECTS)
+
+distrib: clean
+ -@erase $(curl_PROGRAMS)
diff --git a/src/Makefile.vc6 b/src/Makefile.vc6
new file mode 100644
index 000000000..1502b0218
--- /dev/null
+++ b/src/Makefile.vc6
@@ -0,0 +1,68 @@
+########################################################
+## Makefile for building curl.exe with MSVC6
+## Use: nmake -f makefile.vc6 [release | debug]
+## (default is release)
+##
+## Comments to: Troy Engel <tengel@sonic.net>
+
+PROGRAM_NAME = curl.exe
+
+########################################################
+## Nothing more to do below this line!
+
+## Release
+CCR = cl.exe /ML /O2 /D "NDEBUG"
+LINKR = link.exe /incremental:no /libpath:"../lib"
+
+## Debug
+CCD = cl.exe /MLd /Gm /ZI /Od /D "_DEBUG" /GZ
+LINKD = link.exe /incremental:yes /debug
+
+CFLAGS = /nologo /W3 /GX /D "WIN32" /D "_CONSOLE" /D "_MBCS" /YX /FD /c
+LFLAGS = /nologo /out:$(PROGRAM_NAME) /subsystem:console /machine:I386
+LINKLIBS = kernel32.lib wsock32.lib libcurl.lib
+
+RELEASE_OBJS= \
+ hugehelpr.obj \
+ mainr.obj
+
+DEBUG_OBJS= \
+ hugehelpd.obj \
+ maind.obj
+
+LINK_OBJS= \
+ hugehelp.obj \
+ main.obj
+
+all : release
+
+release: $(RELEASE_OBJS)
+ $(LINKR) $(LFLAGS) $(LINKLIBS) $(LINK_OBJS)
+
+debug: $(DEBUG_OBJS)
+ $(LINKD) $(LFLAGS) $(LINKLIBS) $(LINK_OBJS)
+
+## Release
+hugehelpr.obj: hugehelp.c
+ $(CCR) $(CFLAGS) /Zm200 hugehelp.c
+mainr.obj: main.c
+ $(CCR) $(CFLAGS) main.c
+
+## Debug
+hugehelpd.obj: hugehelp.c
+ $(CCD) $(CFLAGS) /Zm200 hugehelp.c
+maind.obj: main.c
+ $(CCD) $(CFLAGS) main.c
+
+clean:
+ -@erase hugehelp.obj
+ -@erase main.obj
+ -@erase vc60.idb
+ -@erase vc60.pdb
+ -@erase vc60.pch
+ -@erase curl.ilk
+ -@erase curl.pdb
+
+distrib: clean
+ -@erase $(PROGRAM_NAME)
+
diff --git a/src/config-win32.h b/src/config-win32.h
new file mode 100644
index 000000000..42264ecff
--- /dev/null
+++ b/src/config-win32.h
@@ -0,0 +1,12 @@
+/* src/config.h. Generated automatically by configure. */
+/* Define if you have the strcasecmp function. */
+/*#define HAVE_STRCASECMP 1*/
+
+/* Define cpu-machine-OS */
+#define OS "win32"
+
+/* Define if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* Define if you have the <io.h> header file. */
+#define HAVE_IO_H 1
diff --git a/src/config.h b/src/config.h
new file mode 100644
index 000000000..e741a236c
--- /dev/null
+++ b/src/config.h
@@ -0,0 +1,13 @@
+/* src/config.h. Generated automatically by configure. */
+/* src/config.h. Generated automatically by configure. */
+/* Define if you have the strcasecmp function. */
+#define HAVE_STRCASECMP 1
+
+/* Define cpu-machine-OS */
+#define OS "sparc-sun-solaris2.6"
+
+/* Define if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* Define if you have the <io.h> header file. */
+/* #undef HAVE_IO_H */
diff --git a/src/config.h.in b/src/config.h.in
new file mode 100644
index 000000000..2e210ede8
--- /dev/null
+++ b/src/config.h.in
@@ -0,0 +1,12 @@
+/* src/config.h. Generated automatically by configure. */
+/* Define if you have the strcasecmp function. */
+#undef HAVE_STRCASECMP
+
+/* Define cpu-machine-OS */
+#undef OS
+
+/* Define if you have the <unistd.h> header file. */
+#undef HAVE_UNISTD_H
+
+/* Define if you have the <io.h> header file. */
+#undef HAVE_IO_H
diff --git a/src/curl b/src/curl
new file mode 100755
index 000000000..cba40a4c0
--- /dev/null
+++ b/src/curl
Binary files differ
diff --git a/src/hugehelp.c b/src/hugehelp.c
new file mode 100644
index 000000000..390221935
--- /dev/null
+++ b/src/hugehelp.c
@@ -0,0 +1,1049 @@
+/* NEVER EVER edit this manually, fix the mkhelp script instead! */
+#include <stdio.h>
+void hugehelp(void)
+{
+puts (
+" _ _ ____ _ \n"
+" Project ___| | | | _ \\| | \n"
+" / __| | | | |_) | | \n"
+" | (__| |_| | _ <| |___ \n"
+" \\___|\\___/|_| \\_\\_____|\n"
+"NAME\n"
+" curl - get a URL with FTP, TELNET, LDAP, GOPHER, DICT, FILE,\n"
+" HTTP or HTTPS syntax.\n"
+"\n"
+"SYNOPSIS\n"
+" curl [options] url\n"
+"\n"
+"DESCRIPTION\n"
+" curl is a client to get documents/files from servers, using\n"
+" any of the supported protocols. The command is designed to\n"
+" work without user interaction or any kind of interactivity.\n"
+"\n"
+" curl offers a busload of useful tricks like proxy support,\n"
+" user authentication, ftp upload, HTTP post, SSL (https:)\n"
+" connections, cookies, file transfer resume and more.\n"
+"\n"
+"URL\n"
+" The URL syntax is protocol dependent. You'll find a detailed\n"
+" description in RFC 2396.\n"
+"\n"
+" You can specify multiple URLs or parts of URLs by writing\n"
+" part sets within braces as in:\n"
+"\n"
+" http://site.{one,two,three}.com\n"
+"\n"
+" or you can get sequences of alphanumeric series by using []\n"
+" as in:\n"
+"\n"
+" ftp://ftp.numericals.com/file[1-100].txt\n"
+" ftp://ftp.numericals.com/file[001-100].txt (with leading\n"
+" zeros)\n"
+" ftp://ftp.letters.com/file[a-z].txt\n"
+"\n"
+" It is possible to specify up to 9 sets or series for a URL,\n"
+" but no nesting is supported at the moment:\n"
+"\n"
+" http://www.any.org/archive[1996-1999]/volume[1-\n"
+" 4]part{a,b,c,index}.html\n"
+"\n"
+"OPTIONS\n"
+" -a/--append\n"
+" (FTP) When used in a ftp upload, this will tell curl to\n"
+" append to the target file instead of overwriting it. If\n"
+" the file doesn't exist, it will be created.\n"
+"\n"
+" -A/--user-agent <agent string>\n"
+" (HTTP) Specify the User-Agent string to send to the\n"
+" HTTP server. Some badly done CGIs fail if its not set\n"
+" to \"Mozilla/4.0\". To encode blanks in the string, sur-\n"
+" round the string with single quote marks. This can\n"
+" also be set with the -H/--header flag of course.\n"
+" -b/--cookie <name=data>\n"
+" (HTTP) Pass the data to the HTTP server as a cookie. It\n"
+" is supposedly the data previously received from the\n"
+" server in a \"Set-Cookie:\" line. The data should be in\n"
+" the format \"NAME1=VALUE1; NAME2=VALUE2\".\n"
+"\n"
+" If no '=' letter is used in the line, it is treated as\n"
+" a filename to use to read previously stored cookie\n"
+" lines from, which should be used in this session if\n"
+" they match. Using this method also activates the\n"
+" \"cookie parser\" which will make curl record incoming\n"
+" cookies too, which may be handy if you're using this in\n"
+" combination with the -L/--location option. The file\n"
+" format of the file to read cookies from should be plain\n"
+" HTTP headers or the netscape cookie file format.\n"
+"\n"
+" -B/--ftp-ascii\n"
+" (FTP/LDAP) Use ASCII transfer when getting an FTP file\n"
+" or LDAP info. For FTP, this can also be enforced by\n"
+" using an URL that ends with \";type=A\".\n"
+"\n"
+" -c/--continue\n"
+" Continue/Resume a previous file transfer. This\n"
+" instructs curl to continue appending data on the file\n"
+" where it was previously left, possibly because of a\n"
+" broken connection to the server. There must be a named\n"
+" physical file to append to for this to work. Note:\n"
+" Upload resume is depening on a command named SIZE not\n"
+" always present in all ftp servers! Upload resume is for\n"
+" FTP only. HTTP resume is only possible with HTTP/1.1\n"
+" or later servers.\n"
+"\n"
+" -C/--continue-at <offset>\n"
+" Continue/Resume a previous file transfer at the given\n"
+" offset. The given offset is the exact number of bytes\n"
+" that will be skipped counted from the beginning of the\n"
+" source file before it is transfered to the destination.\n"
+" If used with uploads, the ftp server command SIZE will\n"
+" not be used by curl. Upload resume is for FTP only.\n"
+" HTTP resume is only possible with HTTP/1.1 or later\n"
+" servers.\n"
+"\n"
+" -d/--data <data>\n"
+" (HTTP) Sends the specified data in a POST request to\n"
+" the HTTP server. Note that the data is sent exactly as\n"
+" specified with no extra processing. The data is\n"
+" expected to be \"url-encoded\". This will cause curl to\n"
+" pass the data to the server using the content-type\n"
+" application/x-www-form-urlencoded. Compare to -F.\n"
+"\n"
+" If you start the data with the letter @, the rest\n"
+" should be a file name to read the data from, or - if\n"
+" you want curl to read the data from stdin. The con-\n"
+" tents of the file must already be url-encoded.\n"
+"\n"
+" -D/--dump-header <file>\n"
+" (HTTP/FTP) Write the HTTP headers to this file. Write\n"
+" the FTP file info to this file if -I/--head is used.\n"
+"\n"
+" -e/--referer <URL>\n"
+" (HTTP) Sends the \"Referer Page\" information to the HTTP\n"
+" server. Some badly done CGIs fail if it's not set. This\n"
+" can also be set with the -H/--header flag of course.\n"
+"\n"
+" -E/--cert <certificate[:password]>\n"
+" (HTTPS) Tells curl to use the specified certificate\n"
+" file when getting a file with HTTPS. The certificate\n"
+" must be in PEM format. If the optional password isn't\n"
+" specified, it will be queried for on the terminal. Note\n"
+" that this certificate is the private key and the\n"
+" private certificate concatenated!\n"
+"\n"
+" -f/--fail\n"
+" (HTTP) Fail silently (no output at all) on server\n"
+" errors. This is mostly done like this to better enable\n"
+" scripts etc to better deal with failed attempts. In\n"
+" normal cases when a HTTP server fails to deliver a\n"
+" document, it returns a HTML document stating so (which\n"
+" often also describes why and more). This flag will\n"
+" prevent curl from outputting that and fail silently\n"
+" instead.\n"
+"\n"
+" -F/--form <name=content>\n"
+" (HTTP) This lets curl emulate a filled in form in which\n"
+" a user has pressed the submit button. This causes curl\n"
+" to POST data using the content-type multipart/form-data\n"
+" according to RFC1867. This enables uploading of binary\n"
+" files etc. To force the 'content' part to be read from\n"
+" a file, prefix the file name with an @ sign. Example,\n"
+" to send your password file to the server, where 'pass-\n"
+" word' is the name of the form-field to which\n"
+" /etc/passwd will be the input:\n"
+"\n"
+" curl -F password=@/etc/passwd www.mypasswords.com\n"
+"\n"
+" To read the file's content from stdin insted of a file,\n"
+" use - where the file name should've been.\n"
+"\n"
+" -h/--help\n"
+" Usage help.\n"
+"\n"
+" -H/--header <header>\n"
+" (HTTP) Extra header to use when getting a web page. You\n"
+" may specify any number of extra headers. Note that if\n"
+" you should add a custom header that has the same name\n"
+" as one of the internal ones curl would use, your exter-\n"
+" nally set header will be used instead of the internal\n"
+" one. This allows you to make even trickier stuff than\n"
+" curl would normally do. You should not replace inter-\n"
+" nally set headers without knowing perfectly well what\n"
+" you're doing.\n"
+"\n"
+" -i/--include\n"
+" (HTTP) Include the HTTP-header in the output. The\n"
+" HTTP-header includes things like server-name, date of\n"
+" the document, HTTP-version and more...\n"
+"\n"
+" -I/--head\n"
+" (HTTP/FTP) Fetch the HTTP-header only! HTTP-servers\n"
+" feature the command HEAD which this uses to get nothing\n"
+" but the header of a document. When used on a FTP file,\n"
+" curl displays the file size only.\n"
+"\n"
+" -K/--config <config file>\n"
+" Specify which config file to read curl arguments from.\n"
+" The config file is a text file in which command line\n"
+" arguments can be written which then will be used as if\n"
+" they were written on the actual command line. If the\n"
+" first column of a config line is a '#' character, the\n"
+" rest of the line will be treated as a comment.\n"
+"\n"
+" Specify the filename as '-' to make curl read the file\n"
+" from stdin.\n"
+"\n"
+" -l/--list-only\n"
+" (FTP) When listing an FTP directory, this switch forces\n"
+" a name-only view. Especially useful if you want to\n"
+" machine-parse the contents of an FTP directory since\n"
+" the normal directory view doesn't use a standard look\n"
+" or format.\n"
+"\n"
+" -L/--location\n"
+" (HTTP/HTTPS) If the server reports that the requested\n"
+" page has a different location (indicated with the\n"
+" header line Location:) this flag will let curl attempt\n"
+" to reattempt the get on the new place. If used together\n"
+" with -i or -I, headers from all requested pages will be\n"
+" shown.\n"
+"\n"
+" -m/--max-time <seconds>\n"
+" Maximum time in seconds that you allow the whole opera-\n"
+" tion to take. This is useful for preventing your batch\n"
+" jobs from hanging for hours due to slow networks or\n"
+" links going down. This doesn't work properly in win32\n"
+" systems.\n"
+" -M/--manual\n"
+" Manual. Display the huge help text.\n"
+"\n"
+" -n/--netrc\n"
+" Makes curl scan the .netrc file in the user's home\n"
+" directory for login name and password. This is typi-\n"
+" cally used for ftp on unix. If used with http, curl\n"
+" will enable user authentication. See netrc(4) for\n"
+" details on the file format. Curl will not complain if\n"
+" that file hasn't the right permissions (it should not\n"
+" be world nor group readable). The environment variable\n"
+" \"HOME\" is used to find the home directory.\n"
+"\n"
+" A quick and very simple example of how to setup a\n"
+" .netrc to allow curl to ftp to the machine\n"
+" host.domain.com with user name\n"
+"\n"
+" machine host.domain.com user myself password secret\n"
+"\n"
+" -o/--output <file>\n"
+" Write output to <file> instead of stdout. If you are\n"
+" using {} or [] to fetch multiple documents, you can use\n"
+" #<num> in the <file> specifier. That variable will be\n"
+" replaced with the current string for the URL being\n"
+" fetched. Like in:\n"
+"\n"
+" curl http://{one,two}.site.com -o \"file_#1.txt\"\n"
+"\n"
+" or use several variables like:\n"
+"\n"
+" curl http://{site,host}.host[1-5].com -o \"#1_#2\"\n"
+"\n"
+" -O/--remote-name\n"
+" Write output to a local file named like the remote file\n"
+" we get. (Only the file part of the remote file is used,\n"
+" the path is cut off.)\n"
+"\n"
+" -P/--ftpport <address>\n"
+" (FTP) Reverses the initiator/listenor roles when con-\n"
+" necting with ftp. This switch makes Curl use the PORT\n"
+" command instead of PASV. In practice, PORT tells the\n"
+" server to connect to the client's specified address and\n"
+" port, while PASV asks the server for an ip address and\n"
+" port to connect to. <address> should be one of:\n"
+" interface - i.e \"eth0\" to specify which interface's IP\n"
+" address you want to use (Unix only)\n"
+" IP address - i.e \"192.168.10.1\" to specify exact IP\n"
+" number\n"
+" host name - i.e \"my.host.domain\" to specify machine\n"
+" \"-\" - (any single-letter string) to make it pick\n"
+" the machine's default\n"
+" -q If used as the first parameter on the command line, the\n"
+" $HOME/.curlrc file will not be read and used as a con-\n"
+" fig file.\n"
+"\n"
+" -Q/--quote <comand>\n"
+" (FTP) Send an arbitrary command to the remote FTP\n"
+" server, by using the QUOTE command of the server. Not\n"
+" all servers support this command, and the set of QUOTE\n"
+" commands are server specific!\n"
+"\n"
+" -r/--range <range>\n"
+" (HTTP/FTP) Retrieve a byte range (i.e a partial docu-\n"
+" ment) from a HTTP/1.1 or FTP server. Ranges can be\n"
+" specified in a number of ways.\n"
+" 0-499 - specifies the first 500 bytes\n"
+" 500-999 - specifies the second 500 bytes\n"
+" -500 - specifies the last 500 bytes\n"
+" 9500- - specifies the bytes from offset 9500\n"
+" and forward\n"
+" 0-0,-1 - specifies the first and last byte\n"
+" only(*)(H)\n"
+" 500-700,600-799 - specifies 300 bytes from offset\n"
+" 500(H)\n"
+" 100-199,500-599 - specifies two separate 100 bytes\n"
+" ranges(*)(H)\n"
+"\n"
+" (*) = NOTE that this will cause the server to reply\n"
+" with a multipart response!\n"
+"\n"
+" You should also be aware that many HTTP/1.1 servers do\n"
+" not have this feature enabled, so that when you attempt\n"
+" to get a range, you'll instead get the whole document.\n"
+"\n"
+" FTP range downloads only support the simple syntax\n"
+" 'start-stop' (optionally with one of the numbers omit-\n"
+" ted). It depends on the non-RFC command SIZE.\n"
+"\n"
+" -s/--silent\n"
+" Silent mode. Don't show progress meter or error mes-\n"
+" sages. Makes Curl mute.\n"
+"\n"
+" -S/--show-error\n"
+" When used with -s it makes curl show error message if\n"
+" it fails.\n"
+"\n"
+" -t/--upload\n"
+" Transfer the stdin data to the specified file. Curl\n"
+" will read everything from stdin until EOF and store\n"
+" with the supplied name. If this is used on a http(s)\n"
+" server, the PUT command will be used.\n"
+"\n"
+" -T/--upload-file <file>\n"
+" Like -t, but this transfers the specified local file.\n"
+" If there is no file part in the specified URL, Curl\n"
+" will append the local file name. NOTE that you must use\n"
+" a trailing / on the last directory to really prove to\n"
+" Curl that there is no file name or curl will think that\n"
+" your last directory name is the remote file name to\n"
+" use. That will most likely cause the upload operation\n"
+" to fail. If this is used on a http(s) server, the PUT\n"
+" command will be used.\n"
+"\n"
+" -u/--user <user:password>\n"
+" Specify user and password to use when fetching. See\n"
+" README.curl for detailed examples of how to use this.\n"
+" If no password is specified, curl will ask for it\n"
+" interactively.\n"
+"\n"
+" -U/--proxy-user <user:password>\n"
+" Specify user and password to use for Proxy authentica-\n"
+" tion. If no password is specified, curl will ask for it\n"
+" interactively.\n"
+"\n"
+" -v/--verbose\n"
+" Makes the fetching more verbose/talkative. Mostly\n"
+" usable for debugging. Lines starting with '>' means\n"
+" data sent by curl, '<' means data received by curl that\n"
+" is hidden in normal cases and lines starting with '*'\n"
+" means additional info provided by curl.\n"
+"\n"
+" -V/--version\n"
+" Displays the full version of curl, libcurl and other\n"
+" 3rd party libraries linked with the executable.\n"
+"\n"
+" -x/--proxy <proxyhost[:port]>\n"
+" Use specified proxy. If the port number is not speci-\n"
+" fied, it is assumed at port 1080.\n"
+"\n"
+" -X/--request <command>\n"
+" (HTTP) Specifies a custom request to use when communi-\n"
+" cating with the HTTP server. The specified request\n"
+" will be used instead of the standard GET. Read the HTTP\n"
+" 1.1 specification for details and explanations.\n"
+"\n"
+" (FTP) Specifies a custom FTP command to use instead of\n"
+" LIST when doing file lists with ftp.\n"
+"\n"
+" -y/--speed-time <speed>\n"
+" Speed Limit. If a download is slower than this given\n"
+" speed, in bytes per second, for Speed Time seconds it\n"
+" gets aborted. Speed Time is set with -Y and is 30 if\n"
+" not set.\n"
+"\n"
+" -Y/--speed-limit <time>\n"
+" Speed Time. If a download is slower than Speed Limit\n"
+" bytes per second during a Speed Time period, the down-\n"
+" load gets aborted. If Speed Time is used, the default\n"
+" Speed Limit will be 1 unless set with -y.\n"
+"\n"
+" -z/--time-cond <date expression>\n"
+" (HTTP) Request to get a file that has been modified\n"
+" later than the given time and date, or one that has\n"
+" been modified before that time. The date expression can\n"
+" be all sorts of date strings or if it doesn't match any\n"
+" internal ones, it tries to get the time from a given\n"
+" file name instead! See the GNU date(1) man page for\n"
+" date expression details.\n"
+"\n"
+" Start the date expression with a dash (-) to make it\n"
+" request for a document that is older than the given\n"
+" date/time, default is a document that is newer than the\n"
+" specified date/time.\n"
+"\n"
+" -3/--sslv3\n"
+" (HTTPS) Forces curl to use SSL version 3 when negotiat-\n"
+" ing with a remote SSL server.\n"
+"\n"
+" -2/--sslv2\n"
+" (HTTPS) Forces curl to use SSL version 2 when negotiat-\n"
+" ing with a remote SSL server.\n"
+"\n"
+" -#/--progress-bar\n"
+" Make curl display progress information as a progress\n"
+" bar instead of the default statistics.\n"
+"\n"
+" --crlf\n"
+" (FTP) Convert LF to CRLF in upload. Useful for MVS\n"
+" (OS/390).\n"
+"\n"
+" --stderr <file>\n"
+" Redirect all writes to stderr to the specified file\n"
+" instead. If the file name is a plain '-', it is instead\n"
+" written to stdout. This option has no point when you're\n"
+" using a shell with decent redirecting capabilities.\n"
+"\n"
+"FILES\n"
+" ~/.curlrc\n"
+" Default config file.\n"
+"\n"
+"ENVIRONMENT\n"
+" HTTP_PROXY [protocol://]<host>[:port]\n"
+" Sets proxy server to use for HTTP.\n"
+"\n"
+" HTTPS_PROXY [protocol://]<host>[:port]\n"
+" Sets proxy server to use for HTTPS.\n"
+" FTP_PROXY [protocol://]<host>[:port]\n"
+" Sets proxy server to use for FTP.\n"
+"\n"
+" GOPHER_PROXY [protocol://]<host>[:port]\n"
+" Sets proxy server to use for GOPHER.\n"
+"\n"
+" ALL_PROXY [protocol://]<host>[:port]\n"
+" Sets proxy server to use if no protocol-specific proxy\n"
+" is set.\n"
+"\n"
+" NO_PROXY <comma-separated list of hosts>\n"
+" list of host names that shouldn't go through any proxy.\n"
+" If se\n"
+"\n"
+"LATEST VERSION\n"
+"\n"
+" You always find news about what's going on as well as the latest versions\n"
+" from the curl web pages, located at:\n"
+"\n"
+" http://curl.haxx.nu\n"
+"\n"
+"SIMPLE USAGE\n"
+"\n"
+" Get the main page from netscape's web-server:\n"
+"\n"
+" curl http://www.netscape.com/\n"
+"\n"
+" Get the root README file from funet's ftp-server:\n"
+"\n"
+" curl ftp://ftp.funet.fi/README\n"
+"\n"
+" Get a gopher document from funet's gopher server:\n"
+"\n"
+" curl gopher://gopher.funet.fi\n"
+"\n"
+" Get a web page from a server using port 8000:\n"
+"\n"
+" curl http://www.weirdserver.com:8000/\n"
+"\n"
+" Get a list of the root directory of an FTP site:\n"
+"\n"
+" curl ftp://ftp.fts.frontec.se/\n"
+"\n"
+" Get the definition of curl from a dictionary:\n"
+"\n"
+" curl dict://dict.org/m:curl\n"
+"\n"
+"DOWNLOAD TO A FILE\n"
+"\n"
+" Get a web page and store in a local file:\n"
+"\n"
+" curl -o thatpage.html http://www.netscape.com/\n"
+"\n"
+" Get a web page and store in a local file, make the local file get the name\n"
+" of the remote document (if no file name part is specified in the URL, this\n"
+" will fail):\n"
+"\n"
+" curl -O http://www.netscape.com/index.html\n"
+"\n"
+"USING PASSWORDS\n"
+"\n"
+" FTP\n"
+"\n"
+" To ftp files using name+passwd, include them in the URL like:\n"
+"\n"
+" curl ftp://name:passwd@machine.domain:port/full/path/to/file\n"
+"\n"
+" or specify them with the -u flag like\n"
+"\n"
+" curl -u name:passwd ftp://machine.domain:port/full/path/to/file\n"
+"\n"
+" HTTP\n"
+"\n"
+" The HTTP URL doesn't support user and password in the URL string. Curl\n"
+" does support that anyway to provide a ftp-style interface and thus you can\n"
+" pick a file like:\n"
+"\n"
+" curl http://name:passwd@machine.domain/full/path/to/file\n"
+"\n"
+" or specify user and password separately like in\n"
+"\n"
+" curl -u name:passwd http://machine.domain/full/path/to/file\n"
+"\n"
+" NOTE! Since HTTP URLs don't support user and password, you can't use that\n"
+" style when using Curl via a proxy. You _must_ use the -u style fetch\n"
+" during such circumstances.\n"
+"\n"
+" HTTPS\n"
+"\n"
+" Probably most commonly used with private certificates, as explained below.\n"
+"\n"
+" GOPHER\n"
+"\n"
+" Curl features no password support for gopher.\n"
+"\n"
+"PROXY\n"
+"\n"
+" Get an ftp file using a proxy named my-proxy that uses port 888:\n"
+"\n"
+" curl -x my-proxy:888 ftp://ftp.leachsite.com/README\n"
+"\n"
+" Get a file from a HTTP server that requires user and password, using the\n"
+" same proxy as above:\n"
+"\n"
+" curl -u user:passwd -x my-proxy:888 http://www.get.this/\n"
+"\n"
+" Some proxies require special authentication. Specify by using -U as above:\n"
+"\n"
+" curl -U user:passwd -x my-proxy:888 http://www.get.this/\n"
+"\n"
+" See also the environment variables Curl support that offer further proxy\n"
+" control.\n"
+"\n"
+"RANGES\n"
+"\n"
+" With HTTP 1.1 byte-ranges were introduced. Using this, a client can request\n"
+" to get only one or more subparts of a specified document. Curl supports\n"
+" this with the -r flag.\n"
+"\n"
+" Get the first 100 bytes of a document:\n"
+"\n"
+" curl -r 0-99 http://www.get.this/\n"
+"\n"
+" Get the last 500 bytes of a document:\n"
+"\n"
+" curl -r -500 http://www.get.this/\n"
+"\n"
+" Curl also supports simple ranges for FTP files as well. Then you can only\n"
+" specify start and stop position.\n"
+"\n"
+" Get the first 100 bytes of a document using FTP:\n"
+"\n"
+" curl -r 0-99 ftp://www.get.this/README \n"
+"\n"
+"UPLOADING\n"
+"\n"
+" FTP\n"
+"\n"
+" Upload all data on stdin to a specified ftp site:\n"
+"\n"
+" curl -t ftp://ftp.upload.com/myfile\n"
+"\n"
+" Upload data from a specified file, login with user and password:\n"
+"\n"
+" curl -T uploadfile -u user:passwd ftp://ftp.upload.com/myfile\n"
+"\n"
+" Upload a local file to the remote site, and use the local file name remote\n"
+" too:\n"
+" \n"
+" curl -T uploadfile -u user:passwd ftp://ftp.upload.com/\n"
+"\n"
+" NOTE: Curl is not currently supporing ftp upload through a proxy! The reason\n"
+" for this is simply that proxies are seldomly configured to allow this and\n"
+" that no author has supplied code that makes it possible!\n"
+"\n"
+" HTTP\n"
+"\n"
+" Upload all data on stdin to a specified http site:\n"
+"\n"
+" curl -t http://www.upload.com/myfile\n"
+"\n"
+" Note that the http server must've been configured to accept PUT before this\n"
+" can be done successfully.\n"
+"\n"
+" For other ways to do http data upload, see the POST section below.\n"
+"\n"
+"VERBOSE / DEBUG\n"
+"\n"
+" If curl fails where it isn't supposed to, if the servers don't let you\n"
+" in, if you can't understand the responses: use the -v flag to get VERBOSE\n"
+" fetching. Curl will output lots of info and all data it sends and\n"
+" receives in order to let the user see all client-server interaction.\n"
+"\n"
+" curl -v ftp://ftp.upload.com/\n"
+"\n"
+"DETAILED INFORMATION\n"
+"\n"
+" Different protocols provide different ways of getting detailed information\n"
+" about specific files/documents. To get curl to show detailed information\n"
+" about a single file, you should use -I/--head option. It displays all\n"
+" available info on a single file for HTTP and FTP. The HTTP information is a\n"
+" lot more extensive.\n"
+"\n"
+" For HTTP, you can get the header information (the same as -I would show)\n"
+" shown before the data by using -i/--include. Curl understands the\n"
+" -D/--dump-header option when getting files from both FTP and HTTP, and it\n"
+" will then store the headers in the specified file.\n"
+"\n"
+" Store the HTTP headers in a separate file:\n"
+"\n"
+" curl --dump-header headers.txt curl.haxx.nu\n"
+"\n"
+" Note that headers stored in a separate file can be very useful at a later\n"
+" time if you want curl to use cookies sent by the server. More about that in\n"
+" the cookies section.\n"
+"\n"
+"POST (HTTP)\n"
+"\n"
+" It's easy to post data using curl. This is done using the -d <data>\n"
+" option. The post data must be urlencoded.\n"
+"\n"
+" Post a simple \"name\" and \"phone\" guestbook.\n"
+"\n"
+" curl -d \"name=Rafael%20Sagula&phone=3320780\" \\\n"
+" http://www.where.com/guest.cgi\n"
+"\n"
+" While -d uses the application/x-www-form-urlencoded mime-type, generally\n"
+" understood by CGI's and similar, curl also supports the more capable\n"
+" multipart/form-data type. This latter type supports things like file upload.\n"
+"\n"
+" -F accepts parameters like -F \"name=contents\". If you want the contents to\n"
+" be read from a file, use <@filename> as contents. When specifying a file,\n"
+" you can also specify which content type the file is, by appending\n"
+" ';type=<mime type>' to the file name. You can also post contents of several\n"
+" files in one field. So that the field name 'coolfiles' can be sent three\n"
+" files with different content types in a manner similar to:\n"
+"\n"
+" curl -F \"coolfiles=@fil1.gif;type=image/gif,fil2.txt,fil3.html\" \\\n"
+" http://www.post.com/postit.cgi\n"
+"\n"
+" If content-type is not specified, curl will try to guess from the extension\n"
+" (it only knows a few), or use the previously specified type (from an earlier\n"
+" file if several files are specified in a list) or finally using the default\n"
+" type 'text/plain'.\n"
+"\n"
+" Emulate a fill-in form with -F. Let's say you fill in three fields in a\n"
+" form. One field is a file name which to post, one field is your name and one\n"
+" field is a file description. We want to post the file we have written named\n"
+" \"cooltext.txt\". To let curl do the posting of this data instead of your\n"
+" favourite browser, you have to check out the HTML of the form page to get to\n"
+" know the names of the input fields. In our example, the input field names are\n"
+" 'file', 'yourname' and 'filedescription'.\n"
+"\n"
+" curl -F \"file=@cooltext.txt\" -F \"yourname=Daniel\" \\\n"
+" -F \"filedescription=Cool text file with cool text inside\" \\\n"
+" http://www.post.com/postit.cgi\n"
+"\n"
+" So, to send two files in one post you can do it in two ways:\n"
+"\n"
+" 1. Send multiple files in a single \"field\" with a single field name:\n"
+" \n"
+" curl -F \"pictures=@dog.gif,cat.gif\" \n"
+" \n"
+" 2. Send two fields with two field names: \n"
+"\n"
+" curl -F \"docpicture=@dog.gif\" -F \"catpicture=@cat.gif\" \n"
+"\n"
+"REFERER\n"
+"\n"
+" A HTTP request has the option to include information about which address\n"
+" that referred to actual page, and curl allows the user to specify that\n"
+" referrer to get specified on the command line. It is especially useful to\n"
+" fool or trick stupid servers or CGI scripts that rely on that information\n"
+" being available or contain certain data.\n"
+"\n"
+" curl -e www.coolsite.com http://www.showme.com/\n"
+"\n"
+"USER AGENT\n"
+"\n"
+" A HTTP request has the option to include information about the browser\n"
+" that generated the request. Curl allows it to be specified on the command\n"
+" line. It is especially useful to fool or trick stupid servers or CGI\n"
+" scripts that only accept certain browsers.\n"
+"\n"
+" Example:\n"
+"\n"
+" curl -A 'Mozilla/3.0 (Win95; I)' http://www.nationsbank.com/\n"
+"\n"
+" Other common strings:\n"
+" 'Mozilla/3.0 (Win95; I)' Netscape Version 3 for Windows 95\n"
+" 'Mozilla/3.04 (Win95; U)' Netscape Version 3 for Windows 95\n"
+" 'Mozilla/2.02 (OS/2; U)' Netscape Version 2 for OS/2\n"
+" 'Mozilla/4.04 [en] (X11; U; AIX 4.2; Nav)' NS for AIX\n"
+" 'Mozilla/4.05 [en] (X11; U; Linux 2.0.32 i586)' NS for Linux\n"
+"\n"
+" Note that Internet Explorer tries hard to be compatible in every way:\n"
+" 'Mozilla/4.0 (compatible; MSIE 4.01; Windows 95)' MSIE for W95\n"
+"\n"
+" Mozilla is not the only possible User-Agent name:\n"
+" 'Konqueror/1.0' KDE File Manager desktop client\n"
+" 'Lynx/2.7.1 libwww-FM/2.14' Lynx command line browser\n"
+"\n"
+"COOKIES\n"
+"\n"
+" Cookies are generally used by web servers to keep state information at the\n"
+" client's side. The server sets cookies by sending a response line in the\n"
+" headers that looks like 'Set-Cookie: <data>' where the data part then\n"
+" typically contains a set of NAME=VALUE pairs (separated by semicolons ';'\n"
+" like \"NAME1=VALUE1; NAME2=VALUE2;\"). The server can also specify for what\n"
+" path the \"cookie\" should be used for (by specifying \"path=value\"), when the\n"
+" cookie should expire (\"expire=DATE\"), for what domain to use it\n"
+" (\"domain=NAME\") and if it should be used on secure connections only\n"
+" (\"secure\").\n"
+"\n"
+" If you've received a page from a server that contains a header like:\n"
+" Set-Cookie: sessionid=boo123; path=\"/foo\";\n"
+"\n"
+" it means the server wants that first pair passed on when we get anything in\n"
+" a path beginning with \"/foo\".\n"
+"\n"
+" Example, get a page that wants my name passed in a cookie:\n"
+"\n"
+" curl -b \"name=Daniel\" www.sillypage.com\n"
+"\n"
+" Curl also has the ability to use previously received cookies in following\n"
+" sessions. If you get cookies from a server and store them in a file in a\n"
+" manner similar to:\n"
+"\n"
+" curl --dump-header headers www.example.com\n"
+"\n"
+" ... you can then in a second connect to that (or another) site, use the\n"
+" cookies from the 'headers' file like:\n"
+"\n"
+" curl -b headers www.example.com\n"
+"\n"
+" Note that by specifying -b you enable the \"cookie awareness\" and with -L\n"
+" you can make curl follow a location: (which often is used in combination\n"
+" with cookies). So that if a site sends cookies and a location, you can\n"
+" use a non-existing file to trig the cookie awareness like:\n"
+"\n"
+" curl -L -b empty-file www.example.com\n"
+"\n"
+" The file to read cookies from must be formatted using plain HTTP headers OR\n"
+" as netscape's cookie file. Curl will determine what kind it is based on the\n"
+" file contents.\n"
+"\n"
+"PROGRESS METER\n"
+"\n"
+" The progress meter was introduced to better show a user that something\n"
+" actually is happening. The different fields in the output have the following\n"
+" meaning:\n"
+"\n"
+" % Received Total Speed Time left Total Curr.Speed\n"
+" 13 524140 3841536 4296 0:12:52 0:14:54 292 \n"
+"\n"
+" From left-to-right:\n"
+" - The first column, is the percentage of the file currently transfered.\n"
+" - Received means the total number of bytes that has been transfered.\n"
+" - Total is the total number of bytes expected to transfer.\n"
+" - Speed is average speed in bytes per second for the whole transfer so far.\n"
+" - Time left is the estimated time left for this transfer to finnish if the\n"
+" current average speed will remain steady.\n"
+" - Total is the estimated total transfer time.\n"
+" - Curr.Speed is the average transfer speed the last 5 seconds (the first\n"
+" 5 seconds of a transfer is based on less time of course.)\n"
+"\n"
+" NOTE: Much of the output is based on the fact that the size of the transfer\n"
+" is known before it takes place. If it isn't, a much less fancy display will\n"
+" be used.\n"
+"\n"
+"SPEED LIMIT\n"
+"\n"
+" Curl offers the user to set conditions regarding transfer speed that must\n"
+" be met to let the transfer keep going. By using the switch -y and -Y you\n"
+" can make curl abort transfers if the transfer speed doesn't exceed your\n"
+" given lowest limit for a specified time.\n"
+"\n"
+" To let curl abandon downloading this page if its slower than 3000 bytes per\n"
+" second for 1 minute, run:\n"
+"\n"
+" curl -y 3000 -Y 60 www.far-away-site.com\n"
+"\n"
+" This can very well be used in combination with the overall time limit, so\n"
+" that the above operatioin must be completed in whole within 30 minutes:\n"
+"\n"
+" curl -m 1800 -y 3000 -Y 60 www.far-away-site.com\n"
+"\n"
+"CONFIG FILE\n"
+"\n"
+" Curl automatically tries to read the .curlrc file (or _curlrc file on win32\n"
+" systems) from the user's home dir on startup. The config file should be\n"
+" made up with normal command line switches. Comments can be used within the\n"
+" file. If the first letter on a line is a '#'-letter the rest of the line\n"
+" is treated as a comment.\n"
+"\n"
+" Example, set default time out and proxy in a config file:\n"
+"\n"
+" # We want a 30 minute timeout:\n"
+" -m 1800\n"
+" # ... and we use a proxy for all accesses:\n"
+" -x proxy.our.domain.com:8080\n"
+"\n"
+" White spaces ARE significant at the end of lines, but all white spaces\n"
+" leading up to the first characters of each line are ignored.\n"
+"\n"
+" Prevent curl from reading the default file by using -q as the first command\n"
+" line parameter, like:\n"
+"\n"
+" curl -q www.thatsite.com\n"
+"\n"
+" Force curl to get and display a local help page in case it is invoked\n"
+" without URL by making a config file similar to:\n"
+"\n"
+" # default url to get\n"
+" http://help.with.curl.com/curlhelp.html\n"
+"\n"
+" You can specify another config file to be read by using the -K/--config\n"
+" flag. If you set config file name to \"-\" it'll read the config from stdin,\n"
+" which can be handy if you want to hide options from being visible in process\n"
+" tables etc:\n"
+"\n"
+" echo \"-u user:passwd\" | curl -K - http://that.secret.site.com\n"
+"\n"
+"EXTRA HEADERS\n"
+"\n"
+" When using curl in your own very special programs, you may end up needing\n"
+" to pass on your own custom headers when getting a web page. You can do\n"
+" this by using the -H flag.\n"
+"\n"
+" Example, send the header \"X-you-and-me: yes\" to the server when getting a\n"
+" page:\n"
+"\n"
+" curl -H \"X-you-and-me: yes\" www.love.com\n"
+"\n"
+" This can also be useful in case you want curl to send a different text in\n"
+" a header than it normally does. The -H header you specify then replaces the\n"
+" header curl would normally send.\n"
+"\n"
+"FTP and PATH NAMES\n"
+"\n"
+" Do note that when getting files with the ftp:// URL, the given path is\n"
+" relative the directory you enter. To get the file 'README' from your home\n"
+" directory at your ftp site, do:\n"
+"\n"
+" curl ftp://user:passwd@my.site.com/README\n"
+"\n"
+" But if you want the README file from the root directory of that very same\n"
+" site, you need to specify the absolute file name:\n"
+"\n"
+" curl ftp://user:passwd@my.site.com//README\n"
+"\n"
+" (I.e with an extra slash in front of the file name.)\n"
+"\n"
+"FTP and firewalls\n"
+"\n"
+" The FTP protocol requires one of the involved parties to open a second\n"
+" connction as soon as data is about to get transfered. There are two ways to\n"
+" do this.\n"
+"\n"
+" The default way for curl is to issue the PASV command which causes the\n"
+" server to open another port and await another connection performed by the\n"
+" client. This is good if the client is behind a firewall that don't allow\n"
+" incoming connections.\n"
+"\n"
+" curl ftp.download.com\n"
+"\n"
+" If the server for example, is behind a firewall that don't allow connections\n"
+" on other ports than 21 (or if it just doesn't support the PASV command), the\n"
+" other way to do it is to use the PORT command and instruct the server to\n"
+" connect to the client on the given (as parameters to the PORT command) IP\n"
+" number and port.\n"
+"\n"
+" The -P flag to curl allows for different options. Your machine may have\n"
+" several IP-addresses and/or network interfaces and curl allows you to select\n"
+" which of them to use. Default address can also be used:\n"
+"\n"
+" curl -P - ftp.download.com\n"
+"\n"
+" Download with PORT but use the IP address of our 'le0' interface:\n"
+"\n"
+" curl -P le0 ftp.download.com\n"
+"\n"
+" Download with PORT but use 192.168.0.10 as our IP address to use:\n"
+"\n"
+" curl -P 192.168.0.10 ftp.download.com\n"
+"\n"
+"HTTPS\n"
+"\n"
+" Secure HTTP requires SSLeay to be installed and used when curl is built. If\n"
+" that is done, curl is capable of retrieving and posting documents using the\n"
+" HTTPS procotol.\n"
+"\n"
+" Example:\n"
+"\n"
+" curl https://www.secure-site.com\n"
+"\n"
+" Curl is also capable of using your personal certificates to get/post files\n"
+" from sites that require valid certificates. The only drawback is that the\n"
+" certificate needs to be in PEM-format. PEM is a standard and open format to\n"
+" store certificates with, but it is not used by the most commonly used\n"
+" browsers (Netscape and MSEI both use the so called PKCS#12 format). If you\n"
+" want curl to use the certificates you use with your (favourite) browser, you\n"
+" may need to download/compile a converter that can convert your browser's\n"
+" formatted certificates to PEM formatted ones. Dr Stephen N. Henson has\n"
+" written a patch for SSLeay that adds this functionality. You can get his\n"
+" patch (that requires an SSLeay installation) from his site at:\n"
+" http://www.drh-consultancy.demon.co.uk/\n"
+"\n"
+" Example on how to automatically retrieve a document using a certificate with\n"
+" a personal password:\n"
+"\n"
+" curl -E /path/to/cert.pem:password https://secure.site.com/\n"
+"\n"
+" If you neglect to specify the password on the command line, you will be\n"
+" prompted for the correct password before any data can be received.\n"
+"\n"
+" Many older SSL-servers have problems with SSLv3 or TLS, that newer versions\n"
+" of OpenSSL etc is using, therefore it is sometimes useful to specify what\n"
+" SSL-version curl should use. Use -3 or -2 to specify that exact SSL version\n"
+" to use:\n"
+"\n"
+" curl -2 https://secure.site.com/\n"
+"\n"
+" Otherwise, curl will first attempt to use v3 and then v2.\n"
+"\n"
+"RESUMING FILE TRANSFERS\n"
+"\n"
+" To continue a file transfer where it was previously aborted, curl supports\n"
+" resume on http(s) downloads as well as ftp uploads and downloads.\n"
+"\n"
+" Continue downloading a document:\n"
+"\n"
+" curl -c -o file ftp://ftp.server.com/path/file\n"
+"\n"
+" Continue uploading a document(*1):\n"
+"\n"
+" curl -c -T file ftp://ftp.server.com/path/file\n"
+"\n"
+" Continue downloading a document from a web server(*2):\n"
+"\n"
+" curl -c -o file http://www.server.com/\n"
+"\n"
+" (*1) = This requires that the ftp server supports the non-standard command\n"
+" SIZE. If it doesn't, curl will say so.\n"
+"\n"
+" (*2) = This requires that the wb server supports at least HTTP/1.1. If it\n"
+" doesn't, curl will say so.\n"
+"\n"
+"TIME CONDITIONS\n"
+"\n"
+" HTTP allows a client to specify a time condition for the document it\n"
+" requests. It is If-Modified-Since or If-Unmodified-Since. Curl allow you to\n"
+" specify them with the -z/--time-cond flag.\n"
+"\n"
+" For example, you can easily make a download that only gets performed if the\n"
+" remote file is newer than a local copy. It would be made like:\n"
+"\n"
+" curl -z local.html http://remote.server.com/remote.html\n"
+"\n"
+" Or you can download a file only if the local file is newer than the remote\n"
+" one. Do this by prepending the date string with a '-', as in:\n"
+"\n"
+" curl -z -local.html http://remote.server.com/remote.html\n"
+"\n"
+" You can specify a \"free text\" date as condition. Tell curl to only download\n"
+" the file if it was updated since yesterday:\n"
+"\n"
+" curl -z yesterday http://remote.server.com/remote.html\n"
+"\n"
+" Curl will then accept a wide range of date formats. You always make the date\n"
+" check the other way around by prepending it with a dash '-'.\n"
+"\n"
+"DICT\n"
+"\n"
+" For fun try\n"
+"\n"
+" curl dict://dict.org/m:curl\n"
+" curl dict://dict.org/d:heisenbug:jargon\n"
+" curl dict://dict.org/d:daniel:web1913\n"
+"\n"
+" Aliases for 'm' are 'match' and 'find', and aliases for 'd' are 'define'\n"
+" and 'lookup'. For example,\n"
+"\n"
+" curl dict://dict.org/find:curl\n"
+"\n"
+" Commands that break the URL description of the RFC (but not the DICT\n"
+" protocol) are\n"
+"\n"
+" curl dict://dict.org/show:db\n"
+" curl dict://dict.org/show:strat\n"
+"\n"
+" Authentication is still missing (but this is not required by the RFC)\n"
+"\n"
+"LDAP\n"
+"\n"
+" If you have installed the OpenLDAP library, curl can take advantage of it\n"
+" and offer ldap:// support.\n"
+"\n"
+" LDAP is a complex thing and writing an LDAP query is not an easy task. I do\n"
+" advice you to dig up the syntax description for that elsewhere, RFC 1959 if\n"
+" no other place is better.\n"
+"\n"
+" To show you an example, this is now I can get all people from my local LDAP\n"
+" server that has a certain sub-domain in their email address:\n"
+"\n"
+" curl -B \"ldap://ldap.frontec.se/o=frontec??sub?mail=*sth.frontec.se\"\n"
+"\n"
+" If I want the same info in HTML format, I can get it by not using the -B\n"
+" (enforce ASCII) flag.\n"
+"\n"
+"ENVIRONMENT VARIABLES\n"
+"\n"
+" Curl reads and understands the following environment variables:\n"
+"\n"
+" HTTP_PROXY, HTTPS_PROXY, FTP_PROXY, GOPHER_PROXY\n"
+"\n"
+" They should be set for protocol-specific proxies. General proxy should be\n"
+" set with\n"
+" \n"
+" ALL_PROXY\n"
+"\n"
+" A comma-separated list of host names that shouldn't go through any proxy is\n"
+" set in (only an asterisk, '*' matches all hosts)\n"
+"\n"
+" NO_PROXY\n"
+"\n"
+" If a tail substring of the domain-path for a host matches one of these\n"
+" strings, transactions with that node will not be proxied.\n"
+"\n"
+"\n"
+" The usage of the -x/--proxy flag overrides the environment variables.\n"
+"\n"
+"MAILING LIST\n"
+"\n"
+" We have an open mailing list to discuss curl, its development and things\n"
+" relevant to this.\n"
+"\n"
+" To subscribe, mail curl-request@contactor.se with \"subscribe <your email\n"
+" address>\" in the body.\n"
+"\n"
+" To post to the list, mail curl@contactor.se.\n"
+"\n"
+" To unsubcribe, mail curl-request@contactor.se with \"unsubscribe <your\n"
+" subscribed email address>\" in the body.\n"
+"\n"
+ ) ;
+}
diff --git a/src/main.c b/src/main.c
new file mode 100644
index 000000000..5666c777e
--- /dev/null
+++ b/src/main.c
@@ -0,0 +1,1154 @@
+/*****************************************************************************
+ * _ _ ____ _
+ * Project ___| | | | _ \| |
+ * / __| | | | |_) | |
+ * | (__| |_| | _ <| |___
+ * \___|\___/|_| \_\_____|
+ *
+ * The contents of this file are subject to the Mozilla Public License
+ * Version 1.0 (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+ * License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * The Original Code is Curl.
+ *
+ * The Initial Developer of the Original Code is Daniel Stenberg.
+ *
+ * Portions created by the Initial Developer are Copyright (C) 1998.
+ * All Rights Reserved.
+ *
+ * ------------------------------------------------------------
+ * Main author:
+ * - Daniel Stenberg <Daniel.Stenberg@haxx.nu>
+ *
+ * http://curl.haxx.nu
+ *
+ * $Source$
+ * $Revision$
+ * $Date$
+ * $Author$
+ * $State$
+ * $Locker$
+ *
+ * ------------------------------------------------------------
+ ****************************************************************************/
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdarg.h>
+#include <sys/stat.h>
+#include <ctype.h>
+
+#include <curl/curl.h>
+#include <curl/mprintf.h>
+#include "../lib/getdate.h"
+#ifdef GLOBURL
+#include "urlglob.h"
+#define CURLseparator "--_curl_--"
+#define MIMEseparator "_curl_"
+#endif
+
+/* This is now designed to have its own local setup.h */
+#include "setup.h"
+
+#include "version.h"
+
+#ifdef HAVE_IO_H /* typical win32 habit */
+#include <io.h>
+#endif
+
+#ifdef HAVE_UNISTD_H
+#include <unistd.h>
+#endif
+
+extern void hugehelp(void);
+
+static void helpf(char *fmt, ...)
+{
+ va_list ap;
+ if(fmt) {
+ va_start(ap, fmt);
+ fputs("curl: ", stderr); /* prefix it */
+ vfprintf(stderr, fmt, ap);
+ va_end(ap);
+ }
+ fprintf(stderr, "curl: try 'curl --help' for more information\n");
+}
+
+static void help(void)
+{
+ printf(CURL_ID "%s\n"
+ "Usage: curl [options...] <url>\n"
+ "Options: (H) means HTTP/HTTPS only, (F) means FTP only\n"
+ " -a/--append Append to target file when uploading (F)\n"
+ " -A/--user-agent <string> User-Agent to send to server (H)\n"
+ " -b/--cookie <name=string/file> Cookie string or file to read cookies from (H)\n"
+ " -B/--ftp-ascii Use ASCII transfer (F)\n"
+ " -c/--continue Resume a previous transfer where we left it\n"
+ " -C/--continue-at <offset> Specify absolute resume offset\n"
+ " -d/--data POST data (H)\n"
+ " -D/--dump-header <file> Write the headers to this file\n"
+ " -e/--referer Referer page (H)\n"
+ " -E/--cert <cert:passwd> Specifies your certificate file and password (HTTPS)\n"
+ " -f/--fail Fail silently (no output at all) on errors (H)\n"
+ " -F/--form <name=content> Specify HTTP POST data (H)\n"
+
+ " -h/--help This help text\n"
+ " -H/--header <line> Custom header to pass to server. (H)\n"
+ " -i/--include Include the HTTP-header in the output (H)\n"
+ " -I/--head Fetch document info only (HTTP HEAD/FTP SIZE)\n"
+ " -K/--config Specify which config file to read\n"
+ " -l/--list-only List only names of an FTP directory (F)\n"
+ " -L/--location Follow Location: hints (H)\n"
+ " -m/--max-time <seconds> Maximum time allowed for the transfer\n"
+ " -M/--manual Display huge help text\n"
+ " -n/--netrc Read .netrc for user name and password\n"
+ " -o/--output <file> Write output to <file> instead of stdout\n"
+ " -O/--remote-name Write output to a file named as the remote file\n"
+#if 0
+ " -p/--port <port> Use port other than default for current protocol.\n"
+#endif
+ " -P/--ftpport <address> Use PORT with address instead of PASV when ftping (F)\n"
+ " -q When used as the first parameter disables .curlrc\n"
+ " -Q/--quote <cmd> Send QUOTE command to FTP before file transfer (F)\n"
+ " -r/--range <range> Retrieve a byte range from a HTTP/1.1 or FTP server\n"
+ " -s/--silent Silent mode. Don't output anything\n"
+ " -S/--show-error Show error. With -s, make curl show errors when they occur\n"
+ " -t/--upload Transfer/upload stdin to remote site\n"
+ " -T/--upload-file <file> Transfer/upload <file> to remote site\n"
+ " -u/--user <user:password> Specify user and password to use\n"
+ " -U/--proxy-user <user:password> Specify Proxy authentication\n"
+ " -v/--verbose Makes the operation more talkative\n"
+ " -V/--version Outputs version number then quits\n"
+ " -x/--proxy <host> Use proxy. (Default port is 1080)\n"
+ " -X/--request <command> Specific request command to use\n"
+ " -y/--speed-limit Stop transfer if below speed-limit for 'speed-time' secs\n"
+ " -Y/--speed-time Time needed to trig speed-limit abort. Defaults to 30\n"
+ " -z/--time-cond <time> Includes a time condition to the server (H)\n"
+ " -2/--sslv2 Force usage of SSLv2 (H)\n"
+ " -3/--sslv3 Force usage of SSLv3 (H)\n"
+ " -#/--progress-bar Display transfer progress as a progress bar\n"
+ " --crlf Convert LF to CRLF in upload. Useful for MVS (OS/390)\n"
+ " --stderr <file> Where to redirect stderr. - means stdout.\n",
+ curl_version()
+ );
+}
+
+struct LongShort {
+ char *letter;
+ char *lname;
+ bool extraparam;
+};
+
+struct Configurable {
+ char *useragent;
+ char *cookie;
+ bool use_resume;
+ int resume_from;
+ char *postfields;
+ char *referer;
+ long timeout;
+ char *outfile;
+ char *headerfile;
+ char remotefile;
+ char *ftpport;
+ unsigned short porttouse;
+ char *range;
+ int low_speed_limit;
+ int low_speed_time;
+ bool showerror;
+ char *infile;
+ char *userpwd;
+ char *proxyuserpwd;
+ char *proxy;
+ bool configread;
+ long conf;
+ char *url;
+ char *cert;
+ char *cert_passwd;
+ bool crlf;
+ char *cookiefile;
+ char *customrequest;
+ bool progressmode;
+
+ FILE *errors; /* if stderr redirect is requested */
+
+ struct curl_slist *quote;
+
+ long ssl_version;
+ TimeCond timecond;
+ time_t condtime;
+
+ struct HttpHeader *headers;
+ struct HttpHeader *last_header;
+
+ struct HttpPost *httppost;
+ struct HttpPost *last_post;
+};
+
+static int parseconfig(char *filename,
+ struct Configurable *config);
+
+static void GetStr(char **string,
+ char *value)
+{
+ if(*string)
+ free(*string);
+ *string = strdup(value);
+}
+
+static char *file2string(FILE *file)
+{
+ char buffer[256];
+ char *ptr;
+ char *string=NULL;
+ int len=0;
+ int stringlen;
+
+ if(file) {
+ while(fgets(buffer, sizeof(buffer), file)) {
+ ptr= strchr(buffer, '\r');
+ if(ptr)
+ *ptr=0;
+ ptr= strchr(buffer, '\n');
+ if(ptr)
+ *ptr=0;
+ stringlen=strlen(buffer);
+ if(string)
+ string = realloc(string, len+stringlen+1);
+ else
+ string = malloc(stringlen+1);
+
+ strcpy(string+len, buffer);
+
+ len+=stringlen;
+ }
+ return string;
+ }
+ else
+ return NULL; /* no string */
+}
+
+static int getparameter(char *flag, /* f or -long-flag */
+ char *nextarg, /* NULL if unset */
+ bool *usedarg, /* set to TRUE if the arg has been
+ used */
+ struct Configurable *config)
+{
+ char letter;
+ char *parse=NULL;
+ int res;
+ struct HttpHeader *head;
+ int j;
+ time_t now;
+ int hit=-1;
+
+ /* single-letter,
+ long-name,
+ boolean whether it takes an additional argument
+ */
+ struct LongShort aliases[]= {
+ {"9", "crlf", FALSE},
+ {"8", "stderr", TRUE},
+
+ {"2", "sslv2", FALSE},
+ {"3", "sslv3", FALSE},
+ {"a", "append", FALSE},
+ {"A", "user-agent", TRUE},
+ {"b", "cookie", TRUE},
+ {"B", "ftp-ascii", FALSE},
+ {"c", "continue", FALSE},
+ {"C", "continue-at", TRUE},
+ {"d", "data", TRUE},
+ {"D", "dump-header", TRUE},
+ {"e", "referer", TRUE},
+ {"E", "cert", TRUE},
+ {"f", "fail", FALSE},
+ {"F", "form", TRUE},
+
+ {"h", "help", FALSE},
+ {"H", "header", TRUE},
+ {"i", "include", FALSE},
+ {"I", "head", FALSE},
+ {"K", "config", TRUE},
+ {"l", "list-only", FALSE},
+ {"L", "location", FALSE},
+ {"m", "max-time", TRUE},
+ {"M", "manual", FALSE},
+ {"n", "netrc", FALSE},
+ {"o", "output", TRUE},
+ {"O", "remote-name", FALSE},
+#if 0
+ {"p", "port", TRUE},
+#endif
+ {"P", "ftpport", TRUE},
+ {"q", "disable", FALSE},
+ {"Q", "quote", TRUE},
+ {"r", "range", TRUE},
+ {"s", "silent", FALSE},
+ {"S", "show-error", FALSE},
+ {"t", "upload", FALSE},
+ {"T", "upload-file", TRUE},
+ {"u", "user", TRUE},
+ {"U", "proxy-user", TRUE},
+ {"v", "verbose", FALSE},
+ {"V", "version", FALSE},
+ {"x", "proxy", TRUE},
+ {"X", "request", TRUE},
+ {"X", "http-request", TRUE}, /* OBSOLETE VERSION */
+ {"y", "speed-time", TRUE},
+ {"Y", "speed-limit", TRUE},
+ {"z", "time-cond", TRUE},
+ {"#", "progress-bar",FALSE},
+ };
+
+ if('-' == flag[0]) {
+ /* try a long name */
+ int fnam=strlen(&flag[1]);
+ for(j=0; j< sizeof(aliases)/sizeof(aliases[0]); j++) {
+ if(strnequal(aliases[j].lname, &flag[1], fnam)) {
+ if(strequal(aliases[j].lname, &flag[1])) {
+ parse = aliases[j].letter;
+ hit = j;
+ break;
+ }
+ if(parse) {
+ /* this is the second match, we can't continue! */
+ helpf("option --%s is ambiguous\n", &flag[1]);
+ return URG_FAILED_INIT;
+ }
+ parse = aliases[j].letter;
+ hit = j;
+ }
+ }
+ if(hit < 0) {
+ helpf("unknown option -%s.\n", flag);
+ return URG_FAILED_INIT;
+ }
+ }
+ else {
+ hit=-1;
+ parse = flag;
+ }
+
+ do {
+ /* we can loop here if we have multiple single-letters */
+
+ letter = parse?*parse:'\0';
+ *usedarg = FALSE; /* default is that we don't use the arg */
+
+#if 0
+ fprintf(stderr, "OPTION: %c %s\n", letter, nextarg?nextarg:"<null>");
+#endif
+ if(hit < 0) {
+ for(j=0; j< sizeof(aliases)/sizeof(aliases[0]); j++) {
+ if(letter == *aliases[j].letter) {
+ hit = j;
+ break;
+ }
+ }
+ if(hit < 0) {
+ helpf("unknown option -%c.\n", letter);
+ return URG_FAILED_INIT;
+ }
+ }
+ if(hit < 0) {
+ helpf("unknown option -%c.\n", letter);
+ return URG_FAILED_INIT;
+ }
+ if(!nextarg && aliases[hit].extraparam) {
+ helpf("option -%s/--%s requires an extra argument!\n",
+ aliases[hit].letter,
+ aliases[hit].lname);
+ return URG_FAILED_INIT;
+ }
+ else if(nextarg && aliases[hit].extraparam)
+ *usedarg = TRUE; /* mark it as used */
+
+ switch(letter) {
+ case 'z': /* time condition coming up */
+ switch(*nextarg) {
+ case '+':
+ nextarg++;
+ default:
+ /* If-Modified-Since: (section 14.28 in RFC2068) */
+ config->timecond = TIMECOND_IFMODSINCE;
+ break;
+ case '-':
+ /* If-Unmodified-Since: (section 14.24 in RFC2068) */
+ config->timecond = TIMECOND_IFUNMODSINCE;
+ nextarg++;
+ break;
+ case '=':
+ /* Last-Modified: (section 14.29 in RFC2068) */
+ config->timecond = TIMECOND_LASTMOD;
+ nextarg++;
+ break;
+ }
+ now=time(NULL);
+ config->condtime=get_date(nextarg, &now);
+ if(-1 == config->condtime) {
+ /* now let's see if it is a file name to get the time from instead! */
+ struct stat statbuf;
+ if(-1 == stat(nextarg, &statbuf)) {
+ /* failed, remove time condition */
+ config->timecond = TIMECOND_NONE;
+ }
+ else {
+ /* pull the time out from the file */
+ config->condtime = statbuf.st_mtime;
+ }
+ }
+ break;
+ case '9': /* there is no short letter for this */
+ /* LF -> CRLF conversinon? */
+ config->crlf = TRUE;
+ break;
+ case '8': /* there is no short letter for this */
+ if(strcmp(nextarg, "-"))
+ config->errors = fopen(nextarg, "wt");
+ else
+ config->errors = stdout;
+ break;
+ case '#': /* added 19990617 larsa */
+ config->progressmode ^= CURL_PROGRESS_BAR;
+ break;
+ case '2':
+ /* SSL version 2 */
+ config->ssl_version = 2;
+ break;
+ case '3':
+ /* SSL version 2 */
+ config->ssl_version = 3;
+ break;
+ case 'a':
+ /* This makes the FTP sessions use APPE instead of STOR */
+ config->conf ^= CONF_FTPAPPEND;
+ break;
+ case 'A':
+ /* This specifies the User-Agent name */
+ GetStr(&config->useragent, nextarg);
+ break;
+ case 'b': /* cookie string coming up: */
+ if(strchr(nextarg, '=')) {
+ /* A cookie string must have a =-letter */
+ GetStr(&config->cookie, nextarg);
+ }
+ else {
+ /* We have a cookie file to read from! */
+ GetStr(&config->cookiefile, nextarg);
+ }
+ break;
+ case 'B':
+ /* use type ASCII when transfering ftp files */
+ config->conf ^= CONF_FTPASCII;
+ break;
+ case 'c':
+ /* This makes us continue an ftp transfer */
+ config->use_resume^=TRUE;
+ break;
+ case 'C':
+ /* This makes us continue an ftp transfer at given position */
+ config->resume_from= atoi(nextarg);
+ config->use_resume=TRUE;
+ break;
+ case 'd':
+ /* postfield data */
+ if('@' == *nextarg) {
+ /* the data begins with a '@' letter, it means that a file name
+ or - (stdin) follows */
+ FILE *file;
+ nextarg++; /* pass the @ */
+ if(strequal("-", nextarg))
+ file = stdin;
+ else
+ file = fopen(nextarg, "r");
+ config->postfields = file2string(file);
+ if(file && (file != stdin))
+ fclose(stdin);
+ }
+ else {
+ GetStr(&config->postfields, nextarg);
+ }
+ if(config->postfields)
+ config->conf |= CONF_POST;
+ break;
+ case 'D':
+ /* dump-header to given file name */
+ GetStr(&config->headerfile, nextarg);
+ break;
+ case 'e':
+ GetStr(&config->referer, nextarg);
+ config->conf |= CONF_REFERER;
+ break;
+ case 'E':
+ {
+ char *ptr = strchr(nextarg, ':');
+ if(ptr) {
+ /* we have a password too */
+ *ptr=0;
+ ptr++;
+ GetStr(&config->cert_passwd, ptr);
+ }
+ GetStr(&config->cert, nextarg);
+ }
+ break;
+ case 'f':
+ /* fail hard on errors */
+ config->conf ^= CONF_FAILONERROR;
+ break;
+ case 'F':
+ /* "form data" simulation, this is a little advanced so lets do our best
+ to sort this out slowly and carefully */
+ if(curl_FormParse(nextarg,
+ &config->httppost,
+ &config->last_post))
+ return URG_FAILED_INIT;
+ config->conf |= CONF_HTTPPOST; /* no toggle, OR! */
+ break;
+
+ case 'h': /* h for help */
+ help();
+ return URG_FAILED_INIT;
+ case 'H':
+ head = (struct HttpHeader *)malloc(sizeof(struct HttpHeader));
+ if(head) {
+ head->next = NULL;
+ head->header = NULL; /* first zero this */
+ GetStr(&head->header, nextarg); /* now get the header line */
+
+ /* point on our new one */
+ if(config->last_header)
+ config->last_header->next = head;
+ else {
+ config->headers = head;
+ }
+
+ config->last_header = head;
+ }
+ break;
+ case 'i':
+ config->conf ^= CONF_HEADER; /* include the HTTP header as well */
+ break;
+ case 'I':
+ config->conf ^= CONF_HEADER; /* include the HTTP header in the output */
+ config->conf ^= CONF_NOBODY; /* don't fetch the body at all */
+ break;
+ case 'K':
+ res = parseconfig(nextarg, config);
+ config->configread = TRUE;
+ if(res)
+ return res;
+ break;
+ case 'l':
+ config->conf ^= CONF_FTPLISTONLY; /* only list the names of the FTP dir */
+ break;
+ case 'L':
+ config->conf ^= CONF_FOLLOWLOCATION; /* Follow Location: HTTP headers */
+ break;
+ case 'm':
+ /* specified max time */
+ config->timeout = atoi(nextarg);
+ break;
+ case 'M': /* M for manual, huge help */
+ hugehelp();
+ return URG_FAILED_INIT;
+ case 'n':
+ /* pick info from .netrc, if this is used for http, curl will
+ automatically enfore user+password with the request */
+ config->conf ^= CONF_NETRC;
+ break;
+ case 'o':
+ /* output file */
+ GetStr(&config->outfile, nextarg); /* write to this file */
+ break;
+ case 'O':
+ /* output file */
+ config->remotefile ^= TRUE;
+ break;
+ case 'P':
+ /* This makes the FTP sessions use PORT instead of PASV */
+ /* use <eth0> or <192.168.10.10> style addresses. Anything except
+ this will make us try to get the "default" address.
+ NOTE: this is a changed behaviour since the released 4.1!
+ */
+ config->conf |= CONF_FTPPORT;
+ GetStr(&config->ftpport, nextarg);
+ break;
+#if 0
+ case 'p':
+ /* specified port */
+ fputs("You've used the -p option, it will be removed in a future version\n",
+ stderr);
+ config->porttouse = atoi(nextarg);
+ config->conf |= CONF_PORT; /* changed port */
+ break;
+#endif
+ case 'q': /* if used first, already taken care of, we do it like
+ this so we don't cause an error! */
+ break;
+ case 'Q':
+ /* QUOTE command to send to FTP server */
+ config->quote = curl_slist_append(config->quote, nextarg);
+ break;
+ case 'r':
+ /* byte range requested */
+ GetStr(&config->range, nextarg);
+ config->conf |= CONF_RANGE;
+ break;
+ case 's':
+ /* don't show progress meter, don't show errors : */
+ config->conf |= (CONF_MUTE|CONF_NOPROGRESS);
+ config->showerror ^= TRUE; /* toggle off */
+ break;
+ case 'S':
+ /* show errors */
+ config->showerror ^= TRUE; /* toggle on if used with -s */
+ break;
+ case 't':
+ /* we are uploading */
+ config->conf ^= CONF_UPLOAD;
+ break;
+ case 'T':
+ /* we are uploading */
+ config->conf |= CONF_UPLOAD;
+ GetStr(&config->infile, nextarg);
+ break;
+ case 'u':
+ /* user:password */
+ GetStr(&config->userpwd, nextarg);
+ config->conf |= CONF_USERPWD;
+ break;
+ case 'U':
+ /* Proxy user:password */
+ GetStr(&config->proxyuserpwd, nextarg);
+ config->conf |= CONF_PROXYUSERPWD;
+ break;
+ case 'v':
+ config->conf ^= CONF_VERBOSE; /* talk a lot */
+ break;
+ case 'V':
+ printf(CURL_ID "%s\n", curl_version());
+ return URG_FAILED_INIT;
+ case 'x':
+ /* proxy */
+ if(!*nextarg) {
+ /* disable proxy when no proxy is given */
+ config->conf &= ~CONF_PROXY;
+ }
+ else {
+ config->conf |= CONF_PROXY;
+ GetStr(&config->proxy, nextarg);
+ }
+ break;
+ case 'X':
+ /* HTTP request */
+ GetStr(&config->customrequest, nextarg);
+ break;
+ case 'Y':
+ /* low speed time */
+ config->low_speed_time = atoi(nextarg);
+ if(!config->low_speed_limit)
+ config->low_speed_limit = 1;
+ break;
+ case 'y':
+ /* low speed limit */
+ config->low_speed_limit = atoi(nextarg);
+ if(!config->low_speed_time)
+ config->low_speed_time=30;
+ break;
+
+ default: /* unknown flag */
+ if(letter)
+ helpf("Unknown option '%c'\n", letter);
+ else
+ helpf("Unknown option\n"); /* short help blurb */
+ return URG_FAILED_INIT;
+ }
+ hit = -1;
+
+ } while(*++parse && !*usedarg);
+
+ return URG_OK;
+}
+
+
+static int parseconfig(char *filename,
+ struct Configurable *config)
+{
+ int res;
+ FILE *file;
+ char configbuffer[4096];
+ char filebuffer[256];
+ bool usedarg;
+
+ if(!filename || !*filename) {
+ /* NULL or no file name attempts to load .curlrc from the homedir! */
+
+#define CURLRC DOT_CHAR "curlrc"
+
+ char *home = curl_GetEnv("HOME"); /* portable environment reader */
+
+ if(!home || (strlen(home)>(sizeof(filebuffer)-strlen(CURLRC))))
+ return URG_OK;
+
+ sprintf(filebuffer, "%s%s%s", home, DIR_CHAR, CURLRC);
+
+ filename = filebuffer;
+ }
+
+ if(strcmp(filename,"-"))
+ file = fopen(filename, "r");
+ else
+ file = stdin;
+
+ if(file) {
+ char *tok;
+ char *tok2;
+ while(fgets(configbuffer, sizeof(configbuffer), file)) {
+ /* lines with # in the fist column is a comment! */
+
+#if 0
+ fprintf(stderr, "%s", configbuffer);
+#endif
+ if('#' == configbuffer[0])
+ continue;
+ tok = configbuffer;
+
+ while(*tok && isspace((int)*tok))
+ tok++;
+/* tok=strtok(configbuffer, " \t\n"); */
+#if 0
+ fprintf(stderr, "TOK: %s\n", tok);
+#endif
+ if('-' != tok[0]) {
+ char *nl;
+ if(config->url)
+ free(config->url);
+ config->url = strdup(tok);
+ nl = strchr(config->url, '\n');
+ if(nl)
+ *nl=0;
+ }
+ while(('-' == tok[0])) {
+ /* this is a flag */
+ char *firsttok = strdup(tok);
+ char *nl;
+
+ /* remove newline from firsttok */
+ nl = strchr(firsttok, '\n');
+ if(nl)
+ *nl=0;
+
+ /* pass the -flag */
+ tok2=tok;
+ while(*tok2 && !isspace((int)*tok2))
+ tok2++;
+
+ /* pass the following white space */
+ while(*tok2 && isspace((int)*tok2))
+ tok2++;
+
+ while(!*tok2 &&
+ fgets(configbuffer, sizeof(configbuffer), file)) {
+ /* lines with # in the fist column is a comment! */
+#if 0
+ fprintf(stderr, "%s", configbuffer);
+#endif
+ if('#' == configbuffer[0])
+ continue;
+ tok2 = configbuffer;
+ /* tok2=strtok(configbuffer, " \t\n"); */
+ /* pass white space */
+ while(*tok2 && isspace((int)*tok2))
+ tok2++;
+ }
+ /* remove newline from tok2 */
+ nl = strchr(tok2, '\n');
+ if(nl)
+ *nl=0;
+
+ res = getparameter(firsttok+1,
+ *tok2?tok2:NULL,
+ &usedarg,
+ config);
+ free(firsttok);
+#if 0
+ fprintf(stderr, "TOK %s TOK2: %s RES: %d\n",
+ firsttok, tok2?tok2:"NULL", res);
+#endif
+ if(res)
+ return res;
+ if(!usedarg) {
+ /* tok2 is unused, */
+ tok = tok2;
+ }
+ else
+ break; /* we've used both our words */
+ }
+ }
+ if(file != stdin)
+ fclose(file);
+ }
+ return URG_OK;
+}
+
+struct OutStruct {
+ char *filename;
+ FILE *stream;
+};
+
+int my_fwrite(void *buffer, size_t size, size_t nmemb, FILE *stream)
+{
+ struct OutStruct *out=(struct OutStruct *)stream;
+ if(out && !out->stream) {
+ /* open file for writing */
+ out->stream=fopen(out->filename, "wb");
+ if(!out->stream)
+ return -1; /* failure */
+ }
+ return fwrite(buffer, size, nmemb, out->stream);
+}
+
+
+int main(int argc, char *argv[])
+{
+ char errorbuffer[URLGET_ERROR_SIZE];
+
+ struct OutStruct outs;
+
+ char *url = NULL;
+#ifdef GLOBURL
+ URLGlob *urls;
+ int urlnum;
+ char *outfiles = NULL;
+ int separator = 0;
+#endif
+
+ FILE *infd = stdin;
+ FILE *headerfilep = NULL;
+ char *urlbuffer=NULL;
+ int infilesize=-1; /* -1 means unknown */
+ bool stillflags=TRUE;
+
+ int res=URG_OK;
+ int i;
+ struct Configurable config;
+
+ outs.stream = stdout;
+
+ memset(&config, 0, sizeof(struct Configurable));
+
+ /* set non-zero default values: */
+ config.useragent= maprintf(CURL_NAME "/" CURL_VERSION " (" OS ") "
+ "%s", curl_version());
+ config.showerror=TRUE;
+ config.conf=CONF_DEFAULT;
+ config.crlf=FALSE;
+ config.quote=NULL;
+
+ if(argc>1 &&
+ (!strnequal("--", argv[1], 2) && (argv[1][0] == '-')) &&
+ strchr(argv[1], 'q')) {
+ /*
+ * The first flag, that is not a verbose name, but a shortname
+ * and it includes the 'q' flag!
+ */
+#if 0
+ fprintf(stderr, "I TURNED OFF THE CRAP\n");
+#endif
+ ;
+ }
+ else {
+ res = parseconfig(NULL, &config);
+ if(res)
+ return res;
+ }
+
+ if ((argc < 2) && !config.url) {
+ helpf(NULL);
+ return URG_FAILED_INIT;
+ }
+
+ /* Parse options */
+ for (i = 1; i < argc; i++) {
+ if(stillflags &&
+ ('-' == argv[i][0])) {
+ char *nextarg;
+ bool passarg;
+
+ char *flag = &argv[i][1];
+
+ if(strequal("--", argv[i]))
+ /* this indicates the end of the flags and thus enables the
+ following (URL) argument to start with -. */
+ stillflags=FALSE;
+ else {
+ nextarg= (i < argc - 1)? argv[i+1]: NULL;
+
+ res = getparameter ( flag,
+ nextarg,
+ &passarg,
+ &config );
+ if(res)
+ return res;
+
+ if(passarg) /* we're supposed to skip this */
+ i++;
+ }
+ }
+ else {
+ if(url) {
+ helpf("only one URL is supported!\n");
+ return URG_FAILED_INIT;
+ }
+ url = argv[i];
+ }
+ }
+
+ /* if no URL was specified and there was one in the config file, get that
+ one */
+ if(!url && config.url)
+ url = config.url;
+
+ if(!url) {
+ helpf("no URL specified!\n");
+ return URG_FAILED_INIT;
+ }
+#if 0
+ fprintf(stderr, "URL: %s PROXY: %s\n", url, config.proxy?config.proxy:"none");
+#endif
+
+#ifdef GLOBURL
+ urlnum = glob_url(&urls, url); /* expand '{...}' and '[...]' expressions and return
+ total number of URLs in pattern set */
+ outfiles = config.outfile; /* save outfile pattern befor expansion */
+ if (!outfiles && !config.remotefile && urlnum > 1) {
+#ifdef CURL_SEPARATORS
+ /* multiple files extracted to stdout, insert separators! */
+ separator = 1;
+#endif
+#ifdef MIME_SEPARATORS
+ /* multiple files extracted to stdout, insert MIME separators! */
+ separator = 1;
+ printf("MIME-Version: 1.0\n");
+ printf("Content-Type: multipart/mixed; boundary=%s\n\n", MIMEseparator);
+#endif
+ }
+ for (i = 0; (url = next_url(urls)); ++i) {
+ if (outfiles)
+ config.outfile = strdup(outfiles);
+#endif
+
+ if(config.outfile && config.infile) {
+ helpf("you can't both upload and download!\n");
+ return URG_FAILED_INIT;
+ }
+
+ if (config.outfile || config.remotefile) {
+ /*
+ * We have specified a file name to store the result in, or we have
+ * decided we want to use the remote file name.
+ */
+
+ if(config.remotefile) {
+ /* Find and get the remote file name */
+ config.outfile=strstr(url, "://");
+ if(config.outfile)
+ config.outfile+=3;
+ else
+ config.outfile=url;
+ config.outfile = strrchr(config.outfile, '/');
+ if(!config.outfile || !strlen(++config.outfile)) {
+ helpf("Remote file name has no length!\n");
+ return URG_WRITE_ERROR;
+ }
+ }
+#ifdef GLOBURL
+ else /* fill '#1' ... '#9' terms from URL pattern */
+ config.outfile = match_url(config.outfile, *urls);
+#endif
+
+ if((0 == config.resume_from) && config.use_resume) {
+ /* we're told to continue where we are now, then we get the size of the
+ file as it is now and open it for append instead */
+ struct stat fileinfo;
+
+ if(0 == stat(config.outfile, &fileinfo)) {
+ /* set offset to current file size: */
+ config.resume_from = fileinfo.st_size;
+ }
+ /* else let offset remain 0 */
+ }
+
+ if(config.resume_from) {
+ /* open file for output: */
+ outs.stream=(FILE *) fopen(config.outfile, config.resume_from?"ab":"wb");
+ if (!outs.stream) {
+ helpf("Can't open '%s'!\n", config.outfile);
+ return URG_WRITE_ERROR;
+ }
+ }
+ else {
+ outs.filename = config.outfile;
+ outs.stream = NULL; /* open when needed */
+ }
+ }
+ if (config.infile) {
+ /*
+ * We have specified a file to upload
+ */
+ struct stat fileinfo;
+
+ /* If no file name part is given in the URL, we add this file name */
+ char *ptr=strstr(url, "://");
+ if(ptr)
+ ptr+=3;
+ else
+ ptr=url;
+ ptr = strrchr(ptr, '/');
+ if(!ptr || !strlen(++ptr)) {
+ /* The URL has no file name part, add the local file name. In order
+ to be able to do so, we have to create a new URL in another buffer.*/
+ urlbuffer=(char *)malloc(strlen(url) + strlen(config.infile) + 3);
+ if(!urlbuffer) {
+ helpf("out of memory\n");
+ return URG_OUT_OF_MEMORY;
+ }
+ if(ptr)
+ /* there is a trailing slash on the URL */
+ sprintf(urlbuffer, "%s%s", url, config.infile);
+ else
+ /* thers is no trailing slash on the URL */
+ sprintf(urlbuffer, "%s/%s", url, config.infile);
+
+ url = urlbuffer; /* use our new URL instead! */
+ }
+
+ infd=(FILE *) fopen(config.infile, "rb");
+ if (!infd || stat(config.infile, &fileinfo)) {
+ helpf("Can't open '%s'!\n", config.infile);
+ return URG_READ_ERROR;
+ }
+ infilesize=fileinfo.st_size;
+
+ }
+ if((config.conf&CONF_UPLOAD) &&
+ config.use_resume &&
+ (0==config.resume_from)) {
+ config.resume_from = -1; /* -1 will then force get-it-yourself */
+ }
+ if(config.headerfile) {
+ /* open file for output: */
+ if(strcmp(config.headerfile,"-"))
+ {
+ headerfilep=(FILE *) fopen(config.headerfile, "wb");
+ if (!headerfilep) {
+ helpf("Can't open '%s'!\n", config.headerfile);
+ return URG_WRITE_ERROR;
+ }
+ }
+ else
+ headerfilep=stdout;
+ }
+
+ /* This was previously done in urlget, but that was wrong place to do it */
+ if(outs.stream && isatty(fileno(outs.stream)))
+ /* we send the output to a tty, and therefor we switch off the progress
+ meter right away */
+ config.conf |= CONF_NOPROGRESS;
+
+#ifdef GLOBURL
+ if (urlnum > 1) {
+ fprintf(stderr, "\n[%d/%d]: %s --> %s\n", i+1, urlnum, url, config.outfile ? config.outfile : "<stdout>");
+ if (separator) {
+#ifdef CURL_SEPARATORS
+ printf("%s%s\n", CURLseparator, url);
+#endif
+#ifdef MIME_SEPARATORS
+ printf("--%s\n", MIMEseparator);
+ printf("Content-ID: %s\n\n", url);
+#endif
+ }
+ }
+#endif
+
+ if(!config.errors)
+ config.errors = stderr;
+
+ res = curl_urlget(URGTAG_FILE, (FILE *)&outs, /* where to store */
+ URGTAG_WRITEFUNCTION, my_fwrite, /* what call to write */
+ URGTAG_INFILE, infd, /* for uploads */
+ URGTAG_INFILESIZE, infilesize, /* size of uploaded file */
+ URGTAG_URL, url, /* what to fetch */
+ URGTAG_PROXY, config.proxy, /* proxy to use */
+ URGTAG_FLAGS, config.conf, /* flags */
+ URGTAG_USERPWD, config.userpwd, /* user + passwd */
+ URGTAG_PROXYUSERPWD, config.proxyuserpwd, /* Proxy user + passwd */
+ URGTAG_RANGE, config.range, /* range of document */
+ URGTAG_ERRORBUFFER, errorbuffer,
+ URGTAG_TIMEOUT, config.timeout,
+ URGTAG_POSTFIELDS, config.postfields,
+ URGTAG_REFERER, config.referer,
+ URGTAG_USERAGENT, config.useragent,
+ URGTAG_FTPPORT, config.ftpport,
+ URGTAG_LOW_SPEED_LIMIT, config.low_speed_limit,
+ URGTAG_LOW_SPEED_TIME, config.low_speed_time,
+ URGTAG_RESUME_FROM, config.use_resume?config.resume_from:0,
+ URGTAG_COOKIE, config.cookie,
+ URGTAG_HTTPHEADER, config.headers,
+ URGTAG_HTTPPOST, config.httppost,
+ URGTAG_SSLCERT, config.cert,
+ URGTAG_SSLCERTPASSWD, config.cert_passwd,
+ URGTAG_CRLF, config.crlf,
+ URGTAG_QUOTE, config.quote,
+ URGTAG_WRITEHEADER, headerfilep,
+ URGTAG_COOKIEFILE, config.cookiefile,
+ URGTAG_SSLVERSION, config.ssl_version,
+ URGTAG_TIMECONDITION, config.timecond,
+ URGTAG_TIMEVALUE, config.condtime,
+ URGTAG_CUSTOMREQUEST, config.customrequest,
+ URGTAG_STDERR, config.errors,
+ URGTAG_DONE); /* always terminate the list of tags */
+ if((res!=URG_OK) && config.showerror)
+ fprintf(config.errors, "curl: (%d) %s\n", res, errorbuffer);
+
+ if((config.errors != stderr) &&
+ (config.errors != stdout))
+ /* it wasn't directed to stdout or stderr so close the file! */
+ fclose(config.errors);
+
+ if(urlbuffer)
+ free(urlbuffer);
+ if (config.outfile && outs.stream)
+ fclose(outs.stream);
+ if (config.infile)
+ fclose(infd);
+ if(headerfilep)
+ fclose(headerfilep);
+
+ if(config.url)
+ free(config.url);
+
+#ifdef GLOBURL
+ if(url)
+ free(url);
+ if(config.outfile && !config.remotefile)
+ free(config.outfile);
+ }
+#ifdef MIME_SEPARATORS
+ if (separator)
+ printf("--%s--\n", MIMEseparator);
+#endif
+#endif
+
+ curl_slist_free_all(config.quote); /* the checks for config.quote == NULL */
+
+ return(res);
+}
diff --git a/src/mkhelp.pl b/src/mkhelp.pl
new file mode 100644
index 000000000..842a42f59
--- /dev/null
+++ b/src/mkhelp.pl
@@ -0,0 +1,85 @@
+#!/usr/local/bin/perl
+
+# Yeah, I know, probably 1000 other persons already wrote a script like
+# this, but I'll tell ya:
+
+# THEY DON'T FIT ME :-)
+
+# Get readme file as parameter:
+$README = $ARGV[0];
+
+if($README eq "") {
+ print "usage: mkreadme.pl <README>\n";
+ exit;
+}
+
+
+push @out, " _ _ ____ _ \n";
+push @out, " Project ___| | | | _ \\| | \n";
+push @out, " / __| | | | |_) | | \n";
+push @out, " | (__| |_| | _ <| |___ \n";
+push @out, " \\___|\\___/|_| \\_\\_____|\n";
+
+$head=0;
+loop:
+while (<STDIN>) {
+ $line = $_;
+
+ # this kind should be removed first:
+ $line =~ s/_//g;
+
+ # then this:
+ $line =~ s/.//g;
+
+ if($line =~ /^curl/i) {
+ # cut off the page headers
+ $head=1;
+ next loop;
+ }
+
+ if($line =~ /^[ \t]*\n/) {
+ $wline++;
+ # we only make one empty line max
+ next loop;
+ }
+ if($wline) {
+ $wline = 0;
+ if(!$head) {
+ push @out, "\n";
+ }
+ $head =0;
+ }
+ push @out, $line;
+}
+push @out, "\n"; # just an extra newline
+
+open(READ, "<$README") ||
+ die "couldn't read the README infile";
+
+while(<READ>) {
+ push @out, $_;
+}
+close(READ);
+
+
+print "/* NEVER EVER edit this manually, fix the mkhelp script instead! */\n"
+;
+print "#include <stdio.h>\n";
+print "void hugehelp(void)\n";
+print "{\n";
+print "puts (\n";
+
+for(@out) {
+ chop;
+
+ $new = $_;
+
+ $new =~ s/\\/\\\\/g;
+ $new =~ s/\"/\\\"/g;
+
+ printf("\"%s\\n\"\n", $new);
+
+}
+
+print " ) ;\n}\n"
+
diff --git a/src/setup.h b/src/setup.h
new file mode 100644
index 000000000..78103f9ce
--- /dev/null
+++ b/src/setup.h
@@ -0,0 +1,91 @@
+#ifndef __SETUP_H
+#define __SETUP_H
+/*****************************************************************************
+ * _ _ ____ _
+ * Project ___| | | | _ \| |
+ * / __| | | | |_) | |
+ * | (__| |_| | _ <| |___
+ * \___|\___/|_| \_\_____|
+ *
+ * The contents of this file are subject to the Mozilla Public License
+ * Version 1.0 (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+ * License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * The Original Code is Curl.
+ *
+ * The Initial Developer of the Original Code is Daniel Stenberg.
+ *
+ * Portions created by the Initial Developer are Copyright (C) 1998.
+ * All Rights Reserved.
+ *
+ * ------------------------------------------------------------
+ * Main author:
+ * - Daniel Stenberg <Daniel.Stenberg@haxx.nu>
+ *
+ * http://curl.haxx.nu
+ *
+ * $Source$
+ * $Revision$
+ * $Date$
+ * $Author$
+ * $State$
+ * $Locker$
+ *
+ * ------------------------------------------------------------
+ ****************************************************************************/
+
+#include <stdio.h>
+
+#if !defined(WIN32) && defined(_WIN32)
+/* This _might_ be a good Borland fix. Please report whether this works or
+ not! */
+#define WIN32
+#endif
+
+#ifdef HAVE_CONFIG_H
+#include "config.h" /* the configure script results */
+#else
+#ifdef WIN32
+/* include the hand-modified win32 adjusted config.h! */
+#include "config-win32.h"
+#endif
+#endif
+
+#ifndef OS
+#define OS "unknown"
+#endif
+
+#ifdef HAVE_STRCASECMP
+#define strnequal(x,y,z) !(strncasecmp)(x,y,z)
+#define strequal(x,y) !(strcasecmp)(x,y)
+
+/* this is for "-ansi -Wall -pedantic" to stop complaining! */
+extern int (strcasecmp)(const char *s1, const char *s2);
+extern int (strncasecmp)(const char *s1, const char *s2, size_t n);
+#ifndef fileno /* sunos 4 have this as a macro! */
+int fileno( FILE *stream);
+#endif
+
+#else
+#define strnequal(x,y,z) !strnicmp(x,y,z)
+#define strequal(x,y) !stricmp(x,y)
+#endif
+
+#ifdef WIN32
+#define PATH_CHAR ";"
+#define DIR_CHAR "\\"
+#define DOT_CHAR "_"
+#else
+#define PATH_CHAR ":"
+#define DIR_CHAR "/"
+#define DOT_CHAR "."
+
+#endif
+
+#endif /* __SETUP_H */
diff --git a/src/stamp-h2 b/src/stamp-h2
new file mode 100644
index 000000000..9788f7023
--- /dev/null
+++ b/src/stamp-h2
@@ -0,0 +1 @@
+timestamp
diff --git a/src/stamp-h2.in b/src/stamp-h2.in
new file mode 100644
index 000000000..9788f7023
--- /dev/null
+++ b/src/stamp-h2.in
@@ -0,0 +1 @@
+timestamp
diff --git a/src/urlglob.c b/src/urlglob.c
new file mode 100644
index 000000000..846f86c2f
--- /dev/null
+++ b/src/urlglob.c
@@ -0,0 +1,332 @@
+/*****************************************************************************
+ * _ _ ____ _
+ * Project ___| | | | _ \| |
+ * / __| | | | |_) | |
+ * | (__| |_| | _ <| |___
+ * \___|\___/|_| \_\_____|
+ *
+ * The contents of this file are subject to the Mozilla Public License
+ * Version 1.0 (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+ * License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * The Original Code is Curl.
+ *
+ * The Initial Developer of the Original Code is Daniel Stenberg.
+ *
+ * Portions created by the Initial Developer are Copyright (C) 1998.
+ * All Rights Reserved.
+ *
+ * ------------------------------------------------------------
+ * Main author:
+ * - Daniel Stenberg <Daniel.Stenberg@haxx.nu>
+ *
+ * http://curl.haxx.nu
+ *
+ * $Source$
+ * $Revision$
+ * $Date$
+ * $Author$
+ * $State$
+ * $Locker$
+ *
+ * ------------------------------------------------------------
+ ****************************************************************************/
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <ctype.h>
+#include <curl/curl.h>
+#include "urlglob.h"
+
+char glob_buffer[URL_MAX_LENGTH];
+URLGlob *glob_expand;
+
+int glob_word(char*, int);
+
+int glob_set(char *pattern, int pos) {
+ /* processes a set expression with the point behind the opening '{'
+ ','-separated elements are collected until the next closing '}'
+ */
+ char* buf = glob_buffer;
+ URLPattern *pat;
+
+ pat = (URLPattern*)&glob_expand->pattern[glob_expand->size / 2];
+ /* patterns 0,1,2,... correspond to size=1,3,5,... */
+ pat->type = UPTSet;
+ pat->content.Set.size = 0;
+ pat->content.Set.ptr_s = 0;
+ pat->content.Set.elements = (char**)malloc(0);
+ ++glob_expand->size;
+
+ while (1) {
+ switch (*pattern) {
+ case '\0': /* URL ended while set was still open */
+ printf("error: unmatched brace at pos %d\n", pos);
+ exit (URG_URL_MALFORMAT);
+ case '{':
+ case '[': /* no nested expressions at this time */
+ printf("error: nested braces not supported %d\n", pos);
+ exit (URG_URL_MALFORMAT);
+ case ',':
+ case '}': /* set element completed */
+ *buf = '\0';
+ pat->content.Set.elements = realloc(pat->content.Set.elements, (pat->content.Set.size + 1) * sizeof(char*));
+ if (!pat->content.Set.elements) {
+ printf("out of memory in set pattern\n");
+ exit(URG_OUT_OF_MEMORY);
+ }
+ pat->content.Set.elements[pat->content.Set.size] = strdup(glob_buffer);
+ ++pat->content.Set.size;
+
+ if (*pattern == '}') /* entire set pattern completed */
+ /* always check for a literal (may be "") between patterns */
+ return pat->content.Set.size * glob_word(++pattern, ++pos);
+
+ buf = glob_buffer;
+ ++pattern;
+ ++pos;
+ break;
+ case ']': /* illegal closing bracket */
+ printf("error: illegal pattern at pos %d\n", pos);
+ exit (URG_URL_MALFORMAT);
+ case '\\': /* escaped character, skip '\' */
+ if (*(buf+1) == '\0') { /* but no escaping of '\0'! */
+ printf("error: illegal pattern at pos %d\n", pos);
+ exit (URG_URL_MALFORMAT);
+ }
+ ++pattern;
+ ++pos; /* intentional fallthrough */
+ default:
+ *buf++ = *pattern++; /* copy character to set element */
+ ++pos;
+ }
+ }
+ exit (URG_FAILED_INIT);
+}
+
+int glob_range(char *pattern, int pos) {
+ /* processes a range expression with the point behind the opening '['
+ - char range: e.g. "a-z]", "B-Q]"
+ - num range: e.g. "0-9]", "17-2000]"
+ - num range with leading zeros: e.g. "001-999]"
+ expression is checked for well-formedness and collected until the next ']'
+ */
+ URLPattern *pat;
+ char *c;
+
+ pat = (URLPattern*)&glob_expand->pattern[glob_expand->size / 2];
+ /* patterns 0,1,2,... correspond to size=1,3,5,... */
+ ++glob_expand->size;
+
+ if (isalpha((int)*pattern)) { /* character range detected */
+ pat->type = UPTCharRange;
+ if (sscanf(pattern, "%c-%c]", &pat->content.CharRange.min_c, &pat->content.CharRange.max_c) != 2 ||
+ pat->content.CharRange.min_c >= pat->content.CharRange.max_c ||
+ pat->content.CharRange.max_c - pat->content.CharRange.min_c > 'z' - 'a') {
+ /* the pattern is not well-formed */
+ printf("error: illegal pattern or range specification after pos %d\n", pos);
+ exit (URG_URL_MALFORMAT);
+ }
+ pat->content.CharRange.ptr_c = pat->content.CharRange.min_c;
+ /* always check for a literal (may be "") between patterns */
+ return (pat->content.CharRange.max_c - pat->content.CharRange.min_c + 1) *
+ glob_word(pattern + 4, pos + 4);
+ }
+ if (isdigit((int)*pattern)) { /* numeric range detected */
+ pat->type = UPTNumRange;
+ pat->content.NumRange.padlength = 0;
+ if (sscanf(pattern, "%d-%d]", &pat->content.NumRange.min_n, &pat->content.NumRange.max_n) != 2 ||
+ pat->content.NumRange.min_n >= pat->content.NumRange.max_n) {
+ /* the pattern is not well-formed */
+ printf("error: illegal pattern or range specification after pos %d\n", pos);
+ exit (URG_URL_MALFORMAT);
+ }
+ if (*pattern == '0') { /* leading zero specified */
+ c = pattern;
+ while (isdigit((int)*c++))
+ ++pat->content.NumRange.padlength; /* padding length is set for all instances
+ of this pattern */
+ }
+ pat->content.NumRange.ptr_n = pat->content.NumRange.min_n;
+ c = (char*)(strchr(pattern, ']') + 1); /* continue after next ']' */
+ /* always check for a literal (may be "") between patterns */
+ return (pat->content.NumRange.max_n - pat->content.NumRange.min_n + 1) *
+ glob_word(c, pos + (c - pattern));
+ }
+ printf("error: illegal character in range specification at pos %d\n", pos);
+ exit (URG_URL_MALFORMAT);
+}
+
+int glob_word(char *pattern, int pos) {
+ /* processes a literal string component of a URL
+ special characters '{' and '[' branch to set/range processing functions
+ */
+ char* buf = glob_buffer;
+ int litindex;
+
+ while (*pattern != '\0' && *pattern != '{' && *pattern != '[') {
+ if (*pattern == '}' || *pattern == ']') {
+ printf("illegal character at position %d\n", pos);
+ exit (URG_URL_MALFORMAT);
+ }
+ if (*pattern == '\\') { /* escape character, skip '\' */
+ ++pattern;
+ ++pos;
+ if (*pattern == '\0') { /* but no escaping of '\0'! */
+ printf("illegal character at position %d\n", pos);
+ exit (URG_URL_MALFORMAT);
+ }
+ }
+ *buf++ = *pattern++; /* copy character to literal */
+ ++pos;
+ }
+ *buf = '\0';
+ litindex = glob_expand->size / 2;
+ /* literals 0,1,2,... correspond to size=0,2,4,... */
+ glob_expand->literal[litindex] = strdup(glob_buffer);
+ ++glob_expand->size;
+ if (*pattern == '\0')
+ return 1; /* singular URL processed */
+ if (*pattern == '{') {
+ return glob_set(++pattern, ++pos); /* process set pattern */
+ }
+ if (*pattern == '[') {
+ return glob_range(++pattern, ++pos);/* process range pattern */
+ }
+ printf("internal error\n");
+ exit (URG_FAILED_INIT);
+}
+
+int glob_url(URLGlob** glob, char* url) {
+ int urlnum; /* counts instances of a globbed pattern */
+
+ glob_expand = (URLGlob*)malloc(sizeof(URLGlob));
+ glob_expand->size = 0;
+ urlnum = glob_word(url, 1);
+ *glob = glob_expand;
+ return urlnum;
+}
+
+char *next_url(URLGlob *glob) {
+ static int beenhere = 0;
+ char *buf = glob_buffer;
+ URLPattern *pat;
+ char *lit;
+ signed int i;
+ int carry;
+
+ if (!beenhere)
+ beenhere = 1;
+ else {
+ carry = 1;
+
+ /* implement a counter over the index ranges of all patterns,
+ starting with the rightmost pattern */
+ for (i = glob->size / 2 - 1; carry && i >= 0; --i) {
+ carry = 0;
+ pat = &glob->pattern[i];
+ switch (pat->type) {
+ case UPTSet:
+ if (++pat->content.Set.ptr_s == pat->content.Set.size) {
+ pat->content.Set.ptr_s = 0;
+ carry = 1;
+ }
+ break;
+ case UPTCharRange:
+ if (++pat->content.CharRange.ptr_c > pat->content.CharRange.max_c) {
+ pat->content.CharRange.ptr_c = pat->content.CharRange.min_c;
+ carry = 1;
+ }
+ break;
+ case UPTNumRange:
+ if (++pat->content.NumRange.ptr_n > pat->content.NumRange.max_n) {
+ pat->content.NumRange.ptr_n = pat->content.NumRange.min_n;
+ carry = 1;
+ }
+ break;
+ default:
+ printf("internal error: invalid pattern type (%d)\n", pat->type);
+ exit (URG_FAILED_INIT);
+ }
+ }
+ if (carry) /* first pattern ptr has run into overflow, done! */
+ return NULL;
+ }
+
+ for (i = 0; i < glob->size; ++i) {
+ if (!(i % 2)) { /* every other term (i even) is a literal */
+ lit = glob->literal[i/2];
+ strcpy(buf, lit);
+ buf += strlen(lit);
+ }
+ else { /* the rest (i odd) are patterns */
+ pat = &glob->pattern[i/2];
+ switch(pat->type) {
+ case UPTSet:
+ strcpy(buf, pat->content.Set.elements[pat->content.Set.ptr_s]);
+ buf += strlen(pat->content.Set.elements[pat->content.Set.ptr_s]);
+ break;
+ case UPTCharRange:
+ *buf++ = pat->content.CharRange.ptr_c;
+ break;
+ case UPTNumRange:
+ buf += sprintf(buf, "%0*d", pat->content.NumRange.padlength, pat->content.NumRange.ptr_n);
+ break;
+ default:
+ printf("internal error: invalid pattern type (%d)\n", pat->type);
+ exit (URG_FAILED_INIT);
+ }
+ }
+ }
+ *buf = '\0';
+ return strdup(glob_buffer);
+}
+
+char *match_url(char *filename, URLGlob glob) {
+ char *buf = glob_buffer;
+ URLPattern pat;
+ int i;
+
+ while (*filename != '\0') {
+ if (*filename == '#') {
+ if (!isdigit((int)*++filename) ||
+ *filename == '0') { /* only '#1' ... '#9' allowed */
+ printf("illegal matching expression\n");
+ exit(URG_URL_MALFORMAT);
+ }
+ i = *filename - '1';
+ if (i + 1 > glob.size / 2) {
+ printf("match against nonexisting pattern\n");
+ exit(URG_URL_MALFORMAT);
+ }
+ pat = glob.pattern[i];
+ switch (pat.type) {
+ case UPTSet:
+ strcpy(buf, pat.content.Set.elements[pat.content.Set.ptr_s]);
+ buf += strlen(pat.content.Set.elements[pat.content.Set.ptr_s]);
+ break;
+ case UPTCharRange:
+ *buf++ = pat.content.CharRange.ptr_c;
+ break;
+ case UPTNumRange:
+ buf += sprintf(buf, "%0*d", pat.content.NumRange.padlength, pat.content.NumRange.ptr_n);
+ break;
+ default:
+ printf("internal error: invalid pattern type (%d)\n", pat.type);
+ exit (URG_FAILED_INIT);
+ }
+ ++filename;
+ }
+ else
+ *buf++ = *filename++;
+ }
+ *buf = '\0';
+ return strdup(glob_buffer);
+}
diff --git a/src/urlglob.h b/src/urlglob.h
new file mode 100644
index 000000000..dc52371ee
--- /dev/null
+++ b/src/urlglob.h
@@ -0,0 +1,74 @@
+#ifndef __URLGLOB_H
+#define __URLGLOB_H
+/*****************************************************************************
+ * _ _ ____ _
+ * Project ___| | | | _ \| |
+ * / __| | | | |_) | |
+ * | (__| |_| | _ <| |___
+ * \___|\___/|_| \_\_____|
+ *
+ * The contents of this file are subject to the Mozilla Public License
+ * Version 1.0 (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
+ * License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * The Original Code is Curl.
+ *
+ * The Initial Developer of the Original Code is Daniel Stenberg.
+ *
+ * Portions created by the Initial Developer are Copyright (C) 1998.
+ * All Rights Reserved.
+ *
+ * ------------------------------------------------------------
+ * Main author:
+ * - Daniel Stenberg <Daniel.Stenberg@haxx.nu>
+ *
+ * http://curl.haxx.nu
+ *
+ * $Source$
+ * $Revision$
+ * $Date$
+ * $Author$
+ * $State$
+ * $Locker$
+ *
+ * ------------------------------------------------------------
+ ****************************************************************************/
+typedef enum {UPTSet=1,UPTCharRange,UPTNumRange} URLPatternType;
+
+typedef struct {
+ URLPatternType type;
+ union {
+ struct {
+ char **elements;
+ short size;
+ short ptr_s;
+ } Set;
+ struct {
+ char min_c, max_c;
+ char ptr_c;
+ } CharRange;
+ struct {
+ int min_n, max_n;
+ short padlength;
+ int ptr_n;
+ } NumRange ;
+ } content;
+} URLPattern;
+
+typedef struct {
+ char* literal[10];
+ URLPattern pattern[9];
+ int size;
+} URLGlob;
+
+int glob_url(URLGlob**, char*);
+char* next_url(URLGlob*);
+char* match_url(char*, URLGlob);
+
+#endif
diff --git a/src/version.h b/src/version.h
new file mode 100644
index 000000000..65ec2d1ef
--- /dev/null
+++ b/src/version.h
@@ -0,0 +1,3 @@
+#define CURL_NAME "curl"
+#define CURL_VERSION "6.3.1"
+#define CURL_ID CURL_NAME " " CURL_VERSION " (" OS ") "