kapiaba Posted November 4, 2015 Report Share Posted November 4, 2015 Alguém consegue fazer isso funcionar? #include "Fivewin.ch" // // File attributes // #define FILE_ATTRIBUTE_READONLY 1 #define FILE_ATTRIBUTE_HIDDEN 2 #define FILE_ATTRIBUTE_SYSTEM 4 #define FILE_ATTRIBUTE_DIRECTORY 16 #define FILE_ATTRIBUTE_ARCHIVE 32 #define FILE_ATTRIBUTE_NORMAL 128 #define FILE_ATTRIBUTE_TEMPORARY 256 // // access types for InternetOpen() // #define INTERNET_OPEN_TYPE_PRECONFIG 0 // use registry configuration #define INTERNET_OPEN_TYPE_DIRECT 1 // direct to net #define INTERNET_OPEN_TYPE_PROXY 3 // via named proxy #define INTERNET_OPEN_TYPE_PRECONFIG_WITH_NO_AUTOPROXY 4 // prevent using java/script/INS // // manifests // #define INTERNET_INVALID_PORT_NUMBER 0 // use the protocol-specific default #define INTERNET_DEFAULT_FTP_PORT 21 // default for FTP servers #define INTERNET_DEFAULT_GOPHER_PORT 70 // " " gopher " #define INTERNET_DEFAULT_HTTP_PORT 80 // " " HTTP " #define INTERNET_DEFAULT_HTTPS_PORT 443 // " " HTTPS " #define INTERNET_DEFAULT_SOCKS_PORT 1080 // default for SOCKS firewall servers. // // service types for InternetConnect() // #define INTERNET_SERVICE_FTP 1 #define INTERNET_SERVICE_GOPHER 2 #define INTERNET_SERVICE_HTTP 3 // // flags for FTP // #define INTERNET_FLAG_TRANSFER_ASCII 1 #define INTERNET_FLAG_TRANSFER_BINARY 2 FUNCTION MAIN() LOCAL hInternet, hConnect hInternet = INTERNETOPEN( "Anystring", INTERNET_OPEN_TYPE_DIRECT, 0, 0, 0 ) hConnect = INTERNETCONNECT( hInternet, "myftpaddress", INTERNET_INVALID_PORT_NUMBER, "myuserid", "mypassword", INTERNET_SERVICE_FTP, 0, 0 ) ? FTPGETFILE( hConnect, "/emagsoftware.it/test/atest.prg", "atest.prg", 0, FILE_ATTRIBUTE_ARCHIVE, 0, 0 ) INTERNETCLOSEHANDLE( hConnect ) INTERNETCLOSEHANDLE( hInternet ) RETURN NIL #pragma BEGINDUMP #include "windows.h" #include "wininet.h" #include "hbapi.h" HB_FUNC( INTERNETOPEN ) { hb_retnl( ( LONG ) InternetOpen( hb_parc( 1 ), hb_parnl( 2 ), hb_parc( 3 ), hb_parc( 4 ), hb_parnl( 5 ) ) ); } HB_FUNC( INTERNETCLOSEHANDLE ) { hb_retl( InternetCloseHandle( ( HINTERNET ) hb_parnl( 1 ) ) ); } HB_FUNC( INTERNETCONNECT ) { hb_retnl( ( LONG ) InternetConnect( ( HINTERNET ) hb_parnl( 1 ), hb_parc( 2 ), ( INTERNET_PORT ) hb_parnl( 3 ), hb_parc( 4 ), hb_parc( 5 ), hb_parnl( 6 ), hb_parnl( 7 ), hb_parnl( 8 ) ) ); } HB_FUNC( FTPGETFILE ) { hb_retl( FtpGetFile( ( HINTERNET ) hb_parnl( 1 ), hb_parc( 2 ), hb_parc( 3 ), hb_parl( 4 ), hb_parnl( 5 ), hb_parnl( 6 ), hb_parnl( 7 ) ) ); } HB_FUNC( FTPPUTFILE ) { hb_retl( FtpPutFile( ( HINTERNET ) hb_parnl( 1 ), hb_parc( 2 ), hb_parc( 3 ), hb_parnl( 4 ), hb_parnl( 5 ) ) ); } HB_FUNC( FTPDELETEFILE ) { hb_retl( FtpDeleteFile( ( HINTERNET ) hb_parnl( 1 ), hb_parc( 2 ) ) ); } HB_FUNC( FTPCREATEDIRECTORY ) { hb_retl( FtpCreateDirectory( ( HINTERNET ) hb_parnl( 1 ), hb_parc( 2 ) ) ); } HB_FUNC( FTPREMOVEDIRECTORY ) { hb_retl( FtpRemoveDirectory( ( HINTERNET ) hb_parnl( 1 ), hb_parc( 2 ) ) ); } HB_FUNC( FTPFINDFIRSTFILE ) { hb_retnl( ( LONG ) FtpFindFirstFile( ( HINTERNET ) hb_parnl( 1 ), hb_parc( 2 ), ( WIN32_FIND_DATA * ) hb_parc( 3 ), hb_parnl( 4 ), hb_parnl( 5 ) ) ); } HB_FUNC( INTERNETFINDNEXTFILE ) { hb_retl( InternetFindNextFile( ( HINTERNET ) hb_parnl( 1 ), hb_parc( 2 ) ) ); } #pragma ENDDUMP Ronaldbuch 1 Quote Link to comment Share on other sites More sharing options...
gilmer Posted November 5, 2015 Report Share Posted November 5, 2015 O xharbour tem funções para FTP kapiaba 1 Quote Link to comment Share on other sites More sharing options...
SISTEMASIT Posted November 28, 2015 Report Share Posted November 28, 2015 Eu uso o WGET pra fazer download: mCOMANDO:= "wget http://site.com/arquivo.zip -b -q" waitrun( mCOMANDO ,0) Quote Link to comment Share on other sites More sharing options...
aferra Posted November 30, 2015 Report Share Posted November 30, 2015 Mais que raios de WGET é esse, é um comando do windows? kapiaba 1 Quote Link to comment Share on other sites More sharing options...
VLNUNES Posted November 30, 2015 Report Share Posted November 30, 2015 O "WGET" que eu uso, é um aplicativo de linha de comando que baixa um arquivo ou todo o site. Linha de comando: LOCAL cCommand1 := 'wget -c http://wwwseusite.com.br/pastado site/arquivo a baixar.com extenção -P "pasta onde vai salvar o arquivo"' RUN (cCommand1) Existem outros comandos que eu não utilizo http://elias.praciano.com/2013/12/o-comando-wget-em-10-exemplos/ Quote Link to comment Share on other sites More sharing options...
VLNUNES Posted November 30, 2015 Report Share Posted November 30, 2015 Help completo do WGET GNU Wget 1.11.4, a non-interactive network retriever. Usage: wget [OPTION]... [url]... Mandatory arguments to long options are mandatory for short options too. Startup: -V, --version display the version of Wget and exit. -h, --help print this help. -b, --background go to background after startup. -e, --execute=COMMAND execute a `.wgetrc'-style command. Logging and input file: -o, --output-file=FILE log messages to FILE. -a, --append-output=FILE append messages to FILE. -d, --debug print lots of debugging information. -q, --quiet quiet (no output). -v, --verbose be verbose (this is the default). -nv, --no-verbose turn off verboseness, without being quiet. -i, --input-file=FILE download URLs found in FILE. -F, --force-html treat input file as HTML. -B, --base=URL prepends URL to relative links in -F -i file. Download: -t, --tries=NUMBER set number of retries to NUMBER (0 unlimits). --retry-connrefused retry even if connection is refused. -O, --output-document=FILE write documents to FILE. -nc, --no-clobber skip downloads that would download to existing files. -c, --continue resume getting a partially-downloaded file. --progress=TYPE select progress gauge type. -N, --timestamping don't re-retrieve files unless newer than local. -S, --server-response print server response. --spider don't download anything. -T, --timeout=SECONDS set all timeout values to SECONDS. --dns-timeout=SECS set the DNS lookup timeout to SECS. --connect-timeout=SECS set the connect timeout to SECS. --read-timeout=SECS set the read timeout to SECS. -w, --wait=SECONDS wait SECONDS between retrievals. --waitretry=SECONDS wait 1..SECONDS between retries of a retrieval. --random-wait wait from 0...2*WAIT secs between retrievals. --no-proxy explicitly turn off proxy. -Q, --quota=NUMBER set retrieval quota to NUMBER. --bind-address=ADDRESS bind to ADDRESS (hostname or IP) on local host. --limit-rate=RATE limit download rate to RATE. --no-dns-cache disable caching DNS lookups. --restrict-file-names=OS restrict chars in file names to ones OS allows. --ignore-case ignore case when matching files/directories. --user=USER set both ftp and http user to USER. --password=PASS set both ftp and http password to PASS. Directories: -nd, --no-directories don't create directories. -x, --force-directories force creation of directories. -nH, --no-host-directories don't create host directories. --protocol-directories use protocol name in directories. -P, --directory-prefix=PREFIX save files to PREFIX/... --cut-dirs=NUMBER ignore NUMBER remote directory components. HTTP options: --http-user=USER set http user to USER. --http-password=PASS set http password to PASS. --no-cache disallow server-cached data. -E, --html-extension save HTML documents with `.html' extension. --ignore-length ignore `Content-Length' header field. --header=STRING insert STRING among the headers. --max-redirect maximum redirections allowed per page. --proxy-user=USER set USER as proxy username. --proxy-password=PASS set PASS as proxy password. --referer=URL include `Referer: URL' header in HTTP request. --save-headers save the HTTP headers to file. -U, --user-agent=AGENT identify as AGENT instead of Wget/VERSION. --no-http-keep-alive disable HTTP keep-alive (persistent connections). --no-cookies don't use cookies. --load-cookies=FILE load cookies from FILE before session. --save-cookies=FILE save cookies to FILE after session. --keep-session-cookies load and save session (non-permanent) cookies. --post-data=STRING use the POST method; send STRING as the data. --post-file=FILE use the POST method; send contents of FILE. --content-disposition honor the Content-Disposition header when choosing local file names (EXPERIMENTAL). --auth-no-challenge Send Basic HTTP authentication information without first waiting for the server's challenge. HTTPS (SSL/TLS) options: --secure-protocol=PR choose secure protocol, one of auto, SSLv2, SSLv3, and TLSv1. --no-check-certificate don't validate the server's certificate. --certificate=FILE client certificate file. --certificate-type=TYPE client certificate type, PEM or DER. --private-key=FILE private key file. --private-key-type=TYPE private key type, PEM or DER. --ca-certificate=FILE file with the bundle of CA's. --ca-directory=DIR directory where hash list of CA's is stored. --random-file=FILE file with random data for seeding the SSL PRNG. --egd-file=FILE file naming the EGD socket with random data. FTP options: --ftp-user=USER set ftp user to USER. --ftp-password=PASS set ftp password to PASS. --no-remove-listing don't remove `.listing' files. --no-glob turn off FTP file name globbing. --no-passive-ftp disable the "passive" transfer mode. --retr-symlinks when recursing, get linked-to files (not dir). --preserve-permissions preserve remote file permissions. Recursive download: -r, --recursive specify recursive download. -l, --level=NUMBER maximum recursion depth (inf or 0 for infinite). --delete-after delete files locally after downloading them. -k, --convert-links make links in downloaded HTML point to local files. -K, --backup-converted before converting file X, back up as X.orig. -m, --mirror shortcut for -N -r -l inf --no-remove-listing. -p, --page-requisites get all images, etc. needed to display HTML page. --strict-comments turn on strict (SGML) handling of HTML comments. Recursive accept/reject: -A, --accept=LIST comma-separated list of accepted extensions. -R, --reject=LIST comma-separated list of rejected extensions. -D, --domains=LIST comma-separated list of accepted domains. --exclude-domains=LIST comma-separated list of rejected domains. --follow-ftp follow FTP links from HTML documents. --follow-tags=LIST comma-separated list of followed HTML tags. --ignore-tags=LIST comma-separated list of ignored HTML tags. -H, --span-hosts go to foreign hosts when recursive. -L, --relative follow relative links only. -I, --include-directories=LIST list of allowed directories. -X, --exclude-directories=LIST list of excluded directories. -np, --no-parent don't ascend to the parent directory. Mail bug reports and suggestions to <bug-wget@gnu.org>. Quote Link to comment Share on other sites More sharing options...
Recommended Posts
Join the conversation
You can post now and register later. If you have an account, sign in now to post with your account.