Jump to content

HTTP: Difference between revisions

18,195 bytes added ,  3 years ago
Summarily revert Revision as of 06:23, 2 May 2020 rather than trying to piecemeal revert
(Undo revision 315432 by WdeCvfYlmB (talk))
(Summarily revert Revision as of 06:23, 2 May 2020 rather than trying to piecemeal revert)
Line 5:
 
There is a separate task for [[HTTPS Request]]s.
 
=={{header|8th}}==
<lang forth>
<lang 8th>"http://www.w3.org/Home.html" net:get drop >s .</lang>
"http://www.rosettacode.org" net:get drop >s .
</lang>
 
=={{header|ABAP}}==
This works for ABAP Version 7.40 and above
<lang ABAP>report z_http.
<lang ABAP>
cl_http_client => create_by_url(
report z_http.
exporting
 
url = `http://www.w3.org/Home.html`
cl_http_client=>create_by_url(
importing
exporting
client = data(http_client)
url = `http://rosettacode.org/robots.txt`
exceptions
importing
argument_not_found = 1
client = data(http_client)
plugin_not_active = 2
exceptions
internal_error = 3
othersargument_not_found = 41
plugin_not_active = 2
).
internal_error = 3
others = 4 ).
 
if sy-subrc <> 0.
data(error_message) = switch string( sy-subrc
when 1 then `argument_not_found`
sy-subrc
when 12 then `argument_not_foundplugin_not_active`
when 23 then `plugin_not_activeinternal_error`
when 34 then `internal_errorother error` ).
 
when 4 then `other error`
write error_message.
).
exit.
write error_message.
exit.
endif.
 
data(rest_http_client) = cast if_rest_client(new cl_rest_http_client(http_client)).
data(rest_http_client) ->= getcast if_rest_client( new cl_rest_http_client( http_client ) ).
 
data(response_string) = rest_http_client -> get_response_entity() -> get_string_data().
rest_http_client->get( ).
split response_string at cl_abap_char_utilities => newline into table data(output_table).
 
data(response_string) = rest_http_client->get_response_entity( )->get_string_data( ).
 
split response_string at cl_abap_char_utilities=>newline into table data(output_table).
 
loop at output_table assigning field-symbol(<output_line>).
write / <output_line>.
endloop.</lang>
</lang>
 
{{out}}
 
<pre>
User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:
</pre>
 
=={{header|ActionScript}}==
<lang actionscript>package {
package
{
import flash.display.Sprite;
import flash.events.Event;
import flash.net.*;
 
public class RequestExample extends Sprite {
public functionclass RequestExample() {extends Sprite
{
public function RequestExample()
{
var loader:URLLoader = new URLLoader();
loader.addEventListener(Event.COMPLETE, loadComplete);
loader.load(new URLRequest("http://www.w3rosettacode.org/Home.html"));
}
private function loadComplete(evt:Event):void {
private function loadComplete(evt:Event):void
{
trace(evt.target.data);
}
}
}
}</lang>
</lang>
 
=={{header|Ada}}==
{{libheader|AWS}}
<lang ada>with Ada.Text_IO; use Ada.Text_IO;
with Ada.Text_IO; use Ada.Text_IO;
 
with AWS.Client;
with AWS.Response;
 
procedure HTTP_Request is
begin
Put_Line (AWS.Response.Message_Body (AWS.Client.Get (URL => "http://www.w3rosettacode.org/Home.html")));
end HTTP_Request;</lang>
</lang>
 
=={{header|ALGOL 68}}==
{{works with|ALGOL 68|Revision 1 - however ''grep in string'', ''http content'' and ''str error'' are from a non-standard library}}
 
{{works with|ALGOL 68G|Any - tested with release [http://sourceforge.net/projects/algol68/files/algol68g/algol68g-1.18.0/algol68g-1.18.0-9h.tiny.el5.centos.fc11.i386.rpm/download 1.18.0-9h.tiny]}}
{{wont work with|ELLA ALGOL 68|Any (with appropriate job cards) - tested with release [http://sourceforge.net/projects/algol68/files/algol68toc/algol68toc-1.8.8d/algol68toc-1.8-8d.fc9.i386.rpm/download 1.8-8d] - due to extensive use of ''grep in string'' and ''http content''}}
 
<lang algol68>STRING domain="www.w3.org";
<lang algol68>
STRING page="Home.html";
STRING domain="rosettacode.org";
STRING page="wiki/Main_Page";
 
STRING re success="^HTTP/[0-9.]* 200";
STRING re result description="^HTTP/[0-9.]* [0-9]+ [a-zA-Z ]*";
STRING re doctype ="\s\s<!DOCTYPE html PUBLIC ""[^>]+"">\s+";
 
PROC html page = (REF STRING page) BOOL: (
BOOL out=grep in string(re success, page, NIL, NIL) = 0;
IF INT start, end;
INT start, end;
grep in string(re result description, page, start, end) = 0
THEN
page := page[end+1:];
IF grep in string(re doctype, page, start, end) = 0
IF
grep in string(re doctype,THEN page, :=page[start, end) = 0+2:]
ELSE print ("unknown format retrieving page")
THEN
page:=page[start+2:]
ELSE
print ("unknown format retrieving page")
FI
ELSE print ("unknown error retrieving page")
ELSE
FI;
print ("unknown error retrieving page")
FI; out
out
);
 
IF
IF STRING reply;
INT rc =
INT rc = http content (reply, domain, "http://"+domain+"/"+page, 0);
rc = 0http ANDcontent html(reply, domain, "http://"+domain+"/"+page, (reply0);
rc = 0 AND html page (reply)
THEN
THEN print (reply)
ELSE print (strerror (rc))
ELSE
print (strerror (rc))
FI
</lang>
 
=={{header|Arturo}}==
 
<lang arturo>print [download "http://www.w3.org/Home.html"]</lang>
<lang arturo>print [download "http://google.com"]</lang>
 
{{out}}
 
<pre><!doctype html><html itemscope="" itemtype="http://schema.org/WebPage" lang="es"><head><meta content="Google.es permite acceder a la información mundial en castellano, catalán, gallego, euskara e inglés." name="description"><meta content="noodp" name="robots"><meta content="text/html; charset=UTF-8" http-equiv="Content-Type"><meta content="/images/branding/googleg/1x/googleg_standard_color_128dp.png" itemprop="image"><title>Google</title><script nonce="mEe5oG98axwLddedgOh1JA==">(function(){window.google={kEI:'lp2lXbjlCJGKauK8o9AB',kEXPI:'0,18167,1335579,5663,730,224,510,18,228,819,1535,1617,378,206,1017,53,173,1163,798,10,50,211,452,319,19,96,161,89,193,122,766,81,176,221,1130704,1197793,230,302939,26305,1294,12383,4855,32692,15247,867,12163,16521,363,3320,5505,2436,5948,1119,2,579,727,2431,1362,4323,4967,774,2250,4744,3118,6196,1719,1808,1976,2044,8909,5071,226,897,1119,38,920,2090,2975,2736,49,2606,315,91,2,632,3240,4191,1571,2303,2883,19,319,235,884,904,101,2024,1,370,2778,917,261,731,509,777,7,2796,887,80,601,11,14,1279,2212,202,37,286,5,1252,327,513,324,193,1466,8,48,1
 
[output truncated]
</pre>
 
=={{header|AutoHotkey}}==
<lang AutoHotkey>UrlDownloadToFile, http://www.w3.org/Home.html, url.html
RunUrlDownloadToFile, cmd http:/k type/rosettacode.org, url.html</lang>
Run, cmd /k type url.html
</lang>
 
=={{header|AWK}}==
{{works with|gawk}}
<lang awk>BEGIN {
site="wwwen.w3wikipedia.org"
path="/Home.htmlwiki/"
name="Rosetta_Code"
 
server = "/inet/tcp/0/" site "/80"
server = "/inet/tcp/0/" site "/80"
print "GET " path name " HTTP/1.0" |& server
print "Host:GET " sitepath name " HTTP/1.0" |& server
print "\r\n\r\nHost: " site |& server
print "\r\n\r\n" |& server
while ( (server |& getline fish) > 0 ) {
 
if ( ++scale == 1 )
while ( (server |& getline fish) > 0 ) ship = fish{
if ( ++scale == else1 )
ship = ship "\n" fish
}else
ship = ship "\n" fish
close(server)
}
print ship
close(server)
 
print ship
}</lang>
 
=={{header|BaCon}}==
<lang qbasic>'
' Read and display a website
website$ = "www.w3.org"
'
IF AMOUNT(ARGUMENT$) = 1 THEN
website$ = "www.basic-converter.org"
ELSE
website$ = TOKEN$(ARGUMENT$, 2)
ENDIF
 
OPEN website$ & ":80" FOR NETWORK AS mynet
SEND "GET /Home.html HTTP/1.1\r\nHost: " & website$ & "\r\n\r\n" TO mynet
REPEAT
RECEIVE dat$ FROM mynet
Line 147 ⟶ 202:
 
=={{header|Batch File}}==
<lang batch>
<lang batch>curl.exe -s -L http://www.w3.org/Home.html</lang>
curl.exe -s -L http://rosettacode.org/
</lang>
 
=={{header|BBC BASIC}}==
{{works with|BBC BASIC for Windows}}
<lang bbcbasic> SYS "LoadLibrary", "URLMON.DLL" TO urlmon%
SYS "GetProcAddress", urlmon%, "URLDownloadToFileA" TO URLDownloadToFile
url$ = "http://www.w3.org/Home.html"
url$ = "http://www.bbcbasic.co.uk/aboutus.html"
file$ = @tmp$ + "rosetta.tmp"
SYS URLDownloadToFile, 0, url$, file$, 0,= 0@tmp$ TO+ fail%"rosetta.tmp"
SYS URLDownloadToFile, 0, url$, file$, 0, 0 TO fail%
IF fail% ERROR 100, "File download failed"
IF fail% ERROR 100, "File download failed"
OSCLI "TYPE """ + file$ + """"</lang>
OSCLI "TYPE """ + file$ + """"</lang>
 
=={{header|Biferno}}==
simple one-liner using httpExt and quick print $
<lang Biferno>$httpExt.ExecRemote("www.w3.org/Home.html")</lang>
<lang Biferno>$httpExt.ExecRemote("www.tabasoft.it")</lang>
 
=={{header|C}}==
{{libheader|libcurl}}
<lang C>#include <unistd.h>
 
#include <netdb.h>
<lang c>
#define BUF_SIZE 16
#include <stdio.h>
int sfd;
#include <stdlib.h>
char buf[BUF_SIZE];
#include <curl/curl.h>
struct addrinfo hints;
 
struct addrinfo * rp;
int main() {
main(void)
hints.ai_family = AF_INET;
{
hints.ai_socktype = SOCK_STREAM;
CURL *curl;
hints.ai_protocol = IPPROTO_TCP;
char buffer[CURL_ERROR_SIZE];
getaddrinfo("www.w3.org", "80", &hints, &rp);
 
sfd = socket(rp -> ai_family, rp -> ai_socktype, rp -> ai_protocol);
if ((curl = curl_easy_init()) != NULL) {
connect(sfd, rp -> ai_addr, rp -> ai_addrlen);
curl_easy_setopt(curl, CURLOPT_URL, "http://www.rosettacode.org/");
write(sfd, "GET /Home.html HTTP/1.1\r\nHost: www.w3.org\r\nConnection: close\r\n\r\n", 1024);
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
while (read(sfd, buf, BUF_SIZE) != 0) {
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, buffer);
write(STDOUT_FILENO, buf, BUF_SIZE);
if (curl_easy_perform(curl) != CURLE_OK) {
}
fprintf(stderr, "%s\n", buffer);
close(sfd);
return 0EXIT_FAILURE;
}
}</lang>
curl_easy_cleanup(curl);
}
return EXIT_SUCCESS;
}
</lang>
 
=={{header|C sharp}}==
<lang csharp>using System;
using System;
using System.Text;
using System.Net;
 
class Program {
class Program
static void Main(string[] args) {
{
static void Main(string[] args)
{
WebClient wc = new WebClient();
string content = wc.DownloadString("http://www.w3google.org/Home.htmlcom");
Console.WriteLine(content);
}
}
}</lang>
</lang>
 
=={{header|C++}}==
<lang cpp>#include <winsock2.h>
#include <winsock2.h>
#include <ws2tcpip.h>
#include <iostream>
 
addrinfo * result;
int bytes;
char buffer[512];
addrinfo hints;
SOCKET s;
WSADATA wsaData;
int main() {
WSADATA wsaData;
WSAStartup(MAKEWORD(2, 2), &wsaData);
WSAStartup( MAKEWORD( 2, 2 ), &wsaData );
ZeroMemory(&hints, sizeof(hints));
 
hints.ai_family = AF_UNSPEC;
addrinfo *result = NULL;
hints.ai_socktype = SOCK_STREAM;
addrinfo hints;
hints.ai_protocol = IPPROTO_TCP;
 
getaddrinfo("www.w3.org", "80", &hints, &result);
ZeroMemory( &hints, sizeof( hints ) );
s = socket(result->ai_family, result->ai_socktype, result->ai_protocol);
hints.ai_family = AF_UNSPEC;
connect(s, result->ai_addr, (int) result->ai_addrlen);
hints.ai_socktype = SOCK_STREAM;
freeaddrinfo(result);
hints.ai_protocol = IPPROTO_TCP;
send(s, "GET /Home.html HTTP/1.0\n\n", 16, 0);
 
do {
getaddrinfo( "74.125.45.100", "80", &hints, &result ); // http://www.google.com
bytes = recv(s, buffer, 512, 0);
 
if ( bytes > 0 )
SOCKET s = socket( result->ai_family, result->ai_socktype, result->ai_protocol );
std::cout.write(buffer, bytes);
 
} while (bytes > 0);
connect( s, result->ai_addr, (int)result->ai_addrlen );
return 0;
 
}</lang>
freeaddrinfo( result );
 
send( s, "GET / HTTP/1.0\n\n", 16, 0 );
 
char buffer[512];
int bytes;
 
do {
bytes = recv( s, buffer, 512, 0 );
 
if ( bytes > 0 )
std::cout.write(buffer, bytes);
} while ( bytes > 0 );
 
return 0;
}
</lang>
 
{{libheader|U++}}
 
<lang cpp>#include <Web/Web.h>
<lang cpp>
#include <Web/Web.h>
 
using namespace Upp;
 
CONSOLE_APP_MAIN {
CONSOLE_APP_MAIN
Cout() << HttpClient("www.w3.org/Home.html").ExecuteRedirect();
{
}</lang>
Cout() << HttpClient("www.rosettacode.org").ExecuteRedirect();
}
</lang>
 
=={{header|Caché ObjectScript}}==
Line 236 ⟶ 326:
<pre>
USER>Set HttpRequest=##class(%Net.HttpRequest).%New()
USER>Set HttpRequest.Server="wwwcheckip.w3dyndns.org"
USER>Do HttpRequest.Get("/Home.html")
USER>Do HttpRequest.HttpResponse.Data.OutputToDevice()
</pre>
Line 243 ⟶ 333:
=={{header|Clojure}}==
Using the Java API:
<lang clojure>(
(defn get-http [url]
(let [sc (java.util.Scanner.
(
let [sc (java.util.Scanner.(.openStream (java.net.URL. url)))]
(while (.hasNext (sc)
while(println (.hasNextnextLine sc) ))))
(get-http "http://www.rosettacode.org")
(
</lang>
println (.nextLine sc)
 
)
)
)
)
(get-http "http://www.w3.org")</lang>
Using <code>clojure.contrib.http.agent</code>:
<lang clojure>(
(ns example
(:use [clojure.contrib.http.agent :only (string http-agent)]))
(
 
:use [clojure.contrib.http.agent :only (string http-agent)]
(println (string (http-agent "http://www.rosettacode.org/")))
)
</lang>
)
 
(println (string (http-agent "http://www.w3.org/")))</lang>
{{works with|Clojure|1.2}}
<lang clojure>(print (slurp "http://www.w3.org/"))</lang>
(print (slurp "http://www.rosettacode.org/"))
</lang>
 
=={{header|COBOL}}==
 
Tested with GnuCOBOL
<lang cobol>COBOL
 
<lang cobol>COBOL >>SOURCE FORMAT IS FIXED
identification division.
identification division.
program-id. curl-rosetta.
 
environment division.
program-id.
configuration section.
curl-write-callback.
environment division repository.
function read-url
function all intrinsic.
 
data division.
configuration section.
working-storage section.
repository.
function all intrinsic.
 
copy "gccurlsym.cpy".
data division.
 
01 web-page pic x(16777216).
working-storage section.
01 realcurl-sizestatus usage binary-long.
01 memory-block based.
05 memory-address usage pointer sync.
05 memory-size usage binary-long sync.
05 running-total usage binary-long sync.
01 content-buffer pic x(65536) based.
01 web-space pic x(16777216) based.
01 left-over usage binary-long.
linkage section.
01 contents usage pointer.
01 element-size usage binary-long.
01 element-count usage binary-long.
01 memory-structure usage pointer.
procedure division using
by value contents
by value element-size
by value element-count
by value memory-structure
returning real-size.
set address of memory-block to memory-structure
compute real-size = element-size * element-count
end-compute
compute left-over = memory-size - running-total
end-compute
if left-over > 0 and < real-size then
move left-over to real-size
end-if
if (left-over > 0) and (real-size > 1) then
set address of content-buffer to contents
set address of web-space to memory-address
move content-buffer(1:real-size) to web-space(running-total:real-size)
add real-size to running-total
else
display "curl buffer sizing problem" upon syserr
end-if
goback.
end program curl-write-callback.
 
01 cli pic x(7) external.
identification division.
88 helping values "-h", "-help", "help", spaces.
88 displaying value "display".
88 summarizing value "summary".
 
*> ***************************************************************
function-id.
procedure division.
read-url.
accept cli from command-line
environment division.
if helping then
display "./curl-rosetta [help|display|summary]"
goback
end-if
 
*>
configuration section.
*> Read a web resource into fixed ram.
repository.
*> Caller is in charge of sizing the buffer,
function all intrinsic.
*> (or getting trickier with the write callback)
*> Pass URL and working-storage variable,
data division.
*> get back libcURL error code or 0 for success
working-storage section.
copy "gccurlsym.cpy".
replace also ==:CALL-EXCEPTION:== by == on exception perform hard-exception ==.
01 curl-handle usage pointer.
01 callback-handle usage procedure-pointer.
01 memory-block.
05 memory-address usage pointer sync.
05 memory-size usage binary-long sync.
05 running-total usage binary-long sync.
01 curl-result usage binary-long.
01 cli pic x(7) external.
88 helping values "-h", "-help", "help", spaces.
88 displaying value "display".
88 summarizing value "summary".
linkage section.
01 url pic x any length.
01 buffer pic x any length.
01 curl-status usage binary-long.
procedure division using
url buffer
returning curl-status.
if displaying or summarizing then
display "Read: " url upon syserr
end-if
call "curl_global_init" using
by value CURL_GLOBAL_ALL on exception
display "need libcurl, link with -lcurl" upon syserr
stop run returning 1
end-call
call "curl_easy_init"
returning curl-handle :CALL-EXCEPTION:
end-call
if curl-handle equal NULL then
display "no curl handle" upon syserr
stop run returning 1
end-if
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_URL
by reference concatenate(trim(url trailing), x"00") :CALL-EXCEPTION:
end-call
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_FOLLOWLOCATION
by value 1 :CALL-EXCEPTION:
end-call
set callback-handle to address of entry "curl-write-callback"
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_WRITEFUNCTION
by value callback-handle :CALL-EXCEPTION:
end-call
set memory-address to address of buffer
move length(buffer) to memory-size
move 1 to running-total
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_WRITEDATA
by value address of memory-block :CALL-EXCEPTION:
end-call
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_USERAGENT
by reference concatenate("libcurl-agent/1.0", x"00") :CALL-EXCEPTION:
end-call
call "curl_easy_perform" using
by value curl-handle
returning curl-result :CALL-EXCEPTION:
end-call
move curl-result to curl-status
call "curl_easy_cleanup" using
by value curl-handle
returning omitted :CALL-EXCEPTION:
end-call
goback.
:EXCEPTION-HANDLERS:
end function read-url.
 
move read-url("http://www.rosettacode.org", web-page)
identification division.
to curl-status
 
perform check
program-id.
perform show
curl-rosetta.
 
environment division.
goback.
*> ***************************************************************
configuration section.
 
repository.
*> Now tesing the result, relying on the gccurlsym
function read-url function all intrinsic.
*> GnuCOBOL Curl Symbol copy book
data division check.
if curl-status not equal zero then
display
working-storage section.
curl-status " "
copy "gccurlsym.cpy".
CURLEMSG(curl-status) upon syserr
01 web-page pic x(16777216).
01 curl-status usage binaryend-long.if
01 cli pic x(7) external.
 
88 helping values "-h", "-help", "help", spaces.
*> And 88 displaying value "display". the page
show.
88 summarizing value "summary".
if summarizing then
display "Length: " stored-char-length(web-page)
procedure division.
end-if
if displaying then
accept cli from command-line
display trim(web-page trailing) with no advancing
if helping then
end-if
display "./curl-rosetta [help|display|summary]" goback
end-if .
 
move read-url("http://www.rosettacode.org", web-page) to curl-status
REPLACE ALSO ==:EXCEPTION-HANDLERS:== BY
perform check
perform show goback.==
*> informational warnings and abends
check.
soft-exception.
if curl-status not equal zero then
display curl-status " " CURLEMSG(curl-status)space upon syserr
display "--Exception Report-- " upon syserr
end-if.
display "Time of exception: " current-date upon syserr
show.
display "Module: " module-id upon syserr
if summarizing then
display "LengthModule-path: " storedmodule-char-length(web-page)path upon syserr
display "Module-source: " module-source upon syserr
end-if
display "Exception-file: " exception-file upon syserr
if displaying then
display trim(web"Exception-pagestatus: trailing) with no advancing" exception-status upon syserr
display "Exception-location: " exception-location upon syserr
end-if.
display "Exception-statement: " exception-statement upon syserr
REPLACE ALSO == :EXCEPTION-HANDLERS: == BY == soft-exception.
display space upon syserr.
 
display "--Exception Report-- " upon syserr
display "Time of hard-exception: " current-date upon syserr.
display "Module: perform " modulesoft-id upon syserrexception
stop run returning 127
display "Module-path: " module-path upon syserr
.
display "Module-source: " module-source upon syserr
==.
display "Exception-file: " exception-file upon syserr
 
display "Exception-status: " exception-status upon syserr
end program curl-rosetta.
display "Exception-location: " exception-location upon syserr
*> ***************************************************************
display "Exception-statement: " exception-statement upon syserr.
 
hard-exception.
*> ***************************************************************
perform soft-exception stop run returning 127.
==. *>
*> The function hiding all the curl details
end program curl-rosetta.</lang>
*>
Copybook :
*> Purpose: Call libcURL and read into memory
<lang cobol>01 CURL_MAX_HTTP_HEADER CONSTANT AS 102400.
*> ***************************************************************
78 CURL_GLOBAL_ALL VALUE 3.
identification division.
78 CURLOPT_FOLLOWLOCATION VALUE 52.
function-id. read-url.
78 CURLOPT_WRITEDATA VALUE 10001.
 
78 CURLOPT_URL VALUE 10002.
environment division.
78 CURLOPT_USERAGENT VALUE 10018.
configuration section.
78 CURLOPT_WRITEFUNCTION VALUE 20011.
repository.
78 CURLOPT_COOKIEFILE VALUE 10031.
function all intrinsic.
78 CURLOPT_COOKIEJAR VALUE 10082.
 
78 CURLOPT_COOKIELIST VALUE 10135.
data division.
78 CURLINFO_COOKIELIST VALUE 4194332.
working-storage section.
78 CURLE_OK VALUE 0.
 
78 CURLE_UNSUPPORTED_PROTOCOL VALUE 1.
copy "gccurlsym.cpy".
78 CURLE_FAILED_INIT VALUE 2.
 
78 CURLE_URL_MALFORMAT VALUE 3.
replace also ==:CALL-EXCEPTION:== by
78 CURLE_OBSOLETE4 VALUE 4.
==
78 CURLE_COULDNT_RESOLVE_PROXY VALUE 5.
on exception
78 CURLE_COULDNT_RESOLVE_HOST VALUE 6.
perform hard-exception
78 CURLE_COULDNT_CONNECT VALUE 7.
==.
78 CURLE_FTP_WEIRD_SERVER_REPLY VALUE 8.
 
78 CURLE_REMOTE_ACCESS_DENIED VALUE 9.
01 curl-handle usage pointer.
78 CURLE_OBSOLETE10 VALUE 10.
01 callback-handle usage procedure-pointer.
78 CURLE_FTP_WEIRD_PASS_REPLY VALUE 11.
01 memory-block.
78 CURLE_OBSOLETE12 VALUE 12.
05 memory-address usage pointer sync.
78 CURLE_FTP_WEIRD_PASV_REPLY VALUE 13.
05 memory-size usage binary-long sync.
78 CURLE_FTP_WEIRD_227_FORMAT VALUE 14.
05 running-total usage binary-long sync.
78 CURLE_FTP_CANT_GET_HOST VALUE 15.
01 curl-result usage binary-long.
78 CURLE_OBSOLETE16 VALUE 16.
 
78 CURLE_FTP_COULDNT_SET_TYPE VALUE 17.
01 cli pic x(7) external.
78 CURLE_PARTIAL_FILE VALUE 18.
88 helping values "-h", "-help", "help", spaces.
78 CURLE_FTP_COULDNT_RETR_FILE VALUE 19.
88 displaying value "display".
78 CURLE_OBSOLETE20 VALUE 20.
88 summarizing value "summary".
78 CURLE_QUOTE_ERROR VALUE 21.
 
78 CURLE_HTTP_RETURNED_ERROR VALUE 22.
linkage section.
78 CURLE_WRITE_ERROR VALUE 23.
01 url pic x any length.
78 CURLE_OBSOLETE24 VALUE 24.
01 buffer pic x any length.
78 CURLE_UPLOAD_FAILED VALUE 25.
01 curl-status usage binary-long.
78 CURLE_READ_ERROR VALUE 26.
 
78 CURLE_OUT_OF_MEMORY VALUE 27.
*> ***************************************************************
78 CURLE_OPERATION_TIMEDOUT VALUE 28.
procedure division using url buffer returning curl-status.
78 CURLE_OBSOLETE29 VALUE 29.
if displaying or summarizing then
78 CURLE_FTP_PORT_FAILED VALUE 30.
display "Read: " url upon syserr
78 CURLE_FTP_COULDNT_USE_REST VALUE 31.
end-if
78 CURLE_OBSOLETE32 VALUE 32.
 
78 CURLE_RANGE_ERROR VALUE 33.
*> initialize libcurl, hint at missing library if need be
78 CURLE_HTTP_POST_ERROR VALUE 34.
call "curl_global_init" using by value CURL_GLOBAL_ALL
78 CURLE_SSL_CONNECT_ERROR VALUE 35.
on exception
78 CURLE_BAD_DOWNLOAD_RESUME VALUE 36.
display
78 CURLE_FILE_COULDNT_READ_FILE VALUE 37.
"need libcurl, link with -lcurl" upon syserr
78 CURLE_LDAP_CANNOT_BIND VALUE 38.
stop run returning 1
78 CURLE_LDAP_SEARCH_FAILED VALUE 39.
end-call
78 CURLE_OBSOLETE40 VALUE 40.
 
78 CURLE_FUNCTION_NOT_FOUND VALUE 41.
*> initialize handle
78 CURLE_ABORTED_BY_CALLBACK VALUE 42.
call "curl_easy_init" returning curl-handle
78 CURLE_BAD_FUNCTION_ARGUMENT VALUE 43.
:CALL-EXCEPTION:
78 CURLE_OBSOLETE44 VALUE 44.
end-call
78 CURLE_INTERFACE_FAILED VALUE 45.
if curl-handle equal NULL then
78 CURLE_OBSOLETE46 VALUE 46.
display "no curl handle" upon syserr
78 CURLE_TOO_MANY_REDIRECTS VALUE 47.
stop run returning 1
78 CURLE_UNKNOWN_TELNET_OPTION VALUE 48.
end-if
78 CURLE_TELNET_OPTION_SYNTAX VALUE 49.
 
78 CURLE_OBSOLETE50 VALUE 50.
*> Set the URL
78 CURLE_PEER_FAILED_VERIFICATION VALUE 51.
call "curl_easy_setopt" using
78 CURLE_GOT_NOTHING VALUE 52.
by value curl-handle
78 CURLE_SSL_ENGINE_NOTFOUND VALUE 53.
by value CURLOPT_URL
78 CURLE_SSL_ENGINE_SETFAILED VALUE 54.
by reference concatenate(trim(url trailing), x"00")
78 CURLE_SEND_ERROR VALUE 55.
:CALL-EXCEPTION:
78 CURLE_RECV_ERROR VALUE 56.
end-call
78 CURLE_OBSOLETE57 VALUE 57.
 
78 CURLE_SSL_CERTPROBLEM VALUE 58.
*> follow all redirects
78 CURLE_SSL_CIPHER VALUE 59.
call "curl_easy_setopt" using
78 CURLE_SSL_CACERT VALUE 60.
by value curl-handle
78 CURLE_BAD_CONTENT_ENCODING VALUE 61.
by value CURLOPT_FOLLOWLOCATION
78 CURLE_LDAP_INVALID_URL VALUE 62.
by value 1
78 CURLE_FILESIZE_EXCEEDED VALUE 63.
:CALL-EXCEPTION:
78 CURLE_USE_SSL_FAILED VALUE 64.
end-call
78 CURLE_SEND_FAIL_REWIND VALUE 65.
 
78 CURLE_SSL_ENGINE_INITFAILED VALUE 66.
*> set the call back to write to memory
78 CURLE_LOGIN_DENIED VALUE 67.
set callback-handle to address of entry "curl-write-callback"
78 CURLE_TFTP_NOTFOUND VALUE 68.
call "curl_easy_setopt" using
78 CURLE_TFTP_PERM VALUE 69.
by value curl-handle
78 CURLE_REMOTE_DISK_FULL VALUE 70.
by value CURLOPT_WRITEFUNCTION
78 CURLE_TFTP_ILLEGAL VALUE 71.
by value callback-handle
78 CURLE_TFTP_UNKNOWNID VALUE 72.
:CALL-EXCEPTION:
78 CURLE_REMOTE_FILE_EXISTS VALUE 73.
end-call
78 CURLE_TFTP_NOSUCHUSER VALUE 74.
 
78 CURLE_CONV_FAILED VALUE 75.
*> set the curl handle data handling structure
78 CURLE_CONV_REQD VALUE 76.
set memory-address to address of buffer
78 CURLE_SSL_CACERT_BADFILE VALUE 77.
move length(buffer) to memory-size
78 CURLE_REMOTE_FILE_NOT_FOUND VALUE 78.
move 1 to running-total
78 CURLE_SSH VALUE 79.
 
78 CURLE_SSL_SHUTDOWN_FAILED VALUE 80.
call "curl_easy_setopt" using
78 CURLE_AGAIN VALUE 81.
by value curl-handle
01 LIBCURL_ERRORS.
by value CURLOPT_WRITEDATA
02 CURLEVALUES.
by value address of memory-block
03 FILLER PIC X(30) VALUE "CURLE_UNSUPPORTED_PROTOCOL ".
:CALL-EXCEPTION:
03 FILLER PIC X(30) VALUE "CURLE_FAILED_INIT ".
end-call
03 FILLER PIC X(30) VALUE "CURLE_URL_MALFORMAT ".
 
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE4 ".
*> some servers demand an agent
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_PROXY ".
call "curl_easy_setopt" using
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_HOST ".
by value curl-handle
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_CONNECT ".
by value CURLOPT_USERAGENT
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_SERVER_REPLY ".
by reference concatenate("libcurl-agent/1.0", x"00")
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_ACCESS_DENIED ".
:CALL-EXCEPTION:
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE10 ".
end-call
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASS_REPLY ".
 
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE12 ".
*> let curl do all the hard work
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASV_REPLY ".
call "curl_easy_perform" using
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_227_FORMAT ".
by value curl-handle
03 FILLER PIC X(30) VALUE "CURLE_FTP_CANT_GET_HOST ".
returning curl-result
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE16 ".
:CALL-EXCEPTION:
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_SET_TYPE ".
end-call
03 FILLER PIC X(30) VALUE "CURLE_PARTIAL_FILE ".
 
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_RETR_FILE ".
*> the call back will handle filling ram, return the result code
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE20 ".
move curl-result to curl-status
03 FILLER PIC X(30) VALUE "CURLE_QUOTE_ERROR ".
 
03 FILLER PIC X(30) VALUE "CURLE_HTTP_RETURNED_ERROR ".
*> curl clean up, more important if testing cookies
03 FILLER PIC X(30) VALUE "CURLE_WRITE_ERROR ".
call "curl_easy_cleanup" using
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE24 ".
by value curl-handle
03 FILLER PIC X(30) VALUE "CURLE_UPLOAD_FAILED ".
returning omitted
03 FILLER PIC X(30) VALUE "CURLE_READ_ERROR ".
:CALL-EXCEPTION:
03 FILLER PIC X(30) VALUE "CURLE_OUT_OF_MEMORY ".
end-call
03 FILLER PIC X(30) VALUE "CURLE_OPERATION_TIMEDOUT ".
 
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE29 ".
goback.
03 FILLER PIC X(30) VALUE "CURLE_FTP_PORT_FAILED ".
 
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_USE_REST ".
:EXCEPTION-HANDLERS:
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE32 ".
 
03 FILLER PIC X(30) VALUE "CURLE_RANGE_ERROR ".
end function read-url.
03 FILLER PIC X(30) VALUE "CURLE_HTTP_POST_ERROR ".
*> ***************************************************************
03 FILLER PIC X(30) VALUE "CURLE_SSL_CONNECT_ERROR ".
 
03 FILLER PIC X(30) VALUE "CURLE_BAD_DOWNLOAD_RESUME ".
*> ***************************************************************
03 FILLER PIC X(30) VALUE "CURLE_FILE_COULDNT_READ_FILE ".
*> Supporting libcurl callback
03 FILLER PIC X(30) VALUE "CURLE_LDAP_CANNOT_BIND ".
identification division.
03 FILLER PIC X(30) VALUE "CURLE_LDAP_SEARCH_FAILED ".
program-id. curl-write-callback.
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE40 ".
 
03 FILLER PIC X(30) VALUE "CURLE_FUNCTION_NOT_FOUND ".
environment division.
03 FILLER PIC X(30) VALUE "CURLE_ABORTED_BY_CALLBACK ".
configuration section.
03 FILLER PIC X(30) VALUE "CURLE_BAD_FUNCTION_ARGUMENT ".
repository.
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE44 ".
function all intrinsic.
03 FILLER PIC X(30) VALUE "CURLE_INTERFACE_FAILED ".
 
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE46 ".
data division.
03 FILLER PIC X(30) VALUE "CURLE_TOO_MANY_REDIRECTS ".
working-storage section.
03 FILLER PIC X(30) VALUE "CURLE_UNKNOWN_TELNET_OPTION ".
01 real-size usage binary-long.
03 FILLER PIC X(30) VALUE "CURLE_TELNET_OPTION_SYNTAX ".
 
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE50 ".
*> libcURL will pass a pointer to this structure in the callback
03 FILLER PIC X(30) VALUE "CURLE_PEER_FAILED_VERIFICATION".
03 FILLER PIC X(30)01 VALUEmemory-block "CURLE_GOT_NOTHING "based.
05 memory-address usage pointer sync.
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_NOTFOUND ".
05 memory-size usage binary-long sync.
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_SETFAILED ".
03 FILLER PIC X(30) VALUE "CURLE_SEND_ERROR 05 running-total usage binary-long "sync.
 
03 FILLER PIC X(30) VALUE "CURLE_RECV_ERROR ".
01 content-buffer pic x(65536) based.
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE57 ".
03 FILLER PIC X(30)01 VALUEweb-space "CURLE_SSL_CERTPROBLEM " pic x(16777216) based.
03 FILLER PIC X(30)01 VALUEleft-over "CURLE_SSL_CIPHER usage "binary-long.
 
03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT ".
linkage section.
03 FILLER PIC X(30) VALUE "CURLE_BAD_CONTENT_ENCODING ".
03 FILLER PIC X(30)01 VALUEcontents "CURLE_LDAP_INVALID_URL " usage pointer.
01 element-size usage binary-long.
03 FILLER PIC X(30) VALUE "CURLE_FILESIZE_EXCEEDED ".
03 FILLER PIC X(30)01 VALUEelement-count "CURLE_USE_SSL_FAILED usage "binary-long.
01 memory-structure usage pointer.
03 FILLER PIC X(30) VALUE "CURLE_SEND_FAIL_REWIND ".
 
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_INITFAILED ".
*> ***************************************************************
03 FILLER PIC X(30) VALUE "CURLE_LOGIN_DENIED ".
procedure division
03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOTFOUND ".
using
03 FILLER PIC X(30) VALUE "CURLE_TFTP_PERM ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_DISK_FULL by value ".contents
03 FILLER PIC X(30) VALUE "CURLE_TFTP_ILLEGAL by value ".element-size
03 FILLER PIC X(30) VALUE "CURLE_TFTP_UNKNOWNID by value ".element-count
by value memory-structure
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_EXISTS ".
returning real-size.
03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOSUCHUSER ".
 
03 FILLER PIC X(30) VALUE "CURLE_CONV_FAILED ".
set address of memory-block to memory-structure
03 FILLER PIC X(30) VALUE "CURLE_CONV_REQD ".
compute real-size = element-size * element-count end-compute
03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT_BADFILE ".
 
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_NOT_FOUND ".
*> Fence off the end of buffer
03 FILLER PIC X(30) VALUE "CURLE_SSH ".
compute
03 FILLER PIC X(30) VALUE "CURLE_SSL_SHUTDOWN_FAILED ".
left-over = memory-size - running-total
03 FILLER PIC X(30) VALUE "CURLE_AGAIN ".
end-compute
01 FILLER REDEFINES LIBCURL_ERRORS.
if left-over > 0 and < real-size then
02 CURLEMSG OCCURS 81 TIMES PIC X(30).</lang>
move left-over to real-size
end-if
 
*> if there is more buffer, and data not zero length
if (left-over > 0) and (real-size > 1) then
set address of content-buffer to contents
set address of web-space to memory-address
 
move content-buffer(1:real-size)
to web-space(running-total:real-size)
 
add real-size to running-total
else
display "curl buffer sizing problem" upon syserr
end-if
 
goback.
end program curl-write-callback.</lang>
 
and a copybook
 
<lang cobol> *> manifest constants for libcurl
*> Usage: COPY occurlsym inside data division
*> Taken from include/curl/curl.h 2013-12-19
 
*> Functional enums
01 CURL_MAX_HTTP_HEADER CONSTANT AS 102400.
 
78 CURL_GLOBAL_ALL VALUE 3.
 
78 CURLOPT_FOLLOWLOCATION VALUE 52.
78 CURLOPT_WRITEDATA VALUE 10001.
78 CURLOPT_URL VALUE 10002.
78 CURLOPT_USERAGENT VALUE 10018.
78 CURLOPT_WRITEFUNCTION VALUE 20011.
78 CURLOPT_COOKIEFILE VALUE 10031.
78 CURLOPT_COOKIEJAR VALUE 10082.
78 CURLOPT_COOKIELIST VALUE 10135.
 
*> Informationals
78 CURLINFO_COOKIELIST VALUE 4194332.
 
*> Result codes
78 CURLE_OK VALUE 0.
*> Error codes
78 CURLE_UNSUPPORTED_PROTOCOL VALUE 1.
78 CURLE_FAILED_INIT VALUE 2.
78 CURLE_URL_MALFORMAT VALUE 3.
78 CURLE_OBSOLETE4 VALUE 4.
78 CURLE_COULDNT_RESOLVE_PROXY VALUE 5.
78 CURLE_COULDNT_RESOLVE_HOST VALUE 6.
78 CURLE_COULDNT_CONNECT VALUE 7.
78 CURLE_FTP_WEIRD_SERVER_REPLY VALUE 8.
78 CURLE_REMOTE_ACCESS_DENIED VALUE 9.
78 CURLE_OBSOLETE10 VALUE 10.
78 CURLE_FTP_WEIRD_PASS_REPLY VALUE 11.
78 CURLE_OBSOLETE12 VALUE 12.
78 CURLE_FTP_WEIRD_PASV_REPLY VALUE 13.
78 CURLE_FTP_WEIRD_227_FORMAT VALUE 14.
78 CURLE_FTP_CANT_GET_HOST VALUE 15.
78 CURLE_OBSOLETE16 VALUE 16.
78 CURLE_FTP_COULDNT_SET_TYPE VALUE 17.
78 CURLE_PARTIAL_FILE VALUE 18.
78 CURLE_FTP_COULDNT_RETR_FILE VALUE 19.
78 CURLE_OBSOLETE20 VALUE 20.
78 CURLE_QUOTE_ERROR VALUE 21.
78 CURLE_HTTP_RETURNED_ERROR VALUE 22.
78 CURLE_WRITE_ERROR VALUE 23.
78 CURLE_OBSOLETE24 VALUE 24.
78 CURLE_UPLOAD_FAILED VALUE 25.
78 CURLE_READ_ERROR VALUE 26.
78 CURLE_OUT_OF_MEMORY VALUE 27.
78 CURLE_OPERATION_TIMEDOUT VALUE 28.
78 CURLE_OBSOLETE29 VALUE 29.
78 CURLE_FTP_PORT_FAILED VALUE 30.
78 CURLE_FTP_COULDNT_USE_REST VALUE 31.
78 CURLE_OBSOLETE32 VALUE 32.
78 CURLE_RANGE_ERROR VALUE 33.
78 CURLE_HTTP_POST_ERROR VALUE 34.
78 CURLE_SSL_CONNECT_ERROR VALUE 35.
78 CURLE_BAD_DOWNLOAD_RESUME VALUE 36.
78 CURLE_FILE_COULDNT_READ_FILE VALUE 37.
78 CURLE_LDAP_CANNOT_BIND VALUE 38.
78 CURLE_LDAP_SEARCH_FAILED VALUE 39.
78 CURLE_OBSOLETE40 VALUE 40.
78 CURLE_FUNCTION_NOT_FOUND VALUE 41.
78 CURLE_ABORTED_BY_CALLBACK VALUE 42.
78 CURLE_BAD_FUNCTION_ARGUMENT VALUE 43.
78 CURLE_OBSOLETE44 VALUE 44.
78 CURLE_INTERFACE_FAILED VALUE 45.
78 CURLE_OBSOLETE46 VALUE 46.
78 CURLE_TOO_MANY_REDIRECTS VALUE 47.
78 CURLE_UNKNOWN_TELNET_OPTION VALUE 48.
78 CURLE_TELNET_OPTION_SYNTAX VALUE 49.
78 CURLE_OBSOLETE50 VALUE 50.
78 CURLE_PEER_FAILED_VERIFICATION VALUE 51.
78 CURLE_GOT_NOTHING VALUE 52.
78 CURLE_SSL_ENGINE_NOTFOUND VALUE 53.
78 CURLE_SSL_ENGINE_SETFAILED VALUE 54.
78 CURLE_SEND_ERROR VALUE 55.
78 CURLE_RECV_ERROR VALUE 56.
78 CURLE_OBSOLETE57 VALUE 57.
78 CURLE_SSL_CERTPROBLEM VALUE 58.
78 CURLE_SSL_CIPHER VALUE 59.
78 CURLE_SSL_CACERT VALUE 60.
78 CURLE_BAD_CONTENT_ENCODING VALUE 61.
78 CURLE_LDAP_INVALID_URL VALUE 62.
78 CURLE_FILESIZE_EXCEEDED VALUE 63.
78 CURLE_USE_SSL_FAILED VALUE 64.
78 CURLE_SEND_FAIL_REWIND VALUE 65.
78 CURLE_SSL_ENGINE_INITFAILED VALUE 66.
78 CURLE_LOGIN_DENIED VALUE 67.
78 CURLE_TFTP_NOTFOUND VALUE 68.
78 CURLE_TFTP_PERM VALUE 69.
78 CURLE_REMOTE_DISK_FULL VALUE 70.
78 CURLE_TFTP_ILLEGAL VALUE 71.
78 CURLE_TFTP_UNKNOWNID VALUE 72.
78 CURLE_REMOTE_FILE_EXISTS VALUE 73.
78 CURLE_TFTP_NOSUCHUSER VALUE 74.
78 CURLE_CONV_FAILED VALUE 75.
78 CURLE_CONV_REQD VALUE 76.
78 CURLE_SSL_CACERT_BADFILE VALUE 77.
78 CURLE_REMOTE_FILE_NOT_FOUND VALUE 78.
78 CURLE_SSH VALUE 79.
78 CURLE_SSL_SHUTDOWN_FAILED VALUE 80.
78 CURLE_AGAIN VALUE 81.
 
*> Error strings
01 LIBCURL_ERRORS.
02 CURLEVALUES.
03 FILLER PIC X(30) VALUE "CURLE_UNSUPPORTED_PROTOCOL ".
03 FILLER PIC X(30) VALUE "CURLE_FAILED_INIT ".
03 FILLER PIC X(30) VALUE "CURLE_URL_MALFORMAT ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE4 ".
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_PROXY ".
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_HOST ".
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_CONNECT ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_SERVER_REPLY ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_ACCESS_DENIED ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE10 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASS_REPLY ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE12 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASV_REPLY ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_227_FORMAT ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_CANT_GET_HOST ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE16 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_SET_TYPE ".
03 FILLER PIC X(30) VALUE "CURLE_PARTIAL_FILE ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_RETR_FILE ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE20 ".
03 FILLER PIC X(30) VALUE "CURLE_QUOTE_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_HTTP_RETURNED_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_WRITE_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE24 ".
03 FILLER PIC X(30) VALUE "CURLE_UPLOAD_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_READ_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_OUT_OF_MEMORY ".
03 FILLER PIC X(30) VALUE "CURLE_OPERATION_TIMEDOUT ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE29 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_PORT_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_USE_REST ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE32 ".
03 FILLER PIC X(30) VALUE "CURLE_RANGE_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_HTTP_POST_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CONNECT_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_BAD_DOWNLOAD_RESUME ".
03 FILLER PIC X(30) VALUE "CURLE_FILE_COULDNT_READ_FILE ".
03 FILLER PIC X(30) VALUE "CURLE_LDAP_CANNOT_BIND ".
03 FILLER PIC X(30) VALUE "CURLE_LDAP_SEARCH_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE40 ".
03 FILLER PIC X(30) VALUE "CURLE_FUNCTION_NOT_FOUND ".
03 FILLER PIC X(30) VALUE "CURLE_ABORTED_BY_CALLBACK ".
03 FILLER PIC X(30) VALUE "CURLE_BAD_FUNCTION_ARGUMENT ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE44 ".
03 FILLER PIC X(30) VALUE "CURLE_INTERFACE_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE46 ".
03 FILLER PIC X(30) VALUE "CURLE_TOO_MANY_REDIRECTS ".
03 FILLER PIC X(30) VALUE "CURLE_UNKNOWN_TELNET_OPTION ".
03 FILLER PIC X(30) VALUE "CURLE_TELNET_OPTION_SYNTAX ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE50 ".
03 FILLER PIC X(30) VALUE "CURLE_PEER_FAILED_VERIFICATION".
03 FILLER PIC X(30) VALUE "CURLE_GOT_NOTHING ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_NOTFOUND ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_SETFAILED ".
03 FILLER PIC X(30) VALUE "CURLE_SEND_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_RECV_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE57 ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CERTPROBLEM ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CIPHER ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT ".
03 FILLER PIC X(30) VALUE "CURLE_BAD_CONTENT_ENCODING ".
03 FILLER PIC X(30) VALUE "CURLE_LDAP_INVALID_URL ".
03 FILLER PIC X(30) VALUE "CURLE_FILESIZE_EXCEEDED ".
03 FILLER PIC X(30) VALUE "CURLE_USE_SSL_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_SEND_FAIL_REWIND ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_INITFAILED ".
03 FILLER PIC X(30) VALUE "CURLE_LOGIN_DENIED ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOTFOUND ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_PERM ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_DISK_FULL ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_ILLEGAL ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_UNKNOWNID ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_EXISTS ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOSUCHUSER ".
03 FILLER PIC X(30) VALUE "CURLE_CONV_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_CONV_REQD ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT_BADFILE ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_NOT_FOUND ".
03 FILLER PIC X(30) VALUE "CURLE_SSH ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_SHUTDOWN_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_AGAIN ".
01 FILLER REDEFINES LIBCURL_ERRORS.
02 CURLEMSG OCCURS 81 TIMES PIC X(30).</lang>
 
{{out}}
<pre>prompt$ ./curl-rosetta summary
Read: http://www.rosettacode.org
Length: 000024043
 
prompt$ ./curl-rosetta display
Read: http://www.rosettacode.org
<!DOCTYPE html>
<html lang="en" dir="ltr" class="client-nojs">
<head>
...</pre>
 
=={{header|ColdFusion}}==
<lang coldfusion><cfhttp url="http://www.w3.org/Home.html" result="result">
<cfhttp url="http://www.rosettacode.org" result="result">
<cfoutput>#result.FileContent#</cfoutput></lang>
<cfoutput>#result.FileContent#</cfoutput>
</lang>
 
=={{header|Common Lisp}}==
CLISP provides an extension function to read http sources. Other implementations may do this differently.
{{works with|CLISP}}
<lang lisp>(
(defun wget-clisp (url)
(ext:with-http-input (stream url)
(
ext:with-http-input(loop for line = (read-line stream urlnil nil)
( while line
loopdo for(format linet ="~a~%" (read-line stream nil nil))))
</lang>
while line do (format t "~a~%" line)
 
)
)
)</lang>
{{libheader|DRAKMA}}
 
First grabbing the entire body as a string, and then by pulling from a stream (as in the CLISP example).
 
<lang lisp>(
<lang lisp>
defun wget-drakma-string (url &optional (out *standard-output*))
(defun wget-drakma-string (url &optional (out *standard-output*))
"Grab the body as a string, and write it to out."
"Grab the body as a (write-string, and write (drakma:http-requestit url)to out)."
(write-string (drakma:http-request url) out))
)
 
(
(defun wget-drakma-stream (url &optional (out *standard-output*))
"Grab the body as a stream, and write it to out."
(loop with body = (drakma:http-request url :want-stream t)
(
loop with body = (drakma:http-request url :want-stream t)
for line = (read-line body nil nil)
while line do (write-line line out)
finally (close body)))
</lang>
)
)</lang>
 
=={{header|Crystal}}==
<lang crystal>require "http/client"
require "http/client"
HTTP::Client.get("http://www.w3.org/Home.html")</lang>
 
HTTP::Client.get("http://google.com")
</lang>
 
=={{header|D}}==
{{libheader|phobos}}
<lang D>void main() {
void main() {
import std.stdio, std.net.curl;
import std.stdio, std.net.curl;
writeln(get("http://www.w3.org/Home.html"));
writeln(get("http://google.com"));
}</lang>
}
</lang>
 
{{libheader|tango}}
 
<lang D>import tango.io.Console;
<lang D>
import tango.io.Console;
import tango.net.http.HttpGet;
 
void main() {
Cout.stream.copy( (new HttpGet("http://wwwgoogle.w3.org/Home.htmlcom")).open );
}
}</lang>
</lang>
 
Or more operating directly on the socket:
 
<lang D>import tango.io.Console;
<lang D>
import tango.io.Console;
import tango.net.InternetAddress;
import tango.net.device.Socket;
 
void main() {
auto site = new Socket;
site.connect (new InternetAddress("wwwgoogle.w3.orgcom",80)).write ("GET /Home.html HTTP/1.0\n\n");
 
Cout.stream.copy (site);
Cout.stream.copy (site);
}</lang>
}
</lang>
 
=={{header|Dart}}==
Line 719 ⟶ 939:
<lang d>import 'dart:io';
void main(){
var url = 'http://www.w3rosettacode.org/Home.html';
var client = new HttpClient();
client.getUrl(Uri.parse(url)).then(
.then((HttpClientRequest request) => request.close())
.then((HttpClientResponse response) => response.pipe(stdout));
).then(
(HttpClientResponse response) => response.pipe(stdout)
);
}</lang>
 
=={{header|Delphi}}==
Simple example using the free Synapse TCP/IP library [http://www.ararat.cz/synapse/doku.php/download]
 
<lang Delphi>program HTTP;
<lang Delphi>
program HTTP;
 
{$APPTYPE CONSOLE}
 
{$DEFINE DEBUG}
 
uses
Classes,
httpsend; // Synapse httpsend; class
 
var
Response: TStrings;
HTTPObj: THTTPSend;
 
begin
HTTPObj := THTTPSend.Create;
try
{ Stringlist object to capture HTML returned
from URL }
Response := TStringList.Create;
try
if HTTPObj.HTTPMethod('GET','http://www.mgis.uk.com') then
Response := TStringList.Create;
trybegin
{ Load HTTP Document into Stringlist }
if HTTPObj.HTTPMethod('GET','http://www.w3.org/Home.html') then
Response.LoadFromStream(HTTPObj.Document);
begin
{ Write the response to the console window }
Response.LoadFromStream(HTTPObj.Document);
Writeln(Response.Text);
end
else
Writeln('Error retrieving data');
 
finally
Response.Free;
end;
finally
HTTPObjResponse.Free;
end;
 
Readln;
finally
end.</lang>
HTTPObj.Free;
end;
 
// Keep console window open
Readln;
 
end.
</lang>
 
 
Using Indy:
 
<lang Delphi>program ShowHTTP;
<lang Delphi>
program ShowHTTP;
 
{$APPTYPE CONSOLE}
 
uses IdHttp;
 
var
s: string;
lHTTP: TIdHTTP;
begin
lHTTP := TIdHTTP.Create(nil);
try
lHTTP.HandleRedirects := True;
s := lHTTP.Get('http://www.w3rosettacode.org/Home.html');
Writeln(s);
finally
lHTTP.Free;
end;
end.</lang>
</lang>
 
=={{header|Dragon}}==
<lang dragon>select "http"
select "std"
 
http("http://www.w3.org/Home.html", ::echo)</lang>
http("http://www.rosettacode.org", ::echo)
</lang>
 
=={{header|E}}==
 
<lang e>when (def t := <http://www.w3.org/Home.html> <- getText()) -> {
<lang e>
when (def t := <http://www.rosettacode.org> <- getText()) -> {
println(t)
}
}</lang>
</lang>
 
=={{header|EchoLisp}}==
'''file->string''' usage: the server must allow cross-domain access, or a browser add-on like cors-everywhere must be installed to bypass cross-domain checking.
<lang scheme>;; asynchronous call back definition
;; asynchronous call back definition
(define (success name text) (writeln 'Loaded name) (writeln text))
;;
(file->string success "http://www.w3google.org/Home.htmlcom")</lang>
</lang>
 
=={{header|Emacs Lisp}}==
<code>url.el</code> can download HTTP. <code>url-retrieve-synchronously</code> returns a buffer containing headers and body. Caller kills the buffer when no longer required.
 
<lang Lisp>(with-current-buffer
(url-retrieve-synchronously "http://www.w3rosettacode.org/Home.html")
(goto-char (point-min))
(search-forward "\n\n" nil t) ;; skip headers
(prin1 (buffer-substring (point) (point-max)))
(search-forward "\n\n" nil t)
(prin1 (kill-buffer-substring (point) (pointcurrent-maxbuffer)))</lang>
(kill-buffer (current-buffer))
)</lang>
 
=={{header|Erlang}}==
Line 841 ⟶ 1,091:
=={{header|F_Sharp|F#}}==
In F# we can just use the .NET library to do this so its the same as the [[C_sharp|C#]] example.
 
<lang fsharp>let wget (url : string) =
<lang fsharp>
let wget (url : string) =
use c = new System.Net.WebClient()
c.DownloadString(url)
 
printfn "%s" (wget "http://www.w3.org/Home.html")</lang>
printfn "%s" (wget "http://www.rosettacode.org/")
</lang>
 
However unlike C#, F# can use an asynchronous workflow to avoid blocking any threads while waiting for a response from the server. To asynchronously download three url's at once...
 
<lang fsharp>open System.Net
<lang fsharp>
open System.Net
open System.IO
 
let wgetAsync url = async {
let wgetAsync url =
let request = WebRequest.Create (url:string)
use!async response{ =let request = WebRequest.AsyncGetResponseCreate (url:string)
use! responseStreamresponse = responserequest.GetResponseStreamAsyncGetResponse()
use responseStream = response.GetResponseStream()
use reader = new StreamReader(responseStream)
return use reader.ReadToEnd = new StreamReader(responseStream)
return reader.ReadToEnd() }
}
 
let urls = ["http://www.w3.org/Home.html"]
let urls = ["http://www.rosettacode.org/"; "http://www.yahoo.com/"; "http://www.google.com/"]
let content = urls
|> List.map wgetAsync
Line 863 ⟶ 1,121:
=={{header|Factor}}==
<lang factor>USE: http.client
"http://www.w3rosettacode.org/Home.html" http-get nip print</lang>
</lang>
 
=={{header|Forth}}==
{{works with|GNU Forth|0.7.0}}
This works at the socket level, returning both the HTTP headers and page contents.
<lang forth>include unix/socket.fs
s"www.w3.org"include 80 open-unix/socket.fs
 
dup s\" GET /Home.html HTTP/1.0\n\n" rot write-socket
s" localhost" 80 open-socket
dup s\" GET / HTTP/1.0\n\n" rot write-socket
dup pad 8092 read-socket type
close-socket</lang>
</lang>
 
=={{header|friendly interactive shell}}==
{{trans|UNIX Shell}}
<lang fishshell>curl --silents --locationL http://www.w3rosettacode.org/Home.html</lang>
 
<lang fishshell>lynx -source http://www.w3.org/Home.html</lang>
<lang fishshell>wgetlynx --output-document=- --quietsource http://www.w3rosettacode.org/Home.html</lang>
 
<lang fishshell>lftp -c "cat http://www.w3.org/Home.html"</lang>
<lang fishshell>wget -O - -q http://rosettacode.org/</lang>
 
<lang fishshell>lftp -c "cat http://rosettacode.org/"</lang>
 
{{works with|BSD}}
<lang fishshell>ftp -o - http://rosettacode.org ^ /dev/null</lang>
Line 885 ⟶ 1,151:
=={{header|Frink}}==
Frink's <CODE>read[<I>URL</I>]</CODE> function works with any URL type supported by your Java Virtual Machine, and returns the results as a single string.
<lang frink>
<lang frink>print[read["http://www.w3.org/Home.html"]]</lang>
print[read["http://frinklang.org/"]]
</lang>
 
=={{header|Gastona}}==
<lang gastona>#listix#
 
<main>
<main>
LOOP, TEXT FILE, http://www.w3.org/Home.html, BODY, @<value></lang>
LOOP, TEXT FILE, http://www.rosettacode.org
, BODY, @<value>
</lang>
 
=={{header|GML}}==
{{works with|Game Maker Studio}}
 
'''Any Event'''
<lang gml>get = http_get("http://www.w3rosettacode.org/Home.html");</lang>
 
'''HTTP Event'''
<lang gml>if (ds_map_find_value(async_load,"id") == get) {
{
show_message_async(ds_map_find_value(async_load,"result"));
}</lang>
 
=={{header|Go}}==
<lang go>package main
package main
 
import (
"io"
"log"
"net/http"
"os"
)
 
func main() {
respr, _err := http.Get("http://www.w3rosettacode.org/Homerobots.htmltxt")
if err != nil {
io.Copy(os.Stdout, resp.Body)
log.Fatalln(err)
}</lang>
}
io.Copy(os.Stdout, r.Body)
}
</lang>
 
Output:
<pre>
User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:
</pre>
 
=={{header|Groovy}}==
<lang groovy>new URL("http://www.w3.org/Home.html").eachLine {
new URL("http://www.rosettacode.org").eachLine { println it }
}</lang>
 
=={{header|GUISS}}==
 
It would be more appropriate to paste to notepad:
 
<lang guiss>Start,Programs,Applications,Mozilla Firefox,Inputbox:address bar>http://www.w3.org/Home.html,Button:Go,Click:Area:browser window,Type:[Control A],[Control C],Start,Programs,Accessories,Notepad,Menu:Edit,Paste</lang>
<lang guiss>Start,Programs,Applications,Mozilla Firefox,Inputbox:address bar>www.rosettacode.org,Button:Go,
Click:Area:browser window,Type:[Control A],[Control C],Start,Programs,Accessories,Notepad,
Menu:Edit,Paste</lang>
 
=={{header|Halon}}==
<lang halon>echo http("http://www.w3rosettacode.org/Home.html");</lang>
 
=={{header|Haskell}}==
Using {{libheader|HTTP}} from [http://hackage.haskell.org/packages/hackage.html HackageDB]
 
<lang haskell>import Network.Browser
<lang haskell>
import Network.Browser
import Network.HTTP
import Network.URI
main = do
main = do
rsp <- Network.Browser.browse $ do
setAllowRedirects True
setOutHandler $ const (return ())
request $ getRequest "http://www.w3rosettacode.org/Home.html"
putStrLn $ rspBody $ snd rsp</lang>
</lang>
 
== Icon and Unicon ==
==={{header|Icon}}===
<lang icon>link cfunc
link cfunc
procedure main(arglist)
get(arglist[1])
end
 
procedure get(url)
local f, host, port, path
url ? {
="http://" | ="HTTP://"
host := tab(upto(':/') | 0)
if not (=":" & (port := integer(tab(upto('/'))))) then port := 80
if pos(0) then portpath := 80"/" else path := tab(0)
}
if pos(0) then
write(host)
path := "/" else path := tab(0)
}write(path)
f := tconnect(host, port) | stop("Unable to connect")
write(host)
writes(f, "GET ", path | "/" ," HTTP/1.0\r\n\r\n")
write(path)
while write(read(f))
f := tconnect(host, port) | stop("Unable to connect")
end
writes(f, "GET ", path | "/" ," HTTP/1.0\r\n\r\n")
</lang>
while write(read(f))
 
end</lang>
Using it
<lang icon>
<lang icon>|icon req.icn http://www.w3.org/Home.html</lang>
|icon req.icn http://www.rosettacode.org
</lang>
 
==={{header|Unicon}}===
Unicon provides improved socket and messaging support without the need for the external function ''cfunc'':
<lang unicon>procedure main(arglist)
procedure main(arglist)
m := open(arglist[1],"m")
while write(read(m))
end</lang>
</lang>
 
=={{header|J}}==
Using <tt>gethttp</tt> from [[Web Scraping#J|Web Scraping]]
 
<lang j>require'web/gethttp'
gethttp 'http://www.w3rosettacode.org/Home.html'</lang>
</lang>
 
=={{header|Java}}==
<lang java5>import java.util.Scanner;
import java.net.URL;
 
public class Main {
public static void main(String[] args) throws Exception {
Scanner sc = new Scanner(new URL("http://www.w3rosettacode.org/Home.html").openStream());
while (sc.hasNext())
System.out.println(sc.nextLine());
}
}
}</lang>
</lang>
 
{{libheader|Apache Commons IO}}
 
<lang java5>import org.apache.commons.io.IOUtils;
<lang java5>
import org.apache.commons.io.IOUtils;
import java.net.URL;
 
public class Main {
public static void main(String[] args) throws Exception {
IOUtils.copy(new URL("http://www.w3rosettacode.org/Home.html").openStream(),System.out);
}
}</lang>
 
=={{header|JavaScript}}==
 
===Browser===
<lang JavaScript>var req = new XMLHttpRequest();
<lang JavaScript>fetch("http://www.w3.org/Home.html").then(function (response) {
req.onload = function() {
return response.text();
console.log(this.responseText);
}).then(function (body) {
};
return body;
 
});</lang>
req.open('get', 'http://rosettacode.org', true);
req.send()</lang>
 
Using fetch API:
<lang JavaScript>
fetch('http://rosettacode.org').then(function(response) {
return response.text();
}).then(function(myText) {
console.log(myText);
});
</lang>
 
As a repeatable function:
 
<lang JavaScript>/**
* @name _http
* @description Generic API Client using XMLHttpRequest
* @param {string} url The URI/URL to connect to
* @param {string} method The HTTP method to invoke- GET, POST, etc
* @param {function} callback Once the HTTP request has completed, responseText is passed into this function for execution
* @param {object} params Query Parameters in a JavaScript Object (Optional)
*
*/
function _http(url, method, callback, params) {
var xhr,
reqUrl;
 
xhr = new XMLHttpRequest();
xhr.onreadystatechange = function xhrProc() {
if (xhr.readyState == 4 && xhr.status == 200) {
callback(xhr.responseText);
}
};
 
 
/** If Query Parameters are present, handle them... */
if (typeof params === 'undefined') {
reqUrl = url;
} else {
switch (method) {
case 'GET':
reqUrl = url + procQueryParams(params);
break;
case 'POST':
reqUrl = url;
break;
default:
}
}
 
 
/** Send the HTTP Request */
if (reqUrl) {
xhr.open(method, reqUrl, true);
xhr.setRequestHeader("Accept", "application/json");
 
if (method === 'POST') {
xhr.send(params);
} else {
xhr.send();
}
}
 
 
/**
* @name procQueryParams
* @description Return function that converts Query Parameters from a JavaScript Object to a proper URL encoded string
* @param {object} params Query Parameters in a JavaScript Object
*
*/
function procQueryParams(params) {
return "?" + Object
.keys(params)
.map(function (key) {
return key + "=" + encodeURIComponent(params[key])
})
.join("&")
}
}</lang>
 
Using jQuery:
 
<lang JavaScript>$.get('http://rosettacode.org', function(data) {
console.log(data);
};</lang>
 
===Node.js===
 
<lang javascript>require("http").get("http://www.w3.org/Home.html", function (resp) {
With Node.js, using only the included http module.
let body = "";
 
resp.on("body", function (chunk) {
<lang javascript>const http = require('http');
body += chunk;
});
http.get('http://rosettacode.org', (resp) => {
resp.on("end", function () {
 
console.debug(body);
let data })= '';
}).on("error", function (err) {
// A chunk of data has been recieved.
console.error(err.message);
resp.on('data', (chunk) => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
console.log("Data:", data);
});
}).on("error", (err) => {
console.log("Error: " + err.message);
});</lang>
 
=={{header|Jsish}}==
Based on Jsi_Wget that ships with Jsish.
 
<lang javascript>#!/usr/bin/env jsish
function httpGet(fileargs:array|string, conf:object=void) {
var options = {
var options = { // Web client for downloading files from url
headers: [],
headers : [], // Header fields to send.
nowait: false,
nowait : false, // Just return object: caller will call update.
onDone: null,
onDone : null, // Callback when done.
wsdebug: 0
wsdebug : 0 // WebSockets debug level.
};
var self = {
address : '',
done : false,
path : '',
port : -1,
post : '', // Post file upload (UNIMPL).
scheme : 'http', // Url scheme
protocol : 'get',
url : null,
response : ''
};
 
parseOpts(self, options, conf);
if (self.port === -1)
self.port = 80;
function WsRecv(ws:userobj, id:number, str:string) {
LogDebug("LEN: "+str.length);
Line 1,043 ⟶ 1,461:
self.response += str;
}
function WsClose(ws:userobj|null, id:number) {
LogDebug("CLOSE");
Line 1,049 ⟶ 1,468:
self.onDone(id);
}
function main() {
if (self.Debug)
Line 1,073 ⟶ 1,493:
if (self.post.length)
self.protocol = 'post';
var wsopts = {
client: true,
onRecv: WsRecv,
onClose: WsClose,
debug: self.wsdebug,
rootdir: self.path,
port: self.port,
address: self.address,
protocol: self.protocol,
clientHost: self.address
};
if (self.post.length)
Line 1,108 ⟶ 1,529:
return self.response;
}
 
return main();
}
 
provide(httpGet, "0.60");
 
if (isMain())
runModule(httpGet);</lang>
 
{{out}}
<pre>prompt$ jsish
# require('httpGet')
0.6
# var page = httpGet('http://rosettacode.org/robots.txt')
variable
# page
"User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:
 
"</pre>
 
=={{header|Julia}}==
<lang Julia>readurl(url) = open(readlines, download(url))
 
readurl("http://www.w3.org/Home.html")</lang>
readurl("http://rosettacode.org/index.html")</lang>
 
=={{header|Kotlin}}==
<lang scala>import// javaversion 1.net1.URL2
 
import java.net.URL
import java.io.InputStreamReader
import java.util.Scanner
 
fun main(args: Array<String>) {
val url = URL("http://www.w3puzzlers.org/Homepub/wordlists/unixdict.htmltxt")
val isr = InputStreamReader(url.openStream())
val sc = Scanner(isr)
while (sc.hasNextLine()) println(sc.nextLine())
println(sc.nextLine())
sc.close()
}</lang>
Line 1,133 ⟶ 1,578:
=={{header|Lasso}}==
incude_url is a wrapper for Lasso's curl datatype, however it can be achieved in several ways.
Using<lang theLasso>// using include_url wrapper.:
<lang Lasso>include_url('http://www.w3rosettacode.org/Homeindex.html')</lang>
 
One line curl.
// one line curl
<lang Lasso>curl('http://www.w3.org/Home.html')->result->asString</lang>
curl('http://rosettacode.org/index')->result->asString
Using curl for more complex operations and feedback.
 
<lang Lasso>local(x = curl('http://www.w3.org/Home.html'))
// using curl for more complex operations and feedback
local(x = curl('http://rosettacode.org/index'))
local(y = #x->result)
#y->asString</lang>
 
=={{header|LFE}}==
 
===Synchronous===
<lang lisp>(: inets start)
(case (: httpc request '"http://lfe.github.io")
(
((tuple 'ok result)
case (: httpc request '"http://www.w3.org/Home.html") (
(: io format '"Result: ~p" (tuple 'oklist result)))
((tuple 'error reason)
(: io format '"Result: ~p" (list result))
(: io format '"Error: ~p~n" (list reason))))
) (
</lang>
(tuple 'error reason)
 
(: io format '"Error: ~p~n" (list reason))
)
)</lang>
===Asynchronous===
<lang lisp>(: inets start)
(let* ((method 'get)
(
(url '"http://lfe.github.io")
let* (
(methodheaders 'get())
(request-data (tuple url '"http://www.w3.org/Home.html"headers))
(headershttp-options ())
(request-dataoptions (list (tuple url'sync headers'false))))
(: httpc request method request-data (http-options ()request-options)
(receive
(request-options (list (tuple 'sync 'false)))
((tuple 'http (tuple request-id (tuple 'error reason)))
)
(: io format '"Error: ~p~n" (list reason)))
(: httpc request method request-data http-options request-options)
((tuple 'http (tuple request-id result))
(
(: io format '"Result: ~p~n" (list result))))))
receive (
</lang>
(tuple 'http (tuple request-id (tuple 'error reason)))
(: io format '"Error: ~p~n" (list reason))
)
(
(tuple 'http (tuple request-id result))
(: io format '"Result: ~p~n" (list result))
)
)
))</lang>
 
=={{header|Liberty BASIC}}==
Line 1,205 ⟶ 1,643:
=={{header|Lingo}}==
HTTP requests based on Director's native HTTP facilities - i.e. without using a 3rd party plugin ("Xtra") - are asynchronous. A simple implementation of a HTTP GET request might look like this:
 
Parent script "SimpleHttpGet":
<lang lingo>property _netID
property _cbHandler
property _cbTarget
 
----------------------------------------
-- Simple HTTP GET request
Line 1,216 ⟶ 1,656:
----------------------------------------
on new (me, url, cbHandler, cbTarget)
if voidP(cbTarget) then cbTarget = _movie
me._netID = getNetText(url)
cbTarget = _movie
me._netID_cbHandler = getNetText(url)cbHandler
me._cbHandler_cbTarget = cbHandlercbTarget
_movie.actorList.add(me)
me._cbTarget = cbTarget
return me
_movie.actorList.add(me)
return me
end
 
----------------------------------------
-- @callback
----------------------------------------
on stepFrame (me)
if netDone(me._netID) then
res = netTextResult(me._netID)
err = netError(me._netID)
_movie.actorList.deleteOne(me)
call(me._cbHandler, me._cbTarget, res, err)
end if
end</lang>
 
In some movie script:
<lang lingo>----------------------------------------
Line 1,240 ⟶ 1,681:
----------------------------------------
on getAdobeHomePage ()
script("SimpleHttpGet").new("http://www.w3adobe.orgcom/Home.html", #printResult)
end
 
----------------------------------------
-- @callback
----------------------------------------
on printResult (res, err)
if err="OK" then
put res
else
put "Network Error:" && err
end if
end</lang>
 
Executed in the "Message Window" (=Director's interactive Lingo console):
<lang lingo>getAdobeHomePage()
Line 1,260 ⟶ 1,703:
Without a callback handler the get URL method will block until complete
<lang LiveCode>put true into libURLFollowHttpRedirects
get URL "http://www.w3httpbin.org/Home.html"
put it</lang>
Non-blocking version
Line 1,266 ⟶ 1,709:
answer "Download Complete" with "Okay"
end myUrlDownloadFinished
 
command getWebResource
load URL "http://www.w3httpbin.org/Home.html" with message "myUrlDownloadFinished"
end getWebResource</lang>
 
=={{header|LSL}}==
To test it yourself; rez a box on the ground, and add the following as a New Script.
<lang LSL>string sURL = "http://www.w3RosettaCode.org/Home.htmlOrg";
key kHttpRequestId;
default {
state_entry() {
kHttpRequestId = llHTTPRequest(sURL, [], "");
}
}
http_response(key kRequestId, integer iStatus, list lMetaData, string sBody) {
if (kRequestId == kHttpRequestId) {
llOwnerSay("Status="+(string)iStatus);
integer x = 0;
for (x=0 ; x<llGetListLength(lMetaData) ; x++) {
llOwnerSay("llList2String(lMetaData, "+(string)x+") = "+llList2String(lMetaData, x));
}
}
list lBody = llParseString2List(sBody, ["\n"], []);
for (x=0 ; x<llGetListLength(lBody) ; x++) {
llOwnerSay("llList2String(lBody, "+(string)x+") = "+llList2String(lBody, x));
}
}
}
}
}
}
}
}</lang>
</lang>
Output:
<pre>Status=200
llList2String(lMetaData, 0)=0
llList2String(lMetaData, 1)=2048
llList2String(lBody, 0)=<!DOCTYPE html>
llList2String(lBody, 1)=<html lang="en" dir="ltr" class="client-nojs">
llList2String(lBody, 2)=<head>
llList2String(lBody, 3)=<title>Rosetta Code</title>
llList2String(lBody, 4)=<meta charset="UTF-8" />
llList2String(lBody, 5)=<meta name="generator" content="MediaWiki 1.18.0" />
llList2String(lBody, 6)=<link rel="shortcut icon" href="/favicon.ico" />
llList2String(lBody, 7)=<link rel="search" type="application/opensearchdescription+xml" href="/mw/opensearch_desc.php" title="Rosetta Code (en)" />
llList2String(lBody, 8)=<link rel="EditURI" type="application/rsd+xml" href="http://rosettacode.org/mw/api.php?action=rsd" />
llList2String(lBody, 9)=<link rel="copyright" href="http://www.gnu.org/licenses/fdl-1.2.html" />
llList2String(lBody, 10)=<link rel="alternate" type="application/atom+xml" title="Rosetta Code Atom feed" href="/mw/index.php?title=Special:RecentChanges&amp;feed=atom" />
llList2String(lBody, 11)=<link rel="stylesheet" href="/mw/load.php?debug=false&amp;lang=en&amp;modules=mediawiki.legacy.commonPrint%2Cshared%7Cskins.vector&amp;only=styles&amp;skin=vector&amp;*" />
llList2String(lBody, 12)=<meta name="ResourceLoaderDynamicStyles" content="" />
llList2String(lBody, 13)=<link rel="stylesheet" href="/mw/load.php?debug=false&amp;lang=en&amp;modules=site&amp;only=styles&amp;skin=vector&amp;*" />
llList2String(lBody, 14)=<style>a:lang(ar),a:lang(ckb),a:lang(fa),a:lang(kk-arab),a:lang(mzn),a:lang(ps),a:lang(ur){text-decoration:none}a.new,#quickbar a.new{color:#ba0000}
... ... ... ... ... ... ... ... ... ... ... ... ... ...
</pre>
 
=={{header|Lua}}==
{{libheader|LuaSocket}}
<lang Lua>local http = require("socket.http")
local http = require("socket.http")
local url = require("socket.url")
local page = http.request('http://www.w3google.orgcom/Home.htmlm/search?q=' .. url.escape("lua"))
print(page)</lang>
</lang>
 
=={{header|M2000 Interpreter}}==
Line 1,306 ⟶ 1,774:
Using With statement we can make objects properties like ReadyState as variables
(some of them as read only)
 
<lang M2000 Interpreter>Module CheckIt {
<lang M2000 Interpreter>
Declare xml "Microsoft.XMLHTTP"
Module CheckIt {
const testUrl$ = "http://www.w3.org/Home.html"
With Declare xml, "readyStateMicrosoft.XMLHTTP" as ReadyState
Method xml "Open", "Get",const testUrl$, True= "http://www.rosettacode.org"
Method With xml, "sendreadyState" as ReadyState
Method xml "Open", "Get", testUrl$, True ' True means Async
k = 0
Method xml "send"
Thread {
\\ We k++set a thread to count time
} as TimeOut interval 100k=0
Task.Main 100 Thread {
Print ReadyState k++
} Ifas ReadyState=4TimeOut theninterval 100
\\ In main thread we can check ReadyState and Mouse button
exit
Task.Main 100 if k > 20 then{
exitPrint ReadyState
if mouse If ReadyState=4 then exit
if k>20 then exit ' 20*100= 2 sec
if mouse then exit ' exit if mouse click
}
if ReadyState = 4 then {}
\\ So Withnow we xml,can "responseText" AS AA$read
if ReadyState=4 Documentthen BB$=AA${
With xml, "responseText" AS AA$
Report BB$
\\ break AA$ to lines
}
Document BB$=AA$
Declare xml Nothing
\\ using line breaks as CRLF
Report BB$
}
Declare xml Nothing
}
CheckIt</lang>
</lang>
 
=={{header|Maple}}==
In Maple 18 or later:
<lang Maple>
<lang Maple>content := URL:-Get("http://www.w3.org/Home.html");</lang>
content := URL:-Get( "http://www.google.com/" );
</lang>
 
In Maple 17 or earlier:
<lang Maple>
<lang Maple>content := HTTP:-Get("http://www.w3.org/Home.html");</lang>
content := HTTP:-Get( "http://www.google.com/" );
</lang>
 
=={{header|Mathematica}} / {{header|Wolfram Language}}==
<lang Mathematica>Print[Import["http://www.w3.org/Home.html", "Source"]]</lang>
Print[Import["http://www.google.com/webhp?complete=1&hl=en", "Source"]]
</lang>
 
=={{header|MATLAB}} / {{header|Octave}}==
[http://www.mathworks.com/help/matlab/ref/urlread.html urlread] is MATLAB's function for making URL requests.
<lang MATLAB>>>urlread('http://www.w3.org/Home.html')</lang>
The documentation for Octave is available here [http://octave.sourceforge.net/octave/function/urlread.html urlread].
 
In this example we initiate an HTTP request for a single random number from [http://www.random.org random.org]:
<lang MATLAB>
>> random = urlread('http://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new')
 
random =
 
61
</lang>
 
It is possible to make more complicated requests, specifically "GET" and "POST," which is explained in the [http://www.mathworks.com/help/matlab/ref/urlread.html documentation].
 
=={{header|MIRC Scripting Language}}==
Line 1,352 ⟶ 1,844:
<lang nanoquery>import http
import url
 
url = new(URL, "http://www.w3.org/Home.html")
url = new(URL, "http://rosettacode.org/wiki/Rosetta_Code")
client = new(HTTPClient, url.getHost())
client = new(HTTPClient, url.getHost())
client.connect()
 
response = client.get(url.getFile())
println response.get("body")</lang>
Line 1,363 ⟶ 1,857:
using System.Net;
using System.IO;
 
module HTTP {
module HTTP
Main() : void {
{
Main() : void
{
def wc = WebClient();
def myStream = wc.OpenRead("http://www.w3rosettacode.org/Home.html");
def sr = StreamReader(myStream);
WriteLine(sr.ReadToEnd());
myStream.Close()
Line 1,376 ⟶ 1,874:
{{trans|Java}}
An implementation of the [[#Java|Java]] version shown above; demonstrating NetRexx's ability to exploit the rich Java SDK.
 
<lang NetRexx>options replace format comments java crossref symbols binary
<lang NetRexx>/* NetRexx */
options replace format comments java crossref symbols binary
 
import java.util.Scanner
import java.net.URL
 
do
rosettaUrl = "http://www.w3rosettacode.org/Home.html"
sc = Scanner(URL(rosettaUrl).openStream)
loop while sc.hasNext
say sc.nextLine
end
catch ex = Exception
ex.printStackTrace
end
 
return</lang>
 
=={{header|NewLisp}}==
<lang NewLisp>(get-url "http://www.w3.org/Home.html")</lang>
(get-url "http://www.rosettacode.org")
</lang>
 
=={{header|Nim}}==
Line 1,402 ⟶ 1,907:
<lang objeck>use HTTP;
use Collection;
 
class HttpTest {
function : Main(args : String[]) ~ Nil {
lines := HttpClient->New()->Get("http://www.w3rosettacode.org/Home.html");
each(i : lines) {
lines->Get(i)->As(String)->PrintLine();
};
}
}</lang>
 
=={{header|Objective-C}}==
<lang objc>#import <Foundation/Foundation.h>
 
int main (int argc, const char * argv[]) {
@autoreleasepool {
 
NSError *error;
NSError *error;
NSURLResponse *response;
NSData *data = [NSURLConnection sendSynchronousRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:@"http://www.w3rosettacode.org/Home.html"]]
returningResponse:&response error:&error];
error:&error];
NSLog(@"%@", [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding]);
 
NSLog(@"%@", [[NSString alloc] initWithData:data
encoding:NSUTF8StringEncoding]);
 
}
return 0;
Line 1,425 ⟶ 1,937:
 
=={{header|OCaml}}==
<lang ocaml>let () =
let () =
let url = "http://www.w3.org/Home.html" in
let url = "http://www.rosettacode.org" in
let _,_, page_content = make_request ~url ~kind:GET () in
let print_endline_,_, page_content; = make_request ~url ~kind:GET () in
print_endline page_content;
;;</lang>
;;
</lang>
 
The source code of the function ''make_request'' is [[Web_Scraping/OCaml|here]].
 
=={{header|ooRexx}}==
Got this from a friend. Needs bsf4oorexx from sourceforge.
<br>Note that rosettacode.org (as used by java and NetRexx) does not permit this access!
<lang oorexx>url=.bsf~new("java.net.URL", "http://www.w3.org/Home.html")
<lang oorexx>url=.bsf~new("java.net.URL","http://teletext.orf.at")
sc =.bsf~new("java.util.Scanner",url~openStream)
loop while sc~hasNext
say sc~nextLine
End
::requires BSF.CLS -- get Java camouflaging support</lang>
{{out}}
massaged to avoid problems.
<pre><-!DOCTYPE HTML-
..
-/html-</pre>
 
=={{header|Oz}}==
When creating a file object, it is possible to specify an URL instead of a filename:
<lang oz>declare
declare
fun {GetPage Url}
F =fun {New Open.fileGetPage init(url:Url)}
ContentsF = {FNew readOpen.file init(list:$ sizeurl:allUrl)}
Contents = {F read(list:$ size:all)}
in
in
{F close}
{F Contentsclose}
end Contents
end
in
{System.showInfo {GetPage "http://www.w3rosettacode.org/Home.html"}}</lang>
</lang>
 
{{libheader|OzHttpClient}}
 
If you need more fine-grained control of the request, you could use a custom library:
<lang oz>declare
declare
[HTTPClient] = {Module.link ['x-ozlib://mesaros/net/HTTPClient.ozf']}
[HTTPClient] = {Module.link ['x-ozlib://mesaros/net/HTTPClient.ozf']}
fun {GetPage Url}
 
Client = {New HTTPClient.urlGET init(inPrms(toFile:false toStrm:true) httpReqPrms)}
fun {GetPage Url}
OutParams
Client = {New HttpResponseParamsHTTPClient.urlGET
init(inPrms(toFile:false toStrm:true)
in
httpReqPrms
{Client getService(Url ?OutParams ?HttpResponseParams)}
)}
{Client closeAll(true)}
OutParams.sOut
HttpResponseParams
end
in
{Client getService(Url ?OutParams ?HttpResponseParams)}
{Client closeAll(true)}
OutParams.sOut
end
in
{System.showInfo {GetPage "http://www.w3rosettacode.org/Home.html"}}</lang>
</lang>
 
=={{header|Pascal}}==
Line 1,474 ⟶ 2,005:
<lang pascal>{$mode objfpc}{$H+}
uses fphttpclient;
 
var
s: string;
hc: tfphttpclient;
 
begin
hc := tfphttpclient.create(nil);
try
s := hc.get('http://www.example.com')
finally
hc.free
end;
writeln(s)
end.</lang>
 
 
{{works with|Free Pascal}} {{libheader|CThreads}} {{libheader|Classes}} {{libheader|httpsend}}
<lang pascal>program http;
 
{$mode objfpc}{$H+}
{$APPTYPE CONSOLE}
 
{$DEFINE DEBUG}
 
uses
{$IFDEF UNIX}{$IFDEF UseCThreads}
cthreads,
{$IFDEF UseCThreads}
{$ENDIF}{$ENDIF}
cthreads,
Classes, httpsend; // Synapse httpsend class
{$ENDIF}
{$ENDIF}
Classes,
// Synapse httpsend class
httpsend;
{$R *.res}
 
var
Response: TStrings;
HTTPObj: THTTPSend;
 
begin
HTTPObj := THTTPSend.Create;
try
{ Stringlist object to capture HTML returned
from URL }
Response := TStringList.Create;
try
if HTTPObj.HTTPMethod('GET','http://wiki.lazarus.freepascal.org/Synapse') then
Response := TStringList.Create;
trybegin
{ Load HTTP Document into Stringlist }
if HTTPObj.HTTPMethod('GET','http://wiki.lazarus.freepascal.org/Synapse') then
Response.LoadFromStream(HTTPObj.Document);
begin
{ Write the response to the console window }
Response.LoadFromStream(HTTPObj.Document);
Writeln(Response.Text);
end
else
Writeln('Error retrieving data');
 
finally
Response.Free;
end;
finally
HTTPObjResponse.Free;
end;
 
Readln;
finally
HTTPObj.Free;
end;
 
// Keep console window open
Readln;
 
end.</lang>
 
=={{header|Peloton}}==
English dialect, short form:
<lang sgml>
<lang sgml><@ SAYURLLIT>http://www.w3.org/Home.html</@></lang>
<@ SAYURLLIT>http://rosettacode.org/wiki/Main_Page</@>
</lang>
 
English dialect, padded variable-length form:
<lang sgml>
<lang sgml><# SAY URLSOURCE LITERAL>http://www.w3.org/Home.html</#></lang>
<# SAY URLSOURCE LITERAL>http://rosettacode.org/wiki/Main_Page</#>
</lang>
 
=={{header|Perl}}==
 
<lang perl>use HTTP::Tiny;
===Core example===
my $response = HTTP::Tiny -> new -> get("http://www.w3.org/Home.html");
{{libheader|HTTP/Tiny}}
print $response -> {content};</lang>
{{works with|Perl|5.14}}
{{works with|Perl/HTTP/Tiny}}
 
This sample is nearly identical to the LWP sample except that it uses HTTP::Tiny which was added to the core libraries in [[Perl/5.14]].
 
<lang perl>use strict; use warnings;
require 5.014; # check HTTP::Tiny part of core
use HTTP::Tiny;
 
print( HTTP::Tiny->new()->get( 'http://rosettacode.org')->{content} );</lang>
 
===Library examples===
 
===={{libheader|LWP}}====
{{works with|Perl/LWP}}
 
Classic LWP sample.
 
<lang perl>use LWP::Simple qw/get $ua/;
$ua->agent(undef) ; # cloudflare blocks default LWP agent
print( get("http://www.rosettacode.org") );</lang>
 
or with more error-checking
 
<lang perl>use strict;
use LWP::UserAgent;
 
my $url = 'http://www.rosettacode.org';
my $response = LWP::UserAgent->new->get( $url );
 
$response->is_success or die "Failed to GET '$url': ", $response->status_line;
 
print $response->as_string</lang>
 
=={{header|Phix}}==
Line 1,542 ⟶ 2,125:
curl_global_init()
atom curl = curl_easy_init()
curl_easy_setopt(curl, CURLOPT_URL, "http://www.w3rosettacode.org/Homerobots.htmltxt")
object res = curl_easy_perform_ex(curl)
curl_easy_cleanup(curl)
curl_global_cleanup()
puts(1,res)</lang>
{{out}}
<pre>
User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:
</pre>
 
=={{header|PHP}}==
<lang php>
<lang php>readfile("http://www.w3.org/Home.html");</lang>
readfile("http://www.rosettacode.org");
</lang>
 
=={{header|PicoLisp}}==
<lang PicoLisp>(load "@lib/http.l")
(load "@lib/http.l")
(client "www.w3.org/Home.html" 80 NIL (out NIL (echo)))</lang>
 
(client "rosettacode.org" 80 NIL # Connect to rosettacode
(out NIL (echo)) ) # Echo to standard output
</lang>
 
=={{header|Pike}}==
<lang pike>
<lang pike>write("%s", Protocols.HTTP.get_url_data("http://www.w3.org/Home.html"));</lang>
write("%s",Protocols.HTTP.get_url_data("http://www.rosettacode.org"));
</lang>
 
=={{header|PowerShell}}==
<lang powershell>
<lang powershell>Invoke-WebRequest -MaximumRedirection 0 -URI http://www.w3.org/Home.html</lang>
$wc = New-Object Net.WebClient
$wc.DownloadString('http://www.rosettacode.org')
</lang>
 
=={{header|Prolog}}==
Works with SWI-Prolog and library http/http_open. (Extract from the doc).
 
<lang Prolog>
:- use_module(library( http/http_open )).
 
=={{header|SWI-Prolog}}==
<lang Prolog>:- use_module(library(http/http_open)).
http :-
http_open('http://www.w3rosettacode.org/Home.html', In, []),
copy_stream_data(In, user_output),
close(In).</lang>
</lang>
 
=={{header|PureBasic}}==
<lang PureBasic>InitNetwork()
InitNetwork()
OpenConsole()
 
tmpdir$ = GetTemporaryDirectory()
tmpdir$ = GetTemporaryDirectory()
filename$ = tmpdir$ + "PB_tempfile" + Str(Random(200000)) + ".html"
 
If ReceiveHTTPFile("http://www.w3.org/Home.html", filename$)
If ReadFileReceiveHTTPFile(1"http://rosettacode.org/wiki/Main_Page", filename$)
If ReadFile(1, filename$)
Repeat
Repeat
PrintN(ReadString(1))
Until EofPrintN(ReadString(1))
Until InputEof(1)
CloseFileInput(1)
; to prevent console from closing if on windows
EndIf
DeleteFileCloseFile(filename$1)
EndIf</lang>
DeleteFile(filename$)
Using general networking commands.
EndIf
<lang PureBasic>InitNetwork()
</lang>
 
Another solution using general networking commands
<lang PureBasic>
InitNetwork()
OpenConsole()
id = OpenNetworkConnection("www.w3rosettacode.org", 80)
SendNetworkString(id, "GET /Home.htmlwiki/Main_Page HTTP/1.1" + Chr(10) + "Host: www.w3rosettacode.org" + Chr(10) + Chr(10))
Repeat
If NetworkClientEvent(id) = 2
a$ = Space(1000)
ReceiveNetworkData(id, @a$, 1000)
out$ + a$
EndIf
Until FindString(out$, "</html>", 0)
PrintN(out$)
; next line only to prevent console from closing on Windows
Input()</lang>
Input()
</lang>
 
Of course you could use wget too.
 
=={{header|Python}}==
 
<lang python>import http
;Python 3:
connection = http.client.HTTPConnection("www.w3.org")
Using the [http://docs.python.org/py3k/library/urllib.request.html urllib.request] module.
connection.request("GET", "/")
<lang python>
print(connection.getresponse().read())</lang>
import urllib.request
print(urllib.request.urlopen("http://rosettacode.org").read())
</lang>
 
Using a more low-level [https://docs.python.org/3/library/http.client.html http.client] library.
<lang python>
from http.client import HTTPConnection
conn = HTTPConnection("example.com")
# If you need to use set_tunnel, do so here.
conn.request("GET", "/")
# Alternatively, you can use connect(), followed by the putrequest, putheader and endheaders functions.
result = conn.getresponse()
r1 = result.read() # This retrieves the entire contents.
</lang>
 
;Python 2:
Using the [http://docs.python.org/library/urllib.html urllib] library.
<lang python>
import urllib
print urllib.urlopen("http://rosettacode.org").read()
</lang>
 
Using the [http://docs.python.org/library/urllib2.html urllib2] library.
<lang python>
import urllib2
print urllib2.urlopen("http://rosettacode.org").read()
</lang>
 
 
{{libheader|Requests}}
{{works with|Python|2.7, 3.4–3.7}}
<lang Python>
import requests
print(requests.get("http://rosettacode.org").text)
</lang>
 
=={{header|R}}==
{{libheader|RCurl}}
{{libheader|XML}}
 
First, retrieve the webpage.
 
<lang R>library(RCurl)
<lang R>
webpage <- getURL("http://www.w3.org/Home.html")
library(RCurl)
webpage <- getURL("http://www.w3.org/Home.html", .opts=list(followlocation=TRUE))
webpage <- getURL("http://rosettacode.org")
webpage <- getURL("http://www.w3.org/Home.html", .opts=list(proxy="123.123.123.123", proxyusername="domain\\username", proxypassword="mypassword", proxyport=8080))</lang>
 
#If you are linking to a page that no longer exists and need to follow the redirect, use followlocation=TRUE
webpage <- getURL("http://www.rosettacode.org", .opts=list(followlocation=TRUE))
 
#If you are behind a proxy server, you will need to use something like:
webpage <- getURL("http://rosettacode.org",
.opts=list(proxy="123.123.123.123", proxyusername="domain\\username", proxypassword="mypassword", proxyport=8080))
#Don't forget that backslashes in your username or password need to be escaped!
</lang>
 
Now parse the html code into a tree and print the html.
 
<lang R>library(XML)
<lang R>
pagetree <- htmlTreeParse(webpage)
library(XML)
pagetree$children$html</lang>
pagetree <- htmlTreeParse(webpage )
pagetree$children$html
</lang>
 
=={{header|Racket}}==
<lang Racket>#lang racket
#lang racket
(require net/url)
(copy-port (get-pure-port (string->url "http://www.rosettacode.org")
(
#:redirections 100)
copy-port (
get (current-pureoutput-port ())
</lang>
string->url "http://www.w3.org/Home.html"
)
#:redirections 100
)
(current-output-port)
)</lang>
 
=={{header|Raku}}==
Line 1,634 ⟶ 2,301:
{{libheader|LWP}}
Using LWP::Simple from [https://modules.raku.org/search/?q=LWP%3A%3ASimple the Raku ecosystem].
 
<lang perl6>use v6;
 
use LWP::Simple;
 
print LWP::Simple.get("http://www.w3.org/Home.html");</lang>
print LWP::Simple.get("http://www.rosettacode.org");
</lang>
 
or, without LWP::Simple:
 
<lang perl6>use v6;
 
my $socket = IO::Socket::INET.new(host => "www.w3.org", port => 80,);
my $socket = IO::Socket::INET.new(host => "www.rosettacode.org",
$socket.print("GET /Home.html HTTP/1.0\r\n\r\n");
port => 80,);
$socket.print("GET / HTTP/1.0\r\n\r\n");
print $socket.recv();
$socket.close;</lang>
</lang>
 
=={{header|REALbasic}}==
REALBasic provides an HTTPSocket class for handling HTTP connections. The 'Get' method of the HTTPSocket is overloaded and can download data to a file or return data as a string, in both cases a timeout argument can be passed.
<lang REALbasic>Dim sock As New HTTPSocket
Dim sock As New HTTPSocket
Print(sock.Get("http://www.w3.org/Home.html", 10))</lang>
Print(sock.Get("http://www.rosettacode.org", 10)) //set the timeout period to 10 seconds.
</lang>
 
=={{header|REBOL}}==
<lang REBOL>print read http://www.w3.org/Home.html</lang>
print read http://rosettacode.org
</lang>
 
=={{header|REXX}}==
Line 1,658 ⟶ 2,338:
<lang Rexx>/* ft=rexx */
/* GET2.RX - Display contents of an URL on the terminal. */
/* Usage: rexx get.rx http://www.w3rosettacode.org/Home.html */
parse arg url .
'curl' url</lang>
Line 1,666 ⟶ 2,346:
<lang Rexx>/* ft=rexx */
/* GET2.RX - Display contents of an URL on the terminal. */
/* Usage: rexx get2.rx http://www.w3rosettacode.org/Home.html */
parse arg url .
address system 'curl' url with output stem stuff.
Line 1,677 ⟶ 2,357:
<lang Rexx>/* ft=rexx */
/* GET3.RX - Display contents of an URL on the terminal. */
/* Usage: rexx get3.rx http://www.w3rosettacode.org/Home.html */
parse arg url .
address system 'curl' url with output fifo ''
Line 1,683 ⟶ 2,363:
 
=={{header|Ring}}==
<lang ring>
<lang ring>See download("http://www.w3.org/Home.html")</lang>
See download("http://rosettacode.org")
</lang>
 
=={{header|RLaB}}==
Line 1,725 ⟶ 2,407:
=={{header|Ruby}}==
The simple way loads the entire content into memory, then prints it.
 
<lang ruby>require 'open-uri'
<lang ruby>
print open("http://www.w3.org/Home.html") {
require 'open-uri'
|f| f.read
 
}</lang>
print open("http://rosettacode.org") {|f| f.read}
</lang>
 
If the content might be large, the better way uses FileUtils.copy_stream.
 
<lang ruby>require 'fileutils'
<lang ruby>
require 'fileutils'
require 'open-uri'
 
open("http://www.w3.org/Home.html") {
open("http://rosettacode.org/") {|f| FileUtils.copy_stream(f, $stdout)}
}</lang>
 
=={{header|Run BASIC}}==
<lang runbasic>print httpget$("http://www.w3rosettacode.org/Home.htmlwiki/Main_Page")</lang>
 
=={{header|Rust}}==
Cargo.toml
<lang toml>[dependencies]
[dependencies]
hyper = "0.6"</lang>
hyper = "0.6"
</lang>
src/main.rs
<lang rust>//cargo-deps: hyper="0.6"
//cargo-deps: hyper="0.6"
// The above line can be used with cargo-script which makes cargo's dependency handling more convenient for small programs
extern crate hyper;
 
use std::io::Read;
use hyper::client::Client;
 
fn main() {
let client = Client::new();
let mut resp = client.get("http://www.w3rosettacode.org/Home.html").send().unwrap();
let mut body = String::new();
resp.read_to_string(&mut body).unwrap();
println!("{}", body);
}
}</lang>
</lang>
 
=={{header|Scala}}==
{{libheader|Scala}}
<lang scala>import scala.io.Source
 
object HttpTest extends App {
System.setProperty("http.agent", "*")
 
Source.fromURL("http://www.w3.org/Home.html").getLines.foreach(println)
Source.fromURL("http://www.rosettacode.org").getLines.foreach(println)
}</lang>
 
=={{header|Scheme}}==
{{works with|Guile}}
 
<lang scheme>(use-modules (ice-9 regex))
<lang scheme>
(define url "http://www.w3.org/Home.html")
; Use the regular expression module to parse the url (included with Guile)
(use-modules (ice-9 regex))
 
; Set the url and parse the hostname, port, and path into variables
(define url "http://www.rosettacode.org/wiki/HTTP")
(define r (make-regexp "^(http://)?([^:/]+)(:)?(([0-9])+)?(/.*)?" regexp/icase))
(define host (match:substring (regexp-exec r url) 2))
(define port (match:substring (regexp-exec r url) 4))
(define path (match:substring (regexp-exec r url) 6))
 
; Set port to 80 if it wasn't set above and convert from a string to a number
(if (eq? port #f) (define port "80"))
(define port (string->number port))
 
(
; Connect to remote host on specified port
let ((s (socket PF_INET SOCK_STREAM 0)))
(let ((s (socket PF_INET SOCK_STREAM 0)))
(connect s AF_INET (car (hostent:addr-list (gethostbyname host))) port)
(connect s AF_INET (car (hostent:addr-list (gethostbyname host))) port)
(display "GET " s)
 
(display path s)
; Send a HTTP request for the specified path
(display " HTTP/1.0\r\n\r\n" s)
(display "GET " s)
(
do ((cdisplay (read-char s) (read-charpath s)))
((eof-object?display c)" HTTP/1.0\r\n\r\n" s)
 
(display c)
; Display the received HTML
)
(do ((c (read-char s) (read-char s))) ((eof-object? c))
)</lang>
(display c)))
</lang>
{{works with|Chicken Scheme}}
Using the [http://api.call-cc.org/doc/http-client http-client] library, this is trivial.
<lang scheme>(use http-client)
(use http-client)
(print (with-input-from-request "http://www.w3.org/Home.html" #f read-string))</lang>
(print
(with-input-from-request "http://google.com/"
#f read-string))
</lang>
 
=={{header|Seed7}}==
Line 1,796 ⟶ 2,504:
contains the function [http://seed7.sourceforge.net/libraries/gethttp.htm#getHttp%28in_string%29 getHttp],
which gets data specified by an URL using the HTTP protocol.
 
<lang seed7>$ include "seed7_05.s7i";
<lang seed7>
include "gethttp.s7i";
$ include "seed7_05.s7i";
const proc: main is func begin
include "gethttp.s7i";
writeln(getHttp("www.w3.org/Home.html"));
 
end func;</lang>
const proc: main is func
begin
writeln(getHttp("www.rosettacode.org"));
end func;</lang>
 
=={{header|SenseTalk}}==
<lang sensetalk>put url "http://www.w3rosettacode.org/Home.html"</lang>
 
=={{header|Sidef}}==
Line 1,809 ⟶ 2,521:
<lang ruby>func get(url) {
var lwp = (
try { require('LWP::UserAgent') }
catch { warn require("'LWP::UserAgent') is not installed!"; return nil }
} catch {
warn "'LWP::UserAgent' is not installed!"; return nil
}
)
var ua = lwp.new(agent => 'Mozilla/5.0')
Line 1,821 ⟶ 2,530:
return nil
}
 
print get("http://www.w3.org/Home.html")</lang>
print get("http://rosettacode.org")</lang>
 
=={{header|Smalltalk}}==
{{works with|Pharo}}
<lang smalltalk>
<lang smalltalk>Transcript show: 'http://www.w3.org/Home.html' asUrl retrieveContents contentStream.</lang>
Transcript show: 'http://rosettacode.org' asUrl retrieveContents contentStream.
</lang>
 
=={{header|SNOBOL4}}==
{{works with|Macro SNOBOL4 in C}}
<lang snobol>-include "tcp.sno"
tcp.open(.conn, 'www.w3rosettacode.org', 'http') :s(cont1)
terminal = "cannot open" :(end)
cont1 conn = "GET http:/Home/rosettacode.htmlorg/wiki/Main_Page HTTP/1.0" char(10) char(10)
while output = conn :s(while)
tcp.close(.conn)
end</lang>
</lang>
 
=={{header|Swift}}==
<lang Swift>import Foundation
 
let request = NSURLRequest(URL: NSURL(string: "http://www.w3.org/Home.html")!)
let request = NSURLRequest(URL: NSURL(string: "http://rosettacode.org/")!)
NSURLConnection.sendAsynchronousRequest(request, queue: NSOperationQueue()) {
 
res, data, err in
// Using trailing closure
NSURLConnection.sendAsynchronousRequest(request, queue: NSOperationQueue()) {res, data, err in
// data is binary
if (data != nil) {
let string = NSString(data: data!, encoding: NSUTF8StringEncoding)
Line 1,847 ⟶ 2,564:
}
}
 
CFRunLoopRun()</lang>
CFRunLoopRun() // dispatch</lang>
 
=={{header|Tcl}}==
Note that the <code>http</code> package is distributed as part of Tcl.
 
<lang tcl>package require http
<lang tcl>
set request [http::geturl "http://www.w3.org/Home.html"]
package require http
set request [http::geturl "http://www.rosettacode.org"]
puts [http::data $request]
http::cleanup $request</lang>
 
=={{header|TSE SAL}}==
<lang TSE SAL>DLL "<urlmon.dll>"
 
INTEGER PROC FNUrlGetSourceApiI(
DLL "<urlmon.dll>"
INTEGER lpunknown,
INTEGER PROC FNUrlGetSourceApiI(
STRING urlS : CSTRVAL,
INTEGER lpunknown,
STRING filenameS : CSTRVAL,
STRING urlS : CSTRVAL,
INTEGER dword,
STRING filenameS : CSTRVAL,
INTEGER tlpbindstatuscallback
INTEGER dword,
) : "URLDownloadToFileA"
INTEGER tlpbindstatuscallback
) : "URLDownloadToFileA"
END
 
// library: url: get: source <description></description> <version control></version control> <version>1.0.0.0.3</version> (filenamemacro=geturgso.s) [kn, ri, su, 13-04-2008 05:12:53]
PROC PROCUrlGetSource( STRING urlS, STRING filenameS )
FNUrlGetSourceApiI( 0, urlS, filenameS, 0, 0 )
END
 
PROC Main()
STRING s1[255] = "http://www.google.com/index.html"
STRING s2[255] = "c:\temp\ddd.txt"
IF ( NOT ( Ask( "url: get: source: urlS = ", s1, _EDIT_HISTORY_ ) ) AND ( Length( s1 ) > 0 ) ) RETURN() ENDIF
IF ( NOT ( AskFilename( "url: get: source: filenameS = ", s2, _DEFAULT_, _EDIT_HISTORY_ ) ) AND ( Length( s2 ) > 0 ) ) RETURN() ENDIF
RETURN()
ENDIF
IF ( NOT ( AskFilename( "url: get: source: filenameS = ", s2, _DEFAULT_, _EDIT_HISTORY_ ) ) AND ( Length( s2 ) > 0 ) )
RETURN()
ENDIF
PROCUrlGetSource( s1, s2 )
EditFile( s2 )
END</lang>
 
</lang>
 
=={{header|TUSCRIPT}}==
<lang tuscript>$$ MODE TUSCRIPT
$$ MODE TUSCRIPT
SET DATEN = REQUEST ("http://www.w3.org/Home.html")
SET DATEN = REQUEST ("http://www.rosettacode.org")
*{daten}</lang>
*{daten}
</lang>
 
=={{header|UNIX Shell}}==
<lang bash>curl -s -L http://www.w3rosettacode.org/Home.html</lang>
 
<lang bash>lynx -source http://www.w3rosettacode.org/Home.html</lang>
 
<lang bash>wget -O - -q http://www.w3rosettacode.org/Home.html</lang>
 
<lang bash>lftp -c "cat http://www.w3rosettacode.org/Home.html"</lang>
 
{{works with|BSD}}
<lang bash>ftp -o - http://www.w3rosettacode.org/Home.html 2>/dev/null</lang>
 
=={{header|VBScript}}==
Line 1,904 ⟶ 2,628:
 
Based on code at [http://itknowledgeexchange.techtarget.com/vbscript-systems-administrator/how-to-retrieve-html-web-pages-with-vbscript-via-the-microsoftxmlhttp-object/ How to retrieve HTML web pages with VBScript via the Microsoft.XmlHttp object]
<lang vb>Option Explicit
Option Explicit
Const sURL="http://www.w3.org/Home.html"
 
Const sURL="http://rosettacode.org/"
 
Dim oHTTP
Set oHTTP = CreateObject("Microsoft.XmlHTTP")
 
On Error Resume Next
oHTTP.Open "GET", sURL, False
Line 1,916 ⟶ 2,644:
Wscript.Echo "error " & Err.Number & ": " & Err.Description
End If
 
Set oHTTP = Nothing</lang>
Set oHTTP = Nothing
</lang>
 
=={{header|Visual Basic}}==
Line 1,942 ⟶ 2,672:
 
=={{header|Visual Basic .NET}}==
<lang vbnet>Imports System.Net
Imports System.Net
 
Dim client As WebClient = New WebClient()
Dim content As String = client.DownloadString("http://www.w3google.org/Home.htmlcom")
Console.WriteLine(content)</lang>
</lang>
 
=={{header|zkl}}==
File htmlGet.zkl. This uses HTTP/1.0 Protocol to avoid chunked data. Or use cURL (see https example).
<lang zkl>url := ask(0,"URL: ");
 
host := url;
dir := "/";
port := 80;
if (n := url.find("/")) { dir = url[n,*]; host = url[0,n]; }
if (n := host.find(":")) { port = host[n+1,*]; host = host[0,n]; }
dir = url[n,*];
 
host = url[0,n];
}
if (n := host.find(":")) {
port = host[n+1,*];
host = host[0,n];
}
get := "GET %s HTTP/1.0\r\nHost: %s:%s\r\n\r\n".fmt(dir,host,port.toInt());
println("-->",get);
Line 1,967 ⟶ 2,696:
data := server.read(True);
println(data.text);</lang>
zkl htmlGet.zkl rosettacode.org/wiki/HTTP
{{out}}
<pre>
-->GET /wiki/HTTP HTTP/1.0
Host: rosettacode.org:80
 
 
HTTP/1.1 200 OK
Server: cloudflare-nginx
Date: Tue, 11 Mar 2014 08:31:43 GMT
Content-Type: text/html; charset=UTF-8
Connection: close
Set-Cookie:XXX
23:50:00 GMT; path=/; domain=.rosettacode.org; HttpOnly
X-Powered-By: PHP/5.3.3-7+squeeze18
X-Content-Type-Options: nosniff
Content-Language: en
ETag: W/"rosettacode:pcache:idhash:3055-0!1!0!!en!2--20140227082903"
Vary: Accept-Encoding,Cookie
Cache-Control: s-maxage=86400, must-revalidate, max-age=0
Last-Modified: Thu, 27 Feb 2014 08:29:03 GMT
Age: 86011
X-Cache: HIT from prgmr2.rosettacode.org
X-Cache-Lookup: HIT from prgmr2.rosettacode.org:80
Via: 1.0 prgmr2.rosettacode.org (squid/3.1.6)
CF-RAY: 109665b7e92a012c-SJC
 
<!DOCTYPE html>
<html lang="en" dir="ltr" class="client-nojs">
<head>
<title>HTTP - Rosetta Code</title>
...
</pre>
 
=={{header|Zoea}}==
<lang Zoea>program: http
program: http
input: 'http://www.w3.org/Home.html'
input: 'https://zoea.co.uk/examples/test.txt'
output: 'hello from zoea'</lang>
output: 'hello from zoea'
</lang>
 
=={{header|Zsh}}==
<lang zsh>zmodload zsh/net/tcp
zmodload zsh/net/tcp
ztcp www.w3.org 80
ztcp example.com 80
fd=$REPLY
print -l -u $fd -- 'GET /Home.html HTTP/1.1' 'Host: www.w3example.orgcom' ''
while read -u $fd -r -e -t 1; do; :; done
ztcp -c $fd</lang>
</lang>
 
{{omit from|Applesoft BASIC|No TCP/IP network support on Apple II}}
{{omit from|Brainf***}}
{{omit from|Commodore BASIC|Does not have network access}}
{{omit from|Inform 7|Does not have network access.}}
{{omit from|Integer BASIC|No TCP/IP network support on Apple II}}
{{omit from|Locomotive Basic|Does not have network access.}}
{{omit from|Lotus 123 Macro Scripting}}
{{omit from|M4}}
{{omit from|Maxima}}
{{omit from|ML/I}}
{{omit from|Openscad}}
{{omit from|PARI/GP}}
{{omit from|PostScript}}
{{omit from|Retro|Does not have network access.}}
{{omit from|SQL PL|Does not have network access}}
{{omit from|TI-83 BASIC|Does not have network access.}}
{{omit from|TI-89 BASIC|Does not have network access.}}
{{omit from|Unlambda|Does not have network access.}}
{{omit from|Yorick|Does not have network access.}}
{{omit from|ZX Spectrum Basic|Does not have network access.}}
10,333

edits

Cookies help us deliver our services. By using our services, you agree to our use of cookies.