HTTP: Difference between revisions
Summarily revert Revision as of 06:23, 2 May 2020 rather than trying to piecemeal revert
Thundergnat (talk | contribs) (Undo revision 315432 by WdeCvfYlmB (talk)) |
Thundergnat (talk | contribs) (Summarily revert Revision as of 06:23, 2 May 2020 rather than trying to piecemeal revert) |
||
Line 5:
There is a separate task for [[HTTPS Request]]s.
=={{header|8th}}==
<lang forth>
"http://www.rosettacode.org" net:get drop >s .
</lang>
=={{header|ABAP}}==
This works for ABAP Version 7.40 and above
<lang ABAP>
report z_http.
cl_http_client=>create_by_url(
exporting
url = `http://rosettacode.org/robots.txt`
importing
client = data(http_client)
exceptions
plugin_not_active = 2
internal_error = 3
others = 4 ).
if sy-subrc <> 0.
when 1 then `argument_not_found`
write error_message.
exit.
endif.
data(rest_http_client)
rest_http_client->get( ).
data(response_string) = rest_http_client->get_response_entity( )->get_string_data( ).
split response_string at cl_abap_char_utilities=>newline into table data(output_table).
loop at output_table assigning field-symbol(<output_line>).
endloop.
</lang>
{{out}}
<pre>
User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:
</pre>
=={{header|ActionScript}}==
<lang actionscript>
package
{
import flash.display.Sprite;
import flash.events.Event;
import flash.net.*;
{
public function RequestExample()
{
var loader:URLLoader = new URLLoader();
loader.addEventListener(Event.COMPLETE, loadComplete);
loader.load(new URLRequest("http://www.
}
private function loadComplete(evt:Event):void
{
trace(evt.target.data);
}
}
}
</lang>
=={{header|Ada}}==
{{libheader|AWS}}
<lang ada>
with Ada.Text_IO; use Ada.Text_IO;
with AWS.Client;
with AWS.Response;
procedure HTTP_Request is
begin
Put_Line (AWS.Response.Message_Body (AWS.Client.Get (URL => "http://www.
end HTTP_Request;
</lang>
=={{header|ALGOL 68}}==
{{works with|ALGOL 68|Revision 1 - however ''grep in string'', ''http content'' and ''str error'' are from a non-standard library}}
{{works with|ALGOL 68G|Any - tested with release [http://sourceforge.net/projects/algol68/files/algol68g/algol68g-1.18.0/algol68g-1.18.0-9h.tiny.el5.centos.fc11.i386.rpm/download 1.18.0-9h.tiny]}}
{{wont work with|ELLA ALGOL 68|Any (with appropriate job cards) - tested with release [http://sourceforge.net/projects/algol68/files/algol68toc/algol68toc-1.8.8d/algol68toc-1.8-8d.fc9.i386.rpm/download 1.8-8d] - due to extensive use of ''grep in string'' and ''http content''}}
<lang algol68>
STRING domain="rosettacode.org";
STRING page="wiki/Main_Page";
STRING re success="^HTTP/[0-9.]* 200";
STRING re result description="^HTTP/[0-9.]* [0-9]+ [a-zA-Z ]*";
STRING re doctype ="\s\s<!DOCTYPE html PUBLIC ""[^>]+"">\s+";
PROC html page = (REF STRING page) BOOL: (
BOOL out=grep in string(re success, page, NIL, NIL) = 0;
IF INT start, end;
grep in string(re result description, page, start, end) = 0
THEN
page
IF grep in string(re doctype, page, start, end) = 0
ELSE print ("unknown format retrieving page")
FI
ELSE print ("unknown error retrieving page")
FI;
);
INT rc =
rc = 0 AND html page (reply)
ELSE print (strerror (rc))
FI
</lang>
=={{header|Arturo}}==
<lang arturo>print [download "http://google.com"]</lang>
{{out}}
<pre><!doctype html><html itemscope="" itemtype="http://schema.org/WebPage" lang="es"><head><meta content="Google.es permite acceder a la información mundial en castellano, catalán, gallego, euskara e inglés." name="description"><meta content="noodp" name="robots"><meta content="text/html; charset=UTF-8" http-equiv="Content-Type"><meta content="/images/branding/googleg/1x/googleg_standard_color_128dp.png" itemprop="image"><title>Google</title><script nonce="mEe5oG98axwLddedgOh1JA==">(function(){window.google={kEI:'lp2lXbjlCJGKauK8o9AB',kEXPI:'0,18167,1335579,5663,730,224,510,18,228,819,1535,1617,378,206,1017,53,173,1163,798,10,50,211,452,319,19,96,161,89,193,122,766,81,176,221,1130704,1197793,230,302939,26305,1294,12383,4855,32692,15247,867,12163,16521,363,3320,5505,2436,5948,1119,2,579,727,2431,1362,4323,4967,774,2250,4744,3118,6196,1719,1808,1976,2044,8909,5071,226,897,1119,38,920,2090,2975,2736,49,2606,315,91,2,632,3240,4191,1571,2303,2883,19,319,235,884,904,101,2024,1,370,2778,917,261,731,509,777,7,2796,887,80,601,11,14,1279,2212,202,37,286,5,1252,327,513,324,193,1466,8,48,1
[output truncated]
</pre>
=={{header|AutoHotkey}}==
<lang AutoHotkey>
Run, cmd /k type url.html
</lang>
=={{header|AWK}}==
{{works with|gawk}}
<lang awk>BEGIN {
server = "/inet/tcp/0/" site "/80"
print "\r\n\r\n" |& server
while ( (server |& getline fish) > 0 )
if ( ++scale ==
ship = ship "\n" fish
}
close(server)
print ship
}</lang>
=={{header|BaCon}}==
<lang qbasic>'
' Read and display a website
'
IF AMOUNT(ARGUMENT$) = 1 THEN
website$ = "www.basic-converter.org"
ELSE
website$ = TOKEN$(ARGUMENT$, 2)
ENDIF
OPEN website$ & ":80" FOR NETWORK AS mynet
SEND "GET /
REPEAT
RECEIVE dat$ FROM mynet
Line 147 ⟶ 202:
=={{header|Batch File}}==
<lang batch>
curl.exe -s -L http://rosettacode.org/
</lang>
=={{header|BBC BASIC}}==
{{works with|BBC BASIC for Windows}}
<lang bbcbasic> SYS "LoadLibrary", "URLMON.DLL" TO urlmon%
SYS "GetProcAddress", urlmon%, "URLDownloadToFileA" TO URLDownloadToFile
url$ = "http://www.bbcbasic.co.uk/aboutus.html"
SYS URLDownloadToFile, 0, url$, file$, 0, 0 TO fail%
IF fail% ERROR 100, "File download failed"
OSCLI "TYPE """ + file$ + """"</lang>
=={{header|Biferno}}==
simple one-liner using httpExt and quick print $
<lang Biferno>$httpExt.ExecRemote("www.tabasoft.it")</lang>
=={{header|C}}==
{{libheader|libcurl}}
<lang c>
#include <stdio.h>
#include <stdlib.h>
#include <curl/curl.h>
int
main(void)
{
CURL *curl;
char buffer[CURL_ERROR_SIZE];
if ((curl = curl_easy_init()) != NULL) {
curl_easy_setopt(curl, CURLOPT_URL, "http://www.rosettacode.org/");
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, buffer);
if (curl_easy_perform(curl) != CURLE_OK) {
fprintf(stderr, "%s\n", buffer);
return
}
curl_easy_cleanup(curl);
}
return EXIT_SUCCESS;
}
</lang>
=={{header|C sharp}}==
<lang csharp>
using System;
using System.Text;
using System.Net;
class Program
{
static void Main(string[] args)
{
WebClient wc = new WebClient();
string content = wc.DownloadString("http://www.
Console.WriteLine(content);
}
}
</lang>
=={{header|C++}}==
<lang cpp
#include <winsock2.h>
#include <ws2tcpip.h>
#include <iostream>
int main() {
WSADATA wsaData;
WSAStartup( MAKEWORD( 2, 2 ), &wsaData );
addrinfo *result = NULL;
addrinfo hints;
ZeroMemory( &hints, sizeof( hints ) );
hints.ai_family = AF_UNSPEC;
hints.ai_socktype = SOCK_STREAM;
hints.ai_protocol = IPPROTO_TCP;
getaddrinfo( "74.125.45.100", "80", &hints, &result ); // http://www.google.com
SOCKET s = socket( result->ai_family, result->ai_socktype, result->ai_protocol );
connect( s, result->ai_addr, (int)result->ai_addrlen );
freeaddrinfo( result );
send( s, "GET / HTTP/1.0\n\n", 16, 0 );
char buffer[512];
int bytes;
do {
bytes = recv( s, buffer, 512, 0 );
if ( bytes > 0 )
std::cout.write(buffer, bytes);
} while ( bytes > 0 );
return 0;
}
</lang>
{{libheader|U++}}
<lang cpp>
#include <Web/Web.h>
using namespace Upp;
CONSOLE_APP_MAIN
{
Cout() << HttpClient("www.rosettacode.org").ExecuteRedirect();
}
</lang>
=={{header|Caché ObjectScript}}==
Line 236 ⟶ 326:
<pre>
USER>Set HttpRequest=##class(%Net.HttpRequest).%New()
USER>Set HttpRequest.Server="
USER>Do HttpRequest.Get("/
USER>Do HttpRequest.HttpResponse.Data.OutputToDevice()
</pre>
Line 243 ⟶ 333:
=={{header|Clojure}}==
Using the Java API:
<lang clojure>
(let [sc (java.util.Scanner.
(while (.hasNext
(get-http "http://www.rosettacode.org")
</lang>
Using <code>clojure.contrib.http.agent</code>:
<lang clojure>
(:use [clojure.contrib.http.agent :only (string http-agent)]))
(println (string (http-agent "http://www.rosettacode.org/")))
</lang>
{{works with|Clojure|1.2}}
<lang clojure
(print (slurp "http://www.rosettacode.org/"))
</lang>
=={{header|COBOL}}==
Tested with GnuCOBOL
<lang cobol>COBOL >>SOURCE FORMAT IS FIXED
identification division.
program-id. curl-rosetta.
environment division.
configuration section.
function read-url
function all intrinsic.
data division.
working-storage section.
copy "gccurlsym.cpy".
01 web-page pic x(16777216).
01
01 cli pic x(7) external.
88 helping values "-h", "-help", "help", spaces.
88 displaying value "display".
88 summarizing value "summary".
*> ***************************************************************
procedure division.
accept cli from command-line
if helping then
display "./curl-rosetta [help|display|summary]"
goback
end-if
*>
*> Read a web resource into fixed ram.
*> Caller is in charge of sizing the buffer,
*> (or getting trickier with the write callback)
*> Pass URL and working-storage variable,
*> get back libcURL error code or 0 for success
move read-url("http://www.rosettacode.org", web-page)
to curl-status
perform check
perform show
goback.
*> ***************************************************************
*> Now tesing the result, relying on the gccurlsym
*> GnuCOBOL Curl Symbol copy book
if curl-status not equal zero then
display
curl-status " "
CURLEMSG(curl-status) upon syserr
*> And
show.
if summarizing then
display "Length: " stored-char-length(web-page)
end-if
if displaying then
display trim(web-page trailing) with no advancing
end-if
REPLACE ALSO ==:EXCEPTION-HANDLERS:== BY
*> informational warnings and abends
soft-exception.
display
display "--Exception Report-- " upon syserr
display "Time of exception: " current-date upon syserr
display "Module: " module-id upon syserr
display "
display "Module-source: " module-source upon syserr
display "Exception-file: " exception-file upon syserr
display
display "Exception-location: " exception-location upon syserr
display "Exception-statement: " exception-statement upon syserr
stop run returning 127
.
==.
end program curl-rosetta.
*> ***************************************************************
*> ***************************************************************
*> The function hiding all the curl details
*>
*> Purpose: Call libcURL and read into memory
*> ***************************************************************
identification division.
function-id. read-url.
environment division.
configuration section.
repository.
function all intrinsic.
data division.
working-storage section.
copy "gccurlsym.cpy".
replace also ==:CALL-EXCEPTION:== by
==
on exception
perform hard-exception
==.
01 curl-handle usage pointer.
01 callback-handle usage procedure-pointer.
01 memory-block.
05 memory-address usage pointer sync.
05 memory-size usage binary-long sync.
05 running-total usage binary-long sync.
01 curl-result usage binary-long.
01 cli pic x(7) external.
88 helping values "-h", "-help", "help", spaces.
88 displaying value "display".
88 summarizing value "summary".
linkage section.
01 url pic x any length.
01 buffer pic x any length.
01 curl-status usage binary-long.
*> ***************************************************************
procedure division using url buffer returning curl-status.
if displaying or summarizing then
display "Read: " url upon syserr
end-if
*> initialize libcurl, hint at missing library if need be
call "curl_global_init" using by value CURL_GLOBAL_ALL
on exception
display
"need libcurl, link with -lcurl" upon syserr
stop run returning 1
end-call
*> initialize handle
call "curl_easy_init" returning curl-handle
:CALL-EXCEPTION:
end-call
if curl-handle equal NULL then
display "no curl handle" upon syserr
stop run returning 1
end-if
*> Set the URL
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_URL
by reference concatenate(trim(url trailing), x"00")
:CALL-EXCEPTION:
end-call
*> follow all redirects
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_FOLLOWLOCATION
by value 1
:CALL-EXCEPTION:
end-call
*> set the call back to write to memory
set callback-handle to address of entry "curl-write-callback"
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_WRITEFUNCTION
by value callback-handle
:CALL-EXCEPTION:
end-call
*> set the curl handle data handling structure
set memory-address to address of buffer
move length(buffer) to memory-size
move 1 to running-total
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_WRITEDATA
by value address of memory-block
:CALL-EXCEPTION:
end-call
*> some servers demand an agent
call "curl_easy_setopt" using
by value curl-handle
by value CURLOPT_USERAGENT
by reference concatenate("libcurl-agent/1.0", x"00")
:CALL-EXCEPTION:
end-call
*> let curl do all the hard work
call "curl_easy_perform" using
by value curl-handle
returning curl-result
:CALL-EXCEPTION:
end-call
*> the call back will handle filling ram, return the result code
move curl-result to curl-status
*> curl clean up, more important if testing cookies
call "curl_easy_cleanup" using
by value curl-handle
returning omitted
:CALL-EXCEPTION:
end-call
goback.
:EXCEPTION-HANDLERS:
end function read-url.
*> ***************************************************************
*> ***************************************************************
*> Supporting libcurl callback
identification division.
program-id. curl-write-callback.
environment division.
configuration section.
repository.
function all intrinsic.
data division.
working-storage section.
01 real-size usage binary-long.
*> libcURL will pass a pointer to this structure in the callback
05 memory-address usage pointer sync.
05 memory-size usage binary-long sync.
01 content-buffer pic x(65536) based.
linkage section.
01 element-size usage binary-long.
01 memory-structure usage pointer.
*> ***************************************************************
procedure division
using
by value memory-structure
returning real-size.
set address of memory-block to memory-structure
compute real-size = element-size * element-count end-compute
*> Fence off the end of buffer
compute
left-over = memory-size - running-total
end-compute
if left-over > 0 and < real-size then
move left-over to real-size
end-if
*> if there is more buffer, and data not zero length
if (left-over > 0) and (real-size > 1) then
set address of content-buffer to contents
set address of web-space to memory-address
move content-buffer(1:real-size)
to web-space(running-total:real-size)
add real-size to running-total
else
display "curl buffer sizing problem" upon syserr
end-if
goback.
end program curl-write-callback.</lang>
and a copybook
<lang cobol> *> manifest constants for libcurl
*> Usage: COPY occurlsym inside data division
*> Taken from include/curl/curl.h 2013-12-19
*> Functional enums
01 CURL_MAX_HTTP_HEADER CONSTANT AS 102400.
78 CURL_GLOBAL_ALL VALUE 3.
78 CURLOPT_FOLLOWLOCATION VALUE 52.
78 CURLOPT_WRITEDATA VALUE 10001.
78 CURLOPT_URL VALUE 10002.
78 CURLOPT_USERAGENT VALUE 10018.
78 CURLOPT_WRITEFUNCTION VALUE 20011.
78 CURLOPT_COOKIEFILE VALUE 10031.
78 CURLOPT_COOKIEJAR VALUE 10082.
78 CURLOPT_COOKIELIST VALUE 10135.
*> Informationals
78 CURLINFO_COOKIELIST VALUE 4194332.
*> Result codes
78 CURLE_OK VALUE 0.
*> Error codes
78 CURLE_UNSUPPORTED_PROTOCOL VALUE 1.
78 CURLE_FAILED_INIT VALUE 2.
78 CURLE_URL_MALFORMAT VALUE 3.
78 CURLE_OBSOLETE4 VALUE 4.
78 CURLE_COULDNT_RESOLVE_PROXY VALUE 5.
78 CURLE_COULDNT_RESOLVE_HOST VALUE 6.
78 CURLE_COULDNT_CONNECT VALUE 7.
78 CURLE_FTP_WEIRD_SERVER_REPLY VALUE 8.
78 CURLE_REMOTE_ACCESS_DENIED VALUE 9.
78 CURLE_OBSOLETE10 VALUE 10.
78 CURLE_FTP_WEIRD_PASS_REPLY VALUE 11.
78 CURLE_OBSOLETE12 VALUE 12.
78 CURLE_FTP_WEIRD_PASV_REPLY VALUE 13.
78 CURLE_FTP_WEIRD_227_FORMAT VALUE 14.
78 CURLE_FTP_CANT_GET_HOST VALUE 15.
78 CURLE_OBSOLETE16 VALUE 16.
78 CURLE_FTP_COULDNT_SET_TYPE VALUE 17.
78 CURLE_PARTIAL_FILE VALUE 18.
78 CURLE_FTP_COULDNT_RETR_FILE VALUE 19.
78 CURLE_OBSOLETE20 VALUE 20.
78 CURLE_QUOTE_ERROR VALUE 21.
78 CURLE_HTTP_RETURNED_ERROR VALUE 22.
78 CURLE_WRITE_ERROR VALUE 23.
78 CURLE_OBSOLETE24 VALUE 24.
78 CURLE_UPLOAD_FAILED VALUE 25.
78 CURLE_READ_ERROR VALUE 26.
78 CURLE_OUT_OF_MEMORY VALUE 27.
78 CURLE_OPERATION_TIMEDOUT VALUE 28.
78 CURLE_OBSOLETE29 VALUE 29.
78 CURLE_FTP_PORT_FAILED VALUE 30.
78 CURLE_FTP_COULDNT_USE_REST VALUE 31.
78 CURLE_OBSOLETE32 VALUE 32.
78 CURLE_RANGE_ERROR VALUE 33.
78 CURLE_HTTP_POST_ERROR VALUE 34.
78 CURLE_SSL_CONNECT_ERROR VALUE 35.
78 CURLE_BAD_DOWNLOAD_RESUME VALUE 36.
78 CURLE_FILE_COULDNT_READ_FILE VALUE 37.
78 CURLE_LDAP_CANNOT_BIND VALUE 38.
78 CURLE_LDAP_SEARCH_FAILED VALUE 39.
78 CURLE_OBSOLETE40 VALUE 40.
78 CURLE_FUNCTION_NOT_FOUND VALUE 41.
78 CURLE_ABORTED_BY_CALLBACK VALUE 42.
78 CURLE_BAD_FUNCTION_ARGUMENT VALUE 43.
78 CURLE_OBSOLETE44 VALUE 44.
78 CURLE_INTERFACE_FAILED VALUE 45.
78 CURLE_OBSOLETE46 VALUE 46.
78 CURLE_TOO_MANY_REDIRECTS VALUE 47.
78 CURLE_UNKNOWN_TELNET_OPTION VALUE 48.
78 CURLE_TELNET_OPTION_SYNTAX VALUE 49.
78 CURLE_OBSOLETE50 VALUE 50.
78 CURLE_PEER_FAILED_VERIFICATION VALUE 51.
78 CURLE_GOT_NOTHING VALUE 52.
78 CURLE_SSL_ENGINE_NOTFOUND VALUE 53.
78 CURLE_SSL_ENGINE_SETFAILED VALUE 54.
78 CURLE_SEND_ERROR VALUE 55.
78 CURLE_RECV_ERROR VALUE 56.
78 CURLE_OBSOLETE57 VALUE 57.
78 CURLE_SSL_CERTPROBLEM VALUE 58.
78 CURLE_SSL_CIPHER VALUE 59.
78 CURLE_SSL_CACERT VALUE 60.
78 CURLE_BAD_CONTENT_ENCODING VALUE 61.
78 CURLE_LDAP_INVALID_URL VALUE 62.
78 CURLE_FILESIZE_EXCEEDED VALUE 63.
78 CURLE_USE_SSL_FAILED VALUE 64.
78 CURLE_SEND_FAIL_REWIND VALUE 65.
78 CURLE_SSL_ENGINE_INITFAILED VALUE 66.
78 CURLE_LOGIN_DENIED VALUE 67.
78 CURLE_TFTP_NOTFOUND VALUE 68.
78 CURLE_TFTP_PERM VALUE 69.
78 CURLE_REMOTE_DISK_FULL VALUE 70.
78 CURLE_TFTP_ILLEGAL VALUE 71.
78 CURLE_TFTP_UNKNOWNID VALUE 72.
78 CURLE_REMOTE_FILE_EXISTS VALUE 73.
78 CURLE_TFTP_NOSUCHUSER VALUE 74.
78 CURLE_CONV_FAILED VALUE 75.
78 CURLE_CONV_REQD VALUE 76.
78 CURLE_SSL_CACERT_BADFILE VALUE 77.
78 CURLE_REMOTE_FILE_NOT_FOUND VALUE 78.
78 CURLE_SSH VALUE 79.
78 CURLE_SSL_SHUTDOWN_FAILED VALUE 80.
78 CURLE_AGAIN VALUE 81.
*> Error strings
01 LIBCURL_ERRORS.
02 CURLEVALUES.
03 FILLER PIC X(30) VALUE "CURLE_UNSUPPORTED_PROTOCOL ".
03 FILLER PIC X(30) VALUE "CURLE_FAILED_INIT ".
03 FILLER PIC X(30) VALUE "CURLE_URL_MALFORMAT ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE4 ".
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_PROXY ".
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_HOST ".
03 FILLER PIC X(30) VALUE "CURLE_COULDNT_CONNECT ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_SERVER_REPLY ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_ACCESS_DENIED ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE10 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASS_REPLY ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE12 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASV_REPLY ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_227_FORMAT ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_CANT_GET_HOST ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE16 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_SET_TYPE ".
03 FILLER PIC X(30) VALUE "CURLE_PARTIAL_FILE ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_RETR_FILE ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE20 ".
03 FILLER PIC X(30) VALUE "CURLE_QUOTE_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_HTTP_RETURNED_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_WRITE_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE24 ".
03 FILLER PIC X(30) VALUE "CURLE_UPLOAD_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_READ_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_OUT_OF_MEMORY ".
03 FILLER PIC X(30) VALUE "CURLE_OPERATION_TIMEDOUT ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE29 ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_PORT_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_USE_REST ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE32 ".
03 FILLER PIC X(30) VALUE "CURLE_RANGE_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_HTTP_POST_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CONNECT_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_BAD_DOWNLOAD_RESUME ".
03 FILLER PIC X(30) VALUE "CURLE_FILE_COULDNT_READ_FILE ".
03 FILLER PIC X(30) VALUE "CURLE_LDAP_CANNOT_BIND ".
03 FILLER PIC X(30) VALUE "CURLE_LDAP_SEARCH_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE40 ".
03 FILLER PIC X(30) VALUE "CURLE_FUNCTION_NOT_FOUND ".
03 FILLER PIC X(30) VALUE "CURLE_ABORTED_BY_CALLBACK ".
03 FILLER PIC X(30) VALUE "CURLE_BAD_FUNCTION_ARGUMENT ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE44 ".
03 FILLER PIC X(30) VALUE "CURLE_INTERFACE_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE46 ".
03 FILLER PIC X(30) VALUE "CURLE_TOO_MANY_REDIRECTS ".
03 FILLER PIC X(30) VALUE "CURLE_UNKNOWN_TELNET_OPTION ".
03 FILLER PIC X(30) VALUE "CURLE_TELNET_OPTION_SYNTAX ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE50 ".
03 FILLER PIC X(30) VALUE "CURLE_PEER_FAILED_VERIFICATION".
03 FILLER PIC X(30) VALUE "CURLE_GOT_NOTHING ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_NOTFOUND ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_SETFAILED ".
03 FILLER PIC X(30) VALUE "CURLE_SEND_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_RECV_ERROR ".
03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE57 ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CERTPROBLEM ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CIPHER ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT ".
03 FILLER PIC X(30) VALUE "CURLE_BAD_CONTENT_ENCODING ".
03 FILLER PIC X(30) VALUE "CURLE_LDAP_INVALID_URL ".
03 FILLER PIC X(30) VALUE "CURLE_FILESIZE_EXCEEDED ".
03 FILLER PIC X(30) VALUE "CURLE_USE_SSL_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_SEND_FAIL_REWIND ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_INITFAILED ".
03 FILLER PIC X(30) VALUE "CURLE_LOGIN_DENIED ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOTFOUND ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_PERM ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_DISK_FULL ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_ILLEGAL ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_UNKNOWNID ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_EXISTS ".
03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOSUCHUSER ".
03 FILLER PIC X(30) VALUE "CURLE_CONV_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_CONV_REQD ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT_BADFILE ".
03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_NOT_FOUND ".
03 FILLER PIC X(30) VALUE "CURLE_SSH ".
03 FILLER PIC X(30) VALUE "CURLE_SSL_SHUTDOWN_FAILED ".
03 FILLER PIC X(30) VALUE "CURLE_AGAIN ".
01 FILLER REDEFINES LIBCURL_ERRORS.
02 CURLEMSG OCCURS 81 TIMES PIC X(30).</lang>
{{out}}
<pre>prompt$ ./curl-rosetta summary
Read: http://www.rosettacode.org
Length: 000024043
prompt$ ./curl-rosetta display
Read: http://www.rosettacode.org
<!DOCTYPE html>
<html lang="en" dir="ltr" class="client-nojs">
<head>
...</pre>
=={{header|ColdFusion}}==
<lang coldfusion
<cfhttp url="http://www.rosettacode.org" result="result">
<cfoutput>#result.FileContent#</cfoutput>
</lang>
=={{header|Common Lisp}}==
CLISP provides an extension function to read http sources. Other implementations may do this differently.
{{works with|CLISP}}
<lang lisp>
(ext:with-http-input (stream url)
</lang>
{{libheader|DRAKMA}}
First grabbing the entire body as a string, and then by pulling from a stream (as in the CLISP example).
<lang lisp>
(defun wget-drakma-string (url &optional (out *standard-output*))
"Grab the body as a
(write-string (drakma:http-request url) out))
(loop with body = (drakma:http-request url :want-stream t)
for line = (read-line body nil nil)
while line do (write-line line out)
finally (close body)))
</lang>
=={{header|Crystal}}==
<lang crystal>
require "http/client"
HTTP::Client.get("http://google.com")
</lang>
=={{header|D}}==
{{libheader|phobos}}
<lang D>
void main() {
import std.stdio, std.net.curl;
writeln(get("http://google.com"));
}
</lang>
{{libheader|tango}}
<lang D>
import tango.io.Console;
import tango.net.http.HttpGet;
void main() {
}
</lang>
Or more operating directly on the socket:
<lang D>
import tango.io.Console;
import tango.net.InternetAddress;
import tango.net.device.Socket;
void main() {
Cout.stream.copy (site);
}
</lang>
=={{header|Dart}}==
Line 719 ⟶ 939:
<lang d>import 'dart:io';
void main(){
.then((HttpClientRequest request) => request.close())
.then((HttpClientResponse response) => response.pipe(stdout));
}</lang>
=={{header|Delphi}}==
Simple example using the free Synapse TCP/IP library [http://www.ararat.cz/synapse/doku.php/download]
<lang Delphi>
program HTTP;
{$APPTYPE CONSOLE}
{$DEFINE DEBUG}
uses
httpsend; // Synapse httpsend
var
begin
try
{ Stringlist object to capture HTML returned
from URL }
Response := TStringList.Create;
try
if HTTPObj.HTTPMethod('GET','http://www.mgis.uk.com') then
{ Load HTTP Document into Stringlist }
Response.LoadFromStream(HTTPObj.Document);
{ Write the response to the console window }
finally
end;
finally
HTTPObj.Free;
end;
// Keep console window open
Readln;
end.
</lang>
Using Indy:
<lang Delphi>
program ShowHTTP;
{$APPTYPE CONSOLE}
uses IdHttp;
var
begin
end.
</lang>
=={{header|Dragon}}==
<lang dragon>select "http"
select "std"
http("http://www.rosettacode.org", ::echo)
</lang>
=={{header|E}}==
<lang e>
when (def t := <http://www.rosettacode.org> <- getText()) -> {
println(t)
}
</lang>
=={{header|EchoLisp}}==
'''file->string''' usage: the server must allow cross-domain access, or a browser add-on like cors-everywhere must be installed to bypass cross-domain checking.
<lang scheme>
;; asynchronous call back definition
(define (success name text) (writeln 'Loaded name) (writeln text))
;;
(file->string success "http://www.
</lang>
=={{header|Emacs Lisp}}==
<code>url.el</code> can download HTTP. <code>url-retrieve-synchronously</code> returns a buffer containing headers and body. Caller kills the buffer when no longer required.
<lang Lisp>(with-current-buffer
(url-retrieve-synchronously "http://www.
(search-forward "\n\n" nil t) ;; skip headers
(prin1 (buffer-substring (point) (point-max)))
=={{header|Erlang}}==
Line 841 ⟶ 1,091:
=={{header|F_Sharp|F#}}==
In F# we can just use the .NET library to do this so its the same as the [[C_sharp|C#]] example.
<lang fsharp>
let wget (url : string) =
use c = new System.Net.WebClient()
c.DownloadString(url)
printfn "%s" (wget "http://www.rosettacode.org/")
</lang>
However unlike C#, F# can use an asynchronous workflow to avoid blocking any threads while waiting for a response from the server. To asynchronously download three url's at once...
<lang fsharp>
open System.Net
open System.IO
let wgetAsync url =
use!
use responseStream = response.GetResponseStream()
return reader.ReadToEnd() }
let urls = ["http://www.rosettacode.org/"; "http://www.yahoo.com/"; "http://www.google.com/"]
let content = urls
|> List.map wgetAsync
Line 863 ⟶ 1,121:
=={{header|Factor}}==
<lang factor>USE: http.client
"http://www.
</lang>
=={{header|Forth}}==
{{works with|GNU Forth|0.7.0}}
This works at the socket level, returning both the HTTP headers and page contents.
<lang forth>
s" localhost" 80 open-socket
dup s\" GET / HTTP/1.0\n\n" rot write-socket
dup pad 8092 read-socket type
close-socket
</lang>
=={{header|friendly interactive shell}}==
{{trans|UNIX Shell}}
<lang fishshell>curl -
<lang fishshell>
<lang fishshell>wget -O - -q http://rosettacode.org/</lang>
<lang fishshell>lftp -c "cat http://rosettacode.org/"</lang>
{{works with|BSD}}
<lang fishshell>ftp -o - http://rosettacode.org ^ /dev/null</lang>
Line 885 ⟶ 1,151:
=={{header|Frink}}==
Frink's <CODE>read[<I>URL</I>]</CODE> function works with any URL type supported by your Java Virtual Machine, and returns the results as a single string.
<lang frink>
print[read["http://frinklang.org/"]]
</lang>
=={{header|Gastona}}==
<lang gastona>#listix#
<main>
LOOP, TEXT FILE, http://www.rosettacode.org
, BODY, @<value>
</lang>
=={{header|GML}}==
{{works with|Game Maker Studio}}
'''Any Event'''
<lang gml>get = http_get("http://www.
'''HTTP Event'''
<lang gml>if (ds_map_find_value(async_load,"id") == get)
{
show_message_async(ds_map_find_value(async_load,"result"));
}</lang>
=={{header|Go}}==
<lang go>
package main
import (
"io"
"log"
"net/http"
"os"
)
func main() {
if err != nil {
log.Fatalln(err)
}
io.Copy(os.Stdout, r.Body)
}
</lang>
Output:
<pre>
User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:
</pre>
=={{header|Groovy}}==
<lang groovy>
new URL("http://www.rosettacode.org").eachLine
=={{header|GUISS}}==
It would be more appropriate to paste to notepad:
<lang guiss>Start,Programs,Applications,Mozilla Firefox,Inputbox:address bar>www.rosettacode.org,Button:Go,
Click:Area:browser window,Type:[Control A],[Control C],Start,Programs,Accessories,Notepad,
Menu:Edit,Paste</lang>
=={{header|Halon}}==
<lang halon>echo http("http://www.
=={{header|Haskell}}==
Using {{libheader|HTTP}} from [http://hackage.haskell.org/packages/hackage.html HackageDB]
<lang haskell>
import Network.Browser
import Network.HTTP
import Network.URI
main = do
rsp <- Network.Browser.browse $ do
setAllowRedirects True
setOutHandler $ const (return ())
request $ getRequest "http://www.
putStrLn $ rspBody $ snd rsp
</lang>
== Icon and Unicon ==
==={{header|Icon}}===
<lang icon>
link cfunc
procedure main(arglist)
end
procedure get(url)
="http://" | ="HTTP://"
host := tab(upto(':/') | 0)
if not (=":" & (port := integer(tab(upto('/'))))) then port := 80
if pos(0) then
}
write(host)
f := tconnect(host, port) | stop("Unable to connect")
writes(f, "GET ", path | "/" ," HTTP/1.0\r\n\r\n")
while write(read(f))
end
</lang>
Using it
<lang icon>
|icon req.icn http://www.rosettacode.org
</lang>
==={{header|Unicon}}===
Unicon provides improved socket and messaging support without the need for the external function ''cfunc'':
<lang unicon>
procedure main(arglist)
m := open(arglist[1],"m")
while write(read(m))
end
</lang>
=={{header|J}}==
Using <tt>gethttp</tt> from [[Web Scraping#J|Web Scraping]]
<lang j>require'web/gethttp'
gethttp 'http://www.
</lang>
=={{header|Java}}==
<lang java5>import java.util.Scanner;
import java.net.URL;
public class Main {
public static void main(String[] args) throws Exception {
Scanner sc = new Scanner(new URL("http://www.
while (sc.hasNext())
System.out.println(sc.nextLine());
}
}
</lang>
{{libheader|Apache Commons IO}}
<lang java5>
import org.apache.commons.io.IOUtils;
import java.net.URL;
public class Main {
public static void main(String[] args) throws Exception {
IOUtils.copy(new URL("http://
}
}</lang>
=={{header|JavaScript}}==
===Browser===
<lang JavaScript>var req = new XMLHttpRequest();
req.onload = function() {
console.log(this.responseText);
};
req.open('get', 'http://rosettacode.org', true);
req.send()</lang>
Using fetch API:
<lang JavaScript>
fetch('http://rosettacode.org').then(function(response) {
return response.text();
}).then(function(myText) {
console.log(myText);
});
</lang>
As a repeatable function:
<lang JavaScript>/**
* @name _http
* @description Generic API Client using XMLHttpRequest
* @param {string} url The URI/URL to connect to
* @param {string} method The HTTP method to invoke- GET, POST, etc
* @param {function} callback Once the HTTP request has completed, responseText is passed into this function for execution
* @param {object} params Query Parameters in a JavaScript Object (Optional)
*
*/
function _http(url, method, callback, params) {
var xhr,
reqUrl;
xhr = new XMLHttpRequest();
xhr.onreadystatechange = function xhrProc() {
if (xhr.readyState == 4 && xhr.status == 200) {
callback(xhr.responseText);
}
};
/** If Query Parameters are present, handle them... */
if (typeof params === 'undefined') {
reqUrl = url;
} else {
switch (method) {
case 'GET':
reqUrl = url + procQueryParams(params);
break;
case 'POST':
reqUrl = url;
break;
default:
}
}
/** Send the HTTP Request */
if (reqUrl) {
xhr.open(method, reqUrl, true);
xhr.setRequestHeader("Accept", "application/json");
if (method === 'POST') {
xhr.send(params);
} else {
xhr.send();
}
}
/**
* @name procQueryParams
* @description Return function that converts Query Parameters from a JavaScript Object to a proper URL encoded string
* @param {object} params Query Parameters in a JavaScript Object
*
*/
function procQueryParams(params) {
return "?" + Object
.keys(params)
.map(function (key) {
return key + "=" + encodeURIComponent(params[key])
})
.join("&")
}
}</lang>
Using jQuery:
<lang JavaScript>$.get('http://rosettacode.org', function(data) {
console.log(data);
};</lang>
===Node.js===
With Node.js, using only the included http module.
<lang javascript>const http = require('http');
http.get('http://rosettacode.org', (resp) => {
let data
// A chunk of data has been recieved.
resp.on('data', (chunk) => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
console.log("Data:", data);
});
}).on("error", (err) => {
console.log("Error: " + err.message);
});</lang>
=={{header|Jsish}}==
Based on Jsi_Wget that ships with Jsish.
<lang javascript>#!/usr/bin/env jsish
function httpGet(fileargs:array|string, conf:object=void) {
var options = { // Web client for downloading files from url
headers : [], // Header fields to send.
nowait : false, // Just return object: caller will call update.
onDone : null, // Callback when done.
wsdebug : 0 // WebSockets debug level.
};
var self = {
address : '',
done : false,
path : '',
port : -1,
post : '', // Post file upload (UNIMPL).
scheme : 'http', // Url scheme
protocol : 'get',
url : null,
response : ''
};
parseOpts(self, options, conf);
if (self.port === -1)
self.port = 80;
function WsRecv(ws:userobj, id:number, str:string) {
LogDebug("LEN: "+str.length);
Line 1,043 ⟶ 1,461:
self.response += str;
}
function WsClose(ws:userobj|null, id:number) {
LogDebug("CLOSE");
Line 1,049 ⟶ 1,468:
self.onDone(id);
}
function main() {
if (self.Debug)
Line 1,073 ⟶ 1,493:
if (self.post.length)
self.protocol = 'post';
var wsopts = {
client:
onRecv:
onClose:
debug:
rootdir:
port:
address:
protocol:
clientHost:
};
if (self.post.length)
Line 1,108 ⟶ 1,529:
return self.response;
}
return main();
}
provide(httpGet, "0.60");
if (isMain())
runModule(httpGet);</lang>
{{out}}
<pre>prompt$ jsish
# require('httpGet')
0.6
# var page = httpGet('http://rosettacode.org/robots.txt')
variable
# page
"User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:
"</pre>
=={{header|Julia}}==
<lang Julia>readurl(url) = open(readlines, download(url))
readurl("http://rosettacode.org/index.html")</lang>
=={{header|Kotlin}}==
<lang scala>
import java.net.URL
import java.io.InputStreamReader
import java.util.Scanner
fun main(args: Array<String>) {
val url = URL("http://www.
val isr = InputStreamReader(url.openStream())
val sc = Scanner(isr)
while (sc.hasNextLine()) println(sc.nextLine())
sc.close()
}</lang>
Line 1,133 ⟶ 1,578:
=={{header|Lasso}}==
incude_url is a wrapper for Lasso's curl datatype, however it can be achieved in several ways.
// one line curl
curl('http://rosettacode.org/index')->result->asString
// using curl for more complex operations and feedback
local(x = curl('http://rosettacode.org/index'))
local(y = #x->result)
#y->asString</lang>
=={{header|LFE}}==
===Synchronous===
<lang lisp>(: inets start)
(case (: httpc request '"http://lfe.github.io")
((tuple 'ok result)
(: io format '"Result: ~p" (
((tuple 'error reason)
(: io format '"Error: ~p~n" (list reason))))
</lang>
===Asynchronous===
<lang lisp>(: inets start)
(let* ((method 'get)
(url '"http://lfe.github.io")
(request-data (tuple url
(: httpc request method request-data
(receive
((tuple 'http (tuple request-id (tuple 'error reason)))
(: io format '"Error: ~p~n" (list reason)))
((tuple 'http (tuple request-id result))
(: io format '"Result: ~p~n" (list result))))))
</lang>
=={{header|Liberty BASIC}}==
Line 1,205 ⟶ 1,643:
=={{header|Lingo}}==
HTTP requests based on Director's native HTTP facilities - i.e. without using a 3rd party plugin ("Xtra") - are asynchronous. A simple implementation of a HTTP GET request might look like this:
Parent script "SimpleHttpGet":
<lang lingo>property _netID
property _cbHandler
property _cbTarget
----------------------------------------
-- Simple HTTP GET request
Line 1,216 ⟶ 1,656:
----------------------------------------
on new (me, url, cbHandler, cbTarget)
me._netID = getNetText(url)
_movie.actorList.add(me)
return me
end
----------------------------------------
-- @callback
----------------------------------------
on stepFrame (me)
end</lang>
In some movie script:
<lang lingo>----------------------------------------
Line 1,240 ⟶ 1,681:
----------------------------------------
on getAdobeHomePage ()
script("SimpleHttpGet").new("http://www.
end
----------------------------------------
-- @callback
----------------------------------------
on printResult (res, err)
end</lang>
Executed in the "Message Window" (=Director's interactive Lingo console):
<lang lingo>getAdobeHomePage()
Line 1,260 ⟶ 1,703:
Without a callback handler the get URL method will block until complete
<lang LiveCode>put true into libURLFollowHttpRedirects
get URL "http://
put it</lang>
Non-blocking version
Line 1,266 ⟶ 1,709:
answer "Download Complete" with "Okay"
end myUrlDownloadFinished
command getWebResource
load URL "http://
end getWebResource</lang>
=={{header|LSL}}==
To test it yourself; rez a box on the ground, and add the following as a New Script.
<lang LSL>string sURL = "http://www.
key kHttpRequestId;
default {
}
}
}
}
}
}
</lang>
Output:
<pre>Status=200
llList2String(lMetaData, 0)=0
llList2String(lMetaData, 1)=2048
llList2String(lBody, 0)=<!DOCTYPE html>
llList2String(lBody, 1)=<html lang="en" dir="ltr" class="client-nojs">
llList2String(lBody, 2)=<head>
llList2String(lBody, 3)=<title>Rosetta Code</title>
llList2String(lBody, 4)=<meta charset="UTF-8" />
llList2String(lBody, 5)=<meta name="generator" content="MediaWiki 1.18.0" />
llList2String(lBody, 6)=<link rel="shortcut icon" href="/favicon.ico" />
llList2String(lBody, 7)=<link rel="search" type="application/opensearchdescription+xml" href="/mw/opensearch_desc.php" title="Rosetta Code (en)" />
llList2String(lBody, 8)=<link rel="EditURI" type="application/rsd+xml" href="http://rosettacode.org/mw/api.php?action=rsd" />
llList2String(lBody, 9)=<link rel="copyright" href="http://www.gnu.org/licenses/fdl-1.2.html" />
llList2String(lBody, 10)=<link rel="alternate" type="application/atom+xml" title="Rosetta Code Atom feed" href="/mw/index.php?title=Special:RecentChanges&feed=atom" />
llList2String(lBody, 11)=<link rel="stylesheet" href="/mw/load.php?debug=false&lang=en&modules=mediawiki.legacy.commonPrint%2Cshared%7Cskins.vector&only=styles&skin=vector&*" />
llList2String(lBody, 12)=<meta name="ResourceLoaderDynamicStyles" content="" />
llList2String(lBody, 13)=<link rel="stylesheet" href="/mw/load.php?debug=false&lang=en&modules=site&only=styles&skin=vector&*" />
llList2String(lBody, 14)=<style>a:lang(ar),a:lang(ckb),a:lang(fa),a:lang(kk-arab),a:lang(mzn),a:lang(ps),a:lang(ur){text-decoration:none}a.new,#quickbar a.new{color:#ba0000}
... ... ... ... ... ... ... ... ... ... ... ... ... ...
</pre>
=={{header|Lua}}==
{{libheader|LuaSocket}}
<lang Lua>
local http = require("socket.http")
local url = require("socket.url")
local page = http.request('http://www.
print(page)
</lang>
=={{header|M2000 Interpreter}}==
Line 1,306 ⟶ 1,774:
Using With statement we can make objects properties like ReadyState as variables
(some of them as read only)
<lang M2000 Interpreter>
Module CheckIt {
Method xml "Open", "Get", testUrl$, True ' True means Async
Method xml "send"
\\ We
}
\\ In main thread we can check ReadyState and Mouse button
Task.Main 100
if k>20 then exit ' 20*100= 2 sec
if mouse then exit ' exit if mouse click
\\ So
if ReadyState=4
With xml, "responseText" AS AA$
\\ break AA$ to lines
Document BB$=AA$
\\ using line breaks as CRLF
Report BB$
}
Declare xml Nothing
}
CheckIt
</lang>
=={{header|Maple}}==
In Maple 18 or later:
<lang Maple>
content := URL:-Get( "http://www.google.com/" );
</lang>
In Maple 17 or earlier:
<lang Maple>
content := HTTP:-Get( "http://www.google.com/" );
</lang>
=={{header|Mathematica}} / {{header|Wolfram Language}}==
<lang Mathematica
Print[Import["http://www.google.com/webhp?complete=1&hl=en", "Source"]]
</lang>
=={{header|MATLAB}} / {{header|Octave}}==
[http://www.mathworks.com/help/matlab/ref/urlread.html urlread] is MATLAB's function for making URL requests.
The documentation for Octave is available here [http://octave.sourceforge.net/octave/function/urlread.html urlread].
In this example we initiate an HTTP request for a single random number from [http://www.random.org random.org]:
<lang MATLAB>
>> random = urlread('http://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new')
random =
61
</lang>
It is possible to make more complicated requests, specifically "GET" and "POST," which is explained in the [http://www.mathworks.com/help/matlab/ref/urlread.html documentation].
=={{header|MIRC Scripting Language}}==
Line 1,352 ⟶ 1,844:
<lang nanoquery>import http
import url
url = new(URL, "http://rosettacode.org/wiki/Rosetta_Code")
client = new(HTTPClient, url.getHost())
client.connect()
response = client.get(url.getFile())
println response.get("body")</lang>
Line 1,363 ⟶ 1,857:
using System.Net;
using System.IO;
module HTTP
{
Main() : void
{
def wc = WebClient();
def myStream = wc.OpenRead("http://
def sr = StreamReader(myStream);
WriteLine(sr.ReadToEnd());
myStream.Close()
Line 1,376 ⟶ 1,874:
{{trans|Java}}
An implementation of the [[#Java|Java]] version shown above; demonstrating NetRexx's ability to exploit the rich Java SDK.
<lang NetRexx>/* NetRexx */
options replace format comments java crossref symbols binary
import java.util.Scanner
import java.net.URL
do
catch ex = Exception
end
return</lang>
=={{header|NewLisp}}==
<lang NewLisp
(get-url "http://www.rosettacode.org")
</lang>
=={{header|Nim}}==
Line 1,402 ⟶ 1,907:
<lang objeck>use HTTP;
use Collection;
class HttpTest {
}</lang>
=={{header|Objective-C}}==
<lang objc>#import <Foundation/Foundation.h>
int main (int argc, const char * argv[]) {
@autoreleasepool {
NSError *error;
NSURLResponse *response;
NSData *data = [NSURLConnection sendSynchronousRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:@"http://
returningResponse:&response
error:&error];
NSLog(@"%@", [[NSString alloc] initWithData:data
encoding:NSUTF8StringEncoding]);
}
return 0;
Line 1,425 ⟶ 1,937:
=={{header|OCaml}}==
<lang ocaml>
let () =
let url = "http://www.rosettacode.org" in
let
print_endline page_content;
;;
</lang>
The source code of the function ''make_request'' is [[Web_Scraping/OCaml|here]].
=={{header|ooRexx}}==
Got this from a friend. Needs bsf4oorexx from sourceforge.
<br>Note that rosettacode.org (as used by java and NetRexx) does not permit this access!
<lang oorexx>url=.bsf~new("java.net.URL","http://teletext.orf.at")
sc =.bsf~new("java.util.Scanner",url~openStream)
loop while sc~hasNext
End
::requires BSF.CLS -- get Java camouflaging support</lang>
{{out}}
massaged to avoid problems.
<pre><-!DOCTYPE HTML-
..
-/html-</pre>
=={{header|Oz}}==
When creating a file object, it is possible to specify an URL instead of a filename:
<lang oz>
declare
Contents = {F read(list:$ size:all)}
in
{F
end
in
</lang>
{{libheader|OzHttpClient}}
If you need more fine-grained control of the request, you could use a custom library:
<lang oz>
declare
[HTTPClient] = {Module.link ['x-ozlib://mesaros/net/HTTPClient.ozf']}
fun {GetPage Url}
Client = {New
init(inPrms(toFile:false toStrm:true)
httpReqPrms
)}
HttpResponseParams
in
{Client getService(Url ?OutParams ?HttpResponseParams)}
{Client closeAll(true)}
OutParams.sOut
end
in
</lang>
=={{header|Pascal}}==
Line 1,474 ⟶ 2,005:
<lang pascal>{$mode objfpc}{$H+}
uses fphttpclient;
var
begin
end.</lang>
{{works with|Free Pascal}} {{libheader|CThreads}} {{libheader|Classes}} {{libheader|httpsend}}
<lang pascal>program http;
{$mode objfpc}{$H+}
{$APPTYPE CONSOLE}
{$DEFINE DEBUG}
uses
cthreads,
{$ENDIF}{$ENDIF}
Classes, httpsend; // Synapse httpsend class
{$R *.res}
var
begin
try
{ Stringlist object to capture HTML returned
from URL }
Response := TStringList.Create;
try
if HTTPObj.HTTPMethod('GET','http://wiki.lazarus.freepascal.org/Synapse') then
{ Load HTTP Document into Stringlist }
Response.LoadFromStream(HTTPObj.Document);
{ Write the response to the console window }
finally
end;
finally
HTTPObj.Free;
end;
// Keep console window open
Readln;
end.</lang>
=={{header|Peloton}}==
English dialect, short form:
<lang sgml>
<@ SAYURLLIT>http://rosettacode.org/wiki/Main_Page</@>
</lang>
English dialect, padded variable-length form:
<lang sgml>
<# SAY URLSOURCE LITERAL>http://rosettacode.org/wiki/Main_Page</#>
</lang>
=={{header|Perl}}==
===Core example===
{{libheader|HTTP/Tiny}}
{{works with|Perl|5.14}}
{{works with|Perl/HTTP/Tiny}}
This sample is nearly identical to the LWP sample except that it uses HTTP::Tiny which was added to the core libraries in [[Perl/5.14]].
<lang perl>use strict; use warnings;
require 5.014; # check HTTP::Tiny part of core
use HTTP::Tiny;
print( HTTP::Tiny->new()->get( 'http://rosettacode.org')->{content} );</lang>
===Library examples===
===={{libheader|LWP}}====
{{works with|Perl/LWP}}
Classic LWP sample.
<lang perl>use LWP::Simple qw/get $ua/;
$ua->agent(undef) ; # cloudflare blocks default LWP agent
print( get("http://www.rosettacode.org") );</lang>
or with more error-checking
<lang perl>use strict;
use LWP::UserAgent;
my $url = 'http://www.rosettacode.org';
my $response = LWP::UserAgent->new->get( $url );
$response->is_success or die "Failed to GET '$url': ", $response->status_line;
print $response->as_string</lang>
=={{header|Phix}}==
Line 1,542 ⟶ 2,125:
curl_global_init()
atom curl = curl_easy_init()
curl_easy_setopt(curl, CURLOPT_URL, "http://
object res = curl_easy_perform_ex(curl)
curl_easy_cleanup(curl)
curl_global_cleanup()
puts(1,res)</lang>
{{out}}
<pre>
User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Allow: /mw/resources/
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:
</pre>
=={{header|PHP}}==
<lang php>
readfile("http://www.rosettacode.org");
</lang>
=={{header|PicoLisp}}==
<lang PicoLisp>
(load "@lib/http.l")
(client "rosettacode.org" 80 NIL # Connect to rosettacode
(out NIL (echo)) ) # Echo to standard output
</lang>
=={{header|Pike}}==
<lang pike>
write("%s",Protocols.HTTP.get_url_data("http://www.rosettacode.org"));
</lang>
=={{header|PowerShell}}==
<lang powershell>
$wc = New-Object Net.WebClient
$wc.DownloadString('http://www.rosettacode.org')
</lang>
=={{header|Prolog}}==
Works with SWI-Prolog and library http/http_open. (Extract from the doc).
<lang Prolog>
:- use_module(library( http/http_open )).
http :-
http_open('http://www.
copy_stream_data(In, user_output),
close(In).
</lang>
=={{header|PureBasic}}==
<lang PureBasic>
InitNetwork()
OpenConsole()
tmpdir$ = GetTemporaryDirectory()
filename$ = tmpdir$ + "PB_tempfile" + Str(Random(200000)) + ".html"
If ReadFile(1, filename$)
Repeat
Until
; to prevent console from closing if on windows
EndIf
DeleteFile(filename$)
EndIf
</lang>
Another solution using general networking commands
<lang PureBasic>
InitNetwork()
OpenConsole()
id = OpenNetworkConnection("
SendNetworkString(id, "GET /
Repeat
Until FindString(out$, "</html>", 0)
PrintN(out$)
; next line only to prevent console from closing on Windows
Input()
</lang>
Of course you could use wget too.
=={{header|Python}}==
;Python 3:
Using the [http://docs.python.org/py3k/library/urllib.request.html urllib.request] module.
<lang python>
import urllib.request
print(urllib.request.urlopen("http://rosettacode.org").read())
</lang>
Using a more low-level [https://docs.python.org/3/library/http.client.html http.client] library.
<lang python>
from http.client import HTTPConnection
conn = HTTPConnection("example.com")
# If you need to use set_tunnel, do so here.
conn.request("GET", "/")
# Alternatively, you can use connect(), followed by the putrequest, putheader and endheaders functions.
result = conn.getresponse()
r1 = result.read() # This retrieves the entire contents.
</lang>
;Python 2:
Using the [http://docs.python.org/library/urllib.html urllib] library.
<lang python>
import urllib
print urllib.urlopen("http://rosettacode.org").read()
</lang>
Using the [http://docs.python.org/library/urllib2.html urllib2] library.
<lang python>
import urllib2
print urllib2.urlopen("http://rosettacode.org").read()
</lang>
{{libheader|Requests}}
{{works with|Python|2.7, 3.4–3.7}}
<lang Python>
import requests
print(requests.get("http://rosettacode.org").text)
</lang>
=={{header|R}}==
{{libheader|RCurl}}
{{libheader|XML}}
First, retrieve the webpage.
<lang R>
library(RCurl)
webpage <- getURL("http://rosettacode.org")
#If you are linking to a page that no longer exists and need to follow the redirect, use followlocation=TRUE
webpage <- getURL("http://www.rosettacode.org", .opts=list(followlocation=TRUE))
#If you are behind a proxy server, you will need to use something like:
webpage <- getURL("http://rosettacode.org",
.opts=list(proxy="123.123.123.123", proxyusername="domain\\username", proxypassword="mypassword", proxyport=8080))
#Don't forget that backslashes in your username or password need to be escaped!
</lang>
Now parse the html code into a tree and print the html.
<lang R>
library(XML)
pagetree <- htmlTreeParse(webpage )
pagetree$children$html
</lang>
=={{header|Racket}}==
<lang Racket>
#lang racket
(require net/url)
(copy-port (get-pure-port (string->url "http://www.rosettacode.org")
#:redirections 100)
</lang>
=={{header|Raku}}==
Line 1,634 ⟶ 2,301:
{{libheader|LWP}}
Using LWP::Simple from [https://modules.raku.org/search/?q=LWP%3A%3ASimple the Raku ecosystem].
<lang perl6>use v6;
use LWP::Simple;
print LWP::Simple.get("http://www.rosettacode.org");
</lang>
or, without LWP::Simple:
<lang perl6>use v6;
my $socket = IO::Socket::INET.new(host => "www.rosettacode.org",
port => 80,);
$socket.print("GET / HTTP/1.0\r\n\r\n");
print $socket.recv();
$socket.close;
</lang>
=={{header|REALbasic}}==
REALBasic provides an HTTPSocket class for handling HTTP connections. The 'Get' method of the HTTPSocket is overloaded and can download data to a file or return data as a string, in both cases a timeout argument can be passed.
<lang REALbasic>
Dim sock As New HTTPSocket
Print(sock.Get("http://www.rosettacode.org", 10)) //set the timeout period to 10 seconds.
</lang>
=={{header|REBOL}}==
<lang REBOL
print read http://rosettacode.org
</lang>
=={{header|REXX}}==
Line 1,658 ⟶ 2,338:
<lang Rexx>/* ft=rexx */
/* GET2.RX - Display contents of an URL on the terminal. */
/* Usage: rexx get.rx http://
parse arg url .
'curl' url</lang>
Line 1,666 ⟶ 2,346:
<lang Rexx>/* ft=rexx */
/* GET2.RX - Display contents of an URL on the terminal. */
/* Usage: rexx get2.rx http://
parse arg url .
address system 'curl' url with output stem stuff.
Line 1,677 ⟶ 2,357:
<lang Rexx>/* ft=rexx */
/* GET3.RX - Display contents of an URL on the terminal. */
/* Usage: rexx get3.rx http://
parse arg url .
address system 'curl' url with output fifo ''
Line 1,683 ⟶ 2,363:
=={{header|Ring}}==
<lang ring>
See download("http://rosettacode.org")
</lang>
=={{header|RLaB}}==
Line 1,725 ⟶ 2,407:
=={{header|Ruby}}==
The simple way loads the entire content into memory, then prints it.
<lang ruby>
require 'open-uri'
print open("http://rosettacode.org") {|f| f.read}
</lang>
If the content might be large, the better way uses FileUtils.copy_stream.
<lang ruby>
require 'fileutils'
require 'open-uri'
open("http://rosettacode.org/")
=={{header|Run BASIC}}==
<lang runbasic>print httpget$("http://
=={{header|Rust}}==
Cargo.toml
<lang toml>
[dependencies]
hyper = "0.6"
</lang>
src/main.rs
<lang rust>
//cargo-deps: hyper="0.6"
// The above line can be used with cargo-script which makes cargo's dependency handling more convenient for small programs
extern crate hyper;
use std::io::Read;
use hyper::client::Client;
fn main() {
let client = Client::new();
let mut resp = client.get("http://
let mut body = String::new();
resp.read_to_string(&mut body).unwrap();
println!("{}", body);
}
</lang>
=={{header|Scala}}==
{{libheader|Scala}}
<lang scala>import scala.io.Source
object HttpTest extends App {
Source.fromURL("http://www.rosettacode.org").getLines.foreach(println)
}</lang>
=={{header|Scheme}}==
{{works with|Guile}}
<lang scheme>
; Use the regular expression module to parse the url (included with Guile)
(use-modules (ice-9 regex))
; Set the url and parse the hostname, port, and path into variables
(define url "http://www.rosettacode.org/wiki/HTTP")
(define r (make-regexp "^(http://)?([^:/]+)(:)?(([0-9])+)?(/.*)?" regexp/icase))
(define host (match:substring (regexp-exec r url) 2))
(define port (match:substring (regexp-exec r url) 4))
(define path (match:substring (regexp-exec r url) 6))
; Set port to 80 if it wasn't set above and convert from a string to a number
(if (eq? port #f) (define port "80"))
(define port (string->number port))
; Connect to remote host on specified port
(let ((s (socket PF_INET SOCK_STREAM 0)))
(connect s AF_INET (car (hostent:addr-list (gethostbyname host))) port)
; Send a HTTP request for the specified path
(display "GET " s)
(
; Display the received HTML
(do ((c (read-char s) (read-char s))) ((eof-object? c))
(display c)))
</lang>
{{works with|Chicken Scheme}}
Using the [http://api.call-cc.org/doc/http-client http-client] library, this is trivial.
<lang scheme>
(use http-client)
(print
(with-input-from-request "http://google.com/"
#f read-string))
</lang>
=={{header|Seed7}}==
Line 1,796 ⟶ 2,504:
contains the function [http://seed7.sourceforge.net/libraries/gethttp.htm#getHttp%28in_string%29 getHttp],
which gets data specified by an URL using the HTTP protocol.
<lang seed7>
$ include "seed7_05.s7i";
include "gethttp.s7i";
const proc: main is func
begin
writeln(getHttp("www.rosettacode.org"));
end func;</lang>
=={{header|SenseTalk}}==
<lang sensetalk>put url "http://www.
=={{header|Sidef}}==
Line 1,809 ⟶ 2,521:
<lang ruby>func get(url) {
var lwp = (
try { require('LWP::UserAgent') }
catch { warn
)
var ua = lwp.new(agent => 'Mozilla/5.0')
Line 1,821 ⟶ 2,530:
return nil
}
print get("http://rosettacode.org")</lang>
=={{header|Smalltalk}}==
{{works with|Pharo}}
<lang smalltalk>
Transcript show: 'http://rosettacode.org' asUrl retrieveContents contentStream.
</lang>
=={{header|SNOBOL4}}==
{{works with|Macro SNOBOL4 in C}}
<lang snobol>-include "tcp.sno"
cont1
while
end
</lang>
=={{header|Swift}}==
<lang Swift>import Foundation
let request = NSURLRequest(URL: NSURL(string: "http://rosettacode.org/")!)
// Using trailing closure
NSURLConnection.sendAsynchronousRequest(request, queue: NSOperationQueue()) {res, data, err in
// data is binary
if (data != nil) {
let string = NSString(data: data!, encoding: NSUTF8StringEncoding)
Line 1,847 ⟶ 2,564:
}
}
CFRunLoopRun() // dispatch</lang>
=={{header|Tcl}}==
Note that the <code>http</code> package is distributed as part of Tcl.
<lang tcl>
package require http
set request [http::geturl "http://www.rosettacode.org"]
puts [http::data $request]
http::cleanup $request</lang>
=={{header|TSE SAL}}==
<lang TSE SAL>
DLL "<urlmon.dll>"
INTEGER PROC FNUrlGetSourceApiI(
INTEGER lpunknown,
STRING urlS : CSTRVAL,
STRING filenameS : CSTRVAL,
INTEGER dword,
INTEGER tlpbindstatuscallback
) : "URLDownloadToFileA"
END
// library: url: get: source <description></description> <version control></version control> <version>1.0.0.0.3</version> (filenamemacro=geturgso.s) [kn, ri, su, 13-04-2008 05:12:53]
PROC PROCUrlGetSource( STRING urlS, STRING filenameS )
END
PROC Main()
IF ( NOT ( AskFilename( "url: get: source: filenameS = ", s2, _DEFAULT_, _EDIT_HISTORY_ ) ) AND ( Length( s2 ) > 0 ) ) RETURN() ENDIF
PROCUrlGetSource( s1, s2 )
EditFile( s2 )
END
</lang>
=={{header|TUSCRIPT}}==
<lang tuscript>
$$ MODE TUSCRIPT
SET DATEN = REQUEST ("http://www.rosettacode.org")
*{daten}
</lang>
=={{header|UNIX Shell}}==
<lang bash>curl -s -L http://
<lang bash>lynx -source http://
<lang bash>wget -O - -q http://
<lang bash>lftp -c "cat http://
{{works with|BSD}}
<lang bash>ftp -o - http://
=={{header|VBScript}}==
Line 1,904 ⟶ 2,628:
Based on code at [http://itknowledgeexchange.techtarget.com/vbscript-systems-administrator/how-to-retrieve-html-web-pages-with-vbscript-via-the-microsoftxmlhttp-object/ How to retrieve HTML web pages with VBScript via the Microsoft.XmlHttp object]
<lang vb>
Option Explicit
Const sURL="http://rosettacode.org/"
Dim oHTTP
Set oHTTP = CreateObject("Microsoft.XmlHTTP")
On Error Resume Next
oHTTP.Open "GET", sURL, False
Line 1,916 ⟶ 2,644:
Wscript.Echo "error " & Err.Number & ": " & Err.Description
End If
Set oHTTP = Nothing
</lang>
=={{header|Visual Basic}}==
Line 1,942 ⟶ 2,672:
=={{header|Visual Basic .NET}}==
<lang vbnet>
Imports System.Net
Dim client As WebClient = New WebClient()
Dim content As String = client.DownloadString("http://www.
Console.WriteLine(content)
</lang>
=={{header|zkl}}==
File htmlGet.zkl. This uses HTTP/1.0 Protocol to avoid chunked data. Or use cURL (see https example).
<lang zkl>url := ask(0,"URL: ");
host := url;
dir := "/";
port := 80;
if (n := url.find("/")) { dir = url[n,*]; host = url[0,n]; }
if (n := host.find(":")) { port = host[n+1,*]; host = host[0,n]; }
get := "GET %s HTTP/1.0\r\nHost: %s:%s\r\n\r\n".fmt(dir,host,port.toInt());
println("-->",get);
Line 1,967 ⟶ 2,696:
data := server.read(True);
println(data.text);</lang>
zkl htmlGet.zkl rosettacode.org/wiki/HTTP
{{out}}
<pre>
-->GET /wiki/HTTP HTTP/1.0
Host: rosettacode.org:80
HTTP/1.1 200 OK
Server: cloudflare-nginx
Date: Tue, 11 Mar 2014 08:31:43 GMT
Content-Type: text/html; charset=UTF-8
Connection: close
Set-Cookie:XXX
23:50:00 GMT; path=/; domain=.rosettacode.org; HttpOnly
X-Powered-By: PHP/5.3.3-7+squeeze18
X-Content-Type-Options: nosniff
Content-Language: en
ETag: W/"rosettacode:pcache:idhash:3055-0!1!0!!en!2--20140227082903"
Vary: Accept-Encoding,Cookie
Cache-Control: s-maxage=86400, must-revalidate, max-age=0
Last-Modified: Thu, 27 Feb 2014 08:29:03 GMT
Age: 86011
X-Cache: HIT from prgmr2.rosettacode.org
X-Cache-Lookup: HIT from prgmr2.rosettacode.org:80
Via: 1.0 prgmr2.rosettacode.org (squid/3.1.6)
CF-RAY: 109665b7e92a012c-SJC
<!DOCTYPE html>
<html lang="en" dir="ltr" class="client-nojs">
<head>
<title>HTTP - Rosetta Code</title>
...
</pre>
=={{header|Zoea}}==
<lang Zoea>
program: http
input: 'https://zoea.co.uk/examples/test.txt'
output: 'hello from zoea'
</lang>
=={{header|Zsh}}==
<lang zsh>
zmodload zsh/net/tcp
ztcp example.com 80
fd=$REPLY
print -l -u $fd -- 'GET /
while read -u $fd -r -e -t 1; do; :; done
ztcp -c $fd
</lang>
{{omit from|Applesoft BASIC|No TCP/IP network support on Apple II}}
{{omit from|Brainf***}}
{{omit from|Commodore BASIC|Does not have network access}}
{{omit from|Inform 7|Does not have network access.}}
{{omit from|Integer BASIC|No TCP/IP network support on Apple II}}
{{omit from|Locomotive Basic|Does not have network access.}}
{{omit from|Lotus 123 Macro Scripting}}
{{omit from|M4}}
{{omit from|Maxima}}
{{omit from|ML/I}}
{{omit from|Openscad}}
{{omit from|PARI/GP}}
{{omit from|PostScript}}
{{omit from|Retro|Does not have network access.}}
{{omit from|SQL PL|Does not have network access}}
{{omit from|TI-83 BASIC|Does not have network access.}}
{{omit from|TI-89 BASIC|Does not have network access.}}
{{omit from|Unlambda|Does not have network access.}}
{{omit from|Yorick|Does not have network access.}}
{{omit from|ZX Spectrum Basic|Does not have network access.}}
|