HTTP

From Rosetta Code
Revision as of 12:35, 7 November 2020 by Thundergnat (talk | contribs) (Undo revision 315433 by WdeCvfYlmB (talk))
Task
HTTP
You are encouraged to solve this task according to the task description, using any language you may know.
Task

Access and print a URL's content (the located resource) to the console.

There is a separate task for HTTPS Requests.

8th

<lang 8th>"http://www.w3.org/Home.html" net:get drop >s .</lang>

ABAP

<lang ABAP>report z_http. cl_http_client => create_by_url(

   exporting
       url = `http://www.w3.org/Home.html`
   importing
       client = data(http_client)
   exceptions
       argument_not_found = 1
       plugin_not_active = 2
       internal_error = 3
       others = 4

). if sy-subrc <> 0.

   data(error_message) = switch string(
       sy-subrc
       when 1 then `argument_not_found`
       when 2 then `plugin_not_active`
       when 3 then `internal_error`
       when 4 then `other error`
   ).
   write error_message.
   exit.

endif. data(rest_http_client) = cast if_rest_client(new cl_rest_http_client(http_client)). rest_http_client -> get(). data(response_string) = rest_http_client -> get_response_entity() -> get_string_data(). split response_string at cl_abap_char_utilities => newline into table data(output_table). loop at output_table assigning field-symbol(<output_line>).

   write / <output_line>.

endloop.</lang>

ActionScript

<lang actionscript>package {

   import flash.display.Sprite;
   import flash.events.Event;
   import flash.net.*;
   public class RequestExample extends Sprite {
       public function RequestExample() {
           var loader:URLLoader = new URLLoader();
           loader.addEventListener(Event.COMPLETE, loadComplete);
           loader.load(new URLRequest("http://www.w3.org/Home.html"));
       }
       private function loadComplete(evt:Event):void {
           trace(evt.target.data);
       }
   }

}</lang>

Ada

Library: AWS

<lang ada>with Ada.Text_IO; use Ada.Text_IO; with AWS.Client; with AWS.Response; procedure HTTP_Request is begin

  Put_Line (AWS.Response.Message_Body (AWS.Client.Get (URL => "http://www.w3.org/Home.html")));

end HTTP_Request;</lang>

ALGOL 68

Works with: ALGOL 68 version Revision 1 - however grep in string, http content and str error are from a non-standard library
Works with: ALGOL 68G version Any - tested with release 1.18.0-9h.tiny

<lang algol68>STRING domain="www.w3.org"; STRING page="Home.html"; STRING re success="^HTTP/[0-9.]* 200"; STRING re result description="^HTTP/[0-9.]* [0-9]+ [a-zA-Z ]*"; STRING re doctype ="\s\s<!DOCTYPE html PUBLIC ""[^>]+"">\s+"; PROC html page = (REF STRING page) BOOL: (

   BOOL out=grep in string(re success, page, NIL, NIL) = 0;
   IF
       INT start, end;
       grep in string(re result description, page, start, end) = 0
   THEN
       page := page[end+1:];
       IF
           grep in string(re doctype, page, start, end) = 0
       THEN
           page:=page[start+2:]
       ELSE
           print ("unknown format retrieving page")
       FI
   ELSE
       print ("unknown error retrieving page")
   FI;
   out

); IF

   STRING reply;
   INT rc = http content (reply, domain, "http://"+domain+"/"+page, 0);
   rc = 0 AND html page (reply)

THEN

   print (reply)

ELSE

   print (strerror (rc))

FI </lang>

Arturo

<lang arturo>print [download "http://www.w3.org/Home.html"]</lang>

AutoHotkey

<lang AutoHotkey>UrlDownloadToFile, http://www.w3.org/Home.html, url.html Run, cmd /k type url.html</lang>

AWK

Works with: gawk

<lang awk>BEGIN {

   site="www.w3.org"
   path="/Home.html"
   name="Rosetta_Code"
   server = "/inet/tcp/0/" site "/80"
   print "GET " path name " HTTP/1.0" |& server
   print "Host: " site |& server
   print "\r\n\r\n" |& server
   while ( (server |& getline fish) > 0 ) {
       if ( ++scale == 1 )
           ship = fish
       else
           ship = ship "\n" fish
   }
   close(server)
   print ship

}</lang>

BaCon

<lang qbasic> website$ = "www.w3.org" OPEN website$ & ":80" FOR NETWORK AS mynet SEND "GET /Home.html HTTP/1.1\r\nHost: " & website$ & "\r\n\r\n" TO mynet REPEAT

   RECEIVE dat$ FROM mynet
   total$ = total$ & dat$

UNTIL ISFALSE(WAIT(mynet, 500)) CLOSE NETWORK mynet PRINT total$ </lang>

Batch File

<lang batch>curl.exe -s -L http://www.w3.org/Home.html</lang>

BBC BASIC

<lang bbcbasic>SYS "LoadLibrary", "URLMON.DLL" TO urlmon% SYS "GetProcAddress", urlmon%, "URLDownloadToFileA" TO URLDownloadToFile url$ = "http://www.w3.org/Home.html" file$ = @tmp$ + "rosetta.tmp" SYS URLDownloadToFile, 0, url$, file$, 0, 0 TO fail% IF fail% ERROR 100, "File download failed" OSCLI "TYPE """ + file$ + """"</lang>

Biferno

<lang Biferno>$httpExt.ExecRemote("www.w3.org/Home.html")</lang>

C

<lang C>#include <unistd.h>

  1. include <netdb.h>
  2. define BUF_SIZE 16

int sfd; char buf[BUF_SIZE]; struct addrinfo hints; struct addrinfo * rp; int main() {

   hints.ai_family = AF_INET;
   hints.ai_socktype = SOCK_STREAM;
   hints.ai_protocol = IPPROTO_TCP;
   getaddrinfo("www.w3.org", "80", &hints, &rp);
   sfd = socket(rp -> ai_family, rp -> ai_socktype, rp -> ai_protocol);
   connect(sfd, rp -> ai_addr, rp -> ai_addrlen);
   write(sfd, "GET /Home.html HTTP/1.1\r\nHost: www.w3.org\r\nConnection: close\r\n\r\n", 1024);
   while (read(sfd, buf, BUF_SIZE) != 0) {
       write(STDOUT_FILENO, buf, BUF_SIZE);
   }
   close(sfd);
   return 0;

}</lang>

C#

<lang csharp>using System; using System.Text; using System.Net; class Program {

   static void Main(string[] args) {
       WebClient wc = new WebClient();
       string content = wc.DownloadString("http://www.w3.org/Home.html");
       Console.WriteLine(content);
   }

}</lang>

C++

<lang cpp>#include <winsock2.h>

  1. include <ws2tcpip.h>
  2. include <iostream>

addrinfo * result; int bytes; char buffer[512]; addrinfo hints; SOCKET s; WSADATA wsaData; int main() {

   WSAStartup(MAKEWORD(2, 2), &wsaData);
   ZeroMemory(&hints, sizeof(hints));
   hints.ai_family = AF_UNSPEC;
   hints.ai_socktype = SOCK_STREAM;
   hints.ai_protocol = IPPROTO_TCP;
   getaddrinfo("www.w3.org", "80", &hints, &result);
   s = socket(result->ai_family, result->ai_socktype, result->ai_protocol);
   connect(s, result->ai_addr, (int) result->ai_addrlen);
   freeaddrinfo(result);
   send(s, "GET /Home.html HTTP/1.0\n\n", 16, 0);
   do {
       bytes = recv(s, buffer, 512, 0);
       if ( bytes > 0 )
           std::cout.write(buffer, bytes);
   } while (bytes > 0);
   return 0;

}</lang>

Library: U++

<lang cpp>#include <Web/Web.h> using namespace Upp; CONSOLE_APP_MAIN {

   Cout() << HttpClient("www.w3.org/Home.html").ExecuteRedirect();

}</lang>

Caché ObjectScript

USER>Set HttpRequest=##class(%Net.HttpRequest).%New()
USER>Set HttpRequest.Server="www.w3.org"
USER>Do HttpRequest.Get("/Home.html")
USER>Do HttpRequest.HttpResponse.Data.OutputToDevice()

Clojure

Using the Java API: <lang clojure>(

   defn get-http [url]
   (
       let [sc (java.util.Scanner.(.openStream (java.net.URL. url)))]
       (
           while (.hasNext sc) 
           (
               println (.nextLine sc)
           )
       )
   )

) (get-http "http://www.w3.org")</lang> Using clojure.contrib.http.agent: <lang clojure>(

   ns example
   (
       :use [clojure.contrib.http.agent :only (string http-agent)]
   )

) (println (string (http-agent "http://www.w3.org/")))</lang>

Works with: Clojure version 1.2

<lang clojure>(print (slurp "http://www.w3.org/"))</lang>

COBOL

Tested with GnuCOBOL <lang cobol>COBOL

identification division.

program-id. curl-write-callback.

   environment division.
   configuration section.
   repository.
   function all intrinsic.
   data division.
   working-storage section.
   01 real-size usage binary-long.
   01 memory-block based.
       05 memory-address usage pointer sync.
       05 memory-size usage binary-long sync.
       05 running-total usage binary-long sync.
   01 content-buffer pic x(65536) based.
   01 web-space pic x(16777216) based.
   01 left-over usage binary-long.
   
   linkage section.
   01 contents usage pointer.
   01 element-size usage binary-long.
   01 element-count usage binary-long.
   01 memory-structure usage pointer.
   
   procedure division using
       by value contents
       by value element-size
       by value element-count
       by value memory-structure
       returning real-size.
   
   set address of memory-block to memory-structure
   compute real-size = element-size * element-count
   end-compute
   compute left-over = memory-size - running-total
   end-compute
   if left-over > 0 and < real-size then
       move left-over to real-size
   end-if
   if (left-over > 0) and (real-size > 1) then
       set address of content-buffer to contents
       set address of web-space to memory-address
       move content-buffer(1:real-size) to web-space(running-total:real-size)
       add real-size to running-total
   else
       display "curl buffer sizing problem" upon syserr
   end-if
   goback.

end program curl-write-callback.

identification division.

function-id. read-url.

   environment division.
   configuration section.
   repository.
   function all intrinsic.
   
   data division.
   
   working-storage section.
   copy "gccurlsym.cpy".
   replace also ==:CALL-EXCEPTION:== by == on exception perform hard-exception ==.
   01 curl-handle usage pointer.
   01 callback-handle usage procedure-pointer.
   01 memory-block.
       05 memory-address usage pointer sync.
       05 memory-size usage binary-long sync.
       05 running-total usage binary-long sync.
   01 curl-result usage binary-long.
   01 cli pic x(7) external.
       88 helping values "-h", "-help", "help", spaces.
       88 displaying value "display".            
       88 summarizing value "summary". 
   
   linkage section.
   01 url pic x any length.
   01 buffer pic x any length.
   01 curl-status usage binary-long.
   
   procedure division using
       url buffer
       returning curl-status.
   
   if displaying or summarizing then
       display "Read: " url upon syserr
   end-if
   call "curl_global_init" using
       by value CURL_GLOBAL_ALL on exception
       display "need libcurl, link with -lcurl" upon syserr
       stop run returning 1
   end-call
   call "curl_easy_init"
       returning curl-handle :CALL-EXCEPTION:
   end-call
   if curl-handle equal NULL then
       display "no curl handle" upon syserr
       stop run returning 1
   end-if
   call "curl_easy_setopt" using
       by value curl-handle
       by value CURLOPT_URL
       by reference concatenate(trim(url trailing), x"00") :CALL-EXCEPTION:
   end-call
   call "curl_easy_setopt" using
       by value curl-handle
       by value CURLOPT_FOLLOWLOCATION
       by value 1 :CALL-EXCEPTION:
   end-call
   set callback-handle to address of entry "curl-write-callback"
   call "curl_easy_setopt" using
       by value curl-handle
       by value CURLOPT_WRITEFUNCTION
       by value callback-handle :CALL-EXCEPTION:
   end-call
   set memory-address to address of buffer
   move length(buffer) to memory-size
   move 1 to running-total
   call "curl_easy_setopt" using
       by value curl-handle
       by value CURLOPT_WRITEDATA
       by value address of memory-block :CALL-EXCEPTION:
   end-call
   call "curl_easy_setopt" using
       by value curl-handle
       by value CURLOPT_USERAGENT
       by reference concatenate("libcurl-agent/1.0", x"00") :CALL-EXCEPTION:
   end-call
   call "curl_easy_perform" using
       by value curl-handle
       returning curl-result :CALL-EXCEPTION:
   end-call
   move curl-result to curl-status
   call "curl_easy_cleanup" using
       by value curl-handle
       returning omitted :CALL-EXCEPTION:
   end-call
   goback.
   :EXCEPTION-HANDLERS:

end function read-url.

identification division.

program-id. curl-rosetta.

   environment division.
   
   configuration section.
   repository.
   function read-url function all intrinsic.
   
   data division.
   
   working-storage section.
   copy "gccurlsym.cpy".
   01 web-page pic x(16777216).
   01 curl-status usage binary-long.
   01 cli pic x(7) external.
       88 helping values "-h", "-help", "help", spaces.
       88 displaying value "display".            
       88 summarizing value "summary". 
   
   procedure division.
   
   accept cli from command-line
   if helping then
       display "./curl-rosetta [help|display|summary]" goback
   end-if
   move read-url("http://www.rosettacode.org", web-page) to curl-status
   perform check
       perform show goback.
   check.
   if curl-status not equal zero then
       display curl-status " " CURLEMSG(curl-status) upon syserr
   end-if.
   show.
   if summarizing then
       display "Length: " stored-char-length(web-page)
   end-if
   if displaying then
       display trim(web-page trailing) with no advancing
   end-if.
   REPLACE ALSO == :EXCEPTION-HANDLERS: == BY == soft-exception.
   display space upon syserr
   display "--Exception Report-- " upon syserr
   display "Time of exception:   " current-date upon syserr
   display "Module:              " module-id upon syserr
   display "Module-path:         " module-path upon syserr
   display "Module-source:       " module-source upon syserr
   display "Exception-file:      " exception-file upon syserr
   display "Exception-status:    " exception-status upon syserr
   display "Exception-location:  " exception-location upon syserr
   display "Exception-statement: " exception-statement upon syserr.
   hard-exception.
   perform soft-exception stop run returning 127.
   ==.

end program curl-rosetta.</lang> Copybook : <lang cobol>01 CURL_MAX_HTTP_HEADER CONSTANT AS 102400.

   78 CURL_GLOBAL_ALL VALUE 3.
   78 CURLOPT_FOLLOWLOCATION VALUE 52.
   78 CURLOPT_WRITEDATA VALUE 10001.
   78 CURLOPT_URL VALUE 10002.
   78 CURLOPT_USERAGENT VALUE 10018.
   78 CURLOPT_WRITEFUNCTION VALUE 20011.
   78 CURLOPT_COOKIEFILE VALUE 10031.
   78 CURLOPT_COOKIEJAR VALUE 10082.
   78 CURLOPT_COOKIELIST VALUE 10135.
   78 CURLINFO_COOKIELIST VALUE 4194332.
   78 CURLE_OK VALUE 0.
   78 CURLE_UNSUPPORTED_PROTOCOL VALUE 1.
   78 CURLE_FAILED_INIT VALUE 2.
   78 CURLE_URL_MALFORMAT VALUE 3.
   78 CURLE_OBSOLETE4 VALUE 4.
   78 CURLE_COULDNT_RESOLVE_PROXY VALUE 5.
   78 CURLE_COULDNT_RESOLVE_HOST VALUE 6.
   78 CURLE_COULDNT_CONNECT VALUE 7.
   78 CURLE_FTP_WEIRD_SERVER_REPLY VALUE 8.
   78 CURLE_REMOTE_ACCESS_DENIED VALUE 9.
   78 CURLE_OBSOLETE10 VALUE 10.
   78 CURLE_FTP_WEIRD_PASS_REPLY VALUE 11.
   78 CURLE_OBSOLETE12 VALUE 12.
   78 CURLE_FTP_WEIRD_PASV_REPLY VALUE 13.
   78 CURLE_FTP_WEIRD_227_FORMAT VALUE 14.
   78 CURLE_FTP_CANT_GET_HOST VALUE 15.
   78 CURLE_OBSOLETE16 VALUE 16.
   78 CURLE_FTP_COULDNT_SET_TYPE VALUE 17.
   78 CURLE_PARTIAL_FILE VALUE 18.
   78 CURLE_FTP_COULDNT_RETR_FILE VALUE 19.
   78 CURLE_OBSOLETE20 VALUE 20.
   78 CURLE_QUOTE_ERROR VALUE 21.
   78 CURLE_HTTP_RETURNED_ERROR VALUE 22.
   78 CURLE_WRITE_ERROR VALUE 23.
   78 CURLE_OBSOLETE24 VALUE 24.
   78 CURLE_UPLOAD_FAILED VALUE 25.
   78 CURLE_READ_ERROR VALUE 26.
   78 CURLE_OUT_OF_MEMORY VALUE 27.
   78 CURLE_OPERATION_TIMEDOUT VALUE 28.
   78 CURLE_OBSOLETE29 VALUE 29.
   78 CURLE_FTP_PORT_FAILED VALUE 30.
   78 CURLE_FTP_COULDNT_USE_REST VALUE 31.
   78 CURLE_OBSOLETE32 VALUE 32.
   78 CURLE_RANGE_ERROR VALUE 33.
   78 CURLE_HTTP_POST_ERROR VALUE 34.
   78 CURLE_SSL_CONNECT_ERROR VALUE 35.
   78 CURLE_BAD_DOWNLOAD_RESUME VALUE 36.
   78 CURLE_FILE_COULDNT_READ_FILE VALUE 37.
   78 CURLE_LDAP_CANNOT_BIND VALUE 38.
   78 CURLE_LDAP_SEARCH_FAILED VALUE 39.
   78 CURLE_OBSOLETE40 VALUE 40.
   78 CURLE_FUNCTION_NOT_FOUND VALUE 41.
   78 CURLE_ABORTED_BY_CALLBACK VALUE 42.
   78 CURLE_BAD_FUNCTION_ARGUMENT VALUE 43.
   78 CURLE_OBSOLETE44 VALUE 44.
   78 CURLE_INTERFACE_FAILED VALUE 45.
   78 CURLE_OBSOLETE46 VALUE 46.
   78 CURLE_TOO_MANY_REDIRECTS VALUE 47.
   78 CURLE_UNKNOWN_TELNET_OPTION VALUE 48.
   78 CURLE_TELNET_OPTION_SYNTAX VALUE 49.
   78 CURLE_OBSOLETE50 VALUE 50.
   78 CURLE_PEER_FAILED_VERIFICATION VALUE 51.
   78 CURLE_GOT_NOTHING VALUE 52.
   78 CURLE_SSL_ENGINE_NOTFOUND VALUE 53.
   78 CURLE_SSL_ENGINE_SETFAILED VALUE 54.
   78 CURLE_SEND_ERROR VALUE 55.
   78 CURLE_RECV_ERROR VALUE 56.
   78 CURLE_OBSOLETE57 VALUE 57.
   78 CURLE_SSL_CERTPROBLEM VALUE 58.
   78 CURLE_SSL_CIPHER VALUE 59.
   78 CURLE_SSL_CACERT VALUE 60.
   78 CURLE_BAD_CONTENT_ENCODING VALUE 61.
   78 CURLE_LDAP_INVALID_URL VALUE 62.
   78 CURLE_FILESIZE_EXCEEDED VALUE 63.
   78 CURLE_USE_SSL_FAILED VALUE 64.
   78 CURLE_SEND_FAIL_REWIND VALUE 65.
   78 CURLE_SSL_ENGINE_INITFAILED VALUE 66.
   78 CURLE_LOGIN_DENIED VALUE 67.
   78 CURLE_TFTP_NOTFOUND VALUE 68.
   78 CURLE_TFTP_PERM VALUE 69.
   78 CURLE_REMOTE_DISK_FULL VALUE 70.
   78 CURLE_TFTP_ILLEGAL VALUE 71.
   78 CURLE_TFTP_UNKNOWNID VALUE 72.
   78 CURLE_REMOTE_FILE_EXISTS VALUE 73.
   78 CURLE_TFTP_NOSUCHUSER VALUE 74.
   78 CURLE_CONV_FAILED VALUE 75.
   78 CURLE_CONV_REQD VALUE 76.
   78 CURLE_SSL_CACERT_BADFILE VALUE 77.
   78 CURLE_REMOTE_FILE_NOT_FOUND VALUE 78.
   78 CURLE_SSH VALUE 79.
   78 CURLE_SSL_SHUTDOWN_FAILED VALUE 80.
   78 CURLE_AGAIN VALUE 81.

01 LIBCURL_ERRORS.

   02 CURLEVALUES.
   03 FILLER PIC X(30) VALUE "CURLE_UNSUPPORTED_PROTOCOL    ".
   03 FILLER PIC X(30) VALUE "CURLE_FAILED_INIT             ".
   03 FILLER PIC X(30) VALUE "CURLE_URL_MALFORMAT           ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE4               ".
   03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_PROXY   ".
   03 FILLER PIC X(30) VALUE "CURLE_COULDNT_RESOLVE_HOST    ".
   03 FILLER PIC X(30) VALUE "CURLE_COULDNT_CONNECT         ".
   03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_SERVER_REPLY  ".
   03 FILLER PIC X(30) VALUE "CURLE_REMOTE_ACCESS_DENIED    ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE10              ".
   03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASS_REPLY    ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE12              ".
   03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_PASV_REPLY    ".
   03 FILLER PIC X(30) VALUE "CURLE_FTP_WEIRD_227_FORMAT    ".
   03 FILLER PIC X(30) VALUE "CURLE_FTP_CANT_GET_HOST       ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE16              ".
   03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_SET_TYPE    ".
   03 FILLER PIC X(30) VALUE "CURLE_PARTIAL_FILE            ".
   03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_RETR_FILE   ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE20              ".
   03 FILLER PIC X(30) VALUE "CURLE_QUOTE_ERROR             ".
   03 FILLER PIC X(30) VALUE "CURLE_HTTP_RETURNED_ERROR     ".
   03 FILLER PIC X(30) VALUE "CURLE_WRITE_ERROR             ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE24              ".
   03 FILLER PIC X(30) VALUE "CURLE_UPLOAD_FAILED           ".
   03 FILLER PIC X(30) VALUE "CURLE_READ_ERROR              ".
   03 FILLER PIC X(30) VALUE "CURLE_OUT_OF_MEMORY           ".
   03 FILLER PIC X(30) VALUE "CURLE_OPERATION_TIMEDOUT      ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE29              ".
   03 FILLER PIC X(30) VALUE "CURLE_FTP_PORT_FAILED         ".
   03 FILLER PIC X(30) VALUE "CURLE_FTP_COULDNT_USE_REST    ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE32              ".
   03 FILLER PIC X(30) VALUE "CURLE_RANGE_ERROR             ".
   03 FILLER PIC X(30) VALUE "CURLE_HTTP_POST_ERROR         ".
   03 FILLER PIC X(30) VALUE "CURLE_SSL_CONNECT_ERROR       ".
   03 FILLER PIC X(30) VALUE "CURLE_BAD_DOWNLOAD_RESUME     ".
   03 FILLER PIC X(30) VALUE "CURLE_FILE_COULDNT_READ_FILE  ".
   03 FILLER PIC X(30) VALUE "CURLE_LDAP_CANNOT_BIND        ".
   03 FILLER PIC X(30) VALUE "CURLE_LDAP_SEARCH_FAILED      ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE40              ".
   03 FILLER PIC X(30) VALUE "CURLE_FUNCTION_NOT_FOUND      ".
   03 FILLER PIC X(30) VALUE "CURLE_ABORTED_BY_CALLBACK     ".
   03 FILLER PIC X(30) VALUE "CURLE_BAD_FUNCTION_ARGUMENT   ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE44              ".
   03 FILLER PIC X(30) VALUE "CURLE_INTERFACE_FAILED        ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE46              ".
   03 FILLER PIC X(30) VALUE "CURLE_TOO_MANY_REDIRECTS      ".
   03 FILLER PIC X(30) VALUE "CURLE_UNKNOWN_TELNET_OPTION   ".
   03 FILLER PIC X(30) VALUE "CURLE_TELNET_OPTION_SYNTAX    ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE50              ".
   03 FILLER PIC X(30) VALUE "CURLE_PEER_FAILED_VERIFICATION".
   03 FILLER PIC X(30) VALUE "CURLE_GOT_NOTHING             ".
   03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_NOTFOUND     ".
   03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_SETFAILED    ".
   03 FILLER PIC X(30) VALUE "CURLE_SEND_ERROR              ".
   03 FILLER PIC X(30) VALUE "CURLE_RECV_ERROR              ".
   03 FILLER PIC X(30) VALUE "CURLE_OBSOLETE57              ".
   03 FILLER PIC X(30) VALUE "CURLE_SSL_CERTPROBLEM         ".
   03 FILLER PIC X(30) VALUE "CURLE_SSL_CIPHER              ".
   03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT              ".
   03 FILLER PIC X(30) VALUE "CURLE_BAD_CONTENT_ENCODING    ".
   03 FILLER PIC X(30) VALUE "CURLE_LDAP_INVALID_URL        ".
   03 FILLER PIC X(30) VALUE "CURLE_FILESIZE_EXCEEDED       ".
   03 FILLER PIC X(30) VALUE "CURLE_USE_SSL_FAILED          ".
   03 FILLER PIC X(30) VALUE "CURLE_SEND_FAIL_REWIND        ".
   03 FILLER PIC X(30) VALUE "CURLE_SSL_ENGINE_INITFAILED   ".
   03 FILLER PIC X(30) VALUE "CURLE_LOGIN_DENIED            ".
   03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOTFOUND           ".
   03 FILLER PIC X(30) VALUE "CURLE_TFTP_PERM               ".
   03 FILLER PIC X(30) VALUE "CURLE_REMOTE_DISK_FULL        ".
   03 FILLER PIC X(30) VALUE "CURLE_TFTP_ILLEGAL            ".
   03 FILLER PIC X(30) VALUE "CURLE_TFTP_UNKNOWNID          ".
   03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_EXISTS      ".
   03 FILLER PIC X(30) VALUE "CURLE_TFTP_NOSUCHUSER         ".
   03 FILLER PIC X(30) VALUE "CURLE_CONV_FAILED             ".
   03 FILLER PIC X(30) VALUE "CURLE_CONV_REQD               ".
   03 FILLER PIC X(30) VALUE "CURLE_SSL_CACERT_BADFILE      ".
   03 FILLER PIC X(30) VALUE "CURLE_REMOTE_FILE_NOT_FOUND   ".
   03 FILLER PIC X(30) VALUE "CURLE_SSH                     ".
   03 FILLER PIC X(30) VALUE "CURLE_SSL_SHUTDOWN_FAILED     ".
   03 FILLER PIC X(30) VALUE "CURLE_AGAIN                   ".

01 FILLER REDEFINES LIBCURL_ERRORS.

   02 CURLEMSG OCCURS 81 TIMES PIC X(30).</lang>

ColdFusion

<lang coldfusion><cfhttp url="http://www.w3.org/Home.html" result="result"> <cfoutput>#result.FileContent#</cfoutput></lang>

Common Lisp

CLISP provides an extension function to read http sources. Other implementations may do this differently.

Works with: CLISP

<lang lisp>(

   defun wget-clisp (url)
   (
       ext:with-http-input (stream url)
       (
           loop for line = (read-line stream nil nil)
           while line do (format t "~a~%" line)
       )
   )

)</lang>

Library: DRAKMA

First grabbing the entire body as a string, and then by pulling from a stream (as in the CLISP example). <lang lisp>(

   defun wget-drakma-string (url &optional (out *standard-output*))
   "Grab the body as a string, and write it to out."
   (write-string (drakma:http-request url) out)

) (

   defun wget-drakma-stream (url &optional (out *standard-output*))
   "Grab the body as a stream, and write it to out."
   (
       loop with body = (drakma:http-request url :want-stream t)
       for line = (read-line body nil nil)
       while line do (write-line line out)
       finally (close body)
   )

)</lang>

Crystal

<lang crystal>require "http/client" HTTP::Client.get("http://www.w3.org/Home.html")</lang>

D

Library: phobos

<lang D>void main() {

   import std.stdio, std.net.curl;
   writeln(get("http://www.w3.org/Home.html"));

}</lang>

Library: tango

<lang D>import tango.io.Console; import tango.net.http.HttpGet; void main() {

   Cout.stream.copy((new HttpGet("http://www.w3.org/Home.html")).open);

}</lang> Or more operating directly on the socket: <lang D>import tango.io.Console; import tango.net.InternetAddress; import tango.net.device.Socket; void main() {

   auto site = new Socket;
   site.connect (new InternetAddress("www.w3.org",80)).write ("GET /Home.html HTTP/1.0\n\n");
   Cout.stream.copy (site);

}</lang>

Dart

Using the stand-alone VM: <lang d>import 'dart:io'; void main(){

   var url = 'http://www.w3.org/Home.html';
   var client = new HttpClient();
   client.getUrl(Uri.parse(url)).then(
       (HttpClientRequest request) => request.close()
   ).then(
       (HttpClientResponse response) => response.pipe(stdout)
   );

}</lang>

Delphi

Simple example using the free Synapse TCP/IP library [1] <lang Delphi>program HTTP; {$APPTYPE CONSOLE} {$DEFINE DEBUG} uses

   Classes,
   httpsend;

var

   Response: TStrings;
   HTTPObj: THTTPSend;

begin

   HTTPObj := THTTPSend.Create;
   try
       Response := TStringList.Create;
       try
           if HTTPObj.HTTPMethod('GET','http://www.w3.org/Home.html') then
               begin
                   Response.LoadFromStream(HTTPObj.Document);
                   Writeln(Response.Text);
               end
           else
               Writeln('Error retrieving data');
       finally
           Response.Free;
       end;
   finally
       HTTPObj.Free;
   end;
   Readln;

end.</lang> Using Indy: <lang Delphi>program ShowHTTP; {$APPTYPE CONSOLE} uses IdHttp; var

   s: string;
   lHTTP: TIdHTTP;

begin

   lHTTP := TIdHTTP.Create(nil);
   try
       lHTTP.HandleRedirects := True;
       s := lHTTP.Get('http://www.w3.org/Home.html');
       Writeln(s);
   finally
       lHTTP.Free;
   end;

end.</lang>

Dragon

<lang dragon>select "http" select "std" http("http://www.w3.org/Home.html", ::echo)</lang>

E

<lang e>when (def t := <http://www.w3.org/Home.html> <- getText()) -> {

   println(t)

}</lang>

EchoLisp

file->string usage: the server must allow cross-domain access, or a browser add-on like cors-everywhere must be installed to bypass cross-domain checking. <lang scheme>;; asynchronous call back definition (define (success name text) (writeln 'Loaded name) (writeln text))

(file->string success "http://www.w3.org/Home.html")</lang>

Emacs Lisp

url.el can download HTTP. url-retrieve-synchronously returns a buffer containing headers and body. Caller kills the buffer when no longer required. <lang Lisp>(with-current-buffer

   (url-retrieve-synchronously "http://www.w3.org/Home.html")
   (goto-char (point-min))
   ;; skip headers
   (search-forward "\n\n" nil t)
   (prin1 (buffer-substring (point) (point-max)))
   (kill-buffer (current-buffer))

)</lang>

Erlang

Synchronous

<lang erlang>-module(main). -export([main/1]).

main([Url|[]]) ->

  inets:start(),
  case http:request(Url) of
      {ok, {_V, _H, Body}} -> io:fwrite("~p~n",[Body]);
      {error, Res} -> io:fwrite("~p~n", [Res])
  end.</lang>

Asynchronous

<lang erlang>-module(main). -export([main/1]). main([Url|[]]) ->

  inets:start(),
  http:request(get, {Url, [] }, [], [{sync, false}]),
  receive
      {http, {_ReqId, Res}} -> io:fwrite("~p~n",[Res]);
      _Any -> io:fwrite("Error: ~p~n",[_Any])
      after 10000 -> io:fwrite("Timed out.~n",[])
  end.</lang>

Using it <lang erlang>|escript ./req.erl http://www.w3.org/Home.html</lang>

F#

In F# we can just use the .NET library to do this so its the same as the C# example.

<lang fsharp> let wget (url : string) =

   use c = new System.Net.WebClient()
   c.DownloadString(url)

printfn "%s" (wget "http://www.rosettacode.org/") </lang>

However unlike C#, F# can use an asynchronous workflow to avoid blocking any threads while waiting for a response from the server. To asynchronously download three url's at once...

<lang fsharp> open System.Net open System.IO

let wgetAsync url =

   async { let request = WebRequest.Create (url:string)
           use! response = request.AsyncGetResponse()
           use responseStream = response.GetResponseStream()
           use reader = new StreamReader(responseStream)
           return reader.ReadToEnd() }

let urls = ["http://www.rosettacode.org/"; "http://www.yahoo.com/"; "http://www.google.com/"] let content = urls

             |> List.map wgetAsync
             |> Async.Parallel
             |> Async.RunSynchronously</lang>

Factor

<lang factor>USE: http.client "http://www.w3.org/Home.html" http-get nip print</lang>

Forth

Works with: GNU Forth version 0.7.0

This works at the socket level, returning both the HTTP headers and page contents. <lang forth>include unix/socket.fs s"www.w3.org" 80 open-socket dup s\" GET /Home.html HTTP/1.0\n\n" rot write-socket dup pad 8092 read-socket type close-socket</lang>

friendly interactive shell

Translation of: UNIX Shell

<lang fishshell>curl --silent --location http://www.w3.org/Home.html</lang> <lang fishshell>lynx -source http://www.w3.org/Home.html</lang> <lang fishshell>wget --output-document=- --quiet http://www.w3.org/Home.html</lang> <lang fishshell>lftp -c "cat http://www.w3.org/Home.html"</lang>

Works with: BSD

<lang fishshell>ftp -o - http://rosettacode.org ^ /dev/null</lang>

Frink

Frink's read[URL] function works with any URL type supported by your Java Virtual Machine, and returns the results as a single string. <lang frink>print[read["http://www.w3.org/Home.html"]]</lang>

Gastona

<lang gastona>#listix#

   <main>
       LOOP, TEXT FILE, http://www.w3.org/Home.html, BODY, @<value></lang>

GML

Works with: Game Maker Studio

Any Event <lang gml>get = http_get("http://www.w3.org/Home.html");</lang> HTTP Event <lang gml>if (ds_map_find_value(async_load,"id") == get) {

   show_message_async(ds_map_find_value(async_load,"result"));

}</lang>

Go

<lang go>package main import (

   "io"
   "net/http"
   "os"

) func main() {

   resp, _ := http.Get("http://www.w3.org/Home.html")
   io.Copy(os.Stdout, resp.Body)

}</lang>

Groovy

<lang groovy>new URL("http://www.w3.org/Home.html").eachLine {

   println it

}</lang>

GUISS

It would be more appropriate to paste to notepad: <lang guiss>Start,Programs,Applications,Mozilla Firefox,Inputbox:address bar>http://www.w3.org/Home.html,Button:Go,Click:Area:browser window,Type:[Control A],[Control C],Start,Programs,Accessories,Notepad,Menu:Edit,Paste</lang>

Halon

<lang halon>echo http("http://www.w3.org/Home.html");</lang>

Haskell

Using

Library: HTTP

from HackageDB

<lang haskell>import Network.Browser import Network.HTTP import Network.URI main = do

   rsp <- Network.Browser.browse $ do
       setAllowRedirects True
       setOutHandler $ const (return ())
       request $ getRequest "http://www.w3.org/Home.html"
   putStrLn $ rspBody $ snd rsp</lang>

Icon and Unicon

Icon

<lang icon>link cfunc procedure main(arglist)

   get(arglist[1])

end procedure get(url)

   local f, host, port, path
   url ? {
       ="http://" | ="HTTP://"
       host := tab(upto(':/') | 0)
       if not (=":" & (port := integer(tab(upto('/'))))) then
           port := 80
       if pos(0) then
           path := "/" else path := tab(0)
   }
   write(host)
   write(path)
   f := tconnect(host, port) | stop("Unable to connect")
   writes(f, "GET ", path | "/" ," HTTP/1.0\r\n\r\n")
   while write(read(f))

end</lang> Using it <lang icon>|icon req.icn http://www.w3.org/Home.html</lang>

Unicon

Unicon provides improved socket and messaging support without the need for the external function cfunc: <lang unicon>procedure main(arglist) m := open(arglist[1],"m") while write(read(m)) end</lang>

J

Using gethttp from Web Scraping <lang j>require'web/gethttp' gethttp 'http://www.w3.org/Home.html'</lang>

Java

<lang java5>import java.util.Scanner; import java.net.URL; public class Main {

   public static void main(String[] args) throws Exception {         
       Scanner sc = new Scanner(new URL("http://www.w3.org/Home.html").openStream());
       while (sc.hasNext())
           System.out.println(sc.nextLine());         
   }

}</lang>

<lang java5>import org.apache.commons.io.IOUtils; import java.net.URL; public class Main {

   public static void main(String[] args) throws Exception {
       IOUtils.copy(new URL("http://www.w3.org/Home.html").openStream(),System.out);    	    	    		    
   }

}</lang>

JavaScript

Browser

<lang JavaScript>fetch("http://www.w3.org/Home.html").then(function (response) {

   return response.text();

}).then(function (body) {

   return body;

});</lang>

Node.js

<lang javascript>require("http").get("http://www.w3.org/Home.html", function (resp) {

   let body = "";
   resp.on("body", function (chunk) {
       body += chunk;
   });
   resp.on("end", function () {
       console.debug(body);
   });

}).on("error", function (err) {

   console.error(err.message);

});</lang>

Jsish

Based on Jsi_Wget that ships with Jsish. <lang javascript>#!/usr/bin/env jsish function httpGet(fileargs:array|string, conf:object=void) {

   var options = {
       headers: [],
       nowait: false,
       onDone: null,
       wsdebug: 0
   };
   var self = {
       address: ,
       done: false,
       path: ,
       port: -1,
       post: ,
       scheme: 'http',
       protocol: 'get',
       url: null,
       response: 
   };
   parseOpts(self, options, conf);
   if (self.port === -1)
       self.port = 80;
   function WsRecv(ws:userobj, id:number, str:string) {
       LogDebug("LEN: "+str.length);
       LogTrace("DATA", str);
       self.response += str;
   }
   function WsClose(ws:userobj|null, id:number) {
       LogDebug("CLOSE");
       self.done = true;
       if (self.onDone)
           self.onDone(id);
   }
   function main() {
       if (self.Debug)
           debugger;
       if (typeof(fileargs) === 'string')
           fileargs = [fileargs];
       if (!fileargs || fileargs.length !== 1)
           throw("expected a url arg");
       self.url = fileargs[0];
       var m = self.url.match(/^([a-zA-Z]+):\/\/([^\/]*+)(.*)$/);
       if (!m)
           throw('invalid url: '+self.url);
       self.scheme = m[1];
       self.address = m[2];
       self.path = m[3];
       var as = self.address.split(':');
       if (as.length==2) {
           self.port = parseInt(as[1]);
           self.address = as[0];
       } else  if (as.length != 1)
           throw('bad port in address: '+self.address);
       if (self.path==)
           self.path = '/index.html';
       if (self.post.length)
           self.protocol = 'post';
       var wsopts = {
           client: true,
           onRecv: WsRecv,
           onClose: WsClose,
           debug: self.wsdebug,
           rootdir: self.path,
           port: self.port,
           address: self.address,
           protocol: self.protocol,
           clientHost: self.address
       };
       if (self.post.length)
           wsopts.post = self.post;
       if (self.headers.length)
           wsopts.headers = self.headers;
       if (self.scheme === 'https') {
           if (!Interp.conf('hasOpenSSL'))
               puts('SSL is not compiled in: falling back to http:');
           else {
               if (self.port === 80)
                   wsopts.port = 441;
               wsopts.use_ssl = true;
           }
       }
       LogDebug("Starting:", conf, wsopts);
       self.ws = new WebSocket( wsopts );
       if (self.nowait)
           return self;
       while (!self.done) {
           update(200);
           LogTrace("UPDATE");
       }
       delete self.ws;
       return self.response;
   }
   return main();

} provide(httpGet, "0.60"); if (isMain())

   runModule(httpGet);</lang>

Julia

<lang Julia>readurl(url) = open(readlines, download(url)) readurl("http://www.w3.org/Home.html")</lang>

Kotlin

<lang scala>import java.net.URL import java.io.InputStreamReader import java.util.Scanner fun main(args: Array<String>) {

   val url = URL("http://www.w3.org/Home.html")
   val isr = InputStreamReader(url.openStream())
   val sc = Scanner(isr)
   while (sc.hasNextLine())
       println(sc.nextLine())
   sc.close()

}</lang>

Lasso

incude_url is a wrapper for Lasso's curl datatype, however it can be achieved in several ways. Using the include_url wrapper. <lang Lasso>include_url('http://www.w3.org/Home.html')</lang> One line curl. <lang Lasso>curl('http://www.w3.org/Home.html')->result->asString</lang> Using curl for more complex operations and feedback. <lang Lasso>local(x = curl('http://www.w3.org/Home.html')) local(y = #x->result)

  1. y->asString</lang>

LFE

Synchronous

<lang lisp>(: inets start) (

   case (: httpc request '"http://www.w3.org/Home.html") (
       (tuple 'ok result)
       (: io format '"Result: ~p" (list result))
   ) (
       (tuple 'error reason) 
       (: io format '"Error: ~p~n" (list reason))
   )

)</lang>

Asynchronous

<lang lisp>(: inets start) (

   let* ( 
       (method 'get)
       (url '"http://www.w3.org/Home.html")
       (headers ())
       (request-data (tuple url headers))
       (http-options ())
       (request-options (list (tuple 'sync 'false)))
   )
   (: httpc request method request-data http-options request-options)
   (
       receive (
           (tuple 'http (tuple request-id (tuple 'error reason)))
           (: io format '"Error: ~p~n" (list reason))
       )
       (
           (tuple 'http (tuple request-id result))
           (: io format '"Result: ~p~n" (list result))
       )
   )

))</lang>

Liberty BASIC

Uses a dll call and a timer to allow time to receive the file. <lang lb> result = DownloadToFile( "http://rosettacode.org/wiki/Main_Page", "in.html") timer 2000, [on] wait [on] timer 0 if result <> 0 then print "Error downloading."

end

Function DownloadToFile( urlfile$, localfile$)

   open "URLmon" for dll as #url
   calldll #url, "URLDownloadToFileA",_
   0 as long,_         'null
   urlfile$ as ptr,_   'url to download
   localfile$ as ptr,_ 'save file name
   0 as long,_         'reserved, must be 0
   0 as long,_         'callback address, can be 0
   DownloadToFile as ulong  '0=success
   close #url

end function </lang>

Lingo

HTTP requests based on Director's native HTTP facilities - i.e. without using a 3rd party plugin ("Xtra") - are asynchronous. A simple implementation of a HTTP GET request might look like this: Parent script "SimpleHttpGet": <lang lingo>property _netID property _cbHandler property _cbTarget


-- Simple HTTP GET request -- @param {string} url -- @param {symbol} cbHandler -- @param {object} [cbTarget=_movie]


on new (me, url, cbHandler, cbTarget)

   if voidP(cbTarget) then
       cbTarget = _movie
   me._netID = getNetText(url)
   me._cbHandler = cbHandler
   me._cbTarget = cbTarget
   _movie.actorList.add(me)
   return me

end


-- @callback


on stepFrame (me)

   if netDone(me._netID) then
       res = netTextResult(me._netID)
       err = netError(me._netID)
       _movie.actorList.deleteOne(me)
       call(me._cbHandler, me._cbTarget, res, err)
   end if

end</lang> In some movie script: <lang lingo>---------------------------------------- --


on getAdobeHomePage ()

 script("SimpleHttpGet").new("http://www.w3.org/Home.html", #printResult)

end


-- @callback


on printResult (res, err)

   if err="OK" then
       put res
   else
       put "Network Error:" && err
   end if

end</lang> Executed in the "Message Window" (=Director's interactive Lingo console): <lang lingo>getAdobeHomePage() -- "<!doctype html> ...</lang>

LiveCode

Without a callback handler the get URL method will block until complete <lang LiveCode>put true into libURLFollowHttpRedirects get URL "http://www.w3.org/Home.html" put it</lang> Non-blocking version <lang LiveCode>on myUrlDownloadFinished

  answer "Download Complete" with "Okay"

end myUrlDownloadFinished command getWebResource

   load URL "http://www.w3.org/Home.html" with message "myUrlDownloadFinished"

end getWebResource</lang>

LSL

To test it yourself; rez a box on the ground, and add the following as a New Script. <lang LSL>string sURL = "http://www.w3.org/Home.html"; key kHttpRequestId; default {

   state_entry() {
       kHttpRequestId = llHTTPRequest(sURL, [], "");
   }
   http_response(key kRequestId, integer iStatus, list lMetaData, string sBody) {
       if (kRequestId == kHttpRequestId) {
           llOwnerSay("Status="+(string)iStatus);
           integer x = 0;
           for (x=0 ; x<llGetListLength(lMetaData) ; x++) {
               llOwnerSay("llList2String(lMetaData, "+(string)x+") = "+llList2String(lMetaData, x));
           }
           list lBody = llParseString2List(sBody, ["\n"], []);
           for (x=0 ; x<llGetListLength(lBody) ; x++) {
               llOwnerSay("llList2String(lBody, "+(string)x+") = "+llList2String(lBody, x));
           }
       }
   }

}</lang>

Lua

Library: LuaSocket

<lang Lua>local http = require("socket.http") local url = require("socket.url") local page = http.request('http://www.w3.org/Home.html') print(page)</lang>

M2000 Interpreter

We use Async read from Microsoft.XMLHTTP So we use Threads (duration is in millisecond) M2000 can use COM objects, using Declare, Method and With statements. Using With statement we can make objects properties like ReadyState as variables (some of them as read only) <lang M2000 Interpreter>Module CheckIt {

   Declare xml "Microsoft.XMLHTTP"
   const testUrl$ = "http://www.w3.org/Home.html"
   With xml, "readyState" as ReadyState
   Method xml "Open", "Get", testUrl$, True
   Method xml "send"
   k = 0
   Thread {
       k++
   } as TimeOut interval 100
   Task.Main 100 {
       Print ReadyState
       If ReadyState=4 then
           exit
       if k > 20 then
           exit
       if mouse then
           exit
   }
   if ReadyState = 4 then {
       With  xml, "responseText" AS AA$
       Document BB$=AA$
       Report BB$
   }
   Declare xml Nothing

} CheckIt</lang>

Maple

In Maple 18 or later: <lang Maple>content := URL:-Get("http://www.w3.org/Home.html");</lang> In Maple 17 or earlier: <lang Maple>content := HTTP:-Get("http://www.w3.org/Home.html");</lang>

Mathematica / Wolfram Language

<lang Mathematica>Print[Import["http://www.w3.org/Home.html", "Source"]]</lang>

MATLAB / Octave

<lang MATLAB>>>urlread('http://www.w3.org/Home.html')</lang>

MIRC Scripting Language

See HTTP/MIRC Scripting Language

Nanoquery

<lang nanoquery>import http import url url = new(URL, "http://www.w3.org/Home.html") client = new(HTTPClient, url.getHost()) client.connect() response = client.get(url.getFile()) println response.get("body")</lang>

Nemerle

<lang Nemerle>using System; using System.Console; using System.Net; using System.IO; module HTTP {

   Main() : void {
       def wc = WebClient();
       def myStream = wc.OpenRead("http://www.w3.org/Home.html");
       def sr = StreamReader(myStream);
       WriteLine(sr.ReadToEnd());
       myStream.Close()
   }

}</lang>

NetRexx

Translation of: Java

An implementation of the Java version shown above; demonstrating NetRexx's ability to exploit the rich Java SDK. <lang NetRexx>options replace format comments java crossref symbols binary import java.util.Scanner import java.net.URL do

   rosettaUrl = "http://www.w3.org/Home.html"
   sc = Scanner(URL(rosettaUrl).openStream)
   loop while sc.hasNext
       say sc.nextLine
   end

catch ex = Exception

   ex.printStackTrace

end return</lang>

NewLisp

<lang NewLisp>(get-url "http://www.w3.org/Home.html")</lang>

Nim

<lang nim>import httpclient

var client = newHttpClient() echo client.getContent "http://rosettacode.org"</lang>

Objeck

<lang objeck>use HTTP; use Collection; class HttpTest {

   function : Main(args : String[]) ~ Nil {
       lines := HttpClient->New()->Get("http://www.w3.org/Home.html");
       each(i : lines) {
           lines->Get(i)->As(String)->PrintLine();
       };
   }

}</lang>

Objective-C

<lang objc>#import <Foundation/Foundation.h> int main (int argc, const char * argv[]) {

   @autoreleasepool {
       NSError *error;
       NSURLResponse *response;
       NSData *data = [NSURLConnection sendSynchronousRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:@"http://www.w3.org/Home.html"]]
       returningResponse:&response error:&error];
       NSLog(@"%@", [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding]);
   }
   return 0;

}</lang>

OCaml

<lang ocaml>let () =

   let url = "http://www.w3.org/Home.html" in
   let _,_, page_content = make_request ~url ~kind:GET () in
   print_endline page_content;
</lang>

The source code of the function make_request is here.

ooRexx

Needs bsf4oorexx from sourceforge. <lang oorexx>url=.bsf~new("java.net.URL", "http://www.w3.org/Home.html") sc =.bsf~new("java.util.Scanner",url~openStream) loop while sc~hasNext

   say sc~nextLine

End

requires BSF.CLS -- get Java camouflaging support</lang>

Oz

When creating a file object, it is possible to specify an URL instead of a filename: <lang oz>declare

   fun {GetPage Url}
       F = {New Open.file init(url:Url)}
       Contents = {F read(list:$ size:all)}
   in
       {F close}
       Contents
   end

in

   {System.showInfo {GetPage "http://www.w3.org/Home.html"}}</lang>
Library: OzHttpClient

If you need more fine-grained control of the request, you could use a custom library: <lang oz>declare

   [HTTPClient] = {Module.link ['x-ozlib://mesaros/net/HTTPClient.ozf']}
   fun {GetPage Url}
       Client = {New HTTPClient.urlGET init(inPrms(toFile:false toStrm:true) httpReqPrms)}
       OutParams
       HttpResponseParams
   in
       {Client getService(Url ?OutParams ?HttpResponseParams)}
       {Client closeAll(true)}
       OutParams.sOut
   end

in

   {System.showInfo {GetPage "http://www.w3.org/Home.html"}}</lang>

Pascal

Works with: Free Pascal

Using fphttpclient <lang pascal>{$mode objfpc}{$H+} uses fphttpclient; var

   s: string;
   hc: tfphttpclient;

begin

   hc := tfphttpclient.create(nil);
   try
       s := hc.get('http://www.example.com')
   finally
       hc.free
   end;
   writeln(s)

end.</lang>

Works with: Free Pascal
Library: CThreads
Library: Classes
Library: httpsend

<lang pascal>program http; {$mode objfpc}{$H+} {$APPTYPE CONSOLE} {$DEFINE DEBUG} uses

   {$IFDEF UNIX}
   {$IFDEF UseCThreads}
   cthreads,
   {$ENDIF}
   {$ENDIF}
   Classes,
   // Synapse httpsend class    
   httpsend;

{$R *.res} var

   Response: TStrings;
   HTTPObj: THTTPSend;

begin

   HTTPObj := THTTPSend.Create;
   try
       Response := TStringList.Create;
       try
           if HTTPObj.HTTPMethod('GET','http://wiki.lazarus.freepascal.org/Synapse') then
               begin
                     Response.LoadFromStream(HTTPObj.Document);
                     Writeln(Response.Text);
               end
           else
               Writeln('Error retrieving data');
       finally
           Response.Free;
       end;
   finally
       HTTPObj.Free;
   end;
   Readln;

end.</lang>

Peloton

English dialect, short form: <lang sgml><@ SAYURLLIT>http://www.w3.org/Home.html</@></lang> English dialect, padded variable-length form: <lang sgml><# SAY URLSOURCE LITERAL>http://www.w3.org/Home.html</#></lang>

Perl

<lang perl>use HTTP::Tiny; my $response = HTTP::Tiny -> new -> get("http://www.w3.org/Home.html"); print $response -> {content};</lang>

Phix

Library: libcurl

Note that curl_easy_get_file() is better suited to multi-megabyte downloads than curl_easy_perform_ex(). <lang Phix>include builtins\libcurl.e curl_global_init() atom curl = curl_easy_init() curl_easy_setopt(curl, CURLOPT_URL, "http://www.w3.org/Home.html") object res = curl_easy_perform_ex(curl) curl_easy_cleanup(curl) curl_global_cleanup() puts(1,res)</lang>

PHP

<lang php>readfile("http://www.w3.org/Home.html");</lang>

PicoLisp

<lang PicoLisp>(load "@lib/http.l") (client "www.w3.org/Home.html" 80 NIL (out NIL (echo)))</lang>

Pike

<lang pike>write("%s", Protocols.HTTP.get_url_data("http://www.w3.org/Home.html"));</lang>

PowerShell

<lang powershell>Invoke-WebRequest -MaximumRedirection 0 -URI http://www.w3.org/Home.html</lang>

SWI-Prolog

<lang Prolog>:- use_module(library(http/http_open)). http :- http_open('http://www.w3.org/Home.html', In, []), copy_stream_data(In, user_output), close(In).</lang>

PureBasic

<lang PureBasic>InitNetwork() OpenConsole() tmpdir$ = GetTemporaryDirectory() filename$ = tmpdir$ + "PB_tempfile" + Str(Random(200000)) + ".html" If ReceiveHTTPFile("http://www.w3.org/Home.html", filename$)

   If ReadFile(1, filename$)
       Repeat
           PrintN(ReadString(1))
       Until Eof(1)
       Input()
       CloseFile(1)
   EndIf
   DeleteFile(filename$)

EndIf</lang> Using general networking commands. <lang PureBasic>InitNetwork() OpenConsole() id = OpenNetworkConnection("www.w3.org", 80) SendNetworkString(id, "GET /Home.html HTTP/1.1" + Chr(10) + "Host: www.w3.org" + Chr(10) + Chr(10)) Repeat

   If NetworkClientEvent(id) = 2
       a$ = Space(1000)
       ReceiveNetworkData(id, @a$, 1000)
       out$ + a$
   EndIf

Until FindString(out$, "</html>", 0) PrintN(out$) Input()</lang>

Python

<lang python>import http connection = http.client.HTTPConnection("www.w3.org") connection.request("GET", "/") print(connection.getresponse().read())</lang>

R

Library: RCurl
Library: XML

First, retrieve the webpage. <lang R>library(RCurl) webpage <- getURL("http://www.w3.org/Home.html") webpage <- getURL("http://www.w3.org/Home.html", .opts=list(followlocation=TRUE)) webpage <- getURL("http://www.w3.org/Home.html", .opts=list(proxy="123.123.123.123", proxyusername="domain\\username", proxypassword="mypassword", proxyport=8080))</lang> Now parse the html code into a tree and print the html. <lang R>library(XML) pagetree <- htmlTreeParse(webpage) pagetree$children$html</lang>

Racket

<lang Racket>#lang racket (require net/url) (

   copy-port (
       get-pure-port (
           string->url "http://www.w3.org/Home.html"
       )
       #:redirections 100
   )
   (current-output-port)

)</lang>

Raku

(formerly Perl 6)

Library: LWP

Using LWP::Simple from the Raku ecosystem. <lang perl6>use v6; use LWP::Simple; print LWP::Simple.get("http://www.w3.org/Home.html");</lang> or, without LWP::Simple: <lang perl6>use v6; my $socket = IO::Socket::INET.new(host => "www.w3.org", port => 80,); $socket.print("GET /Home.html HTTP/1.0\r\n\r\n"); print $socket.recv(); $socket.close;</lang>

REALbasic

REALBasic provides an HTTPSocket class for handling HTTP connections. The 'Get' method of the HTTPSocket is overloaded and can download data to a file or return data as a string, in both cases a timeout argument can be passed. <lang REALbasic>Dim sock As New HTTPSocket Print(sock.Get("http://www.w3.org/Home.html", 10))</lang>

REBOL

<lang REBOL>print read http://www.w3.org/Home.html</lang>

REXX

This script takes an URL as an argument and displays the content on the terminal. It uses the external program `curl` to perform both the acquisition of the data and the display.

<lang Rexx>/* ft=rexx */ /* GET2.RX - Display contents of an URL on the terminal. */ /* Usage: rexx get.rx http://www.w3.org/Home.html */ parse arg url . 'curl' url</lang>

A simple change to the script will redirect the output to an internal variable for internal processing. (Our "internal processing" in this case is to display it.)

<lang Rexx>/* ft=rexx */ /* GET2.RX - Display contents of an URL on the terminal. */ /* Usage: rexx get2.rx http://www.w3.org/Home.html */ parse arg url . address system 'curl' url with output stem stuff. do i = 1 to stuff.0

 say stuff.i

end</lang>

Another simple change redirects the output to another external program like a shell pipe.

<lang Rexx>/* ft=rexx */ /* GET3.RX - Display contents of an URL on the terminal. */ /* Usage: rexx get3.rx http://www.w3.org/Home.html */ parse arg url . address system 'curl' url with output fifo address system 'more' with input fifo </lang>

Ring

<lang ring>See download("http://www.w3.org/Home.html")</lang>

RLaB

RLaB supports HTTP/FTP through its Read/Write facilities, which are organized around the concept of Universal Resource Locator (URL),

protocol://address

RLaB accepts the following values for protocol:

1. file or omitted, for generic text files or files in native binary format (partially compatible with matlab binary format);
2. h5 or hdf5 for files that use Hierarhical Data Format 5 (HDF5) version 1.8.0, and later. Here protocol can be omitted while address has to end with .h5 (file extension);
3. http, https, or ftp for accessing the data and files on web- and ftp-servers;
4. tcp, for accessing sockets over tcp/ip protocol;
5. serial, for accessing serial port on Un*x type systems.

For these URLs RLaB provides an internal book-keeping: It keeps track of the open ones and, say, upon quitting, closes them and releases the internal resources it allocated for managing them.

For accessing URLs on world wide web RLaB implements the library cURL (libcurl) [2] and its "easy" interface.

This said, this is how one would download financial data for Pfeizer from Yahoo [3].

<lang RLaB> // get cvs data from Yahoo for Pfeizer (PFE) url="http://ichart.finance.yahoo.com/table.csv?s=PFE&a=00&b=4&c=1982&d=00&e=10&f=2010&g=d&ignore=.csv";

opt = <<>>; // opt.CURLOPT_PROXY = "your.proxy.here"; // opt.CURLOPT_PROXYPORT = YOURPROXYPORT; // opt.CURLOPT_PROXYTYPE = "http"; open(url, opt); x = readm(url); close (url); </lang>

Ruby

The simple way loads the entire content into memory, then prints it. <lang ruby>require 'open-uri' print open("http://www.w3.org/Home.html") {

   |f| f.read

}</lang> If the content might be large, the better way uses FileUtils.copy_stream. <lang ruby>require 'fileutils' require 'open-uri' open("http://www.w3.org/Home.html") {

   |f| FileUtils.copy_stream(f, $stdout)

}</lang>

Run BASIC

<lang runbasic>print httpget$("http://www.w3.org/Home.html")</lang>

Rust

Cargo.toml <lang toml>[dependencies] hyper = "0.6"</lang> src/main.rs <lang rust>//cargo-deps: hyper="0.6" // The above line can be used with cargo-script which makes cargo's dependency handling more convenient for small programs extern crate hyper; use std::io::Read; use hyper::client::Client; fn main() {

   let client = Client::new();
   let mut resp = client.get("http://www.w3.org/Home.html").send().unwrap();
   let mut body = String::new();
   resp.read_to_string(&mut body).unwrap();
   println!("{}", body);

}</lang>

Scala

Library: Scala

<lang scala>import scala.io.Source object HttpTest extends App {

   System.setProperty("http.agent", "*")
   Source.fromURL("http://www.w3.org/Home.html").getLines.foreach(println)

}</lang>

Scheme

Works with: Guile

<lang scheme>(use-modules (ice-9 regex)) (define url "http://www.w3.org/Home.html") (define r (make-regexp "^(http://)?([^:/]+)(:)?(([0-9])+)?(/.*)?" regexp/icase)) (define host (match:substring (regexp-exec r url) 2)) (define port (match:substring (regexp-exec r url) 4)) (define path (match:substring (regexp-exec r url) 6)) (if (eq? port #f) (define port "80")) (define port (string->number port)) (

   let ((s (socket PF_INET SOCK_STREAM 0)))
   (connect s AF_INET (car (hostent:addr-list (gethostbyname host))) port)
   (display "GET " s)
   (display path s)
   (display " HTTP/1.0\r\n\r\n" s)
   (
       do ((c (read-char s) (read-char s)))
       ((eof-object? c))
       (display c)
   )

)</lang>

Works with: Chicken Scheme

Using the http-client library, this is trivial. <lang scheme>(use http-client) (print (with-input-from-request "http://www.w3.org/Home.html" #f read-string))</lang>

Seed7

The gethttp.s7i library contains the function getHttp, which gets data specified by an URL using the HTTP protocol. <lang seed7>$ include "seed7_05.s7i"; include "gethttp.s7i"; const proc: main is func begin

   writeln(getHttp("www.w3.org/Home.html"));

end func;</lang>

SenseTalk

<lang sensetalk>put url "http://www.w3.org/Home.html"</lang>

Sidef

Sidef can load and use Perl modules: <lang ruby>func get(url) {

   var lwp = (
       try {
           require('LWP::UserAgent')
       } catch {
           warn "'LWP::UserAgent' is not installed!"; return nil
       }
   )
   var ua = lwp.new(agent => 'Mozilla/5.0')
   if (var resp = ua.get(url); resp.is_success) {
       return resp.decoded_content
   }
   return nil

} print get("http://www.w3.org/Home.html")</lang>

Smalltalk

Works with: Pharo

<lang smalltalk>Transcript show: 'http://www.w3.org/Home.html' asUrl retrieveContents contentStream.</lang>

SNOBOL4

Works with: Macro SNOBOL4 in C

<lang snobol>-include "tcp.sno"

   tcp.open(.conn, 'www.w3.org', 'http') :s(cont1)
   terminal = "cannot open" :(end)

cont1 conn = "GET /Home.html HTTP/1.0" char(10) char(10) while output = conn :s(while)

   tcp.close(.conn)

end</lang>

Swift

<lang Swift>import Foundation let request = NSURLRequest(URL: NSURL(string: "http://www.w3.org/Home.html")!) NSURLConnection.sendAsynchronousRequest(request, queue: NSOperationQueue()) {

   res, data, err in
   if (data != nil) {
       let string = NSString(data: data!, encoding: NSUTF8StringEncoding)
       println(string)
   }

} CFRunLoopRun()</lang>

Tcl

Note that the http package is distributed as part of Tcl. <lang tcl>package require http set request [http::geturl "http://www.w3.org/Home.html"] puts [http::data $request] http::cleanup $request</lang>

TSE SAL

<lang TSE SAL>DLL "<urlmon.dll>"

   INTEGER PROC FNUrlGetSourceApiI(
       INTEGER lpunknown,
       STRING urlS : CSTRVAL,
       STRING filenameS : CSTRVAL,
       INTEGER dword,
       INTEGER tlpbindstatuscallback
   ) : "URLDownloadToFileA"

END // library: url: get: source <description></description> <version control></version control> <version>1.0.0.0.3</version> (filenamemacro=geturgso.s) [kn, ri, su, 13-04-2008 05:12:53] PROC PROCUrlGetSource( STRING urlS, STRING filenameS )

   FNUrlGetSourceApiI( 0, urlS, filenameS, 0, 0 )

END PROC Main()

   STRING s1[255] = "http://www.google.com/index.html"
   STRING s2[255] = "c:\temp\ddd.txt"
   IF ( NOT ( Ask( "url: get: source: urlS = ", s1, _EDIT_HISTORY_ ) ) AND ( Length( s1 ) > 0 ) )
       RETURN()
   ENDIF
   IF ( NOT ( AskFilename( "url: get: source: filenameS = ", s2, _DEFAULT_, _EDIT_HISTORY_ ) ) AND ( Length( s2 ) > 0 ) ) 
       RETURN()
   ENDIF
PROCUrlGetSource( s1, s2 )
EditFile( s2 )

END</lang>

TUSCRIPT

<lang tuscript>$$ MODE TUSCRIPT SET DATEN = REQUEST ("http://www.w3.org/Home.html")

  • {daten}</lang>

UNIX Shell

<lang bash>curl -s -L http://www.w3.org/Home.html</lang>

<lang bash>lynx -source http://www.w3.org/Home.html</lang>

<lang bash>wget -O - -q http://www.w3.org/Home.html</lang>

<lang bash>lftp -c "cat http://www.w3.org/Home.html"</lang>

Works with: BSD

<lang bash>ftp -o - http://www.w3.org/Home.html 2>/dev/null</lang>

VBScript

Based on code at How to retrieve HTML web pages with VBScript via the Microsoft.XmlHttp object <lang vb>Option Explicit Const sURL="http://www.w3.org/Home.html" Dim oHTTP Set oHTTP = CreateObject("Microsoft.XmlHTTP") On Error Resume Next oHTTP.Open "GET", sURL, False oHTTP.Send "" If Err.Number = 0 Then

    WScript.Echo oHTTP.responseText

Else

    Wscript.Echo "error " & Err.Number & ": " & Err.Description

End If Set oHTTP = Nothing</lang>

Visual Basic

Works with: Visual Basic version 5
Works with: Visual Basic version 6
Works with: VBA version Access 97
Works with: VBA version 6.5
Works with: VBA version 7.1

<lang vb>Sub Main() Dim HttpReq As WinHttp.WinHttpRequest ' in the "references" dialog of the IDE, check ' "Microsoft WinHTTP Services, version 5.1" (winhttp.dll) Const HTTPREQUEST_PROXYSETTING_PROXY As Long = 2

  1. Const USE_PROXY = 1
 Set HttpReq = New WinHttp.WinHttpRequest
 HttpReq.Open "GET", "http://rosettacode.org/robots.txt"
  1. If USE_PROXY Then
 HttpReq.SetProxy HTTPREQUEST_PROXYSETTING_PROXY, "my_proxy:80"
  1. End If
 HttpReq.SetTimeouts 1000, 1000, 1000, 1000
 HttpReq.Send
 Debug.Print HttpReq.ResponseText

End Sub</lang>

Visual Basic .NET

<lang vbnet>Imports System.Net Dim client As WebClient = New WebClient() Dim content As String = client.DownloadString("http://www.w3.org/Home.html") Console.WriteLine(content)</lang>

zkl

File htmlGet.zkl. This uses HTTP/1.0 Protocol to avoid chunked data. Or use cURL (see https example). <lang zkl>url := ask(0,"URL: "); host := url; dir := "/"; port := 80; if (n := url.find("/")) {

   dir = url[n,*];
   host = url[0,n];

} if (n := host.find(":")) {

   port = host[n+1,*];
   host = host[0,n];

} get := "GET %s HTTP/1.0\r\nHost: %s:%s\r\n\r\n".fmt(dir,host,port.toInt()); println("-->",get); server := Network.TCPClientSocket.connectTo(host,port); server.write(get); data := server.read(True); println(data.text);</lang>

Zoea

<lang Zoea>program: http

   input: 'http://www.w3.org/Home.html'
   output: 'hello from zoea'</lang>

Zsh

<lang zsh>zmodload zsh/net/tcp ztcp www.w3.org 80 fd=$REPLY print -l -u $fd -- 'GET /Home.html HTTP/1.1' 'Host: www.w3.org' while read -u $fd -r -e -t 1; do; :; done ztcp -c $fd</lang>