HTTP

From Rosetta Code
Task
HTTP
You are encouraged to solve this task according to the task description, using any language you may know.

Access and print a URL's content (the located resource) to the console. There is a separate task for HTTPS Requests.

ActionScript

<lang actionscript> package {

   import flash.display.Sprite;
   import flash.events.Event;
   import flash.net.*;
   public class RequestExample extends Sprite
   {
       public function RequestExample()
       {
           var loader:URLLoader = new URLLoader();
           loader.addEventListener(Event.COMPLETE, loadComplete);
           loader.load(new URLRequest("http://www.rosettacode.org"));
       }
       private function loadComplete(evt:Event):void
       {
           trace(evt.target.data);
       }
   }

} </lang>

Ada

Library: AWS

<lang ada> with Ada.Text_IO; use Ada.Text_IO;

with AWS.Client; with AWS.Response;

procedure HTTP_Request is begin

  Put_Line (AWS.Response.Message_Body (AWS.Client.Get (URL => "http://www.rosettacode.org")));

end HTTP_Request; </lang>

ALGOL 68

Works with: ALGOL 68 version Revision 1 - however grep in string, http content and str error are from a non-standard library
Works with: ALGOL 68G version Any - tested with release 1.18.0-9h.tiny


<lang algol68> STRING domain="rosettacode.org"; STRING page="wiki/Main_Page";

STRING re success="^HTTP/[0-9.]* 200"; STRING re result description="^HTTP/[0-9.]* [0-9]+ [a-zA-Z ]*"; STRING re doctype ="\s\s<!DOCTYPE html PUBLIC ""[^>]+"">\s+";

PROC html page = (REF STRING page) BOOL: (

    BOOL out=grep in string(re success, page, NIL, NIL) = 0;
    IF INT start, end;
       grep in string(re result description, page, start, end) = 0
    THEN
       page:=page[end+1:];
       IF grep in string(re doctype, page, start, end) = 0
       THEN page:=page[start+2:]
       ELSE print ("unknown format retrieving page")
       FI
    ELSE print ("unknown error retrieving page")
    FI;
    out

);

IF STRING reply;

  INT rc =
     http content (reply, domain, "http://"+domain+"/"+page, 0);
  rc = 0 AND html page (reply)

THEN print (reply) ELSE print (strerror (rc)) FI </lang>

AutoHotkey

<lang AutoHotkey> UrlDownloadToFile, http://rosettacode.org, url.html Run, cmd /k type url.html </lang>

Batch File

<lang batch> curl.exe -s -L http://rosettacode.org/ </lang>

BBC BASIC

<lang bbcbasic> SYS "LoadLibrary", "URLMON.DLL" TO urlmon%

     SYS "GetProcAddress", urlmon%, "URLDownloadToFileA" TO URLDownloadToFile
     
     url$ = "http://www.bbcbasic.co.uk/aboutus.html"
     file$ = @tmp$ + "rosetta.tmp"
     SYS URLDownloadToFile, 0, url$, file$, 0, 0 TO fail%
     IF fail% ERROR 100, "File download failed"
     
     OSCLI "TYPE """ + file$ + """"</lang>

Biferno

simple one-liner using httpExt and quick print $ <lang Biferno>$httpExt.ExecRemote("www.tabasoft.it")</lang>

C

Library: libcurl

<lang c>

  1. include <stdio.h>
  2. include <stdlib.h>
  3. include <curl/curl.h>

int main(void) {

       CURL *curl;
       char buffer[CURL_ERROR_SIZE];
       if ((curl = curl_easy_init()) != NULL) {
               curl_easy_setopt(curl, CURLOPT_URL, "http://www.rosettacode.org/");
               curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
               curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, buffer);
               if (curl_easy_perform(curl) != CURLE_OK) {
                       fprintf(stderr, "%s\n", buffer);
                       return EXIT_FAILURE;
               }
               curl_easy_cleanup(curl);
       }
       return EXIT_SUCCESS;

} </lang>

C++

<lang cpp>

  1. include <winsock2.h>
  2. include <ws2tcpip.h>
  3. include <iostream>

int main() { WSADATA wsaData; WSAStartup( MAKEWORD( 2, 2 ), &wsaData );

addrinfo *result = NULL; addrinfo hints;

ZeroMemory( &hints, sizeof( hints ) ); hints.ai_family = AF_UNSPEC; hints.ai_socktype = SOCK_STREAM; hints.ai_protocol = IPPROTO_TCP;

getaddrinfo( "74.125.45.100", "80", &hints, &result ); // http://www.google.com

SOCKET s = socket( result->ai_family, result->ai_socktype, result->ai_protocol );

connect( s, result->ai_addr, (int)result->ai_addrlen );

freeaddrinfo( result );

send( s, "GET / HTTP/1.0\n\n", 16, 0 );

char buffer[512]; int bytes;

do { bytes = recv( s, buffer, 512, 0 );

if ( bytes > 0 ) std::cout.write(buffer, bytes); } while ( bytes > 0 );

return 0; } </lang>

Library: U++

<lang cpp>

  1. include <Web/Web.h>

using namespace Upp;

CONSOLE_APP_MAIN { Cout() << HttpClient("www.rosettacode.org").ExecuteRedirect(); } </lang>

C#

<lang csharp> using System; using System.Text; using System.Net;

class Program {

   static void Main(string[] args)
   {
       WebClient wc = new WebClient();
       string content = wc.DownloadString("http://www.google.com");
       Console.WriteLine(content);
   }

} </lang>

Caché ObjectScript

USER>Set HttpRequest=##class(%Net.HttpRequest).%New()
USER>Set HttpRequest.Server="checkip.dyndns.org"
USER>Do HttpRequest.Get("/")
USER>Do HttpRequest.HttpResponse.Data.OutputToDevice()


Clojure

Using the Java API: <lang clojure> (defn get-http [url]

 (let [sc (java.util.Scanner. 

(.openStream (java.net.URL. url)))]

   (while (.hasNext sc)
     (println (.nextLine sc)))))

(get-http "http://www.rosettacode.org") </lang>

Using clojure.contrib.http.agent: <lang clojure> (ns example

 (:use [clojure.contrib.http.agent :only (string http-agent)]))

(println (string (http-agent "http://www.rosettacode.org/"))) </lang>

Works with: Clojure version 1.2

<lang clojure> (print (slurp "http://www.rosettacode.org/")) </lang>


ColdFusion

<lang coldfusion>

 <cfhttp url="http://www.rosettacode.org" result="result">
 <cfoutput>#result.FileContent#</cfoutput>

</lang>

Common Lisp

CLISP provides an extension function to read http sources. Other implementations may do this differently.

Works with: CLISP

<lang lisp> (defun wget-clisp (url)

   (ext:with-http-input (stream url)
       (loop for line = (read-line stream nil nil)
           while line
           do (format t "~a~%" line))))

</lang>

Library: DRAKMA

First grabbing the entire body as a string, and then by pulling from a stream (as in the CLISP example).

<lang lisp> (defun wget-drakma-string (url &optional (out *standard-output*))

 "Grab the body as a string, and write it to out."
 (write-string (drakma:http-request url) out))

(defun wget-drakma-stream (url &optional (out *standard-output*))

 "Grab the body as a stream, and write it to out."
 (loop with body = (drakma:http-request url :want-stream t)
       for line = (read-line body nil nil)
       while line do (write-line line out)
       finally (close body)))

</lang>

D

Library: phobos

<lang D> void main() {

 import std.stdio, std.net.curl;
 writeln(get("http://google.com"));

} </lang>


Library: tango

<lang D> import tango.io.Console; import tango.net.http.HttpGet;

void main() {

 Cout.stream.copy( (new HttpGet("http://google.com")).open );

} </lang>

Or more operating directly on the socket:

<lang D> import tango.io.Console; import tango.net.InternetAddress; import tango.net.device.Socket;

void main() {

 auto site = new Socket;
 site.connect (new InternetAddress("google.com",80)).write ("GET / HTTP/1.0\n\n");
 Cout.stream.copy (site);

} </lang>

Delphi

Simple example using the free Synapse TCP/IP library [1]

<lang Delphi> program HTTP;

{$APPTYPE CONSOLE}

{$DEFINE DEBUG}

uses

 Classes,
 httpsend; // Synapse httpsend class

var

 Response: TStrings;
 HTTPObj: THTTPSend;

begin

 HTTPObj := THTTPSend.Create;
 try
   { Stringlist object to capture HTML returned
     from URL }
   Response := TStringList.Create;
   try
     if HTTPObj.HTTPMethod('GET','http://www.mgis.uk.com') then
       begin
         { Load HTTP Document into Stringlist }
         Response.LoadFromStream(HTTPObj.Document);
         { Write the response to the console window }
         Writeln(Response.Text);
       end
       else
       Writeln('Error retrieving data');
   finally
     Response.Free;
   end;
 finally
   HTTPObj.Free;
 end;
 // Keep console window open
 Readln;

end. </lang>


Using Indy:

<lang Delphi> program ShowHTTP;

{$APPTYPE CONSOLE}

uses IdHttp;

var

 s: string;
 lHTTP: TIdHTTP;

begin

 lHTTP := TIdHTTP.Create(nil);
 try
   lHTTP.HandleRedirects := True;
   s := lHTTP.Get('http://www.rosettacode.org');
   Writeln(s);
 finally
   lHTTP.Free;
 end;

end. </lang>

E

<lang e> when (def t := <http://www.rosettacode.org> <- getText()) -> {

   println(t)

} </lang>

Erlang

Synchronous

<lang erlang> -module(main). -export([main/1]).

main([Url|[]]) ->

  inets:start(),
  case http:request(Url) of
      {ok, {_V, _H, Body}} -> io:fwrite("~p~n",[Body]);
      {error, Res} -> io:fwrite("~p~n", [Res])
  end.

</lang>

Asynchronous

<lang erlang> -module(main). -export([main/1]).

main([Url|[]]) ->

  inets:start(),
  http:request(get, {Url, [] }, [], [{sync, false}]),
  receive
      {http, {_ReqId, Res}} -> io:fwrite("~p~n",[Res]);
      _Any -> io:fwrite("Error: ~p~n",[_Any])
      after 10000 -> io:fwrite("Timed out.~n",[])
  end.

</lang>

Using it <lang erlang> |escript ./req.erl http://www.rosettacode.org </lang>

F#

In F# we can just use the .NET library to do this so its the same as the C# example.

<lang fsharp> let wget (url : string) =

   use c = new System.Net.WebClient()
   c.DownloadString(url)

printfn "%s" (wget "http://www.rosettacode.org/") </lang>

However unlike C#, F# can use an asynchronous workflow to avoid blocking any threads while waiting for a response from the server. To asynchronously download three url's at once...

<lang fsharp> open System.Net open System.IO

let wgetAsync url =

   async { let request = WebRequest.Create (url:string)
           use! response = request.AsyncGetResponse()
           use responseStream = response.GetResponseStream()
           use reader = new StreamReader(responseStream)
           return reader.ReadToEnd() }

let urls = ["http://www.rosettacode.org/"; "http://www.yahoo.com/"; "http://www.google.com/"] let content = urls

             |> List.map wgetAsync
             |> Async.Parallel
             |> Async.RunSynchronously</lang>

Factor

<lang factor>USE: http.client "http://www.rosettacode.org" http-get nip print </lang>

Forth

Works with: GNU Forth version 0.7.0

This works at the socket level, returning both the HTTP headers and page contents. <lang forth> include unix/socket.fs

s" localhost" 80 open-socket dup s\" GET / HTTP/1.0\n\n" rot write-socket dup pad 8092 read-socket type close-socket </lang>

friendly interactive shell

Translation of: UNIX Shell

<lang fishshell>curl -s -L http://rosettacode.org/</lang>

<lang fishshell>lynx -source http://rosettacode.org/</lang>

<lang fishshell>wget -O - -q http://rosettacode.org/</lang>

<lang fishshell>lftp -c "cat http://rosettacode.org/"</lang>

Works with: BSD

<lang fishshell>ftp -o - http://rosettacode.org ^ /dev/null</lang>

Go

<lang go> package main

import (

   "io"
   "log"
   "net/http"
   "os"

)

func main() {

   r, err := http.Get("http://rosettacode.org/robots.txt")
   if err != nil {
       log.Fatalln(err)
   }
   io.Copy(os.Stdout, r.Body)

} </lang>

Output:

User-agent: *
Allow: /mw/images/
Allow: /mw/skins/
Allow: /mw/title.png
Disallow: /w/
Disallow: /mw/
Disallow: /wiki/Special:

Groovy

<lang groovy> new URL("http://www.rosettacode.org").eachLine { println it } </lang>

GUISS

It would be more appropriate to paste to notepad:

<lang guiss>Start,Programs,Applications,Mozilla Firefox,Inputbox:address bar>www.rosettacode.org,Button:Go, Click:Area:browser window,Type:[Control A],[Control C],Start,Programs,Accessories,Notepad, Menu:Edit,Paste</lang>

Haskell

Using

Library: HTTP

from HackageDB

<lang haskell> import Network.Browser import Network.HTTP import Network.URI

httpreq = do

     rsp <- Network.Browser.browse $ do
                setAllowRedirects True

setOutHandler $ const (return ())

                request $ getRequest "http://www.rosettacode.org/"

     putStrLn $ rspBody $ snd rsp

</lang>

Icon and Unicon

Icon

<lang icon> link cfunc procedure main(arglist)

  get(arglist[1])

end

procedure get(url)

  local f, host, port, path
  url ? {
        ="http://" | ="HTTP://"
        host := tab(upto(':/') | 0)
        if not (=":" & (port := integer(tab(upto('/'))))) then port := 80
        if pos(0) then path := "/" else path := tab(0)
  }
  write(host)
  write(path)
  f := tconnect(host, port) | stop("Unable to connect")
  writes(f, "GET ", path | "/" ," HTTP/1.0\r\n\r\n")
  while write(read(f))

end </lang>

Using it <lang icon> |icon req.icn http://www.rosettacode.org </lang>

Unicon

Unicon provides improved socket and messaging support without the need for the external function cfunc: <lang unicon> procedure main(arglist) m := open(arglist[1],"m") while write(read(m)) end </lang>

J

Using gethttp from Web Scraping

<lang j> gethttp 'http://www.rosettacode.org' </lang>

Java

<lang java5> import java.util.Scanner; import java.net.URL;

public class Main {

   public static void main(String[] args) throws Exception {         
       Scanner sc = new Scanner(new URL("http://www.rosettacode.org").openStream());
       while (sc.hasNext())
           System.out.println(sc.nextLine());         
   }

} </lang>

<lang java5> import org.apache.commons.io.IOUtils; import java.net.URL;

public class Main {

   public static void main(String[] args) throws Exception {
       IOUtils.copy(new URL("http://rosettacode.org").openStream(),System.out);    	    	    		    
   }

} </lang>

JavaScript

<lang JavaScript>((function(url,callback,method,post,headers){//headers is an object like this {Connection:"keep-alive"} function looProp(object,callback){ var a; for(a in object){ if(object.hasOwnProperty(a))callback.call(object,a,object[a]); } } method=method||"GET"; xhr=new XMLHttpRequest(); xhr.open(method,url,true); looProp(headers,function(a,b){xhr.setRequestHeader(a,b)}) xhr.onreadystatechange=function(){if(xhr.readyState==xhr.DONE){callback(xhr)}}; xhr.send(post); return xhr; })('http://rosettacode.org',function(xhr){console.log(xhr.response)}))</lang>

LFE

Synchronous

<lang lisp>(: inets start) (case (: httpc request '"http://lfe.github.io")

 ((tuple 'ok result)
   (: io format '"Result: ~p" (list result))) 
 ((tuple 'error reason) 
   (: io format '"Error: ~p~n" (list reason))))

</lang>

Asynchronous

<lang lisp>(: inets start) (let* ((method 'get)

      (url '"http://lfe.github.io")
      (headers ())
      (request-data (tuple url headers))
      (http-options ())
      (request-options (list (tuple 'sync 'false))))
 (: httpc request method request-data http-options request-options)
 (receive
   ((tuple 'http (tuple request-id (tuple 'error reason)))
    (: io format '"Error: ~p~n" (list reason)))
   ((tuple 'http (tuple request-id result))
    (: io format '"Result: ~p~n" (list result))))))

</lang>

Liberty BASIC

Uses a dll call and a timer to allow time to receive the file. <lang lb> result = DownloadToFile( "http://rosettacode.org/wiki/Main_Page", "in.html") timer 2000, [on] wait [on] timer 0 if result <> 0 then print "Error downloading."

end

Function DownloadToFile( urlfile$, localfile$)

   open "URLmon" for dll as #url
   calldll #url, "URLDownloadToFileA",_
   0 as long,_         'null
   urlfile$ as ptr,_   'url to download
   localfile$ as ptr,_ 'save file name
   0 as long,_         'reserved, must be 0
   0 as long,_         'callback address, can be 0
   DownloadToFile as ulong  '0=success
   close #url

end function </lang>

LSL

To test it yourself; rez a box on the ground, and add the following as a New Script. <lang LSL>string sURL = "http://www.RosettaCode.Org"; key kHttpRequestId; default { state_entry() { kHttpRequestId = llHTTPRequest(sURL, [], ""); } http_response(key kRequestId, integer iStatus, list lMetaData, string sBody) { if(kRequestId==kHttpRequestId) { llOwnerSay("Status="+(string)iStatus); integer x = 0; for(x=0 ; x<llGetListLength(lMetaData) ; x++) { llOwnerSay("llList2String(lMetaData, "+(string)x+")="+llList2String(lMetaData, x)); } list lBody = llParseString2List(sBody, ["\n"], []); for(x=0 ; x<llGetListLength(lBody) ; x++) { llOwnerSay("llList2String(lBody, "+(string)x+")="+llList2String(lBody, x)); } } } } </lang> Output:

Status=200
llList2String(lMetaData, 0)=0
llList2String(lMetaData, 1)=2048
llList2String(lBody, 0)=<!DOCTYPE html>
llList2String(lBody, 1)=<html lang="en" dir="ltr" class="client-nojs">
llList2String(lBody, 2)=<head>
llList2String(lBody, 3)=<title>Rosetta Code</title>
llList2String(lBody, 4)=<meta charset="UTF-8" />
llList2String(lBody, 5)=<meta name="generator" content="MediaWiki 1.18.0" />
llList2String(lBody, 6)=<link rel="shortcut icon" href="/favicon.ico" />
llList2String(lBody, 7)=<link rel="search" type="application/opensearchdescription+xml" href="/mw/opensearch_desc.php" title="Rosetta Code (en)" />
llList2String(lBody, 8)=<link rel="EditURI" type="application/rsd+xml" href="http://rosettacode.org/mw/api.php?action=rsd" />
llList2String(lBody, 9)=<link rel="copyright" href="http://www.gnu.org/licenses/fdl-1.2.html" />
llList2String(lBody, 10)=<link rel="alternate" type="application/atom+xml" title="Rosetta Code Atom feed" href="/mw/index.php?title=Special:RecentChanges&feed=atom" />
llList2String(lBody, 11)=<link rel="stylesheet" href="/mw/load.php?debug=false&lang=en&modules=mediawiki.legacy.commonPrint%2Cshared%7Cskins.vector&only=styles&skin=vector&*" />
llList2String(lBody, 12)=<meta name="ResourceLoaderDynamicStyles" content="" />
llList2String(lBody, 13)=<link rel="stylesheet" href="/mw/load.php?debug=false&lang=en&modules=site&only=styles&skin=vector&*" />
llList2String(lBody, 14)=<style>a:lang(ar),a:lang(ckb),a:lang(fa),a:lang(kk-arab),a:lang(mzn),a:lang(ps),a:lang(ur){text-decoration:none}a.new,#quickbar a.new{color:#ba0000}
...   ...   ...   ...   ...   ...   ...   ...   ...   ...   ...   ...   ...   ...

Lua

Library: LuaSocket

<lang Lua> local http = require("socket.http") function url_encode(str)

 if (str) then
   str = string.gsub (str, "\n", "\r\n")
   str = string.gsub (str, "([^%w ])",
       function (c) return string.format ("%%%02X", string.byte(c)) end)
   str = string.gsub (str, " ", "+")
 end
 return str

end function url_decode(str)

 str = string.gsub (str, "+", " ")
 str = string.gsub (str, "%%(%x%x)",
     function(h) return string.char(tonumber(h,16)) end)
 str = string.gsub (str, "\r\n", "\n")
 return str

end

local page = http.request( 'http://www.google.com/m/search?q=' .. url_encode("lua") ) print( page ) </lang>

Maple

<lang Maple> HTTP:-Get( "http://www.google.com" ); </lang>

Mathematica

<lang Mathematica> Print[Import["http://www.google.com/webhp?complete=1&hl=en", "Source"]] </lang>

MATLAB / Octave

urlread is MATLAB's function for making URL requests. The documentation for Octave is available here urlread.

In this example we initiate an HTTP request for a single random number from random.org: <lang MATLAB> >> random = urlread('http://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new')

random =

61 </lang>

It is possible to make more complicated requests, specifically "GET" and "POST," which is explained in the documentation.

MIRC Scripting Language

See HTTP/MIRC Scripting Language

Objeck

<lang objeck> use HTTP; use Collection;

class HttpTest {

 function : Main(args : String[]) ~ Nil {
   lines := HttpClient->New()->Get("http://rosettacode.org");
   each(i : lines) {
     lines->GetValue(i)->As(String)->PrintLine();
   };
 }

} </lang>

NetRexx

Translation of: Java

An implementation of the Java version shown above; demonstrating NetRexx's ability to exploit the rich Java SDK.

<lang NetRexx> /* NetRexx */ options replace format comments java crossref savelog symbols binary

import java.util.Scanner import java.net.URL

do

 rosettaUrl = "http://www.rosettacode.org"
 sc = Scanner(URL(rosettaUrl).openStream)
 loop while sc.hasNext
   say sc.nextLine
 end

catch ex = Exception

 ex.printStackTrace

end

return </lang>

NewLisp

<lang NewLisp> (get-url "http://www.rosettacode.org") </lang>

Objective-C

<lang objc>

  1. import <Foundation/Foundation.h>

int main (int argc, const char * argv[]) {

   NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
   NSError        *error;
   NSURLResponse *response;
   NSData *data = [NSURLConnection sendSynchronousRequest:[NSURLRequest requestWithURL:[NSURL URLWithString:@"http://rosettacode.org"]]
                                           returningResponse:&response
                                                       error:&error];
                                                    
   NSLog(@"%@", [[[NSString alloc] initWithData:data
                                         encoding:NSUTF8StringEncoding] autorelease]);
   
   [pool drain];
   return 0;

} </lang>

OCaml

<lang ocaml> let () =

 let url = "http://www.rosettacode.org" in
 let _,_, page_content = make_request ~url ~kind:GET () in
 print_endline page_content;

</lang>

The source code of the function make_request is here.

Oz

When creating a file object, it is possible to specify an URL instead of a filename: <lang oz> declare

 fun {GetPage Url}
    F = {New Open.file init(url:Url)}
    Contents = {F read(list:$ size:all)}
 in
    {F close}
    Contents
 end

in

 {System.showInfo {GetPage "http://www.rosettacode.org"}}

</lang>

Library: OzHttpClient

If you need more fine-grained control of the request, you could use a custom library: <lang oz> declare

 [HTTPClient] = {Module.link ['x-ozlib://mesaros/net/HTTPClient.ozf']}
 fun {GetPage Url}
    Client = {New HTTPClient.urlGET

init(inPrms(toFile:false toStrm:true) httpReqPrms )}

    OutParams
    HttpResponseParams
 in
    {Client getService(Url ?OutParams ?HttpResponseParams)}
    {Client closeAll(true)}
    OutParams.sOut
 end

in

 {System.showInfo {GetPage "http://www.rosettacode.org"}}

</lang>

Pascal

Works with: Free_Pascal
Library: CThreads
Library: Classes
Library: httpsend

<lang pascal>program http;

{$mode objfpc}{$H+} {$APPTYPE CONSOLE}

{$DEFINE DEBUG}

uses

 {$IFDEF UNIX}{$IFDEF UseCThreads}
 cthreads,
 {$ENDIF}{$ENDIF}
 Classes, httpsend; // Synapse httpsend class

{$R *.res}

var

 Response: TStrings;
 HTTPObj: THTTPSend;

begin

 HTTPObj := THTTPSend.Create;
 try
   { Stringlist object to capture HTML returned
     from URL }
   Response := TStringList.Create;
   try
     if HTTPObj.HTTPMethod('GET','http://wiki.lazarus.freepascal.org/Synapse') then
       begin
         { Load HTTP Document into Stringlist }
         Response.LoadFromStream(HTTPObj.Document);
         { Write the response to the console window }
         Writeln(Response.Text);
       end
       else
       Writeln('Error retrieving data');
   finally
     Response.Free;
   end;
 finally
   HTTPObj.Free;
 end;
 // Keep console window open
 Readln;

end.</lang>

Perl

Library: LWP

<lang perl> use LWP::Simple; print get("http://www.rosettacode.org"); </lang>

or with more error-checking

<lang perl> use strict; use LWP::UserAgent;

my $url = 'http://www.rosettacode.org'; my $response = LWP::UserAgent->new->get( $url );

$response->is_success or die "Failed to GET '$url': ", $response->status_line;

print $response->as_string; </lang>

Perl 6

Library: LWP

<lang perl6> use v6;

  1. Using LWP::Simple from: git://github.com/cosimo/perl6-lwp-simple.git

use LWP::Simple;

print LWP::Simple.get("http://www.rosettacode.org"); </lang>

or, without LWP::Simple:

<lang perl6> use v6;

my $socket = IO::Socket::INET.new(host => "www.rosettacode.org", port => 80,); $socket.send("GET / HTTP/1.0\r\n\r\n"); print $socket.recv(); $socket.close; </lang>

PHP

<lang php> readfile("http://www.rosettacode.org"); </lang>

PicoLisp

<lang PicoLisp> (load "@lib/http.l")

(client "rosettacode.org" 80 NIL # Connect to rosettacode

  (out NIL (echo)) )                  # Echo to standard output

</lang>

Pike

<lang pike> write("%s",Protocols.HTTP.get_url_data("http://www.rosettacode.org")); </lang>

PowerShell

<lang powershell> $wc = New-Object Net.WebClient $wc.DownloadString('http://www.rosettacode.org') </lang>

Prolog

Works with SWI-Prolog and library http/http_open. (Extract from the doc).

<lang Prolog>

- use_module(library( http/http_open )).

http :- http_open('http://www.rosettacode.org/',In, []), copy_stream_data(In, user_output), close(In). </lang>

Protium

English dialect, short form: <lang html> <@ SAYURLLIT>http://rosettacode.org/wiki/Main_Page</@> </lang>

English dialect, padded variable-length form: <lang html> <# SAY URLSOURCE LITERAL>http://rosettacode.org/wiki/Main_Page</#> </lang>

PureBasic

<lang PureBasic> InitNetwork() OpenConsole()

tmpdir$ = GetTemporaryDirectory() filename$ = tmpdir$ + "PB_tempfile" + Str(Random(200000)) + ".html"

If ReceiveHTTPFile("http://rosettacode.org/wiki/Main_Page", filename$)

 If ReadFile(1, filename$)
   Repeat
     PrintN(ReadString(1))
   Until Eof(1)
   Input()
   ; to prevent console from closing if on windows
   CloseFile(1)
 EndIf
 DeleteFile(filename$)

EndIf </lang>

Another solution using general networking commands <lang PureBasic> InitNetwork() OpenConsole() id = OpenNetworkConnection("rosettacode.org", 80) SendNetworkString(id, "GET /wiki/Main_Page HTTP/1.1" + Chr(10) + "Host: rosettacode.org" + Chr(10) + Chr(10)) Repeat

 If NetworkClientEvent(id) = 2
   a$ = Space(1000)
   ReceiveNetworkData(id, @a$, 1000)
   out$ + a$
 EndIf

Until FindString(out$, "</html>", 0) PrintN(out$)

next line only to prevent console from closing on Windows

Input() </lang>

Of course you could use wget too.

Python

Python 3

Using the urllib.request module. <lang python> import urllib.request print(urllib.request.urlopen("http://rosettacode.org").read()) </lang>

Python 2

Using the urllib library. <lang python> import urllib print urllib.urlopen("http://rosettacode.org").read() </lang>

Using the urllib2 library. <lang python> import urllib2 print urllib2.urlopen("http://rosettacode.org").read() </lang>

R

Library: RCurl
Library: XML

First, retrieve the webpage.

<lang R> library(RCurl) webpage <- getURL("http://rosettacode.org")

  1. If you are linking to a page that no longer exists and need to follow the redirect, use followlocation=TRUE

webpage <- getURL("http://www.rosettacode.org", .opts=list(followlocation=TRUE))

  1. If you are behind a proxy server, you will need to use something like:

webpage <- getURL("http://rosettacode.org",

  .opts=list(proxy="123.123.123.123", proxyusername="domain\\username", proxypassword="mypassword", proxyport=8080))
  1. Don't forget that backslashes in your username or password need to be escaped!

</lang>

Now parse the html code into a tree and print the html.

<lang R> library(XML) pagetree <- htmlTreeParse(webpage ) pagetree$children$html </lang>

Racket

<lang Racket>

  1. lang racket

(require net/url) (copy-port (get-pure-port (string->url "http://www.rosettacode.org")

                         #:redirections 100)
          (current-output-port))

</lang>

REALbasic

REALBasic provides an HTTPSocket class for handling HTTP connections. The 'Get' method of the HTTPSocket is overloaded and can download data to a file or return data as a string, in both cases a timeout argument can be passed. <lang REALbasic>

 Dim sock As New HTTPSocket
 Print(sock.Get("http://www.rosettacode.org", 10))  //set the timeout period to 10 seconds.

</lang>

REBOL

<lang REBOL> print read http://rosettacode.org </lang>

RLaB

RLaB supports HTTP/FTP through its Read/Write facilities, which are organized around the concept of Universal Resource Locator (URL),

protocol://address

RLaB accepts the following values for protocol:

1. file or omitted, for generic text files or files in native binary format (partially compatible with matlab binary format);
2. h5 or hdf5 for files that use Hierarhical Data Format 5 (HDF5) version 1.8.0, and later. Here protocol can be omitted while address has to end with .h5 (file extension);
3. http, https, or ftp for accessing the data and files on web- and ftp-servers;
4. tcp, for accessing sockets over tcp/ip protocol;
5. serial, for accessing serial port on Un*x type systems.

For these URLs RLaB provides an internal book-keeping: It keeps track of the open ones and, say, upon quitting, closes them and releases the internal resources it allocated for managing them.

For accessing URLs on world wide web RLaB implements the library cURL (libcurl) [2] and its "easy" interface.

This said, this is how one would download financial data for Pfeizer from Yahoo [3].

<lang RLaB> // get cvs data from Yahoo for Pfeizer (PFE) url="http://ichart.finance.yahoo.com/table.csv?s=PFE&a=00&b=4&c=1982&d=00&e=10&f=2010&g=d&ignore=.csv";

opt = <<>>; // opt.CURLOPT_PROXY = "your.proxy.here"; // opt.CURLOPT_PROXYPORT = YOURPROXYPORT; // opt.CURLOPT_PROXYTYPE = "http"; open(url, opt); x = readm(url); close (url); </lang>

Ruby

The simple way loads the entire content into memory, then prints it.

<lang ruby> require 'open-uri'

print open("http://rosettacode.org") {|f| f.read} </lang>

If the content might be large, the better way uses FileUtils.copy_stream.

<lang ruby> require 'fileutils' require 'open-uri'

open("http://rosettacode.org/") {|f| FileUtils.copy_stream(f, $stdout)} </lang>

Run BASIC

<lang runbasic>print httpget$("http://rosettacode.org/wiki/Main_Page")</lang>

Scala

<lang scala> import scala.io._

object HttpTest {

  def main(args: Array[String]): Unit = {
     //if you are behind a firewall you can configure your proxy
     System.setProperty("http.proxySet", "true")
     System.setProperty("http.proxyHost", "0.0.0.0")
     System.setProperty("http.proxyPort", "8080")
     Source.fromURL("http://www.rosettacode.org").getLines.foreach(println)
  }

} </lang>

Scheme

Works with: Guile

<lang scheme>

Use the regular expression module to parse the url (included with Guile)

(use-modules (ice-9 regex))

Set the url and parse the hostname, port, and path into variables

(define url "http://www.rosettacode.org/wiki/HTTP") (define r (make-regexp "^(http://)?([^:/]+)(:)?(([0-9])+)?(/.*)?" regexp/icase)) (define host (match:substring (regexp-exec r url) 2)) (define port (match:substring (regexp-exec r url) 4)) (define path (match:substring (regexp-exec r url) 6))

Set port to 80 if it wasn't set above and convert from a string to a number

(if (eq? port #f) (define port "80")) (define port (string->number port))

Connect to remote host on specified port

(let ((s (socket PF_INET SOCK_STREAM 0)))

       (connect s AF_INET (car (hostent:addr-list (gethostbyname host))) port)
Send a HTTP request for the specified path
       (display "GET " s)
       (display path s)
       (display " HTTP/1.0\r\n\r\n" s)
Display the received HTML
       (do ((c (read-char s) (read-char s))) ((eof-object? c))
               (display c)))

</lang>

Seed7

The gethttp.s7i library contains the function getHttp, which gets data specified by an URL using the HTTP protocol.

<lang seed7> $ include "seed7_05.s7i";

 include "gethttp.s7i";

const proc: main is func

 begin
   writeln(getHttp("www.rosettacode.org"));
 end func;</lang>

SNOBOL4

Works with: Macro SNOBOL4 in C

<lang snobol>-include "tcp.sno" tcp.open(.conn,'www.rosettacode.org','http') :s(cont1) terminal = "cannot open" :(end) cont1 conn = "GET http://rosettacode.org/wiki/Main_Page HTTP/1.0" char(10) char(10) while output = conn :s(while) tcp.close(.conn) end </lang>

Smalltalk

Works with: Pharo

<lang smalltalk> Transcript show: 'http://rosettacode.org' asUrl retrieveContents contentStream. </lang>

Tcl

Note that the http package is distributed as part of Tcl.

<lang tcl> package require http set request [http::geturl "http://www.rosettacode.org"] puts [http::data $request] http::cleanup $request</lang>

TSE SAL

<lang TSE SAL>

DLL "<urlmon.dll>"

INTEGER PROC FNUrlGetSourceApiI(
 INTEGER lpunknown,
 STRING urlS : CSTRVAL,
 STRING filenameS : CSTRVAL,
 INTEGER dword,
 INTEGER tlpbindstatuscallback

) : "URLDownloadToFileA" END

// library: url: get: source <description></description> <version control></version control> <version>1.0.0.0.3</version> (filenamemacro=geturgso.s) [kn, ri, su, 13-04-2008 05:12:53] PROC PROCUrlGetSource( STRING urlS, STRING filenameS )

FNUrlGetSourceApiI( 0, urlS, filenameS, 0, 0 )

END

PROC Main() STRING s1[255] = "http://www.google.com/index.html" STRING s2[255] = "c:\temp\ddd.txt" IF ( NOT ( Ask( "url: get: source: urlS = ", s1, _EDIT_HISTORY_ ) ) AND ( Length( s1 ) > 0 ) ) RETURN() ENDIF IF ( NOT ( AskFilename( "url: get: source: filenameS = ", s2, _DEFAULT_, _EDIT_HISTORY_ ) ) AND ( Length( s2 ) > 0 ) ) RETURN() ENDIF

PROCUrlGetSource( s1, s2 )
EditFile( s2 )

END

</lang>

TUSCRIPT

<lang tuscript> $$ MODE TUSCRIPT SET DATEN = REQUEST ("http://www.rosettacode.org")

  • {daten}

</lang>

UNIX Shell

<lang bash>curl -s -L http://rosettacode.org/</lang>

<lang bash>lynx -source http://rosettacode.org/</lang>

<lang bash>wget -O - -q http://rosettacode.org/</lang>

<lang bash>lftp -c "cat http://rosettacode.org/"</lang>

Works with: BSD

<lang bash>ftp -o - http://rosettacode.org 2>/dev/null</lang>

VBScript

Based on code at How to retrieve HTML web pages with VBScript via the Microsoft.XmlHttp object <lang vb> Option Explicit

Const sURL="http://rosettacode.org/"

Dim oHTTP Set oHTTP = CreateObject("Microsoft.XmlHTTP")

On Error Resume Next oHTTP.Open "GET", sURL, False oHTTP.Send "" If Err.Number = 0 Then

    WScript.Echo oHTTP.responseText

Else

    Wscript.Echo "error " & Err.Number & ": " & Err.Description

End If

Set oHTTP = Nothing </lang>

Visual Basic .NET

<lang vbnet> Imports System.Net

Dim client As WebClient = New WebClient() Dim content As String = client.DownloadString("http://www.google.com") Console.WriteLine(content) </lang>

Zsh

<lang zsh> zmodload zsh/net/tcp ztcp example.com 80 fd=$REPLY print -l -u $fd -- 'GET / HTTP/1.1' 'Host: example.com' while read -u $fd -r -e -t 1; do; :; done ztcp -c $fd </lang> lang cpp>

  1. include