I am fairly new to coding in golang and am struggling with the panic/recover process for a bad url request. Below is a script which queries a list of URLs and outputs responses. Occasionally a bad url is entered or a server is down and the HTTP request fails which causes a panic. I am not clear on how to recover from this and continue. I want the program to recover from the panic, document the bad url and error, and continue down the list of urls outputting the failed url and error with the rest of the normal url response data.

package main

import (
    "fmt"
    "net/http"
)

var urls = []string{
    "http://www.google.com",        //good url, 200
    "http://www.googlegoogle.com/", //bad url
    "http://www.zoogle.com",        //500 example
}

//CONCURRENT HTTP REQUESTS -------------------------------------------
func MakeRequest(url string, ch chan<- string) {
    resp, err := http.Get(url)
    if err != nil {
        fmt.Println("Error Triggered", err)
        ch <- fmt.Sprintf("err: %s", err)
    }
    ch <- fmt.Sprintf("url: %s, status: %s ", url, resp.Status) // put response into a channel
    resp.Body.Close()
}

func main() {
    output := make([][]string, 0) //define an array to hold responses

    //PANIC RECOVER------------------------------
    defer func() { //catch or finally
        if r := recover(); r != nil { //catch
            fmt.Println("Recover Triggered: ", r)
        }
    }()

    //MAKE URL REQUESTS----------------------------------------------
    for _, url := range urls {
        ch := make(chan string)                 //create a channel for each request
        go MakeRequest(url, ch)                 //make concurrent http request
        output = append(output, []string{<-ch}) //append output to an array
    }

    //PRINT OUTPUT ----------------------
    for _, value := range output {
        fmt.Println(value)
    }
}

I am looking for an output similar to:

[url: http://www.google.com, status: 200 OK ]

[url: http://www.zoogle.com, status: 500 Internal Server Error ]