How to convert *sql.Rows to typed JSON in Golang

Here is a better solution, using reflection. It handles types correctly (e.g. a string value of true won't erroneously be turned into a bool and so on.

It also handles possibly-null types (only tested with MySQL - you will probably need to modify it for other drivers).

package main

import (
    "database/sql"
    "encoding/json"
    "fmt"
    "reflect"

    "github.com/go-sql-driver/mysql"
)

// Additional scan types returned by the MySQL driver. I haven't looked at
// what PostgreSQL does.

type jsonNullInt64 struct {
    sql.NullInt64
}

func (v jsonNullInt64) MarshalJSON() ([]byte, error) {
    if !v.Valid {
        return json.Marshal(nil)
    }
    return json.Marshal(v.Int64)
}

type jsonNullFloat64 struct {
    sql.NullFloat64
}

func (v jsonNullFloat64) MarshalJSON() ([]byte, error) {
    if !v.Valid {
        return json.Marshal(nil)
    }
    return json.Marshal(v.Float64)
}

type jsonNullTime struct {
    mysql.NullTime
}

func (v jsonNullTime) MarshalJSON() ([]byte, error) {
    if !v.Valid {
        return json.Marshal(nil)
    }
    return json.Marshal(v.Time)
}

var jsonNullInt64Type = reflect.TypeOf(jsonNullInt64{})
var jsonNullFloat64Type = reflect.TypeOf(jsonNullFloat64{})
var jsonNullTimeType = reflect.TypeOf(jsonNullTime{})
var nullInt64Type = reflect.TypeOf(sql.NullInt64{})
var nullFloat64Type = reflect.TypeOf(sql.NullFloat64{})
var nullTimeType = reflect.TypeOf(mysql.NullTime{})

// SQLToJSON takes an SQL result and converts it to a nice JSON form. It also
// handles possibly-null values nicely. See https://stackoverflow.com/a/52572145/265521
func SQLToJSON(rows *sql.Rows) ([]byte, error) {
    columns, err := rows.Columns()
    if err != nil {
        return nil, fmt.Errorf("Column error: %v", err)
    }

    tt, err := rows.ColumnTypes()
    if err != nil {
        return nil, fmt.Errorf("Column type error: %v", err)
    }

    types := make([]reflect.Type, len(tt))
    for i, tp := range tt {
        st := tp.ScanType()
        if st == nil {
            return nil, fmt.Errorf("Scantype is null for column: %v", err)
        }
        switch st {
        case nullInt64Type:
            types[i] = jsonNullInt64Type
        case nullFloat64Type:
            types[i] = jsonNullFloat64Type
        case nullTimeType:
            types[i] = jsonNullTimeType
        default:
            types[i] = st
        }
    }

    values := make([]interface{}, len(tt))
    data := make(map[string][]interface{})

    for rows.Next() {
        for i := range values {
            values[i] = reflect.New(types[i]).Interface()
        }
        err = rows.Scan(values...)
        if err != nil {
            return nil, fmt.Errorf("Failed to scan values: %v", err)
        }
        for i, v := range values {
            data[columns[i]] = append(data[columns[i]], v)
        }
    }

    return json.Marshal(data)
}

This is the best implementation that I was able to come up with that would make it dynamic. It is also significantly shorter than my original. As I've seen this type of question quite a bit, I hope this helps others. I am open to other answers that have a better implementation of this:

func (d *DbDao) makeStructJSON(queryText string, w http.ResponseWriter) error {

    // returns rows *sql.Rows
    rows, err := d.db.Query(queryText)
    if err != nil {
        return err
    }
    columns, err := rows.Columns()
    if err != nil {
        return err
    }

    count := len(columns)
    values := make([]interface{}, count)
    scanArgs := make([]interface{}, count)
    for i := range values {
        scanArgs[i] = &values[i]
    }

    masterData := make(map[string][]interface{})

    for rows.Next() {
        err := rows.Scan(scanArgs...)
        if err != nil {
            return err
        }
        for i, v := range values {

            x := v.([]byte)

            //NOTE: FROM THE GO BLOG: JSON and GO - 25 Jan 2011:
            // The json package uses map[string]interface{} and []interface{} values to store arbitrary JSON objects and arrays; it will happily unmarshal any valid JSON blob into a plain interface{} value. The default concrete Go types are:
            //
            // bool for JSON booleans,
            // float64 for JSON numbers,
            // string for JSON strings, and
            // nil for JSON null.

            if nx, ok := strconv.ParseFloat(string(x), 64); ok == nil {
                masterData[columns[i]] = append(masterData[columns[i]], nx)
            } else if b, ok := strconv.ParseBool(string(x)); ok == nil {
                masterData[columns[i]] = append(masterData[columns[i]], b)
            } else if "string" == fmt.Sprintf("%T", string(x)) {
                masterData[columns[i]] = append(masterData[columns[i]], string(x))
            } else {
                fmt.Printf("Failed on if for type %T of %v\n", x, x)
            }

        }
    }

    w.Header().Set("Content-Type", "application/json")

    err = json.NewEncoder(w).Encode(masterData)

    if err != nil {
        return err
    }
    return err
}

This is a much better way to do it (Tested with Postgres). No reflect/reflection needed:

    columnTypes, err := rows.ColumnTypes()

    if err != nil {
        return err
    }

    count := len(columnTypes)
    finalRows := []interface{}{};

    for rows.Next() {

        scanArgs := make([]interface{}, count)

        for i, v := range columnTypes {

            switch v.DatabaseTypeName() {
            case "VARCHAR", "TEXT", "UUID", "TIMESTAMP":
                scanArgs[i] = new(sql.NullString)
                break;
            case "BOOL":
                scanArgs[i] = new(sql.NullBool)
                break;
            case "INT4":
                scanArgs[i] = new(sql.NullInt64)
                break;
            default:
                scanArgs[i] = new(sql.NullString)
            }
        }

        err := rows.Scan(scanArgs...)

        if err != nil {
            return err
        }

        masterData := map[string]interface{}{}

        for i, v := range columnTypes {

            if z, ok := (scanArgs[i]).(*sql.NullBool); ok  {
                masterData[v.Name()] = z.Bool
                continue;
            }

            if z, ok := (scanArgs[i]).(*sql.NullString); ok  {
                masterData[v.Name()] = z.String
                continue;
            }

            if z, ok := (scanArgs[i]).(*sql.NullInt64); ok  {
                masterData[v.Name()] = z.Int64
                continue;
            }

            if z, ok := (scanArgs[i]).(*sql.NullFloat64); ok  {
                masterData[v.Name()] = z.Float64
                continue;
            }

            if z, ok := (scanArgs[i]).(*sql.NullInt32); ok  {
                masterData[v.Name()] = z.Int32
                continue;
            }

            masterData[v.Name()] = scanArgs[i]
        }

        finalRows = append(finalRows, masterData)
    }


    z, err := json.Marshal(finalRows)