Why selecting rowversion using outer join never returns null

This appears to be a bug in .NET's SqlClient. The column is being returned as an empty byte[] even though it's marked as null in the result.

EG

using System;
using System.Data.Odbc;
using System.Data.SqlClient;
using System.Data.SqlTypes;
using System.Threading;
using System.Threading.Tasks;
using System.Transactions;

namespace ConsoleApp14
{
    class Program
    {

        static void Main(string[] args)
        {

            using (var con = new SqlConnection("Server=localhost;database=tempdb;Integrated Security=true"))
            {
                con.Open();
                var cmd = con.CreateCommand();
                cmd.CommandText = "select cast(null as rowversion) rv";
                using (var rdr = cmd.ExecuteReader())
                {
                    rdr.Read();
                    var allowDbNull = rdr.GetColumnSchema()[0].AllowDBNull;
                    var isNull = rdr.IsDBNull(0);
                    var val = rdr[0];

                    Console.WriteLine($"SqlClient: AllowDbNull {allowDbNull} IsDbNull: {isNull} {val.GetType().Name} {val}");

                }
            }


            using (var con = new SqlConnection("Server=localhost;database=tempdb;Integrated Security=true"))
            {
                con.Open();
                var cmd = con.CreateCommand();
                cmd.CommandText = "select @val = cast(null as rowversion) ";

                var p = cmd.Parameters.Add(new SqlParameter("@val", System.Data.SqlDbType.Timestamp));
                p.Direction = System.Data.ParameterDirection.Output;

                cmd.ExecuteNonQuery();
                {

                   SqlBinary val = (SqlBinary) p.SqlValue;
                   Console.WriteLine($"SqlClient (parameter): IsDbNull: {val.IsNull} {val.GetType().Name} {val}");

                }
            }

            using (var con = new OdbcConnection("Driver={ODBC Driver 17 for SQL Server};Server=localhost;Trusted_Connection=yes"))
            {
                con.Open();
                var cmd = con.CreateCommand();
                cmd.CommandText = "select cast(null as rowversion) rv";
                using (var rdr = cmd.ExecuteReader())
                {
                    rdr.Read();
                    var allowDbNull = rdr.GetSchemaTable().Rows[0]["AllowDBNull"];
                    var isNull = rdr.IsDBNull(0);
                    var val = rdr[0];

                    Console.WriteLine($"ODBC:      AllowDbNull {allowDbNull} IsDbNull: {isNull} {val.GetType().Name} {val}");

                }

            }


        }
    }
}

Outputs

SqlClient: AllowDbNull True IsDbNull: False Byte[] System.Byte[]
SqlClient (parameter): IsDbNull: True SqlBinary Null
ODBC:      AllowDbNull True IsDbNull: True DBNull

I've opened an issue at https://github.com/dotnet/SqlClient/issues/255 But it's likely to be closed as WontFix. According to notes in the source

// Dev10 Bug #479607 - this should have been the same as SqlDbType.Binary, but it's a rejected breaking change

The issue has been raised, but not fixed as a breaking change. It may get fixed in .NET Core, which is full of breaking changes anyway, and left as-is in .NET Framework.


Except the correct and accepted answer being "it's a bug", what you can do about is what ypercube suggests in comment: Cast it to VARBINARY

SELECT ft.[Id], 
       CAST(ft.[RowVersion] AS varbinary(8)),
       bt.[Id] 
FROM FooTable ft 
     FULL OUTER JOIN BarTable bt
     ON ft.Id = Bt.Id ;