# Smallest region of the plane that contains all free n-ominoes

In the interest of getting the process started, here's a quick (but not very optimal) answer.

### Pattern:

n = 8:
########
######
#####
####
###
##


Take a triangle with length n - 1, stick an extra square onto the corner, and cut off the bottom square.

### Proof that all n-ominos fit:

Note that any n-omino can fit in a rectangle with length + width at most n + 1.

If an n-omino fits in a rectangle with length + width at most n, it fits nicely in the triangle (which is the union of all such rectangles). If it happens to use the cut-off square, transposing it will fit in the triangle.

Otherwise, we have a chain with at most one branch. We can always fit one end of the chain into the extra square (prove this with casework), and the rest fits into a rectangle with length + width at most n, reducing to the case above.

The only case where the above doesn't work is the case where we use both the extra square and the cut-off square. There's only one such n-omino (the long L), and that one fits inside the triangle transposed.

### Code (Python 2):

def f(n):
if n < 7:
return [0, 1, 2, 4, 6, 9, 12][n]
return n * (n - 1) / 2


### Table:

 1:   1
2:   2
3:   4
4:   6
5:   9
6:  12
7:  21
8:  28
9:  36
10:  45
11:  55
12:  66
13:  78
14:  91
15: 105
16: 120
17: 136
18: 153
19: 171
20: 190
... more cases can be generated if necessary.


## C#, score: 1, 2, 4, 6, 9, 12, 17, 20, 26, 31, 38, 44

#

##

#..
###

.##.
####

..#..
#####
###..

##....
######
####..

..##...
.###...
#######
.#####.

..###...
..###...
..######
########

..##.....
.###.....
#######..
#########
..#####..

.###......
.####.....
.######...
##########
.########.

.###.......
.####......
.####......
.#######...
###########
.#########.

.####.......
#####.......
.#####......
############
.##########.
.########...


The output format of the program is a bit more compact.

This uses a seeded random approach, and I've optimised the seeds. I enforce a bounding box constraint which is both plausible and consistent with the known data for small values of n. If that constraint is indeed valid then

1. The output is optimal up to n=8 (by brute force validation, not included).
2. The number of optimal solutions (distinct up to symmetry) begins 1, 1, 2, 2, 2, 6, 63, 6.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;

namespace Sandbox
{
class FreePolyomino : IEquatable<FreePolyomino>
{
public static void Main()
{
for (int i = 1; i < 12; i++)
{
int seed;
switch (i) {
default: seed = 1103199029; break;
case 9: seed = 693534956; break; // 26
case 10: seed = 2005746461; break; // 31
case 11: seed = 377218946; break; // 38
case 12: seed = 1281379414; break; // 44
}

var rnd = new Random(seed);
var polys = FreePolyomino.All(i);
var minUnion = FreePolyomino.RandomMinimalUnion2(polys, rnd, i, (i + 1) >> 1);
Console.WriteLine($"{i}\t{minUnion.Weight}\t{minUnion}"); } } internal FreePolyomino(OrientedPolyomino orientation) { var orientations = new HashSet<OrientedPolyomino>(); orientations.Add(orientation); var tmp = orientation.Rot90(); orientations.Add(tmp); tmp = tmp.Rot90(); orientations.Add(tmp); tmp = tmp.Rot90(); orientations.Add(tmp); tmp = tmp.FlipV(); orientations.Add(tmp); tmp = tmp.Rot90(); orientations.Add(tmp); tmp = tmp.Rot90(); orientations.Add(tmp); tmp = tmp.Rot90(); orientations.Add(tmp); OrientedPolyominos = orientations.OrderBy(x => x).ToArray(); } public IReadOnlyList<OrientedPolyomino> OrientedPolyominos { get; private set; } public OrientedPolyomino CanonicalOrientation => OrientedPolyominos[0]; public static IEnumerable<FreePolyomino> All(int numCells) { if (numCells < 1) throw new ArgumentOutOfRangeException(nameof(numCells)); if (numCells == 1) return new FreePolyomino[] { new FreePolyomino(OrientedPolyomino.Unit) }; // We do this in two phases because identifying two equal oriented polyominos is faster than first building // free polyominos and then identifying that they're equal. var oriented = new HashSet<OrientedPolyomino>(); foreach (var smaller in All(numCells - 1)) { // We can add a cell to a side. The easiest way to do this is to add to the bottom of one of the rotations. // TODO Optimise by distinguishing the symmetries. foreach (var orientation in smaller.OrientedPolyominos) { int h = orientation.BBHeight; var bottomRow = orientation.Rows[h - 1]; for (int deltax = 0; deltax < orientation.BBWidth; deltax++) { if (((bottomRow >> deltax) & 1) == 1) oriented.Add(orientation.Union(OrientedPolyomino.Unit, deltax, h)); } } // We can add a cell in the middle, provided it connects up. var canon = smaller.CanonicalOrientation; uint prev = 0, curr = 0, next = canon.Rows[0]; for (int y = 0; y < canon.BBHeight; y++) { (prev, curr, next ) = (curr, next, y + 1 < canon.BBHeight ? canon.Rows[y + 1] : 0); uint valid = (prev | next | (curr << 1) | (curr >> 1)) & ~curr; for (int x = 0; x < canon.BBWidth; x++) { if (((valid >> x) & 1) == 1) oriented.Add(canon.Union(OrientedPolyomino.Unit, x, y)); } } } // Now cluster the oriented polyominos into equivalence classes under dihedral symmetry. return new HashSet<FreePolyomino>(oriented.Select(orientation => new FreePolyomino(orientation))); } internal static OrientedPolyomino RandomMinimalUnion2(IEnumerable<FreePolyomino> polys, Random rnd, int maxWidth, int maxHeight, int target = int.MaxValue) { var union = OrientedPolyomino.Unit; foreach (var poly in polys.Shuffle(rnd).ToList()) { union = poly.MinimalUnion(union, rnd, maxWidth, maxHeight); if (union.Weight > target) throw new Exception("Too heavy"); } return new FreePolyomino(union).CanonicalOrientation; } private OrientedPolyomino MinimalUnion(FreePolyomino other, Random rnd, int maxWidth, int maxHeight) { // Choose the option which does least work. return OrientedPolyominos.Count <= other.OrientedPolyominos.Count ? MinimalUnion(other.CanonicalOrientation, rnd, maxWidth, maxHeight) : other.MinimalUnion(CanonicalOrientation, rnd, maxWidth, maxHeight); } private OrientedPolyomino MinimalUnion(OrientedPolyomino other, Random rnd, int maxWidth, int maxHeight) { OrientedPolyomino best = default(OrientedPolyomino); int containsWeight = Math.Min(CanonicalOrientation.Weight, other.Weight); int bestWeight = int.MaxValue; int ties = 0; foreach (var orientation in OrientedPolyominos) { // Bounding boxes must overlap, but otherwise we brute force for (int dx = 1 - orientation.BBWidth; dx < other.BBWidth; dx++) { for (int dy = 1 - orientation.BBHeight; dy < other.BBHeight; dy++) { var union = other.Union(orientation, dx, dy, maxWidth, maxHeight); if (union.Rows == null) continue; if (union.Weight == containsWeight) return union; if (union.Weight < bestWeight) { best = union; bestWeight = union.Weight; ties = 1; } else if (union.Weight == bestWeight) { ties++; if (rnd.Next(ties) == 0) best = union; } } } } if (best.Rows == null) throw new Exception(); return best; } public bool Equals(FreePolyomino other) => other != null && CanonicalOrientation.Equals(other.CanonicalOrientation); public override bool Equals(object obj) => Equals(obj as FreePolyomino); public override int GetHashCode() => CanonicalOrientation.GetHashCode(); } [DebuggerDisplay("{ToString()}")] struct OrientedPolyomino : IComparable<OrientedPolyomino>, IEquatable<OrientedPolyomino> { public static readonly OrientedPolyomino Unit = new OrientedPolyomino(1); public OrientedPolyomino(params uint[] rows) { if (rows.Length == 0) throw new ArgumentException("We don't support the empty polyomino", nameof(rows)); if (rows.Any(row => row == 0) || rows.All(row => (row & 1) == 0)) throw new ArgumentException("Polyomino is not packed into the corner", nameof(rows)); var colsUsed = rows.Aggregate(0U, (accum, row) => accum | row); BBWidth = Helper.Width(colsUsed); if (colsUsed != ((1U << BBWidth) - 1)) throw new ArgumentException("Polyomino has empty columns", nameof(rows)); Rows = rows; } public IReadOnlyList<uint> Rows { get; private set; } public int BBWidth { get; private set; } public int BBHeight => Rows.Count; #region Dihedral symmetries public OrientedPolyomino FlipH() { int width = BBWidth; return new OrientedPolyomino(Rows.Select(x => Helper.Reverse(x, width)).ToArray()); } public OrientedPolyomino FlipV() => new OrientedPolyomino(Rows.Reverse().ToArray()); public OrientedPolyomino Rot90() { uint[] rot = new uint[BBWidth]; for (int y = 0; y < BBHeight; y++) { for (int x = 0; x < BBWidth; x++) { rot[x] |= ((Rows[y] >> x) & 1) << (BBHeight - 1 - y); } } return new OrientedPolyomino(rot); } #endregion #region Conglomeration public OrientedPolyomino Union(OrientedPolyomino other, int deltax, int deltay, int maxWidth = int.MaxValue, int maxHeight = int.MaxValue) { // NB deltax or deltay could be negative int minCol = Math.Min(0, deltax); int maxCol = Math.Max(BBWidth - 1, other.BBWidth - 1 + deltax); int width = maxCol + 1 - minCol; if (width > maxWidth) return default(OrientedPolyomino); int minRow = Math.Min(0, deltay); int maxRow = Math.Max(BBHeight - 1, other.BBHeight - 1 + deltay); int height = maxRow + 1 - minRow; if (height > maxHeight) return default(OrientedPolyomino); uint[] unionRows = new uint[height]; for (int y = 0; y < BBHeight; y++) { unionRows[(deltay < 0 ? -deltay : 0) + y] |= Rows[y] << (deltax < 0 ? -deltax : 0); } for (int y = 0; y < other.BBHeight; y++) { unionRows[(deltay < 0 ? 0 : deltay) + y] |= other.Rows[y] << (deltax < 0 ? 0 : deltax); } return new OrientedPolyomino(unionRows); } #endregion #region Identity public int CompareTo(OrientedPolyomino other) { // Favour wide-and-short orientations for the canonical one. if (BBHeight != other.BBHeight) return BBHeight.CompareTo(other.BBHeight); for (int i = 0; i < BBHeight; i++) { if (Rows[i] != other.Rows[i]) return Rows[i].CompareTo(other.Rows[i]); } return 0; } public bool Equals(OrientedPolyomino other) => CompareTo(other) == 0; public override int GetHashCode() => Rows.Aggregate(0, (h, row) => h * 37 + (int)row); public override bool Equals(object obj) => (obj is OrientedPolyomino other) && Equals(other); public override string ToString() { var width = BBWidth; return string.Join("_", Rows.Select(row => Helper.ToString(row, width))); } #endregion public int Weight => Rows.Sum(row => (int)Helper.Weight(row)); } static class Helper { public static int Width(uint x) { int w = 0; if ((x >> 16) != 0) { w += 16; x >>= 16; } if ((x >> 8) != 0) { w += 8; x >>= 8; } if ((x >> 4) != 0) { w += 4; x >>= 4; } if ((x >> 2) != 0) { w += 2; x >>= 2; } switch (x) { case 0: break; case 1: w++; break; case 2: case 3: w += 2; break; default: throw new Exception("Unreachable code"); } return w; } public static uint Reverse(uint x, int width) { uint rev = 0; while (width-- > 0) { rev = (rev << 1) | (x & 1); x >>= 1; } return rev; } internal static string ToString(uint x, int width) { char[] chs = new char[width]; for (int i = 0; i < width; i++) { chs[i] = (char)('0' + (x & 1)); x >>= 1; } return new string(chs); } internal static uint Weight(uint v) { // https://graphics.stanford.edu/~seander/bithacks.html v = v - ((v >> 1) & 0x55555555); v = (v & 0x33333333) + ((v >> 2) & 0x33333333); return ((v + (v >> 4) & 0xF0F0F0F) * 0x1010101) >> 24; } public static IEnumerable<T> Shuffle<T>(this IEnumerable<T> elts, Random rnd = null) { rnd = rnd ?? new Random(); T[] arr = elts.ToArray(); int n = arr.Length; while (n > 0) { int idx = rnd.Next(n - 1); yield return arr[idx]; arr[idx] = arr[n - 1]; arr[n - 1] = default(T); // Help GC if T is a class n--; } } } }  Online demo ## C# and SAT: 1, 2, 4, 6, 9, 12, 17, 20, 26, 31, 37, 43 If we limit the bounding box, there is a fairly obvious expression of the problem in terms of SAT: each translation of each orientation of each free polyomino is a large conjunction; for each polyomino we form a disjunction over its conjunctions; and then we require each disjunction to be true and the total number of cells used to be limited. To limit the number of cells my initial version built a full adder; then I used bitonic sort for unary counting (similar to this earlier answer but generalised); finally I settled on the approach described by Bailleux and Boufkhad in Efficient CNF encoding of Boolean cardinality constraints. I wanted to make the post self-contained, so I dug out a C# implementation of a SAT solver with a BSD licence which was state-of-the-art about 15 years ago, replaced its NIH list implementation with System.Collections.Generic.List<T> (gaining a factor of 2 in speed), golfed it from 50kB down to 31kB to fit in the 64kB post limit, and then did some aggressive work on reducing memory usage. This code can obviously be adapted to output a DIMACS file which can then be passed to more modern solvers. ## Solutions found # ## ### ..# #### .##. ..#.. ##### ..### .####. ###### .##... ....#.. ####### #####.. .####.. ######## ..###### .....### .....### ######### #######.. ..#####.. ....##... ....###.. ########## ########.. ..######.. ....####.. .....###.. ..#######.. ..######### ########### ..####..... ..####..... ..##....... ...#######.. ...######### ############ ..#####....# ..#####..... ...####.....  To find 43 for n=12 took a bit over 7.5 hours. ### Polyomino code using MiniSAT; using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; namespace PPCG167484 { internal class SatGenerator { public static void Main() { for (int n = 1; n < 13; n++) { int width = n; int height = (n + 1) >> 1; var polys = FreePolyomino.All(n); (var solver, var unaryWeights) = Generate(polys, width, height); int previous = width * height + 1; while (true) { Stopwatch sw = new Stopwatch(); sw.Start(); if (solver.Solve()) { // The weight of the solution might be smaller than the target int weight = Enumerable.Range(0, width * height).Count(x => solver.Model[x] == Solver.l_True); Console.Write($"{n}\t<={weight}\t{sw.Elapsed.TotalSeconds:F3}s\t");
int cell = 0;
for (int y = 0; y < height; y++)
{
if (y > 0) Console.Write('_');
for (int x = 0; x < width; x++) Console.Write(solver.Model[cell++] == Solver.l_True ? '#' : '.');
}
Console.WriteLine();

// Now knock out that weight
for (int i = weight - 1; i < previous - 1; i++) solver.AddClause(~unaryWeights[i]);
previous = weight;
}
else
{
Console.WriteLine("--------");
break;
}
}
}
}

public static Tuple<Solver, Solver.Lit[]> Generate(IEnumerable<FreePolyomino> polys, int width, int height)
{
var solver = new Solver();

if (width == 12) solver.Prealloc(6037071 + 448, 72507588 + 6008); // HACK!

// Variables: 0 to width * height - 1 are the cells available to fill.
for (int i = 0; i < width * height; i++) solver.NewVar();

foreach (var poly in polys)
{
// We naturally get a DNF: each position of each orientation is a conjunction of poly.Weight variables,
// and we require any one. Therefore we add an auxiliary variable per.

var polyAuxs = new List<Solver.Lit>();
foreach (var orientation in poly.OrientedPolyominos)
{
int maxh = height;
// Optimisation: break symmetry
if (orientation.BBHeight == 1) maxh = ((height + 1) >> 1);

for (int dy = 0; dy + orientation.BBHeight <= maxh; dy++)
{
for (int dx = 0; dx + orientation.BBWidth <= width; dx++)
{
var currentAux = solver.NewVar();
for (int y = 0; y < orientation.BBHeight; y++)
{
uint tmp = orientation.Rows[y];
for (int x = 0; tmp > 0; x++, tmp >>= 1)
{
if ((tmp & 1) == 1) solver.AddClause(~currentAux, new Solver.Lit((y + dy) * width + x + dx));
}
}

}
}
}
}

var unaryWeights = _BBSum(0, width * height, solver);
return Tuple.Create(solver, unaryWeights);
}

private static Solver.Lit[] _BBSum(int from, int num, Solver solver)
{
var sum = new Solver.Lit[num];
if (num == 1) sum[0] = new Solver.Lit(from);
else
{
var left = _BBSum(from, num >> 1, solver);
var right = _BBSum(from + left.Length, num - left.Length, solver);

for (int i = 0; i < num; i++) sum[i] = solver.NewVar();
for (int alpha = 0; alpha <= left.Length; alpha++)
{
for (int beta = 0; beta <= right.Length; beta++)
{
var sigma = alpha + beta;
// C_1 = ~left[alpha-1] + ~right[beta-1] + sum[sigma-1]
if (alpha > 0 && beta > 0) solver.AddClause(~left[alpha - 1], ~right[beta - 1], sum[sigma - 1]);
else if (alpha > 0) solver.AddClause(~left[alpha - 1], sum[sigma - 1]);
else if (beta > 0) solver.AddClause(~right[beta - 1], sum[sigma - 1]);

// C_2 = left[alpha] + right[beta] + ~sum[sigma]
if (alpha < left.Length && beta < right.Length) solver.AddClause(left[alpha], right[beta], ~sum[sigma]);
else if (alpha < left.Length) solver.AddClause(left[alpha], ~sum[sigma]);
else if (beta < right.Length) solver.AddClause(right[beta], ~sum[sigma]);
}
}
}

return sum;
}
}

class FreePolyomino : IEquatable<FreePolyomino>
{
internal FreePolyomino(OrientedPolyomino orientation)
{
var orientations = new HashSet<OrientedPolyomino>();

OrientedPolyominos = orientations.OrderBy(x => x).ToArray();
}

public IReadOnlyList<OrientedPolyomino> OrientedPolyominos { get; private set; }

public OrientedPolyomino CanonicalOrientation => OrientedPolyominos[0];

public static IEnumerable<FreePolyomino> All(int numCells)
{
if (numCells < 1) throw new ArgumentOutOfRangeException(nameof(numCells));
if (numCells == 1) return new FreePolyomino[] { new FreePolyomino(OrientedPolyomino.Unit) };

// We do this in two phases because identifying two equal oriented polyominos is faster than first building
// free polyominos and then identifying that they're equal.
var oriented = new HashSet<OrientedPolyomino>();
foreach (var smaller in All(numCells - 1))
{
// We can add a cell to a side. The easiest way to do this is to add to the bottom of one of the rotations.
// TODO Optimise by distinguishing the symmetries.
foreach (var orientation in smaller.OrientedPolyominos)
{
int h = orientation.BBHeight;
var bottomRow = orientation.Rows[h - 1];
for (int deltax = 0; deltax < orientation.BBWidth; deltax++)
{
if (((bottomRow >> deltax) & 1) == 1)
{
var rows = orientation.Rows.Concat(Enumerable.Repeat(1U << deltax, 1)).ToArray();
}
}
}

// We can add a cell in the middle, provided it connects up.
var canon = smaller.CanonicalOrientation;
uint prev = 0, curr = 0, next = canon.Rows[0];
for (int y = 0; y < canon.BBHeight; y++)
{
(prev, curr, next ) = (curr, next, y + 1 < canon.BBHeight ? canon.Rows[y + 1] : 0);
uint valid = (prev | next | (curr << 1) | (curr >> 1)) & ~curr;
for (int x = 0; x < canon.BBWidth; x++)
{
if (((valid >> x) & 1) == 1)
{
var rows = canon.Rows.ToArray(); // Copy
rows[y] |= 1U << x;
}
}
}
}

// Now cluster the oriented polyominos into equivalence classes under dihedral symmetry.
return new HashSet<FreePolyomino>(oriented.Select(orientation => new FreePolyomino(orientation)));
}

public bool Equals(FreePolyomino other) => other != null && CanonicalOrientation.Equals(other.CanonicalOrientation);
public override bool Equals(object obj) => Equals(obj as FreePolyomino);
public override int GetHashCode() => CanonicalOrientation.GetHashCode();
}

[DebuggerDisplay("{ToString()}")]
struct OrientedPolyomino : IComparable<OrientedPolyomino>, IEquatable<OrientedPolyomino>
{
public static readonly OrientedPolyomino Unit = new OrientedPolyomino(1);

public OrientedPolyomino(params uint[] rows)
{
if (rows.Length == 0) throw new ArgumentException("We don't support the empty polyomino", nameof(rows));
if (rows.Any(row => row == 0) || rows.All(row => (row & 1) == 0)) throw new ArgumentException("Polyomino is not packed into the corner", nameof(rows));
var colsUsed = rows.Aggregate(0U, (accum, row) => accum | row);
BBWidth = Helper.Width(colsUsed);
if (colsUsed != ((1U << BBWidth) - 1)) throw new ArgumentException("Polyomino has empty columns", nameof(rows));
Rows = rows;
}

public IReadOnlyList<uint> Rows { get; private set; }
public int BBWidth { get; private set; }
public int BBHeight => Rows.Count;

#region Dihedral symmetries

public OrientedPolyomino FlipV() => new OrientedPolyomino(Rows.Reverse().ToArray());

public OrientedPolyomino Rot90()
{
uint[] rot = new uint[BBWidth];
for (int y = 0; y < BBHeight; y++)
{
for (int x = 0; x < BBWidth; x++)
{
rot[x] |= ((Rows[y] >> x) & 1) << (BBHeight - 1 - y);
}
}
return new OrientedPolyomino(rot);
}

#endregion

#region Identity

public int CompareTo(OrientedPolyomino other)
{
// Favour wide-and-short orientations for the canonical one.
if (BBHeight != other.BBHeight) return BBHeight.CompareTo(other.BBHeight);

for (int i = 0; i < BBHeight; i++)
{
if (Rows[i] != other.Rows[i]) return Rows[i].CompareTo(other.Rows[i]);
}

return 0;
}
public bool Equals(OrientedPolyomino other) => CompareTo(other) == 0;
public override int GetHashCode() => Rows.Aggregate(0, (h, row) => h * 37 + (int)row);
public override bool Equals(object obj) => (obj is OrientedPolyomino other) && Equals(other);
public override string ToString()
{
var width = BBWidth;
return string.Join("_", Rows.Select(row => Helper.ToString(row, width)));
}

#endregion
}

static class Helper
{
public static int Width(uint x)
{
int w = 0;
if ((x >> 16) != 0) { w += 16; x >>= 16; }
if ((x >> 8) != 0) { w += 8; x >>= 8; }
if ((x >> 4) != 0) { w += 4; x >>= 4; }
if ((x >> 2) != 0) { w += 2; x >>= 2; }
switch (x)
{
case 0: break;
case 1: w++; break;
case 2:
case 3: w += 2; break;
default: throw new Exception("Unreachable code");
}

return w;
}

internal static string ToString(uint x, int width)
{
char[] chs = new char[width];
for (int i = 0; i < width; i++)
{
chs[i] = (char)('0' + (x & 1));
x >>= 1;
}
return new string(chs);
}

internal static uint Weight(uint v)
{
// https://graphics.stanford.edu/~seander/bithacks.html
v = v - ((v >> 1) & 0x55555555);
v = (v & 0x33333333) + ((v >> 2) & 0x33333333);
return ((v + (v >> 4) & 0xF0F0F0F) * 0x1010101) >> 24;
}
}
}


### SAT solver code

/******************************************************************************************
MiniSat -- Copyright (c) 2003-2005, Niklas Een, Niklas Sorensson
MiniSatCS -- Copyright (c) 2006-2007 Michal Moskal
GolfMiniSat -- Copyright (c) 2018 Peter Taylor

Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
**************************************************************************************************/

using System;
using System.Diagnostics;
using System.Collections.Generic;

// NOTE! Variables are just integers. No abstraction here. They should be chosen from 0..N, so that they can be used as array indices.
using Var = System.Int32;
using System.Linq;

namespace MiniSAT
{
public static class Ext
{
private static int TargetCapacity(int size) =>
size < 65536 ? (size << 1) :
size < 1048576 ? (size + (size >> 1)) :
size + (size >> 2);

public static void Push<T>(this List<T> list, T elem)
{
// Similar to List<T>.Add but with a slower growth rate for large lists
if (list.Count == list.Capacity) list.Capacity = TargetCapacity(list.Count + 1);
}
public static void Pop<T>(this List<T> list) => list.RemoveAt(list.Count - 1);
public static T Peek<T>(this List<T> list) => list[list.Count - 1];
public static void GrowTo<T>(this List<T> list, int size, T pad)
{
if (size > list.Count)
{
// Minimise resizing
if (size > list.Capacity) list.Capacity = size;
}
}
public static void ShrinkTo<T>(this List<T> list, int size)
{
list.RemoveRange(size, list.Count - size);

int targetCap = TargetCapacity(size);
if (list.Capacity > targetCap) list.Capacity = targetCap;
}
}

public delegate bool IntLess(int i1, int i2);

public class Heap
{
IntLess Cmp;
List<int> Heap_ = new List<int>(); // heap of ints
List<int> Indices = new List<int>(); // index in Heap_

static int Left(int i) => i << 1;
static int Right(int i) => (i << 1) | 1;
static int Parent(int i) => i >> 1;

{
int x = Heap_[i];
while (Parent(i) != 0 && Cmp(x, Heap_[Parent(i)]))
{
Heap_[i] = Heap_[Parent(i)];
Indices[Heap_[i]] = i;
i = Parent(i);
}
Heap_[i] = x;
Indices[x] = i;
}

void DownHeap(int i)
{
int x = Heap_[i];
while (Left(i) < Heap_.Count)
{
int child = Right(i) < Heap_.Count && Cmp(Heap_[Right(i)], Heap_[Left(i)]) ? Right(i) : Left(i);
if (!Cmp(Heap_[child], x)) break;
Heap_[i] = Heap_[child];
Indices[Heap_[i]] = i;
i = child;
}
Heap_[i] = x;
Indices[x] = i;
}

bool Ok(int n) => n >= 0 && n < Indices.Count;

public Heap(IntLess c) { Cmp = c; Heap_.Add(-1); }

public void SetBounds(int size) { Solver.Assert(size >= 0); Indices.GrowTo(size, 0); if (size > Heap_.Capacity) Heap_.Capacity = size; }
public bool InHeap(int n) { Solver.Assert(Ok(n)); return Indices[n] != 0; }
public void Increase(int n) { Solver.Assert(Ok(n)); Solver.Assert(InHeap(n)); UpHead(Indices[n]); }
public bool IsEmpty => Heap_.Count == 1;

public void Push(int n)
{
Solver.Assert(Ok(n));
Indices[n] = Heap_.Count;
}

public int Pop()
{
int r = Heap_[1];
Heap_[1] = Heap_.Peek();
Indices[Heap_[1]] = 1;
Indices[r] = 0;
Heap_.Pop();
if (Heap_.Count > 1) DownHeap(1);
return r;
}
}

public class Solver
{
#region lbool ~= Nullable<bool>

public struct LBool
{
public static readonly LBool True = new LBool { Content = 1 };
public static readonly LBool False = new LBool { Content = -1 };
public static readonly LBool Undef = new LBool { Content = 0 };

private sbyte Content;

public static bool operator ==(LBool a, LBool b) => a.Content == b.Content;
public static bool operator !=(LBool a, LBool b) => a.Content != b.Content;
public static LBool operator ~(LBool a) => new LBool { Content = (sbyte)-a.Content };

public static implicit operator LBool(bool b) => b ? True : False;
}

public static readonly LBool l_True = LBool.True;
public static readonly LBool l_False = LBool.False;
public static readonly LBool l_Undef = LBool.Undef;

#endregion

#region Literals

const int var_Undef = -1;

public struct Lit
{
public Lit(Var var) { Index = var << 1; }

public bool Sign => (Index & 1) != 0;
public int Index { get; private set; }
public int Var => Index >> 1;

public bool SatisfiedBy(List<LBool> assignment) => assignment[Var] == (Sign ? l_False : l_True);

public static Lit operator ~(Lit p) => new Lit { Index = p.Index ^ 1 };
public static bool operator ==(Lit p, Lit q) => p.Index == q.Index;
public static bool operator !=(Lit p, Lit q) => !(p == q);

public override int GetHashCode() => Index;
public override bool Equals(object other) => other is Lit that && this == that;
public override string ToString() => (Sign ? "-" : "") + "x" + Var;
}

static public readonly Lit lit_Undef = ~new Lit(var_Undef);

#endregion

#region Clauses

public abstract class Clause
{
protected Clause(bool learnt)
{
IsLearnt = learnt;
}

public bool IsLearnt { get; private set; }
public float Activity;

public abstract int Size { get; }
public abstract Lit this[int i] { get;set; }

public abstract bool SatisfiedBy(List<LBool> assigns);

public static Clause Create(bool learnt, List<Lit> ps)
{
if (ps.Count < 2) throw new ArgumentOutOfRangeException(nameof(ps));
if (ps.Count == 2) return new BinaryClause(learnt, ps[0], ps[1]);
return new LargeClause(learnt, ps);
}
}

public class BinaryClause : Clause
{
public BinaryClause(bool learnt, Lit p0, Lit p1) : base(learnt)
{
l0 = p0; l1 = p1;
}

private Lit l0;
private Lit l1;

public override Lit this[int i]
{
get { return i == 0 ? l0 : l1; }
set { if (i == 0) l0 = value; else l1 = value; }
}

public override int Size => 2;

public override bool SatisfiedBy(List<LBool> assigns) => l0.SatisfiedBy(assigns) || l1.SatisfiedBy(assigns);
}

public class LargeClause : Clause
{
public static int[] SizeDistrib = new int[10];

internal LargeClause(bool learnt, List<Lit> ps) : base(learnt)
{
Data = ps.ToArray();

SizeDistrib[Size >= SizeDistrib.Length ? SizeDistrib.Length - 1 : Size]++;
}

public Lit[] Data { get; private set; }

public override int Size => Data.Length;

public override Lit this[int i]
{
get { return Data[i]; }
set { Data[i] = value; }
}

public override bool SatisfiedBy(List<LBool> assigns) => Data.Any(lit => lit.SatisfiedBy(assigns));

public override string ToString() => "[" + string.Join(", ", Data) + "]";
}

#endregion

#region Utilities

// Returns a random float 0 <= x < 1. Seed must never be 0.
static double Rnd(ref double seed)
{
seed *= 1389796;
int k = 2147483647;
int q = (int)(seed / k);
seed -= (double)q * k;
return seed / k;
}

[Conditional("DEBUG")]
static public void Assert(bool expr) => Check(expr);

// Just like 'assert()' but expression will be evaluated in the release version as well.
static void Check(bool expr) { if (!expr) throw new Exception("assertion violated"); }

#endregion

#region VarOrder

public class VarOrder
{
readonly List<LBool> Assigns; // Pointer to external assignment table.
readonly List<float> Activity; // Pointer to external activity table.
internal Heap Heap_;
double RandomSeed;

public VarOrder(List<LBool> ass, List<float> act)
{
Assigns = ass;
Activity = act;
Heap_ = new Heap(Lt);
RandomSeed = 91648253;
}

bool Lt(Var x, Var y) => Activity[x] > Activity[y];

public virtual void NewVar()
{
Heap_.SetBounds(Assigns.Count);
Heap_.Push(Assigns.Count - 1);
}

// Called when variable increased in activity.
public virtual void Update(Var x) { if (Heap_.InHeap(x)) Heap_.Increase(x); }

// Called when variable is unassigned and may be selected again.
public virtual void Undo(Var x) { if (!Heap_.InHeap(x)) Heap_.Push(x); }

// Selects a new, unassigned variable (or 'var_Undef' if none exists).
public virtual Lit Select(double random_var_freq)
{
// Random decision:
if (Rnd(ref RandomSeed) < random_var_freq && !Heap_.IsEmpty)
{
Var next = (Var)(Rnd(ref RandomSeed) * Assigns.Count);
if (Assigns[next] == l_Undef) return ~new Lit(next);
}

// Activity based decision:
while (!Heap_.IsEmpty)
{
Var next = Heap_.Pop();
if (Assigns[next] == l_Undef) return ~new Lit(next);
}

return lit_Undef;
}
}

#endregion

#region Solver state

public bool Ok { get; private set; } // If false, the constraints are already unsatisfiable. No part of the solver state may be used!
List<Clause> Clauses = new List<Clause>(); // List of problem clauses.
List<Clause> Learnts = new List<Clause>(); // List of learnt clauses.
double ClaInc = 1; // Amount to bump next clause with.
const double ClaDecay = 1 / 0.999; // INVERSE decay factor for clause activity: stores 1/decay.

public List<float> Activity = new List<float>(); // A heuristic measurement of the activity of a variable.
float VarInc = 1; // Amount to bump next variable with.
const float VarDecay = 1 / 0.95f; // INVERSE decay factor for variable activity: stores 1/decay. Use negative value for static variable order.
VarOrder Order; // Keeps track of the decision variable order.
const double RandomVarFreq = 0.02;

List<List<Clause>> Watches = new List<List<Clause>>(); // 'watches[lit]' is a list of constraints watching 'lit' (will go there if literal becomes true).
public List<LBool> Assigns = new List<LBool>(); // The current assignments.
public List<Lit> Trail = new List<Lit>(); // Assignment stack; stores all assigments made in the order they were made.
List<int> TrailLim = new List<int>(); // Separator indices for different decision levels in 'trail'.
List<Clause> Reason = new List<Clause>(); // 'reason[var]' is the clause that implied the variables current value, or 'null' if none.
List<int> Level = new List<int>(); // 'level[var]' is the decision level at which assignment was made.
List<int> TrailPos = new List<int>(); // 'trail_pos[var]' is the variable's position in 'trail[]'. This supersedes 'level[]' in some sense, and 'level[]' will probably be removed in future releases.
int QHead = 0; // Head of queue (as index into the trail -- no more explicit propagation queue in MiniSat).
int SimpDBAssigns = 0; // Number of top-level assignments since last execution of 'simplifyDB()'.
long SimpDBProps = 0; // Remaining number of propagations that must be made before next execution of 'simplifyDB()'.

// Temporaries (to reduce allocation overhead)
List<LBool> AnalyzeSeen = new List<LBool>();
List<Lit> AnalyzeStack = new List<Lit>();
List<Lit> AnalyzeToClear = new List<Lit>();

#endregion

#region Main internal methods:

// Activity
void VarBumpActivity(Lit p)
{
if (VarDecay < 0) return; // (negative decay means static variable order -- don't bump)
if ((Activity[p.Var] += VarInc) > 1e100) VarRescaleActivity();
Order.Update(p.Var);
}
void VarDecayActivity() { if (VarDecay >= 0) VarInc *= VarDecay; }

// Operations on clauses
void ClaBumpActivity(Clause c) { if ((c.Activity += (float)ClaInc) > 1e20) ClaRescaleActivity(); }
// Disposes of clause and removes it from watcher lists. NOTE! Low-level; does NOT change the 'clauses' and 'learnts' vector.
void Remove(Clause c)
{
RemoveWatch(Watches[(~c[0]).Index], c);
RemoveWatch(Watches[(~c[1]).Index], c);

if (c.IsLearnt) LearntsLiterals -= c.Size;
else ClausesLiterals -= c.Size;
}
bool IsLocked(Clause c) => c == Reason[c[0].Var];

int DecisionLevel => TrailLim.Count;

#endregion

#region Public interface

public Solver()
{
Ok = true;
Order = new VarOrder(Assigns, Activity);
}

public void Prealloc(int numVars, int numClauses)
{
Activity.Capacity = numVars;
AnalyzeSeen.Capacity = numVars;
Assigns.Capacity = numVars;
Level.Capacity = numVars;
Reason.Capacity = numVars;
Watches.Capacity = numVars << 1;
Order.Heap_.SetBounds(numVars + 1);
Trail.Capacity = numVars;
TrailPos.Capacity = numVars;

Clauses.Capacity = numClauses;
}

// Helpers (semi-internal)
public LBool Value(Lit p) => p.Sign ? ~Assigns[p.Var] : Assigns[p.Var];

public int nAssigns => Trail.Count;
public int nClauses => Clauses.Count;
public int nLearnts => Learnts.Count;

// Statistics
public long ClausesLiterals, LearntsLiterals;

// Problem specification
public int nVars => Assigns.Count;
public void AddClause(params Lit[] ps) => NewClause(new List<Lit>(ps), false);

// Solving
public List<LBool> Model = new List<LBool>(); // If problem is satisfiable, this vector contains the model (if any).

#endregion

#region Operations on clauses:

List<Lit> BasicClauseSimplification(List<Lit> ps)
{
List<Lit> qs = new List<Lit>(ps);

var dict = new Dictionary<Var, Lit>(qs.Count);
int ptr = 0;

for (int i = 0; i < qs.Count; i++)
{
Lit l = qs[i];
Var v = l.Var;
if (dict.TryGetValue(v, out var other))
{
if (other != l) return null; // other = ~l, so always satisfied
}
else
{
dict[v] = l;
qs[ptr++] = l;
}
}
qs.ShrinkTo(ptr);

return qs;
}

void NewClause(List<Lit> ps, bool learnt)
{
if (!Ok) return;
Assert(ps != null);

if (!learnt)
{
Assert(DecisionLevel == 0);

ps = BasicClauseSimplification(ps);
if (ps == null) return;

int j = 0;
for (int i = 0; i < ps.Count; i++)
{
var lit = ps[i];
if (Level[lit.Var] == 0)
{
if (Value(lit) == l_True) return; // Clause already sat
if (Value(lit) == l_False) continue; // Literal already eliminated
}
ps[j++] = lit;
}
ps.ShrinkTo(j);
}

// 'ps' is now the (possibly) reduced vector of literals.
if (ps.Count == 0) Ok = false;
else if (ps.Count == 1)
{
if (!Enqueue(ps[0], null)) Ok = false;
}
else
{
var c = Clause.Create(learnt, ps);

if (!learnt)
{
ClausesLiterals += c.Size;
}
else
{
// Put the second watch on the literal with highest decision level:
int max_i = 1;
int max = Level[ps[1].Var];
for (int i = 2; i < ps.Count; i++)
if (Level[ps[i].Var] > max)
{
max = Level[ps[i].Var];
max_i = i;
}
c[1] = ps[max_i];
c[max_i] = ps[1];

Check(Enqueue(c[0], c));

// Bumping:
ClaBumpActivity(c); // (newly learnt clauses should be considered active)
Learnts.Push(c);
LearntsLiterals += c.Size;
}

// Watch clause:
Watches[(~c[0]).Index].Push(c);
Watches[(~c[1]).Index].Push(c);
}
}

// Can assume everything has been propagated! (esp. the first two literals are != l_False, unless
// the clause is binary and satisfied, in which case the first literal is true)
bool IsSatisfied(Clause c)
{
Assert(DecisionLevel == 0);
return c.SatisfiedBy(Assigns);
}

#endregion

#region Minor methods

static bool RemoveWatch(List<Clause> ws, Clause elem) // Pre-condition: 'elem' must exists in 'ws' OR 'ws' must be empty.
{
if (ws.Count == 0) return false; // (skip lists that are already cleared)
int j = 0;
for (; ws[j] != elem; j++) Assert(j < ws.Count - 1);
for (; j < ws.Count - 1; j++) ws[j] = ws[j + 1];
ws.Pop();
return true;
}

public Lit NewVar()
{
int index = nVars;
Watches.Add(new List<Clause>()); // (list for positive literal)
Watches.Add(new List<Clause>()); // (list for negative literal)
Order.NewVar();
return new Lit(index);
}

// Returns FALSE if immediate conflict.
bool Assume(Lit p)
{
return Enqueue(p, null);
}

// Revert to the state at given level.
void CancelUntil(int level)
{
if (DecisionLevel > level)
{
for (int c = Trail.Count - 1; c >= TrailLim[level]; c--)
{
Var x = Trail[c].Var;
Assigns[x] = l_Undef;
Reason[x] = null;
Order.Undo(x);
}
Trail.RemoveRange(TrailLim[level], Trail.Count - TrailLim[level]);
TrailLim.ShrinkTo(level);
}
}

#endregion

#region Major methods:

int Analyze(Clause confl, List<Lit> out_learnt)
{
List<LBool> seen = AnalyzeSeen;
int pathC = 0;
Lit p = lit_Undef;

// Generate conflict clause
out_learnt.Push(lit_Undef); // (placeholder for the asserting literal)
var out_btlevel = 0;
int index = Trail.Count - 1;
do
{
Assert(confl != null); // (otherwise should be UIP)

if (confl.IsLearnt) ClaBumpActivity(confl);

for (int j = (p == lit_Undef) ? 0 : 1; j < confl.Size; j++)
{
Lit q = confl[j];
var v = q.Var;
if (seen[v] == l_Undef && Level[v] > 0)
{
VarBumpActivity(q);
seen[v] = l_True;
if (Level[v] == DecisionLevel) pathC++;
else
{
out_learnt.Push(q);
out_btlevel = Math.Max(out_btlevel, Level[v]);
}
}
}

// Select next clause to look at
while (seen[Trail[index--].Var] == l_Undef) ;
p = Trail[index + 1];
confl = Reason[p.Var];
seen[p.Var] = l_Undef;
pathC--;
} while (pathC > 0);
out_learnt[0] = ~p;

// Conflict clause minimization
{
uint min_level = 0;
for (int i = 1; i < out_learnt.Count; i++) min_level |= (uint)(1 << (Level[out_learnt[i].Var] & 31)); // (maintain an abstraction of levels involved in conflict)

AnalyzeToClear.Clear();
int j = 1;
for (int i = 1; i < out_learnt.Count; i++)
if (Reason[out_learnt[i].Var] == null || !AnalyzeRemovable(out_learnt[i], min_level)) out_learnt[j++] = out_learnt[i];

// Clean up
for (int jj = 0; jj < out_learnt.Count; jj++) seen[out_learnt[jj].Var] = l_Undef;
for (int jj = 0; jj < AnalyzeToClear.Count; jj++) seen[AnalyzeToClear[jj].Var] = l_Undef; // ('seen[]' is now cleared)

out_learnt.ShrinkTo(j);
}

return out_btlevel;
}

// Check if 'p' can be removed. 'min_level' is used to abort early if visiting literals at a level that cannot be removed.
bool AnalyzeRemovable(Lit p_, uint min_level)
{
Assert(Reason[p_.Var] != null);
int top = AnalyzeToClear.Count;
while (AnalyzeStack.Count > 0)
{
Clause c = Reason[AnalyzeStack.Peek().Var];
Assert(c != null);
AnalyzeStack.Pop();
for (int i = 1; i < c.Size; i++)
{
Lit p = c[i];
if (AnalyzeSeen[p.Var] == l_Undef && Level[p.Var] != 0)
{
if (Reason[p.Var] != null && ((1 << (Level[p.Var] & 31)) & min_level) != 0)
{
AnalyzeSeen[p.Var] = l_True;
AnalyzeStack.Push(p);
AnalyzeToClear.Push(p);
}
else
{
for (int j = top; j < AnalyzeToClear.Count; j++) AnalyzeSeen[AnalyzeToClear[j].Var] = l_Undef;
AnalyzeToClear.ShrinkTo(top);
return false;
}
}
}
}

AnalyzeToClear.Push(p_);
return true;
}

bool Enqueue(Lit p, Clause from)
{
if (Value(p) != l_Undef) return Value(p) == l_True;

Var x = p.Var;
Assigns[x] = !p.Sign;
Level[x] = DecisionLevel;
TrailPos[x] = Trail.Count;
Reason[x] = from;
return true;
}

Clause Propagate()
{
Clause confl = null;
{
SimpDBProps--;

Lit p = Trail[QHead++]; // 'p' is enqueued fact to propagate.
List<Clause> ws = Watches[p.Index];
int i, j, end;
for (i = j = 0, end = ws.Count; i != end;)
{
Clause c = ws[i++];
// Make sure the false literal is data[1]
Lit false_lit = ~p;
if (c[0] == false_lit) { c[0] = c[1]; c[1] = false_lit; }

Assert(c[1] == false_lit);

// If 0th watch is true, then clause is already satisfied.
Lit first = c[0];
LBool val = Value(first);
if (val == l_True) ws[j++] = c;
else
{
// Look for new watch
for (int k = 2; k < c.Size; k++)
if (Value(c[k]) != l_False)
{
c[1] = c[k]; c[k] = false_lit;
Watches[(~c[1]).Index].Push(c);
goto FoundWatch;
}

// Did not find watch -- clause is unit under assignment
ws[j++] = c;
if (!Enqueue(first, c))
{
if (DecisionLevel == 0) Ok = false;
confl = c;
while (i < end) ws[j++] = ws[i++]; // Copy the remaining watches
}
FoundWatch:;
}
}
ws.ShrinkTo(j);
}

return confl;
}

void ReduceDB()
{
double extra_lim = ClaInc / Learnts.Count; // Remove any clause below this activity

Learnts.Sort((x, y) => x.Size > 2 && (y.Size == 2 || x.Activity < y.Activity) ? -1 : 1);

int i, j;
for (i = j = 0; i < Learnts.Count / 2; i++)
{
if (Learnts[i].Size > 2 && !IsLocked(Learnts[i])) Remove(Learnts[i]);
else Learnts[j++] = Learnts[i];
}
for (; i < Learnts.Count; i++)
{
if (Learnts[i].Size > 2 && !IsLocked(Learnts[i]) && Learnts[i].Activity < extra_lim) Remove(Learnts[i]);
else Learnts[j++] = Learnts[i];
}
Learnts.ShrinkTo(j);
}

void SimplifyDB()
{
if (!Ok) return;
Assert(DecisionLevel == 0);

if (Propagate() != null) { Ok = false; return; }
if (nAssigns == SimpDBAssigns || SimpDBProps > 0) return; // (nothing has changed or performed a simplification too recently)

// Clear watcher lists:
for (int i = SimpDBAssigns; i < nAssigns; i++)
{
Lit p = Trail[i];
Watches[p.Index].Clear();
Watches[(~p).Index].Clear();
}

// Remove satisfied clauses:
for (int type = 0; type < 2; type++)
{
List<Clause> cs = type != 0 ? Learnts : Clauses;
int j = 0;
for (int i = 0; i < cs.Count; i++)
{
if (!IsLocked(cs[i]) && IsSatisfied(cs[i])) Remove(cs[i]);
else cs[j++] = cs[i];
}
cs.ShrinkTo(j);
}

SimpDBAssigns = nAssigns;
SimpDBProps = ClausesLiterals + LearntsLiterals;
}

LBool Search(int nof_conflicts, int nof_learnts)
{
if (!Ok) return l_False;
Assert(0 == DecisionLevel);

int conflictC = 0;
Model.Clear();

while (true)
{
Clause confl = Propagate();
if (confl != null)
{
// CONFLICT
conflictC++;
var learnt_clause = new List<Lit>();
if (DecisionLevel == 0) return l_False; // Contradiction found
CancelUntil(Analyze(confl, learnt_clause));
NewClause(learnt_clause, true);
if (learnt_clause.Count == 1) Level[learnt_clause[0].Var] = 0;
VarDecayActivity();
}
else
{
// NO CONFLICT
if (nof_conflicts >= 0 && conflictC >= nof_conflicts)
{
// Reached bound on number of conflicts
CancelUntil(0);
return l_Undef;
}

// Simplify the set of problem clauses
if (DecisionLevel == 0) { SimplifyDB(); if (!Ok) return l_False; }

// Reduce the set of learnt clauses
if (nof_learnts >= 0 && Learnts.Count - nAssigns >= nof_learnts) ReduceDB();

// New variable decision
Lit next = Order.Select(RandomVarFreq);

if (next == lit_Undef)
{
// Model found
Model.Clear();
Model.Capacity = nVars;
CancelUntil(0);
return l_True;
}

Check(Assume(next));
}
}
}

void VarRescaleActivity()
{
for (int i = 0; i < nVars; i++) Activity[i] *= 1e-100f;
VarInc *= 1e-100f;
}

void ClaRescaleActivity()
{
for (int i = 0; i < Learnts.Count; i++) Learnts[i].Activity *= 1e-20f;
ClaInc *= 1e-20;
}

public bool Solve()
{
SimplifyDB();
Assert(DecisionLevel == 0);

double nof_conflicts = 100;
double nof_learnts = nClauses / 3;
while (true)
{
if (Search((int)nof_conflicts, (int)nof_learnts) != l_Undef)
{
CancelUntil(0);
return Ok;
}
nof_conflicts *= 1.5;
nof_learnts *= 1.1;
}
}

#endregion
}
}


### Optimality

The code above keeps reducing the target size until it finds an unsatisfiable constraint, so it guarantees that the output is optimal (under the bounding box assumption) up to and including \$n=11\$. However, it runs out of memory (3GB for a 32-bit process or 4GB for a 64-bit process) with \$n=12\$ after producing a region with weight 43.

To run the search for \$n=12\$ to completion I found it necessary to reduce the memory considerably, special-casing binary clauses and not keeping empty watch lists. However, by changing the order in which clauses are considered it changes the results, so I present the change as a patch and leave the list of solutions above untouched.

--- MiniSAT.cs.old
+++ MiniSAT.cs
@@ -346,6 +346,7 @@ namespace MiniSAT
const double RandomVarFreq = 0.02;

List<List<Clause>> Watches = new List<List<Clause>>(); // 'watches[lit]' is a list of constraints watching 'lit' (will go there if literal becomes true).
+        List<List<Lit>> BinaryWatches = new List<List<Lit>>();
public List<LBool> Assigns = new List<LBool>(); // The current assignments.
public List<Lit> Trail = new List<Lit>(); // Assignment stack; stores all assigments made in the order they were made.
List<int> TrailLim = new List<int>(); // Separator indices for different decision levels in 'trail'.
@@ -381,7 +382,9 @@ namespace MiniSAT
void Remove(Clause c)
{
RemoveWatch(Watches[(~c[0]).Index], c);
+            if (Watches[(~c[0]).Index] != null && Watches[(~c[0]).Index].Count == 0) Watches[(~c[0]).Index] = null;
RemoveWatch(Watches[(~c[1]).Index], c);
+            if (Watches[(~c[1]).Index] != null && Watches[(~c[1]).Index].Count == 0) Watches[(~c[1]).Index] = null;

if (c.IsLearnt) LearntsLiterals -= c.Size;
else ClausesLiterals -= c.Size;
@@ -408,6 +411,7 @@ namespace MiniSAT
Level.Capacity = numVars;
Reason.Capacity = numVars;
Watches.Capacity = numVars << 1;
+            BinaryWatches.Capacity = numVars << 1;
Order.Heap_.SetBounds(numVars + 1);
Trail.Capacity = numVars;
TrailPos.Capacity = numVars;
@@ -500,7 +504,7 @@ namespace MiniSAT

if (!learnt)
{
+                    if (c.Size > 2) Clauses.Add(c);
ClausesLiterals += c.Size;
}
else
@@ -526,8 +530,20 @@ namespace MiniSAT
}

// Watch clause:
-                Watches[(~c[0]).Index].Push(c);
-                Watches[(~c[1]).Index].Push(c);
+                if (c.Size == 2 && !learnt)
+                {
+                    if (BinaryWatches[(~c[0]).Index] == null) BinaryWatches[(~c[0]).Index] = new List<Lit>();
+                    BinaryWatches[(~c[0]).Index].Push(c[1]);
+                    if (BinaryWatches[(~c[1]).Index] == null) BinaryWatches[(~c[1]).Index] = new List<Lit>();
+                    BinaryWatches[(~c[1]).Index].Push(c[0]);
+                }
+                else
+                {
+                    if (Watches[(~c[0]).Index] == null) Watches[(~c[0]).Index] = new List<Clause>();
+                    Watches[(~c[0]).Index].Push(c);
+                    if (Watches[(~c[1]).Index] == null) Watches[(~c[1]).Index] = new List<Clause>();
+                    Watches[(~c[1]).Index].Push(c);
+                }
}
}

@@ -545,7 +561,7 @@ namespace MiniSAT

static bool RemoveWatch(List<Clause> ws, Clause elem) // Pre-condition: 'elem' must exists in 'ws' OR 'ws' must be empty.
{
-            if (ws.Count == 0) return false; // (skip lists that are already cleared)
+            if (ws == null || ws.Count == 0) return false; // (skip lists that are already cleared)
int j = 0;
for (; ws[j] != elem; j++) Assert(j < ws.Count - 1);
for (; j < ws.Count - 1; j++) ws[j] = ws[j + 1];
@@ -556,8 +572,10 @@ namespace MiniSAT
public Lit NewVar()
{
int index = nVars;
-            Watches.Add(new List<Clause>()); // (list for positive literal)
-            Watches.Add(new List<Clause>()); // (list for negative literal)
+            Watches.Add(null); // (list for positive literal)
+            Watches.Add(null); // (list for negative literal)
@@ -716,45 +734,85 @@ namespace MiniSAT
SimpDBProps--;

Lit p = Trail[QHead++]; // 'p' is enqueued fact to propagate.
-                List<Clause> ws = Watches[p.Index];
-                int i, j, end;
-                for (i = j = 0, end = ws.Count; i != end;)
{
-                    Clause c = ws[i++];
-                    // Make sure the false literal is data[1]
-                    Lit false_lit = ~p;
-                    if (c[0] == false_lit) { c[0] = c[1]; c[1] = false_lit; }
+                    List<Clause> ws = Watches[p.Index];
+                    if (ws != null)
+                    {
+                        int i, j, end;
+                        for (i = j = 0, end = ws.Count; i != end;)
+                        {
+                            Clause c = ws[i++];
+                            // Make sure the false literal is data[1]
+                            Lit false_lit = ~p;
+                            if (c[0] == false_lit) { c[0] = c[1]; c[1] = false_lit; }

-                    Assert(c[1] == false_lit);
+                            Assert(c[1] == false_lit);

-                    // If 0th watch is true, then clause is already satisfied.
-                    Lit first = c[0];
-                    LBool val = Value(first);
-                    if (val == l_True) ws[j++] = c;
-                    else
-                    {
-                        // Look for new watch
-                        for (int k = 2; k < c.Size; k++)
-                            if (Value(c[k]) != l_False)
+                            // If 0th watch is true, then clause is already satisfied.
+                            Lit first = c[0];
+                            LBool val = Value(first);
+                            if (val == l_True) ws[j++] = c;
+                            else
{
-                                c[1] = c[k]; c[k] = false_lit;
-                                Watches[(~c[1]).Index].Push(c);
-                                goto FoundWatch;
+                                // Look for new watch
+                                for (int k = 2; k < c.Size; k++)
+                                    if (Value(c[k]) != l_False)
+                                    {
+                                        c[1] = c[k]; c[k] = false_lit;
+                                        if (Watches[(~c[1]).Index] == null) Watches[(~c[1]).Index] = new List<Clause>();
+                                        Watches[(~c[1]).Index].Push(c);
+                                        goto FoundWatch;
+                                    }
+
+                                // Did not find watch -- clause is unit under assignment
+                                ws[j++] = c;
+                                if (!Enqueue(first, c))
+                                {
+                                    if (DecisionLevel == 0) Ok = false;
+                                    confl = c;
+                                    while (i < end) ws[j++] = ws[i++]; // Copy the remaining watches
+                                }
+                                FoundWatch:;
}
+                        }

-                        // Did not find watch -- clause is unit under assignment
-                        ws[j++] = c;
-                        if (!Enqueue(first, c))
+                        if (j == 0) Watches[p.Index] = null;
+                        else ws.ShrinkTo(j);
+                    }
+                }
+                // TODO BinaryWatches
+                {
+                    List<Lit> ws = BinaryWatches[p.Index];
+                    if (ws != null)
+                    {
+                        int i, j, end;
+                        for (i = j = 0, end = ws.Count; i != end;)
{
-                            if (DecisionLevel == 0) Ok = false;
-                            confl = c;
-                            while (i < end) ws[j++] = ws[i++]; // Copy the remaining watches
+                            var first = ws[i++];
+
+                            // If 0th watch is true, then clause is already satisfied.
+                            LBool val = Value(first);
+                            if (val == l_True) ws[j++] = first;
+                            else
+                            {
+                                // Did not find watch -- clause is unit under assignment
+                                ws[j++] = first;
+                                var c = new BinaryClause(false, first, ~p); // Needed for consistency of interface
+                                if (!Enqueue(first, c))
+                                {
+                                    if (DecisionLevel == 0) Ok = false;
+                                    confl = c;
+                                    while (i < end) ws[j++] = ws[i++]; // Copy the remaining watches
+                                }
+                            }
}
-                        FoundWatch:;
+
+                        if (j == 0) Watches[p.Index] = null;
+                        else ws.ShrinkTo(j);
}
}
-                ws.ShrinkTo(j);
}

return confl;
@@ -792,8 +850,10 @@ namespace MiniSAT
for (int i = SimpDBAssigns; i < nAssigns; i++)
{
Lit p = Trail[i];
-                Watches[p.Index].Clear();
-                Watches[(~p).Index].Clear();
+                Watches[p.Index] = null;
+                Watches[(~p).Index] = null;
+                BinaryWatches[p.Index] = null;
+                BinaryWatches[(~p).Index] = null;
}

// Remove satisfied clauses:


### Distinct solutions

Counting solutions to a SAT problem is straightforward, if sometimes slow: you find a solution, add a new clause which directly rules it out, and run again. Here it's easy to generate the equivalence class of solutions under the symmetries of the rectangle, so the following code suffices to generate all distinct solutions.

                // Force it to the known optimal weight
for (int i = optimal[n]; i < unaryWeights.Length; i++) solver.AddClause(~unaryWeights[i]);
while (solver.Solve())
{
var rows = new uint[height];
int cell = 0;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
if (solver.Model[cell++] == Solver.l_True) rows[y] |= 1U << x;
}
}
var poly = new FreePolyomino(new OrientedPolyomino(rows));
Console.WriteLine(poly.CanonicalOrientation);

foreach (var orientation in poly.OrientedPolyominos)
{
if (orientation.BBWidth != width || orientation.BBHeight != height) continue;

// Exclude it
List<Solver.Lit> soln = new List<Solver.Lit>(previous);
cell = 0;
for (int y = 0; y < height; y++)
{
uint row = orientation.Rows[y];
for (int x = 0; x < width; x++, cell++)
{
if ((row & 1) == 1) soln.Add(~new Solver.Lit(cell));
row >>= 1;
}
}
}
}


This may be useful for people to generate or test hypotheses about the "typical" structure which can guide searches for higher \$n\$.

100_111
010_111

0110_1111
1100_1111

01000_11111_01110
00100_11111_11100

011000_111111_011110
110000_111111_111100
011000_011110_111111
001100_111100_111111
110000_111100_111111
001100_111111_111100

0010000_0111100_0111110_1111111
0001000_1111000_1111111_1111100
0001000_0111000_1111111_1111110
0100000_1111000_1111111_1111100
0100000_1111000_1111100_1111111
0001000_0111000_1111110_1111111
0001000_0111110_1111111_1111000
0100000_1111000_0111110_1111111
0001000_1111100_1111111_1111000
1100000_1110000_1111100_1111111
0100000_1111111_1111100_0111100
0011000_0111000_0111110_1111111
1010000_1110000_1111100_1111111
0011000_1110000_1111100_1111111
0010100_0011100_1111100_1111111
0011000_1011100_1111111_1111000
0100000_1111111_0111110_0111100
1100000_1110000_1111111_1111100
0100000_1111100_1111111_0111100
1010000_1110000_1111111_1111100
1110000_0110000_1111111_0111110
0110000_1110000_1111100_1111111
0110000_1110000_1111111_1011110
0111000_0011000_1111111_0011111
0011100_0011000_1111111_0011111
1000100_1111111_0011110_0111100
0010000_1111111_0011111_0011110
0011000_0111000_1111111_0101111
0011000_0011101_1111111_0001111
0101000_0111000_0111110_1111111
0001000_0111100_1111100_1111111
1000100_1111111_0111100_0111100
0110000_1110000_1111111_1111100
1010000_1110000_1111111_0111110
0101000_0111000_1111111_0011111
0001000_1111111_1111100_1111000
0110000_0111010_1111111_0011110
0011000_0001110_0111110_1111111
0010000_1111111_0111110_0011110
0101000_0111000_1111111_0111110
0010000_1111100_1111111_0011110
0010000_0011100_1111111_1111110
0110000_0111000_1111111_0111110
0010000_0011100_1111111_0111111
0011000_0111010_1111111_0011110
0110000_1110100_1111111_0111100
0110000_0011100_1111100_1111111
0001000_0111100_1111111_1111100
0010000_0111100_1111111_1011110
0011000_0011100_1111111_1111100
0110000_0111000_0111110_1111111
0011000_0011100_1111100_1111111
0011000_0011100_0011111_1111111
0010000_0111100_0011111_1111111
0010000_0011100_1111110_1111111
0011000_0111000_1111111_0111110
0010000_0111100_1111111_0111110
0010000_0011110_1111111_0111110
0010000_0111100_1111111_0011111
0011000_0011100_1111111_0011111
0010100_0011100_1111111_1111100
0010000_0111101_1111111_0011110
0010000_0111110_1111111_0011110

01110000_01110000_11111111_01111110
11100000_11100000_11111100_11111111
00111000_00111000_00111111_11111111
11100000_11100000_11111111_11111100
00111000_00111000_11111111_00111111
01110000_01110000_01111110_11111111

011000000_111000000_111111111_111111100_011111000
001110000_000110000_001111100_001111111_111111111
000110000_001110000_111111111_001111111_000111110
001100000_011100000_111111111_011111110_001111100
001100000_011100000_111111100_111111111_001111100
011100000_001100000_011111000_011111110_111111111
000110000_000111000_001111111_111111111_000011111
000110000_000111000_111111100_111111111_111110000
000110000_001110000_011111110_111111111_000111110
001100000_001110000_011111110_111111111_000111110

1110000000_1111000000_1111110000_1111111100_1111111111
1110000000_1111000000_1111110000_1111111111_1111111100
1111000000_0111000000_0111111000_1111111111_0111111110
0011100000_0011110000_0011111100_0011111111_1111111111
0111000000_0111100000_0111111000_0111111110_1111111111
0111000000_0111100000_0111111000_1111111111_0111111110
0111000000_1111000000_0111111000_1111111111_1111111001
0111000000_1111000000_0111111000_1111111111_1111111010
0011100000_0011110000_0011111100_1111111111_0011111111
0111100000_0011100000_0011111100_1111111111_0011111111
0011100000_0111100000_0011111100_1111111111_0111111101