# Advent of Code 2023 - Day 12

I tried to use combinatorial to solve today’s puzzles but failed (my brain burned out ). In the end I just used brute force with memoization.

`Task.async_stream` turned out to be very helpful. It both let me handle each line of input concurrently, and allows me to abuse process dictionaries

``````defmodule AoC2023.Day12 do

# `input` for both parts are things like
#
# [
#   {"???.###", [1,1,3]},
#   {".??..??...###.", [1,1,3]},
#   ...
# ]

@spec part1([{String.t(), [pos_integer()]}]) :: non_neg_integer()
def part1(input) do
input
aux(springs, ".", counts)
end, ordered: false)
|> Stream.map(&elem(&1, 1))
|> Enum.sum()
end

@spec part2([{String.t(), [pos_integer()]}]) :: non_neg_integer()
def part2(input) do
input
|> Enum.map(fn {springs, counts} ->
{
List.duplicate(springs, 5) |> Enum.join("?"),
List.duplicate(counts, 5) |> List.flatten()
}
end)
|> part1()
end

@spec aux(
springs :: String.t(),
previous_spring :: String.t(),
counts :: [pos_integer()]
) :: non_neg_integer()
defp aux("", _, []), do: 1

defp aux("", _, [0]), do: 1

defp aux("", _, _), do: 0

defp aux("#" <> _, _, []), do: 0

defp aux("#" <> _, _, [0 | _]), do: 0

defp aux("#" <> rest, _, [h | t]), do: aux(rest, "#", [h - 1 | t])

defp aux("." <> rest, _, []), do: aux(rest, ".", [])

defp aux("." <> rest, "#", [0 | t]), do: aux(rest, ".", t)

defp aux("." <> _, "#", [_ | _]), do: 0

defp aux("." <> rest, ".", counts), do: aux(rest, ".", counts)

defp aux("?" <> rest, "#", []), do: aux(rest, ".", [])

defp aux("?" <> rest, "#", [0 | t]), do: aux(rest, ".", t)

defp aux("?" <> rest, "#", [h | t]), do: aux(rest, "#", [h - 1 | t])

defp aux("?" <> rest, ".", []), do: aux(rest, ".", [])

defp aux("?" <> rest, ".", [0 | t]), do: aux(rest, ".", t)

defp aux("?" <> rest, ".", [h | t]) do
memoized({rest, [h | t]}, fn ->
aux(rest, "#", [h - 1 | t]) + aux(rest, ".", [h | t])
end)
end

defp memoized(key, fun) do
with nil <- Process.get(key) do
fun.() |> tap(&Process.put(key, &1))
end
end
end
``````
2 Likes

I spent most time to get part 1 to work; I solved part 2 by adding memoization:

2 Likes

@Aetherus I love your process dictionary trick. I just set up an ETS for each thread . I wonder though if process dictionaries are O(1) or O(log N) like other maps in Elixir?

My solution (not very happy with how it turned out, feels like way too much code):

Runs in around 0.2 seconds.

2 Likes

I don’t know whether it’s O(1) or O(log N) either. According to my benchmark (not for this puzzle), process dictionary is faster than ETS, which in turn is faster than a plain map, and Agent is the slowest.

3 Likes

Just tried changing my solution to use process dictionaries instead — besides a much cleaner solution, it seems that this version is indeed around 10% faster than my old ETS-based approach. Never used process dictionaries in this way, thanks for sharing your solution! Learned a new thing today.

2 Likes

Well I split the springs into groups (so `??..??.#?` becomes `["??", "??", "#?"]`, and then I ran my (originally non-memoized) fit-counter, which I then memoized once I encountered part 2 with a dumb `Map`, that I just send up and down the recursive function

2 Likes

Today was tricky, I got the naive solution but optimizing for part 2 was not easy:

1 Like

AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA…

Ok so after hours and hours failing, I looked a bit online and found some tips to implement a fast solution.

The problem is that is was only in mutable imperative languages, so I had a hard time to find my own thing in Elixir. And it still could be better.

I could not have made it alone, I was too lost.

But anyway, this solutions takes 100ms for part 2 which is satisying. : https://github.com/lud/adventofcode/blob/main/lib/solutions/2023/day12.ex

2 Likes

I tried a pure functional approach. Here’s my heavily commented code:

``````defmodule AoC2023.Day12.FunctionalMemoization do

# `input` for both parts are things like
#
# [
#   {"#.#.###", [1,1,3]},
#   {".#...#....###.", [1,1,3]},
#   ...
# ]

@spec part1([{String.t(), [pos_integer()]}]) :: non_neg_integer()
def part1(input) do
input
# It does not change the result
# if we prepend a "." to each line of springs.
# By adding this "." I do not need to handle the
# edge case that the line starts with a "?".
aux(springs, 0, ".", counters, %{})
end, ordered: false)
|> Stream.map(&elem(&1, 1))
|> Stream.map(&elem(&1, 0))
|> Enum.sum()
end

@spec part2([{String.t(), [pos_integer()]}]) :: non_neg_integer()
def part2(input) do
input
|> Enum.map(fn {springs, counters} ->
{
List.duplicate(springs, 5) |> Enum.join("?"),
List.duplicate(counters, 5) |> List.flatten()
}
end)
|> part1()
end

@spec aux(
springs :: String.t(),
index,
previous_spring :: String.t(),
counters,
memo
) :: {total_count, memo}
when index: non_neg_integer(),
counters: [non_neg_integer()],
total_count: non_neg_integer(),
memo: %{optional({index, counters}) => total_count}

# When we reached the end of a line,
# and all counters are consumed,
# we found a solution.
defp aux("", _, _, [], memo), do: {1, memo}

# When we reached the end of a line,
# and the last counter reaches 0,
# we found a solution.
defp aux("", _, _, [0], memo), do: {1, memo}

# When we reached the end of a line,
# and there is at least one non-zero counter,
# that's an invalid situation so no solution.
defp aux("", _, _, _, memo), do: {0, memo}

# When the current spring is broken,
# and there is no counter left,
# that's an invalid situation so no solution.
defp aux("#" <> _, _, _, [], memo), do: {0, memo}

# When the current spring is broken,
# and the current counter reaches 0,
# that's an invalid situation so no solution.
defp aux("#" <> _, _, _, [0 | _], memo), do: {0, memo}

# When the current spring is broken,
# and the current counter is not 0,
# decrement the counter and recursively find solutions.
defp aux("#" <> rest, i, _, [h | t], memo), do: aux(rest, i + 1, "#", [h - 1 | t], memo)

# When the current spring is good,
# and there's no counter left,
# try rest of the line and see if all the springs left are good.
defp aux("." <> rest, i, _, [], memo), do: aux(rest, i + 1, ".", [], memo)

# When the current spring is good,
# and the previous spring is bad,
# and the current counter reaches 0,
# we can discard that 0 and recursively find solutions.
defp aux("." <> rest, i, "#", [0 | t], memo), do: aux(rest, i + 1, ".", t, memo)

# When the current spring is good,
# and the previous spring is bad,
# and the current counter is not 0,
# that's an invalid situation so no solution.
defp aux("." <> _, _, "#", [_ | _], memo), do: {0, memo}

# When the current spring is good,
# and the previous spring is also good,
# and the current counter is not 0,
# just check the rest of the springs.
defp aux("." <> rest, i, ".", counters, memo), do: aux(rest, i + 1, ".", counters, memo)

# When the current spring is unknown,
# and the previous spring is broken,
# and there's no counter left,
# then the current spring has to be good.
# We still need to check the rest of the springs.
defp aux("?" <> rest, i, "#", [], memo), do: aux(rest, i + 1, ".", [], memo)

# When the current spring is unknown,
# and the previous spring is broken,
# and the current counter reaches 0,
# then the current spring has to be good.
# We still need to check the rest of the springs.
# The zero-counter is no longer useful so we discard it.
defp aux("?" <> rest, i, "#", [0 | t], memo), do: aux(rest, i + 1, ".", t, memo)

# When the current spring is unknown,
# and the previous spring is bad,
# and the current counter is not 0,
# then the current spring has to be broken.
# We decrement that counter and check the rest of the springs.
defp aux("?" <> rest, i, "#", [h | t], memo), do: aux(rest, i + 1, "#", [h - 1 | t], memo)

# When the current spring is unknown,
# and the previous spring is good,
# and there's no counter left,
# then the current spring has to be good.
# We still need to check the rest of the springs.
defp aux("?" <> rest, i, ".", [], memo), do: aux(rest, i + 1, ".", [], memo)

# When the current spring is unknown,
# and the previous spring is good,
# and the current counter reaches 0,
# then the current spring must be good.
# Discard the 0 counter as usual,
# and check the rest of the springs.
defp aux("?" <> rest, i, ".", [0 | t], memo), do: aux(rest, i + 1, ".", t, memo)

# When the current spring is unknown,
# and the previous spring is good,
# and the current counter is not 0,
# then the current spring can be either good or bad.
# Try both possibilities.
defp aux("?" <> rest, i, ".", [h | t], memo) do
memoized(memo, {i, [h | t]}, fn ->
{a, memo} = aux(rest, i + 1, "#", [h - 1 | t], memo)
{b, memo} = aux(rest, i + 1, ".", [h | t], memo)
{a + b, memo}
end)
end

defp memoized(memo, key, fun) do
with nil <- Map.get(memo, key) do
{result, memo} = fun.()
memo = Map.put(memo, key, result)
{result, memo}
else
result -> {result, memo}
end
end
end
``````
1 Like

Nice !

So your memo cache is just the current group index and the remaining group counters.

I tried a memoized solution but I could not get it right. I had either no cache hits or wrong results

I think that if you map each possibility at each step, and sum the indentical states counts, then that would be equivalent to my solution.

1 Like

I still need to try your solution in a debugging way to fully understand it.

Here’s what I think about dynamic programming. It’s just brute force with some sort of caching. When we talk about caching, we know each entry needs a key. I think it doesn’t matter what the key is, as long as it’s consistent. If the keys are continuous non-negative integers or tuples of integers, in imperative languages we usually use an array or a 2D array or a 3D array as our store. But we don’t have to restrict ourselves to that kind of caching mechanism. We can use maps, GenServers, ETS tables, process dictionaries, or even databases as the cache store as long as it provides a fast way of lookup for an exact match, and what the type of the keys are just doesn’t matter.

4 Likes

My beginner’s solution, Day12 part 1. Hot Springs
It took a minute but it returned the correct answer

``````defmodule Day12 do

def part1(input) do
parse(input)
|> Enum.map(&possible_arrangements/1)
|> Enum.sum
end

def possible_arrangements({springs, numbers}) do
possibilities(springs)
|> Enum.filter(&(valid?(&1, numbers)))
|> Enum.count |> tap(&IO.puts(&1))
end

def possibilities(springs, possible \\ [[]])
def possibilities([], possible), do: possible
def possibilities([:unknown | springs], possible) do
variant1 = Enum.map(possible, fn x -> x ++ [:damaged] end)
variant2 = Enum.map(possible, fn x -> x ++ [:operational] end)
possibilities(springs, variant1 ++ variant2)
end
def possibilities([:damaged | springs], possible) do
variant1 = Enum.map(possible, fn x -> x ++ [:damaged] end)
possibilities(springs, variant1)
end
def possibilities([:operational | springs], possible) do
variant2 = Enum.map(possible, fn x -> x ++ [:operational] end)
possibilities(springs, variant2)
end

def valid?(springs, numbers) do
springs
|> Enum.chunk_by(&(&1))
|> Enum.reject(fn s -> Enum.all?(s, &(&1 == :operational)) end)
|> Enum.map(&Enum.count/1)
|> Kernel.==(numbers)
end

def parse(raw_records) do
raw_records
|> String.split("\n")
|> Enum.map(&parse_line/1)
end

def parse_line(raw_line) do
[raw_chars, raw_numbers] = String.split(raw_line)

numbers = raw_numbers
|> String.split(",")
|> Enum.map(&String.to_integer/1)

springs = raw_chars
|> String.graphemes
|> Enum.map(&(case &1 do
"." -> :operational
"#" -> :damaged
"?" -> :unknown
end))

{springs, numbers}
end

end
``````
2 Likes

I solved it with Python first (was out and only computer I had was my wife’s who only had Python and R).

Today I had some free time and I wondered how a 1:1 Python to Elixir conversion would look like so I did:

I used Aja (perhaps I didn’t need to?) and also used `Process` for caching.

I don’t have the Python version committed due to it being on a different computer but I do have screenshot of it

I wish something like pythons cache decorator existed in Elixir. I looked into memoized but maybe I was using it wrong (it took forever). I will certainly scroll up and learn from you all.

Also Python takes 3 seconds vs Elixir’s 13 seconds. Same algorithm (literally mapped code). So I must be doing something wrong around the caching process (no pun intended) here.

Update: Ignore what I said before, I’m an idiot.

1 Like

I was not sure how to approach this, so I just did the dumb-guy brute-force method for part one. It takes about nine seconds to run on part one when I spawn a process per line. Unfortunately this dumb-guy approach doesn’t work on part 2. I quickly OOM it stops running. I’ll be looking at solutions here to learn how others are doing this.

``````defmodule SpringParse do
defp enum_repeat(enum, n) do
for _ <- 1..n do
enum
end
|> Enum.flat_map(& &1)
end

def into_spring_count(str) do
str
|> String.split(".")
|> Stream.reject(&(&1 == ""))
|> Enum.map(&String.length(&1))
end

def generate(s) do
chars = s |> String.graphemes()

q_map =
for {{i, _}, j} <-
(for {q, i} <- chars |> Stream.with_index(), q == "?" do
{i, q}
end)
|> Stream.with_index(),
into: %{} do
{i, j}
end

possibilities = chars |> Stream.filter(&(&1 == "?")) |> Enum.count()

for p <- 0..(2 ** possibilities - 1) do
for {l, i} <- chars |> Stream.with_index() do
if l == "?" do
case Bitwise.band(2 ** Map.get(q_map, i), p) do
0 -> "."
n when n > 0 -> "#"
end
else
l
end
end
end
|> Stream.map(&Enum.join(&1, ""))
end

def parse(input) do
input
|> String.split("\n")
|> Stream.reject(&(&1 === ""))
|> Stream.map(fn line ->
[pattern, exp] = line |> String.split(" ")
exp = String.split(exp, ",") |> Enum.map(&String.to_integer/1)
[pattern, exp]
end)
end

def parse_2(input) do
input
|> String.split("\n")
|> Stream.reject(&(&1 == ""))
|> Stream.map(fn line ->
[pattern, exp] = line |> String.split(" ")
pattern = enum_repeat([pattern], 5) |> Enum.join("?")
exp = String.split(exp, ",") |> Stream.map(&String.to_integer/1) |> enum_repeat(5)

[pattern, exp]
end)
end

def count_solutions(pattern, expectation) do
matches =
SpringParse.generate(pattern)
|> Stream.map(&{&1, SpringParse.into_spring_count(&1)})
|> Stream.filter(&(&1 |> elem(1) == expectation))

Enum.count(matches)
end

def solve(input) do
for [pattern, expectation] <- parse(input),
reduce: 0 do
acc ->
acc + count_solutions(pattern, expectation)
end
end
end
``````

with the parallelization code

``````defmodule SprintParseWorker do
case n do
0 ->
acc

_ ->
m ->
loop_receive(n - 1, acc + m)
end
end
end

def solve_in_parallel(i) do
parent = self()

u = SpringParse.parse(i)

pid_count =
for [pattern, expectation] <- u do
send(parent, SpringParse.count_solutions(pattern, expectation))
end)
end
|> Enum.count()

end
end
``````
1 Like

I thought my part 1 was a slightly clever early terminating brute force search. Works pretty well but for part 2 it is way too slow on the real data. Not even sure how I can speed it up beyond running it on a machine with more than four cores to get more parallel.

``````defmodule Day12 do
@moduledoc """
Day12 AoC Solutions
"""

alias AocToolbox.Input

def input(:test),
do: """
???.### 1,1,3
.??..??...?##. 1,1,3
?#?#?#?#?#?#?#? 1,3,1,6
????.#...#... 4,1,1
????.######..#####. 1,6,5
?###???????? 3,2,1
"""

def input(:test2),
do: """
?###? 3
?###?????? 3,2
??????? 2,1
?###???????? 3,2,1
"""

def input(:real), do: Input.load(__DIR__ <> "/input.txt")

def solve(1, mode) do
__MODULE__.Part1.solve(input(mode))
end

def solve(2, mode) do
__MODULE__.Part2.solve(input(mode))
end

defmodule Part1 do
def solve(input) do
input
|> parse()
|> Enum.map(&bfs/1)
|> Enum.sum()
end

def parse(input) do
input
|> Input.Parser.parser()
|> elem(1)
end

def bfs([line, spec]) do
do_bfs(line, spec)
end

defp do_bfs(line, spec, last \\ [])
defp do_bfs([], [], _), do: 1
defp do_bfs([], _spec, _), do: 0

defp do_bfs(line, [], _) do
if(Enum.any?(line, fn i -> i == ?# end)) do
0
else
1
end
end

defp do_bfs([?? | rest] = _line, spec, []) do
# + do_bfs(rest, spec, [?.])
Task.async_stream([?#, ?.], fn i -> do_bfs(rest, spec, [i]) end, timeout: :infinity)
|> Stream.map(fn
{:ok, i} -> i
i -> i
end)
|> Enum.sum()
end

defp do_bfs([next | rest] = _line, spec, []) do
do_bfs(rest, spec, [next])
end

defp do_bfs([next | rest] = line, [next_chunk | rest_chunks] = spec, [last | _] = chunk) do
chunk_count = Enum.filter(chunk, fn i -> i == ?# end) |> Enum.count()

case {next, last} do
{??, ?#} ->
do_bfs([?. | rest], spec, chunk) +
case next_chunk do
n when n == chunk_count + 1 -> do_bfs(rest, rest_chunks, [:no_hash_tag])
n when n > chunk_count + 1 -> do_bfs([?# | rest], spec, chunk)
_ -> 0
end

{??, _} ->
do_bfs([?# | rest], spec, chunk) + do_bfs([?. | rest], spec, chunk)

{?#, ?#} ->
case next_chunk do
n when n == chunk_count + 1 ->
do_bfs(rest, rest_chunks, [:no_hash_tag])

^chunk_count ->
0

_n ->
do_bfs(rest, spec, [next | chunk])
end

{?#, :no_hash_tag} ->
0

{?#, ?.} ->
if next_chunk == 1 do
do_bfs(rest, rest_chunks, [:no_hash_tag])
else
do_bfs(rest, spec, [next])
end

{?., ?#} when chunk_count < next_chunk ->
0

{?., ?#} when chunk_count == next_chunk ->
do_bfs(rest, rest_chunks, [next])

{?., ?#} ->
0

{?., ?.} ->
do_bfs(rest, spec, chunk)

{?., :no_hash_tag} ->
do_bfs(rest, spec, [?.])
end
end
end

defmodule Part2 do
def solve(input) do
input
|> parse()
|> Enum.map(&unfold/1)
|> Stream.map(fn {:ok, i} -> i end)
|> Enum.sum()
end

defp parse(input) do
input
|> Part1.parse()
end

defp unfold([springs, specs]),
do: [
List.duplicate(springs, 5) |> Enum.intersperse([??]) |> List.flatten(),
List.duplicate(specs, 5) |> List.flatten()
]
end
end
``````
1 Like

Uggh. Finally got the caching solution to work after thoroughly reading every solution I could find. Ultimately @woojiahao’s was the one that seemed to fit with my existing solution the easiest.

``````defmodule Day12 do
@moduledoc """
Day12 AoC Solutions
"""

alias AocToolbox.Input

def input(:test),
do: """
???.### 1,1,3
.??..??...?##. 1,1,3
?#?#?#?#?#?#?#? 1,3,1,6
????.#...#... 4,1,1
????.######..#####. 1,6,5
?###???????? 3,2,1
"""

def input(:test2),
do: """
?###? 3
?###?????? 3,2
??????? 2,1
?###???????? 3,2,1
"""

def input(:real), do: Input.load(__DIR__ <> "/input.txt")

def solve(1, mode) do
__MODULE__.Part1.solve(input(mode))
end

def solve(2, mode) do
__MODULE__.Part2.solve(input(mode))
end

defmodule Part1 do
def solve(input) do
input
|> parse()
|> Enum.map(&bfs/1)
|> Enum.sum()
end

def parse(input) do
input
|> Input.Parser.parser()
|> elem(1)
end

def bfs([line, spec], table_name \\ :cached) do
:ets.new(table_name, [:set, :protected, :named_table])
count = do_bfs(line, spec, table_name)
:ets.delete(table_name)
count
end

defp do_bfs(line, spec, last \\ [], table_name)
defp do_bfs([], [], _, _), do: 1
defp do_bfs([], _spec, _, _), do: 0

defp do_bfs(line, [], _, _) do
if(Enum.any?(line, fn i -> i == ?# end)) do
0
else
1
end
end

defp do_bfs([?? | rest] = line, spec, [] = chunk, table_name) do
key = {line, spec, chunk}
cached = :ets.lookup(table_name, key)

if length(cached) > 0 do
[{_, sub_count} | _] = cached
sub_count
else
count = do_bfs(rest, spec, [?#], table_name) + do_bfs(rest, spec, [?.], table_name)
:ets.insert(table_name, {key, count})
count
end
end

defp do_bfs([next | rest] = _line, spec, [], table_name) do
do_bfs(rest, spec, [next], table_name)
end

defp do_bfs(
[next | rest] = line,
[next_chunk | rest_chunks] = spec,
[last | _] = chunk,
table_name
) do
key = {line, spec, chunk}
cached = :ets.lookup(table_name, key)

if length(cached) > 0 do
[{_, sub_count} | _] = cached
sub_count
else
chunk_count = Enum.filter(chunk, fn i -> i == ?# end) |> Enum.count()

count =
case {next, last} do
{??, ?#} ->
do_bfs([?. | rest], spec, chunk, table_name) +
case next_chunk do
n when n == chunk_count + 1 ->
do_bfs(rest, rest_chunks, [:no_hash_tag], table_name)

n when n > chunk_count + 1 ->
do_bfs([?# | rest], spec, chunk, table_name)

_ ->
0
end

{??, _} ->
do_bfs([?# | rest], spec, chunk, table_name) +
do_bfs([?. | rest], spec, chunk, table_name)

{?#, ?#} ->
case next_chunk do
n when n == chunk_count + 1 ->
do_bfs(rest, rest_chunks, [:no_hash_tag], table_name)

^chunk_count ->
0

_n ->
do_bfs(rest, spec, [next | chunk], table_name)
end

{?#, :no_hash_tag} ->
0

{?#, ?.} ->
if next_chunk == 1 do
do_bfs(rest, rest_chunks, [:no_hash_tag], table_name)
else
do_bfs(rest, spec, [next], table_name)
end

{?., ?#} when chunk_count < next_chunk ->
0

{?., ?#} when chunk_count == next_chunk ->
do_bfs(rest, rest_chunks, [next], table_name)

{?., ?#} ->
0

{?., ?.} ->
do_bfs(rest, spec, chunk, table_name)

{?., :no_hash_tag} ->
do_bfs(rest, spec, [?.], table_name)
end

:ets.insert(table_name, {key, count})
count
end
end
end

defmodule Part2 do
def solve(input) do
input
|> parse()
|> Stream.map(&unfold/1)
|> Stream.with_index()
fn {line, ndx} -> Day12.Part1.bfs(line, String.to_atom("cache_#{ndx}")) end,
timeout: :infinity
)
|> Stream.map(fn {:ok, i} -> i end)
|> Enum.sum()
end

defp parse(input) do
input
|> Part1.parse()
end

defp unfold([springs, specs]),
do: [
List.duplicate(springs, 5) |> Enum.intersperse([??]) |> List.flatten(),
List.duplicate(specs, 5) |> List.flatten()
]
end
end
``````

Maybe `Nebulex.Caching.Decorators` will work for you?

https://hexdocs.pm/nebulex/Nebulex.Caching.Decorators.html

1 Like