# Advent of Code 2023 - Day 2

That was a pleasant one

``````defmodule AdventOfCode.Day02 do
defmodule Parser do
import NimbleParsec

game_id =
string("Game ")
|> ignore()
|> integer(min: 1, max: 3)
|> ignore(string(": "))

cube_set =
times(
integer(min: 1, max: 3)
|> ignore(ascii_char([?\s]))
|> choice([string("red"), string("green"), string("blue")])
|> ignore(optional(ascii_char([?,])))
|> ignore(optional(ascii_char([?\s]))),
min: 1,
max: 3
)
|> reduce(:cube_set_reducer)
|> ignore(optional(string("; ")))

cube_sets = repeat(cube_set)

game =
game_id
|> wrap(cube_sets)
|> ignore(optional(ascii_char([?\n])))
|> wrap()

games = repeat(game)

defparsec(:game_id, game_id)
defparsec(:cube_set, cube_set)
defparsec(:cube_sets, cube_sets)
defparsec(:game, game)
defparsec(:games, games)

def cube_set_reducer(x) do
for [k, v] <- Enum.chunk_every(x, 2), do: {v, k}, into: %{}
end
end

def parse!(input, parsec \\ :games) do
{:ok, result, "", %{}, _, _} = apply(Parser, parsec, [input])
result
end

def part1(input, bag \\ %{}) do
input
|> parse!()
|> Enum.filter(&game_possible?(&1, bag))
|> Enum.map(fn [game_id, _] -> game_id end)
|> Enum.sum()
end

def part2(input) do
input
|> parse!()
|> Enum.map(&minimal_bag/1)
|> Enum.map(&power/1)
|> Enum.sum()
end

def power(minimal_bag) do
minimal_bag
|> Map.values()
|> Enum.reduce(1, &(&1 * &2))
end

def minimal_bag([_game_id, cube_sets]) do
Enum.reduce(cube_sets, %{"red" => 0, "green" => 0, "blue" => 0}, fn cube_set, acc ->
Enum.map(acc, fn {k, v} ->
case cube_set[k] do
new when is_integer(new) and new > v -> {k, new}
_ -> {k, v}
end
end)
|> Enum.into(%{})
end)
end

def game_possible?([_game_id, cube_sets], bag) do
any_exceeding? =
Enum.find(cube_sets, fn cube_set ->
Enum.any?(cube_set, fn {k, v} -> is_nil(bag[k]) or bag[k] < v end)
end)

!any_exceeding?
end
end

``````
3 Likes

My solution using Livebook.

I avoided Regex this time and did a lot of String splitting.

After solving part 2 I adjusted part 1 to also work with the aggregated “max cubes needed”.

2 Likes

I overkilled it with yecc

Here’s the content my `aoc2023_day2_parser.yrl` file:

``````Terminals  game  num  red  green  blue  ':'  ','  ';'.

Nonterminals  color  games  one_game  set  sets.

Rootsymbol  games.

color -> num red : {red, element(3, '\$1')}.
color -> num green : {green, element(3, '\$1')}.
color -> num blue : {blue, element(3, '\$1')}.

set -> color : ['\$1'].
set -> color ',' set : ['\$1' | '\$3'].

sets -> set : ['\$1'].
sets -> set ';' sets : ['\$1' | '\$3'].

one_game -> game num ':' sets : {element(3, '\$2'), '\$4'}.

games -> one_game : ['\$1'].
games -> one_game games : ['\$1' | '\$2'].
``````

FYI,

The parser generated by `yecc` with the `.yrl` file expects a series of tokens, not the text input, so we need another program to transform the text into tokens.

A token is either `{category_name(), metadata(), value()}` or `{category_name(), metadata()}` if that category contains only 1 value.

• `@type category_name() :: atom()`
• `@type metadata() :: term()`
• `@type value() :: term()`

The first line of the `.yrl` file declares what terminal category names will appear in this file. The name of a terminal category is just the `category_name()` part of a token.

The second line of the `.yrl` file declares what non-terminal categories will be defined in this file. A non-terminal category is a category that is composed of one or more terminal or non-terminal categories. We’ll see this later.

The third line tells the parser generator which category will be the root. The value of the root is the top-level value that’ll be returned by the parser.

The rest lines define the non-terminal categories. The syntax of these lines is

``````NonterminalCategoryName -> Syntax : Reducer.
``````

The things appear in the `Syntax` part are all category names (terminal or non-terminal).

A `Reducer` is just a piece of Erlang code that converts the categories in the `Syntax` part to a value (any Erlang term you want). The `'\$n'` in a `Reducer` part refers to the value of the n-th (1-based) category in the `Syntax` part.

Here is my lexer (a program that converts the text input to a series of tokens):

``````defmodule AoC2023.Day2.Lexer do
def tokenize(input) do
do_tokenize(input, [])
end

defp do_tokenize("", acc) do
Enum.reverse([{:"\$end", []} | acc])
end

defp do_tokenize("Game" <> rest, acc) do
do_tokenize(rest, [{:game, []} | acc])
end

defp do_tokenize("red" <> rest, acc) do
do_tokenize(rest, [{:red, []} | acc])
end

defp do_tokenize("green" <> rest, acc) do
do_tokenize(rest, [{:green, []} | acc])
end

defp do_tokenize("blue" <> rest, acc) do
do_tokenize(rest, [{:blue, []} | acc])
end

defp do_tokenize("," <> rest, acc) do
do_tokenize(rest, [{:",", []} | acc])
end

defp do_tokenize(";" <> rest, acc) do
do_tokenize(rest, [{:";", []} | acc])
end

defp do_tokenize(":" <> rest, acc) do
do_tokenize(rest, [{:":", []} | acc])
end

defp do_tokenize(<<char, rest::binary>>, [{:num, _, num} | acc])
when char in ?0..?9 do
do_tokenize(rest, [{:num, [], num * 10 + char - ?0} | acc])
end

defp do_tokenize(<<char, rest::binary>>, acc)
when char in ?0..?9 do
do_tokenize(rest, [{:num, [], char - ?0} | acc])
end

defp do_tokenize(<<_, rest::binary>>, acc) do
do_tokenize(rest, acc)
end
end
``````

The tokens it produces are like

``````[
{:game, []},
{:num, [], 1},
{:":", []},
{:num, [], 1},
{:blue, []},
{:";", []},
{:num, [], 4},
{:green, []},
{:",", []},
{:num, [], 5},
{:blue, []},
...
]
``````

Whole solution:

1 Like

import AOC
import String, only: [split: 2, to_integer: 1]
import Enum, only: [map: 2, max: 1, product: 1, reduce: 3, sum: 1, zip_with: 2]

``````aoc 2023, 2 do
def parse_game(line) do
["Game " <> id, hands] = split(line, ": ")
{to_integer(id), hands |> split("; ") |> map(&parse_hand/1)}
end

def parse_hand(hand) do
hand |> split(", ") |> reduce([0,0,0], &parse_count/2)
end

def parse_count(count, [red, green, blue]) do
case split(count, " ") do
[n, "red"]   -> [to_integer(n), green, blue]
[n, "green"] -> [red, to_integer(n), blue]
[n, "blue"]  -> [red, green, to_integer(n)]
end
end

def common(input, f) do
input |> split("\n") |> map(&(&1 |> parse_game() |> f.())) |> sum()
end

def p1(input) do
common(
input,
fn {id, games} ->
[r, g, b] = zip_with(games, &max/1)
if(r <= 12 and g <= 13 and b <= 14, do: id, else: 0)
end)
end

def p2(input) do
common(input, fn {_, hands} -> hands |> zip_with(&max/1) |> product() end)
end
end
``````
1 Like

My solution, was also lucky to just reuse the code for the second task

``````defmodule Puzzle2 do
def get_game_id(line) do
Regex.named_captures(~r/(?:Game )(?<game_id>\d+)/, line)
end

def find_number_of_cubes(line) do
Regex.scan(~r/(?:(?<greens>\d+) green)|(?:(?<blues>\d+) blue)|(?:(?<reds>\d+) red)/, line,
capture: :all_names
)
end

def get_max_cube_count(counts) do
counts
|> Enum.filter(fn x -> x != "" end)
|> Enum.map(&String.to_integer/1)
|> Enum.concat([0])
|> Enum.max()
end

def find_max_per_color(cubes) do
%{
:blue =>
Enum.map(cubes, fn [blue, _, _] -> blue end)
|> get_max_cube_count(),
:green =>
Enum.map(cubes, fn [_, green, _] -> green end)
|> get_max_cube_count(),
:red =>
Enum.map(cubes, fn [_, _, red] -> red end)
|> get_max_cube_count()
}
end

def build_game_info(line) do
find_number_of_cubes(line)
|> find_max_per_color()
|> Map.merge(get_game_id(line))
end

def game_is_possible?(game, avail) do
game.blue <= avail.blue and game.red <= avail.red and game.green <= avail.green
end
end

cubes = %{:green => 13, :red => 12, :blue => 14}
puzzle_data
|> String.split("\n", trim: true)
|> Enum.map(&Puzzle2.build_game_info/1)
|> Enum.filter(&Puzzle2.game_is_possible?(&1, cubes))
|> Enum.reduce(0, fn %{"game_id" => gid}, acc -> acc + String.to_integer(gid) end)
|> dbg()

puzzle_data
|> String.split("\n", trim: true)
|> Enum.map(&Puzzle2.build_game_info/1)
|> Enum.reduce(0, fn %{:green => green, :red => red, :blue => blue}, acc ->
acc + green * red * blue
end)
|> dbg()
``````
2 Likes

Here is my take on Day 2. My focus was on parsing the text input and processing the lists of maps. `Enum.all?/2` and `Map.merge/3` helped keep things under control (mainly).

``````defmodule Day2 do
@test_bag %{"red" => 12, "green" => 13, "blue" => 14}

def part_1(path \\ "day2/sample.txt") do
|> parse_input()
|> sum_possible_games()
end

def part_2(path \\ "day2/sample.txt") do
|> parse_input()
|> sum_minimum_cubes()
end

defp parse_input(input) do
input
|> Enum.map(fn row ->
String.split(row, ": ")
end)
|> Enum.map(fn [raw_id, raw_sets] ->
id = String.split(raw_id, " ") |> List.last() |> String.to_integer()

sets =
raw_sets
|> String.split("; ")
|> Enum.map(fn game ->
String.split(game, ", ")
end)
|> Enum.map(fn game ->
game
|> Enum.map(fn x -> String.split(x, " ") end)
|> Enum.map(&List.to_tuple/1)
|> Map.new(fn {val, key} -> {key, String.to_integer(val)} end)
end)

{id, sets}
end)
end

defp sum_possible_games(input) do
input
|> Enum.filter(&possible_game?/1)
|> Enum.map(fn {id, _} -> id end)
|> Enum.sum()
end

defp possible_game?({id, sets}) do
case Enum.all?(sets, fn set ->
Enum.all?(set, fn {color, count} ->
Map.has_key?(@test_bag, color) && @test_bag[color] >= count
end)
end) do
false -> false
true -> id
end
end

defp sum_minimum_cubes(input) do
input
|> Enum.map(&calculate_smallest_cube_count_for_game/1)
|> Enum.map(&Enum.product/1)
|> Enum.sum()
end

defp calculate_smallest_cube_count_for_game({_id, sets}) do
sets
|> Enum.reduce(%{"red" => 0, "green" => 0, "blue" => 0}, fn set, acc ->
Map.merge(acc, set, fn _k, v1, v2 -> Enum.max([v1, v2]) end)
end)
|> Map.values()
end
end
``````
3 Likes

My code here. The only notable thing was I decided to force myself to parse manually doing nothing but pattern matching on binaries. Painful and pointless, but also was kind of a fun challenge. Only worked because I knew the limits of the input size. It would fail if any round included any cubes of more than 99 or if there were more than 100 games. Since I knew neither of those conditions applied it was fine.

Here’s the parsing:

``````def parse(input) do
input
|> Day2.input()
|> Input.lines()
|> Enum.map(&parse_game/1)
|> Enum.map(fn map ->
[k] = Map.keys(map)

v =
Map.values(map)
|> hd()
|> un_nest()

{k, v}
end)
|> Enum.into(%{})
end

defp parse_game(<<"Game ", rest::binary>>), do: parse_game(rest)

defp parse_game(<<i, j, ": ", rest::binary>>) when i in 49..57 and j in 48..57 do
k = (i - 48) * 10 + (j - 48)
Map.put(%{}, k, parse_game(rest))
end

defp parse_game(<<i, ": ", rest::binary>>) when i in 49..57,
do: %{(i - 48) => parse_game(rest)}

defp parse_game(<<"100: ", rest::binary>>), do: %{100 => parse_game(rest)}

defp parse_game(<<i, " blue", rest::binary>>) when i in 49..57 do
[{"blue", i - 48} | parse_game(rest)]
end

defp parse_game(<<i, " red", rest::binary>>) when i in 49..57 do
[{"red", i - 48} | parse_game(rest)]
end

defp parse_game(<<i, " green", rest::binary>>) when i in 49..57 do
[{"green", i - 48} | parse_game(rest)]
end

defp parse_game(<<i, j, " blue", rest::binary>>) when i in 49..57 and j in 48..57 do
[{"blue", (i - 48) * 10 + (j - 48)} | parse_game(rest)]
end

defp parse_game(<<i, j, " red", rest::binary>>) when i in 49..57 and j in 48..57 do
[{"red", (i - 48) * 10 + (j - 48)} | parse_game(rest)]
end

defp parse_game(<<i, j, " green", rest::binary>>) when i in 49..57 and j in 48..57 do
[{"green", (i - 48) * 10 + (j - 48)} | parse_game(rest)]
end

defp parse_game(<<", ", rest::binary>>), do: parse_game(rest)
defp parse_game(<<"; ", rest::binary>>), do: [parse_game(rest)]
defp parse_game(""), do: []

defp un_nest(nested, m \\ %{})
defp un_nest([], m), do: [m]

defp un_nest([hd], m) when is_list(hd),
do: un_nest(hd) ++ [m]

defp un_nest([{k, v} | tl], m), do: un_nest(tl, Map.put(m, k, v))
``````
4 Likes

A bit late. Trying to learn to use Elixir for this years advent of code. Coming from mostly Ruby. Here was my solution.

``````  def part1(input) do
input
|> String.split("\n", trim: true)
|> Enum.filter(&match?(%{green: g, red: r, blue: b} when g <= 13 and r <= 12 and b <= 14, bag_max(&1)))
|> Enum.map(fn s -> Regex.run(~r/Game (\d+)/, s) |> List.last |> String.to_integer end)
|> Enum.sum()
end

def part2(input) do
input
|> String.split("\n", trim: true)
|> Enum.map(&bag_max/1)
|> Enum.map(fn c -> c[:green] * c[:red] * c[:blue] end)
|> Enum.sum()
end

def bag_max(line) do
Regex.scan(~r/(\d+) (blue|red|green)/,line)
|> Enum.map(&tl/1)
|> Enum.group_by(&String.to_atom(List.last(&1)), &String.to_integer(List.first(&1)))
|> Enum.into(%{}, fn {k,v} -> {k, Enum.max(v)} end)
end
``````
3 Likes

I’ve used Regex.named_captures too in order to parse each game.

``````  defp parse_game(game) do
["Game " <> id | sets] = String.split(game, [":", ";"])

parsed_sets =
sets
|> Enum.map(fn set ->
["red", "blue", "green"]
|> Enum.map(fn color ->
case Regex.named_captures(~r/(?<#{color}>[0-9]*) #{color}/, set) do
nil -> 0
%{^color => count} -> String.to_integer(count)
end
end)
|> List.to_tuple()
end)

{String.to_integer(id), parsed_sets}
end
``````

Most of the time on this was spent parsing with `String.split/3`. Regex probably would have been faster. The rest of the logic was trivial

``````defp game_possible?({_game_number, sets}) do
Enum.all?(sets, &Enum.all?(&1, fn {color, count} -> count <= @limits[color] end))
end

defp minimum_necessary_set({_game_number, sets}) do
Enum.reduce(sets, %{}, &Map.merge(&1, &2, fn _key, v1, v2 -> max(v1, v2) end))
end
``````

Similar to @bjorng, I wanted to try out NimbleParsec for fun. Looks like we had fairly different parser definitions, but the “post processing” bits seemed similar.
I also included an example of how I may do a similar approach in regex, which I saw others do something similar

1 Like