For top N when N is a variable, then we can also use :gb_sets
or build a priority queue to do the job.
I was going to suggest that but if I remember correctly it does not allow for duplicate values. Elves with equal calories were not explicitly forbidden from being in the top 3 so that could lead to a bad result. Youād have to build the queue including the index from the original input like `{calories, ndx}'. Tuples get sorted by elements in order, so the gb_set would still give you the right top N, you just have to then extract the calories from the tuples.
The solution I initially had (the one that got me the stars) was with :gb_sets
but then I realized, what if one of the top 3 have dupes in them? One other option was to use :gb_trees
and then use a counter
as value that increments when the key exists (Not sure of how it would turn out but seemed okay in my head) but I was too tired so scraped that and did the good old sort and take. Also, performance didnāt seem to matter much between those two. However, performance mattered a little when I parsed for \n\n
vs reducer with cons-swapping.
Anyways, hereās the solution: advent_of_code/day_01.ex at master Ā· code-shoily/advent_of_code Ā· GitHub
Also: Nice seeing you again @Aetherus hope to learn from your solutions this year again.
I wonder if we can design a heap that only contains 3 elements. And pass all the calorie sums in it and at the end, just sum all heapās elements up. This could be slightly space efficient, but with the number of data given, shouldnāt improve much in terms of speed. (Although something tells me we are getting a massive version of calorie data soon)
Iāve built my own priority queue in order to solve LeetCode problems
defmodule PriorityQueue do
@moduledoc """
A stateless priority queue implementation basing on pairing heap.
"""
@initial_serial -Bitwise.bsl(1, 64)
defstruct size: 0,
serial: @initial_serial,
heap: nil,
key_fun: nil
@typep serial :: integer()
@typep pairing_heap(key, value) :: nil | {{key, serial()}, value, [pairing_heap(key, value)]}
@opaque t(key, value) :: %__MODULE__{
size: non_neg_integer,
serial: serial(),
heap: pairing_heap(key, value),
key_fun: (value -> key)
}
@doc """
Create an empty priority queue.
"""
@spec new((value -> key)) :: t(key, value) when key: any(), value: any()
def new(key_fun \\ nil), do: %__MODULE__{key_fun: key_fun || (& &1)}
@doc """
Get the size of a priority queue.
"""
@spec size(t(any, any)) :: non_neg_integer()
def size(%__MODULE__{size: size}), do: size
@doc """
Put an item into a priority queue.
Returns a new priority queue.
"""
@spec push(t(key, value), value) :: t(key, value) when key: any(), value: any()
def push(%__MODULE__{size: size, heap: heap, key_fun: key_fun, serial: serial} = pq, value) do
%{pq | size: size + 1, heap: meld(heap, to_heap(value, key_fun, serial)), serial: serial + 1}
end
@doc """
Get and delete the smallest item in the priority queue.
If the queue is empty, returns `{:error, :empty}`,
otherwise returns `{:ok, smallest_item, new_priority_queue}`.
"""
@spec pop(t(key, value)) :: {:ok, value, t(key, value)} | {:error, :empty}
when key: any(), value: any()
def pop(%__MODULE__{size: 0}), do: {:error, :empty}
def pop(%__MODULE__{size: size, heap: {_key, value, sub_heaps}} = pq) do
{:ok, value, %{pq | size: size - 1, heap: pair(sub_heaps)}}
end
@doc """
Get the smallest element in the priority queue.
"""
@spec top(t(key, value)) :: {:ok, value} | {:error, :empty}
when key: any(), value: any()
def top(%__MODULE__{size: 0}), do: {:error, :empty}
def top(%__MODULE__{heap: {_, value, _}}), do: {:ok, value}
@doc """
Merge two priority queues. Crashes if the given priority queues have different key functions.
"""
@spec merge!(t(key, value), t(key, value)) :: t(key, value)
when key: any(), value: any()
def merge!(%__MODULE__{key_fun: f} = pq1, %__MODULE__{key_fun: f} = pq2) do
%__MODULE__{
key_fun: f,
serial: max(pq1.serial, pq2.serial),
size: pq1.size + pq2.size,
heap: meld(pq1.heap, pq2.heap)
}
end
def merge!(%__MODULE__{}, %__MODULE__{}) do
raise ArgumentError, "Can't merge priority queues with different key functions."
end
defp to_heap(value, key_fun, serial) do
{{key_fun.(value), serial}, value, []}
end
@spec meld(pairing_heap(key, value), pairing_heap(key, value)) :: pairing_heap(key, value)
when key: any(), value: any()
defp meld(nil, heap), do: heap
defp meld(heap, nil), do: heap
defp meld({k1, v1, sub1}, {k2, _v2, _sub2} = heap2) when k1 < k2, do: {k1, v1, [heap2 | sub1]}
defp meld(heap1, {k2, v2, sub2}), do: {k2, v2, [heap1 | sub2]}
@spec pair([pairing_heap(key, value)]) :: pairing_heap(key, value)
when key: any(), value: any()
defp pair([]), do: nil
defp pair([node]), do: node
defp pair([node1, node2 | rest]), do: meld(meld(node2, node1), pair(rest))
end
My first go at AoC in Elixir:
{_ok, data} = File.read("input_day1.txt")
data
|> String.split("\n")
|> Enum.chunk_by(&(String.length(&1) == 0))
|> Enum.filter(fn el -> case el do
[""] -> false
_ -> true
end
end)
|> Enum.map(fn sums -> Enum.map(sums, &String.to_integer/1) end)
|> Enum.map(&Enum.sum/1)
|> Enum.sort(&(&1 >= &2))
|> Enum.take(3)
|> Enum.sum
Not very fond of the anonymous function in the Enum.filter
line.
A bit cleaner way of doing it:
Enum.filter(fn
[""] -> false
_ -> true
end)
or maybe just Enum.reject(& &1 == [""])
(deleted)
Using streams.
part 1
start = System.monotonic_time(:microsecond)
File.stream!("input.txt")
|> Stream.transform([], fn
"\n", acc -> {[acc], []}
line, acc -> {[], [(line |> String.trim_trailing() |> String.to_integer()) | acc]}
end)
|> Stream.map(&Enum.sum/1)
|> Enum.max()
|> tap(fn sum -> IO.puts "The maximum is #{sum}" end)
elapsed = System.monotonic_time(:microsecond) - start
IO.puts "Job done in #{elapsed} µs"
part1 with the elf number
File.stream!("input.txt")
|> Stream.transform([], fn
"\n", acc -> {[acc], []}
line, acc -> {[], [(line |> String.trim_trailing() |> String.to_integer()) | acc]}
end)
|> Stream.map(&Enum.sum/1)
|> Stream.with_index(1)
|> Enum.max_by(fn {v,_i} -> v end)
|> tap(fn {v,i} -> IO.puts "The maximum is #{v} with elf number #{i}" end)
part 2
start = System.monotonic_time(:microsecond)
File.stream!("input.txt")
|> Stream.transform([], fn
"\n", acc -> {[acc], []}
line, acc -> {[], [(line |> String.trim_trailing() |> String.to_integer()) | acc]}
end)
|> Stream.map(&Enum.sum/1)
|> Enum.sort(:desc)
|> Enum.take(3)
|> Enum.sum()
|> tap(fn sum -> IO.puts "The maximum is #{sum}" end)
elapsed = System.monotonic_time(:microsecond) - start
IO.puts "Job done in #{elapsed} µs"
part 2 with the elves numbers
File.stream!("input.txt")
|> Stream.transform([], fn
"\n", acc -> {[acc], []}
line, acc -> {[], [(line |> String.trim_trailing() |> String.to_integer()) | acc]}
end)
|> Stream.map(&Enum.sum/1)
|> Stream.with_index(1)
|> Enum.sort_by(fn {v,_i} -> v end, :desc)
|> Enum.take(3)
|> Enum.reduce({0,[]}, fn {v,i}, {sum,lst} -> {sum+v, [i | lst]} end)
|> tap(fn {sum, lst} -> IO.puts "The maximum is #{sum} with elves number #{inspect(lst)}" end)
Iām fairly new to Elixir and functional programming, I would love to get feedback on my solution. I have to admit it took me a while to figure out how Enum.chunk_while/4 works. I have never used a similar function in another language, even after many years in object-oriented programming.
defmodule AdventOfCode2022.Day1 do
@moduledoc """
https://adventofcode.com/2022/day/1
"""
def part_1 do
elves()
|> Enum.take(1)
end
def part_2 do
elves()
|> Enum.take(3)
|> Enum.sum()
end
defp elves do
"lib/advent_of_code2022/day1-input.txt"
|> File.stream!() # Read the input file line by line
|> Enum.map(&String.trim/1) # Remove all leading and trailing whitespaces
|> Enum.chunk_while([], &chunk_func/2, &after_func/1) # Create a list of nested lists containing the calories of every elf
|> Enum.map(&Enum.sum/1) # Calculate the sum of calories for every elf
|> Enum.sort(:desc) # Sort the elves based on their calories total
end
defp chunk_func(_separator = "", accumulator), do: { :cont, accumulator, [] }
defp chunk_func(calories, accumulator), do: { :cont, [ String.to_integer(calories) | accumulator ] }
defp after_func([]), do: { :cont, [] }
defp after_func(accumulator), do: { :cont, accumulator, [] }
end
Looks good to me! These are things i thought were interesting when i did day 1 - explore other stdlib things. Less about āimprovingā your existing solution since it looks good!
- You could merge
Enum.sum
into your chunk function by changing your accumulator to 0 and[ String.to_integer(calories) | accumulator ]
would turn intoString.to_integer(calories) + accumulator
. Saves you one Enum pass - For part 1, you can avoid sorting using
Enum.max
(others have mentioned top-k algos above for part 2) - since youāre already using
File.stream!
, you might try playing around converting some of yourEnum
calls to their respectiveStream
calls. If you run a profiler, you might notice differences in time, cpu, and especially memory used (more difference for full input)
Thank you very much
i = File.read!("input")
|> String.split("\n\n")
|> Enum.map(fn s -> s
|> String.split("\n")
|> Enum.map(&String.to_integer/1)
|> Enum.sum()
end)
i |> Enum.max()
|> IO.inspect(label: :part1)
i |> Enum.sort(:desc)
|> Enum.take(3)
|> Enum.sum()
|> IO.inspect(label: :part2)