From f3542c49ceb566aa5f937438b6d9b991acd6dc94 Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Sat, 28 Jul 2018 19:06:25 -0300 Subject: [PATCH 01/14] Rewrite Log internals --- lib/event/dispatcher.ex | 2 +- lib/event/loggable/flow.ex | 102 ++++-- lib/log/action/log.ex | 81 ++--- lib/log/event/log.ex | 85 +++-- lib/log/internal/log.ex | 225 ++++++++------ lib/log/model/log.ex | 209 ++++++++----- lib/log/model/log_touch.ex | 48 --- lib/log/model/log_type.ex | 172 +++++++++++ lib/log/model/log_type/macros.ex | 262 ++++++++++++++++ lib/log/model/revision.ex | 104 +++---- lib/log/public/index.ex | 43 +-- lib/network/event/connection.ex | 14 +- lib/notification/model/code.ex | 3 +- lib/server/event/server.ex | 8 +- lib/software/event/file.ex | 38 ++- lib/software/event/log_forge/log_create.ex | 31 -- lib/software/event/log_forge/log_edit.ex | 31 -- lib/software/event/virus.ex | 15 +- .../software_type/log_forge/process_type.ex | 221 ------------- lib/universe/bank/internal/bank_transfer.ex | 5 +- mix.exs | 1 - .../migrations/20180719044155_log_rewrite.exs | 35 +++ test/event/loggable/flow_test.exs | 49 ++- test/features/file/transfer_test.exs | 53 ++-- test/log/action/log_test.exs | 222 ++++++------- test/log/event/handler/log_test.exs | 139 ++++----- test/log/event/log_test.exs | 32 +- test/log/internal/log_test.exs | 182 +++++++++++ test/log/public/index_test.exs | 16 +- test/log/query/log_test.exs | 204 ++++++------ test/network/event/connection_test.exs | 29 +- test/software/event/file_test.exs | 33 +- test/software/event/virus_test.exs | 28 +- .../log_forge/process_type_test.exs | 292 ------------------ test/support/event/setup/log.ex | 10 +- test/support/log/factory.ex | 77 ----- test/support/log/helper.ex | 63 ++++ test/support/log/macros.ex | 99 ++---- test/support/log/setup.ex | 92 +++--- test/support/log/setup/log_type.ex | 76 +++++ 40 files changed, 1794 insertions(+), 1637 deletions(-) delete mode 100644 lib/log/model/log_touch.ex create mode 100644 lib/log/model/log_type.ex create mode 100644 lib/log/model/log_type/macros.ex delete mode 100644 lib/software/event/log_forge/log_create.ex delete mode 100644 lib/software/event/log_forge/log_edit.ex delete mode 100644 lib/software/model/software_type/log_forge/process_type.ex create mode 100644 priv/repo/log/migrations/20180719044155_log_rewrite.exs create mode 100644 test/log/internal/log_test.exs delete mode 100644 test/software/model/software_type/log_forge/process_type_test.exs delete mode 100644 test/support/log/factory.ex create mode 100644 test/support/log/setup/log_type.ex diff --git a/lib/event/dispatcher.ex b/lib/event/dispatcher.ex index 92442adf..0ed7ba04 100644 --- a/lib/event/dispatcher.ex +++ b/lib/event/dispatcher.ex @@ -146,7 +146,7 @@ defmodule Helix.Event.Dispatcher do # All event LogEvent.Log.Created event LogEvent.Log.Deleted - event LogEvent.Log.Modified + # event LogEvent.Log.Modified ############################################################################## # Process events diff --git a/lib/event/loggable/flow.ex b/lib/event/loggable/flow.ex index cd99a89c..7a6c8698 100644 --- a/lib/event/loggable/flow.ex +++ b/lib/event/loggable/flow.ex @@ -24,6 +24,8 @@ defmodule Helix.Event.Loggable.Flow do alias Helix.Event alias Helix.Event.Loggable.Utils, as: LoggableUtils alias Helix.Entity.Model.Entity + alias Helix.Log.Model.Log + alias Helix.Log.Model.LogType alias Helix.Log.Action.Log, as: LogAction alias Helix.Network.Model.Bounce alias Helix.Network.Model.Network @@ -31,10 +33,8 @@ defmodule Helix.Event.Loggable.Flow do alias Helix.Network.Query.Bounce, as: BounceQuery alias Helix.Server.Model.Server - @type log_entry :: - {Server.id, Entity.id, log_msg} - - @type log_msg :: String.t + @typep log_entry :: + {Server.id, Entity.id, Log.info} @doc """ Top-level macro for events wanting to implement the Loggable protocol. @@ -66,13 +66,12 @@ defmodule Helix.Event.Loggable.Flow do """ defmacro log_map(map) do quote do - map = unquote(map) + unquote(map) # Put default values (if not specified) - map |> Map.put_new(:network_id, nil) |> Map.put_new(:endpoint_id, nil) - |> Map.put_new(:msg_endpoint, nil) + |> Map.put_new(:data_both, %{}) |> Map.put_new(:opts, %{}) end end @@ -119,8 +118,11 @@ defmodule Helix.Event.Loggable.Flow do gateway_id: gateway_id, endpoint_id: endpoint_id, network_id: network_id, - msg_gateway: msg_gateway, - msg_endpoint: msg_endpoint, + type_gateway: type_gateway, + data_gateway: data_gateway, + type_endpoint: type_endpoint, + data_endpoint: data_endpoint, + data_both: data_both, opts: opts }) do @@ -147,13 +149,23 @@ defmodule Helix.Event.Loggable.Flow do end |> customize_last_ip(opts) - msg_gateway = String.replace(msg_gateway, "$first_ip", first_ip) - msg_endpoint = String.replace(msg_endpoint, "$last_ip", last_ip) + data_gateway = + data_gateway + |> replace_ips(first_ip, last_ip) + |> Map.merge(data_both) + + data_endpoint = + data_endpoint + |> replace_ips(first_ip, last_ip) + |> Map.merge(data_both) - log_gateway = build_entry(gateway_id, entity_id, msg_gateway) - log_endpoint = build_entry(endpoint_id, entity_id, msg_endpoint) + log_gateway = {type_gateway, LogType.new(type_gateway, data_gateway)} + log_endpoint = {type_endpoint, LogType.new(type_endpoint, data_endpoint)} - bounce_logs = + entry_gateway = build_entry(gateway_id, entity_id, log_gateway) + entry_endpoint = build_entry(endpoint_id, entity_id, log_endpoint) + + bounce_entries = if skip_bounce? do [] else @@ -161,11 +173,12 @@ defmodule Helix.Event.Loggable.Flow do bounce, {gateway_id, network_id, gateway_ip}, {endpoint_id, network_id, endpoint_ip}, - entity_id + entity_id, + network_id ) end - [log_gateway, log_endpoint, bounce_logs] |> List.flatten() + [entry_gateway, entry_endpoint, bounce_entries] |> List.flatten() end @doc """ @@ -178,10 +191,13 @@ defmodule Helix.Event.Loggable.Flow do event: _, server_id: server_id, entity_id: entity_id, - msg_server: msg_server + type: type, + data: data }) do - [build_entry(server_id, entity_id, msg_server)] + log_type = LogType.new(type, data) + + [build_entry(server_id, entity_id, {type, log_type})] end @doc """ @@ -249,7 +265,7 @@ defmodule Helix.Event.Loggable.Flow do defdelegate format_ip(ip), to: LoggableUtils - @spec build_entry(Server.id, Entity.id, log_msg) :: + @spec build_entry(Server.id, Entity.id, Log.info) :: log_entry @doc """ Returns data required to insert the log @@ -263,16 +279,23 @@ defmodule Helix.Event.Loggable.Flow do Messages follow the format "Connection bounced from hop (n-1) to (n+1)" """ - def build_bounce_entries(nil, _, _, _), + def build_bounce_entries(nil, _, _, _, _), do: [] - def build_bounce_entries(bounce_id = %Bounce.ID{}, gateway, endpoint, entity) do + def build_bounce_entries( + bounce_id = %Bounce.ID{}, gateway, endpoint, entity, network + ) do bounce_id |> BounceQuery.fetch() - |> build_bounce_entries(gateway, endpoint, entity) + |> build_bounce_entries(gateway, endpoint, entity, network) end + def build_bounce_entries( - bounce = %Bounce{}, gateway = {_, _, _}, endpoint = {_, _, _}, entity_id) - do + bounce = %Bounce{}, + gateway = {_, _, _}, + endpoint = {_, _, _}, + entity_id, + network_id + ) do full_path = [gateway | bounce.links] ++ [endpoint] length_hop = length(full_path) @@ -298,8 +321,11 @@ defmodule Helix.Event.Loggable.Flow do {_, _, ip_prev} = bounce_map[idx - 1] {_, _, ip_next} = bounce_map[idx + 1] - msg = "Connection bounced from #{ip_prev} to #{ip_next}" - entry = build_entry(server_id, entity_id, msg) + data = %{ip_prev: ip_prev, ip_next: ip_next, network_id: network_id} + # TODO + log_type = {:connection_bounced, LogType.new(:connection_bounced, data)} + + entry = build_entry(server_id, entity_id, log_type) {idx + 1, acc ++ [entry]} end @@ -321,8 +347,8 @@ defmodule Helix.Event.Loggable.Flow do do: save([log_entry]) def save(logs) do logs - |> Enum.map(fn {server_id, entity_id, msg} -> - {:ok, _, events} = LogAction.create(server_id, entity_id, msg) + |> Enum.map(fn {server_id, entity_id, log_type} -> + {:ok, _, events} = LogAction.create(server_id, entity_id, log_type) events end) |> List.flatten() @@ -378,4 +404,24 @@ defmodule Helix.Event.Loggable.Flow do do: censor_ip(ip) defp customize_last_ip(ip, _), do: ip + + defp replace_ips(params, first_ip, last_ip) do + params + |> Enum.reduce([], fn {k, v}, acc -> + new_v = + case v do + "$first_ip" -> + first_ip + + "$last_ip" -> + last_ip + + _ -> + v + end + + [{k, new_v} | acc] + end) + |> Enum.into(%{}) + end end diff --git a/lib/log/action/log.ex b/lib/log/action/log.ex index 8132871e..063d86d7 100644 --- a/lib/log/action/log.ex +++ b/lib/log/action/log.ex @@ -25,73 +25,56 @@ defmodule Helix.Log.Action.Log do alias Helix.Log.Event.Log.Deleted, as: LogDeletedEvent alias Helix.Log.Event.Log.Modified, as: LogModifiedEvent - @spec create(Server.idt, Entity.idt, String.t, pos_integer | nil) :: + @spec create(Server.id, Entity.id, Log.info, pos_integer | nil) :: {:ok, Log.t, [LogCreatedEvent.t]} - | {:error, Ecto.Changeset.t} + | :error @doc """ - Creates a new log linked to `entity` on `server` with `message` as content. + Creates a new log linked to `entity` on `server` with `log_info` as content. """ - def create(server, entity, message, forge \\ nil) do - with {:ok, log} <- LogInternal.create(server, entity, message, forge) do - event = LogCreatedEvent.new(log) + def create(server_id, entity_id, log_info, forge_version \\ nil) do + case LogInternal.create(server_id, entity_id, log_info, forge_version) do + {:ok, log} -> + {:ok, log, [LogCreatedEvent.new(log)]} - {:ok, log, [event]} + {:error, _} -> + :error end end - @spec revise(Log.t, Entity.idt, String.t, pos_integer) :: - {:ok, Log.t, [LogModifiedEvent.t]} - | {:error, Ecto.Changeset.t} + @spec revise(Log.t, Entity.id, Log.info, pos_integer) :: + {:ok, Log.t, [LogRevisedEvent.t]} + | :error @doc """ - Adds a revision over `log`. - - ### Params - - `entity` is the the entity that is doing the revision. - - `message` is the new log's content. - - `forge_version` is the version of log forger used to make this revision. - - ### Examples - - iex> revise(%Log{}, %Entity{}, "empty log", 100) - {:ok, %Log{message: "empty log"}, [%LogModifiedEvent{}]} + Adds a revision to the given `log`. """ - def revise(log, entity, message, forge_version) do - with \ - {:ok, log} <- LogInternal.revise(log, entity, message, forge_version) - do - event = LogModifiedEvent.new(log) - - {:ok, log, [event]} + def revise(log = %Log{}, entity_id, log_info, forge_version) do + case LogInternal.revise(log, entity_id, log_info, forge_version) do + {:ok, log} -> + event = LogRevisedEvent.new(log) + {:ok, log, [event]} + + {:error, _} -> + :error end end @spec recover(Log.t) :: - {:ok, :recovered, [LogModifiedEvent.t]} - | {:ok, :deleted, [LogDeletedEvent.t]} - | {:error, :original_revision} + {:ok, :destroyed, [LogDeletedEvent.t]} + | {:ok, :original, []} + | {:ok, :recovered, [LogRecoveredEvent.t]} @doc """ - Recovers `log` to a previous revision. - - ### Notes - - If the log is in its original state and it is not a forged log, the - operation will fail with `{:error, :original_revision}`. - - If the log is in its original state and it is forged, it will be deleted, - returning `{:ok, :deleted, [Helix.Event.t]}`. - - Otherwise the revision will be deleted and the log will be updated to use - the last revision's message, returning `{:ok, :recovered, [Helix.Event.t]}`. + Attempts to recover the given `log`. """ - def recover(log) do + def recover(log = %Log{}) do case LogInternal.recover(log) do - {:ok, :deleted} -> - event = LogDeletedEvent.new(log) - {:ok, :deleted, [event]} + :destroyed -> + {:ok, :destroyed, [LogDeletedEvent.new(log)]} - {:ok, :recovered} -> - event = LogModifiedEvent.new(log) - {:ok, :recovered, [event]} + {:original, _} -> + {:ok, :original, []} - {:error, :original_revision} -> - {:error, :original_revision} + {:recovered, new_log} -> + {:ok, :recovered, [LogRecoveredEvent.new(new_log)]} end end end diff --git a/lib/log/event/log.ex b/lib/log/event/log.ex index 99a1e788..0f0a4cf2 100644 --- a/lib/log/event/log.ex +++ b/lib/log/event/log.ex @@ -29,17 +29,12 @@ defmodule Helix.Log.Event.Log do publish do - alias HELL.ClientUtils + alias Helix.Log.Public.Index, as: LogIndex @event :log_created def generate_payload(event, _socket) do - data = %{ - log_id: to_string(event.log.log_id), - server_id: to_string(event.server_id), - timestamp: ClientUtils.to_timestamp(event.log.creation_time), - message: event.log.message - } + data = LogIndex.render_log(event.log) {:ok, data} end @@ -49,55 +44,55 @@ defmodule Helix.Log.Event.Log do end end - event Modified do - @moduledoc """ - LogModifiedEvent is fired when an existing log has changed (revised) or - has been recovered. + # event Modified do + # @moduledoc """ + # LogModifiedEvent is fired when an existing log has changed (revised) or + # has been recovered. - TODO: we'll probably want to create a LogRecovered event instead. - """ + # TODO: we'll probably want to create a LogRecovered event instead. + # """ - alias Helix.Server.Model.Server - alias Helix.Log.Model.Log + # alias Helix.Server.Model.Server + # alias Helix.Log.Model.Log - @type t :: - %__MODULE__{ - log: Log.t, - server_id: Server.id - } + # @type t :: + # %__MODULE__{ + # log: Log.t, + # server_id: Server.id + # } - event_struct [:server_id, :log] + # event_struct [:server_id, :log] - @spec new(Log.t) :: - t - def new(log = %Log{}) do - %__MODULE__{ - log: log, - server_id: log.server_id - } - end + # @spec new(Log.t) :: + # t + # def new(log = %Log{}) do + # %__MODULE__{ + # log: log, + # server_id: log.server_id + # } + # end - publish do + # publish do - alias HELL.ClientUtils + # alias HELL.ClientUtils - @event :log_modified + # @event :log_modified - def generate_payload(event, _socket) do - data = %{ - log_id: to_string(event.log.log_id), - server_id: to_string(event.server_id), - timestamp: ClientUtils.to_timestamp(event.log.creation_time), - message: event.log.message - } + # def generate_payload(event, _socket) do + # data = %{ + # log_id: to_string(event.log.log_id), + # server_id: to_string(event.server_id), + # timestamp: ClientUtils.to_timestamp(event.log.creation_time), + # message: event.log.message + # } - {:ok, data} - end + # {:ok, data} + # end - def whom_to_publish(event), - do: %{server: event.server_id} - end - end + # def whom_to_publish(event), + # do: %{server: event.server_id} + # end + # end event Deleted do @moduledoc """ diff --git a/lib/log/internal/log.ex b/lib/log/internal/log.ex index 43baacfc..beda268f 100644 --- a/lib/log/internal/log.ex +++ b/lib/log/internal/log.ex @@ -4,130 +4,169 @@ defmodule Helix.Log.Internal.Log do alias Helix.Entity.Model.Entity alias Helix.Server.Model.Server alias Helix.Log.Model.Log - alias Helix.Log.Model.LogTouch alias Helix.Log.Model.Revision alias Helix.Log.Repo @spec fetch(Log.id) :: Log.t | nil - def fetch(log_id), - do: Repo.get(Log, log_id) + def fetch(log_id) do + log_id + |> Log.Query.by_id() + |> Log.Query.include_revision() + |> Repo.one() + end - @spec get_logs_on_server(Server.idt) :: + @spec fetch_for_update(Log.id) :: + Log.t + | nil + def fetch_for_update(log_id) do + unless Repo.in_transaction?(), + do: "Transaction required in order to acquire lock" + + log_id + |> Log.Query.by_id() + |> Log.Query.include_revision() + |> Log.Query.lock_for_update() + |> Repo.one() + end + + @spec fetch_revision(Log.t, integer) :: + Revision.t + | nil + def fetch_revision(log = %Log{}, offset) do + log.log_id + |> Revision.Query.by_log() + |> Revision.Query.by_revision(log.revision_id + offset) + |> Repo.one() + end + + @spec get_logs_on_server(Server.id, pos_integer) :: [Log.t] - def get_logs_on_server(server) do - server + def get_logs_on_server(server_id, count \\ 20) do + server_id |> Log.Query.by_server() - # TODO: Use id's timestamp - |> Log.Query.order_by_newest() + |> Log.Query.include_revision() + |> Log.Query.only(count) + |> Log.Order.by_id() |> Repo.all() end - @spec get_logs_from_entity_on_server(Server.idt, Entity.idt) :: + @spec paginate_logs_on_server(Server.id, Log.id, pos_integer) :: [Log.t] - def get_logs_from_entity_on_server(server, entity) do - server + def paginate_logs_on_server(server_id, last_log_id, count \\ 20) do + server_id |> Log.Query.by_server() - # TODO: Use id's timestamp - |> Log.Query.order_by_newest() - |> Log.Query.edited_by_entity(entity) + |> Log.Query.paginate_after_log(last_log_id) + |> Log.Query.include_revision() + |> Log.Query.only(count) + |> Log.Order.by_id() |> Repo.all() end - @spec create(Server.idt, Entity.idt, String.t, pos_integer | nil) :: + @spec create(Server.id, Entity.id, Log.info, pos_integer | nil) :: {:ok, Log.t} - | {:error, Ecto.Changeset.t} - def create(server, entity, message, forge_version \\ nil) do - params = %{ - server_id: server, - entity_id: entity, - message: message, - forge_version: forge_version - } - - changeset = Log.create_changeset(params) - - Repo.transaction fn -> - with \ - {:ok, log} <- Repo.insert(changeset), - {:ok, _} <- touch_log(log, entity) - do - log - else - {:error, changeset} -> - Repo.rollback(changeset) - end - end + | {:error, Log.changeset} + @doc """ + Creates a new Log entry (along with its underlying Revision). + + The newly created Log may be natural (i.e. as a reaction to a game event) or + artificial (i.e. crafted by the player). The `forge_version` defines whether + it is natural (nil) or artificial (non-nil). + """ + def create(server_id, entity_id, log_info, forge_version \\ nil) do + {log_type, log_data} = log_info + + log_params = %{server_id: server_id} + revision_params = + %{ + entity_id: entity_id, + forge_version: forge_version, + type: log_type, + data: Map.from_struct(log_data) + } + + log_params + |> Log.create_changeset(revision_params) + |> Repo.insert() end - @spec revise(Log.t, Entity.idt, String.t, pos_integer) :: + @spec revise(Log.t, Entity.id, Log.info, pos_integer) :: {:ok, Log.t} - | {:error, Ecto.Changeset.t} - def revise(log, entity, message, forge_version) do - revision = Revision.create(log, entity, message, forge_version) - + | {:error, Log.changeset} + @doc """ + Stacks up a new revision for the given `log`. It inserts a new Revision row + and updates the existing Log row to point to the new `revision_id`. + """ + def revise(log = %Log{}, entity_id, {log_type, log_data}, forge_version) do Repo.transaction fn -> + log = fetch_for_update(log.log_id) + + params = + %{ + entity_id: entity_id, + forge_version: forge_version, + type: log_type, + data: Map.from_struct(log_data) + } + + {log_changeset, revision_changeset} = Log.add_revision(log, params) + with \ - {:ok, revision} <- Repo.insert(revision), - {:ok, _} <- touch_log(log, entity) + {:ok, log} <- Repo.update(log_changeset), + {:ok, _} <- Repo.insert(revision_changeset) do - revision.log + log else - {:error, changeset} -> - Repo.rollback(changeset) + _ -> + Repo.rollback(:internal) end end end @spec recover(Log.t) :: - {:ok, :deleted | :recovered} - | {:error, :original_revision} - def recover(log) do - Repo.transaction fn -> - query = - log - |> Revision.Query.by_log() - |> Revision.Query.last(2) - - case Repo.all(query) do - [%{forge_version: nil}] -> - Repo.rollback(:original_revision) - - [_] -> - # Forged log, should be deleted - Repo.delete!(log) - - :deleted - - [old, %{message: m}] -> - Repo.delete!(old) - - log - |> Log.update_changeset(%{message: m}) - |> Repo.update!() - - :recovered + :destroyed + | {:original, Log.t} + | {:recovered, Log.t} + @doc """ + Attempts to recover the given `log` to the previous version. + + If the log is already at the original server, it will either `:destroy` it if + it's an artificial log, or return it as is if it's a natural one. + + Otherwise, if there exists a previous revision, it will pop the current one + and point to the previous one, effectively deleting the current revision. + """ + def recover(log = %Log{}) do + trans_result = + Repo.transaction fn -> + log = fetch_for_update(log.log_id) + previous_revision = fetch_revision(log, -1) + + case Log.recover_revision(log, previous_revision) do + # We are attempting to recover a natural log. It should never be + # destroyed, so we just return it as is. + {:original, :natural} -> + {:original, log} + + # We are dealing with an artificial log that is already on its last + # revision. According to the game mechanics, this log should be + # destroyed + {:original, :artificial} -> + Repo.delete!(log) + :destroyed + + # It may either be a natural or artificial log, but we don't care, as + # for both cases we need to recover to the `previous_revision`. + {:recover, changeset} -> + with {:ok, recovered_log} <- Repo.update(changeset) do + {:recovered, recovered_log} + end + end end - end - end - @spec count_revisions_of_entity(Log.t, Entity.idt) :: - non_neg_integer - def count_revisions_of_entity(log, entity) do - log - |> Revision.Query.by_log() - |> Revision.Query.select_count() - |> Revision.Query.by_entity(entity) - |> Repo.one() - end - - @spec touch_log(Log.t, Entity.idt) :: - {:ok, LogTouch.t} - | {:error, Ecto.Changeset.t} - defp touch_log(log, entity) do - log - |> LogTouch.create(entity) - |> Repo.insert(on_conflict: :nothing) + with {:ok, result} <- trans_result do + result + end end end diff --git a/lib/log/model/log.ex b/lib/log/model/log.ex index 03990325..f0b7f6f5 100644 --- a/lib/log/model/log.ex +++ b/lib/log/model/log.ex @@ -9,51 +9,51 @@ defmodule Helix.Log.Model.Log do alias Ecto.Changeset alias Helix.Entity.Model.Entity alias Helix.Server.Model.Server + alias Helix.Log.Model.LogType alias Helix.Log.Model.Revision + alias __MODULE__ - @type message :: String.t - - @type t :: %__MODULE__{ - log_id: id, - server_id: Server.id, - entity_id: Entity.id, - message: message, - crypto_version: pos_integer | nil, - revisions: term, - creation_time: DateTime.t - } - - @type creation_params :: %{ - :server_id => Server.idtb, - :entity_id => Entity.idtb, - :message => String.t, - optional(:crypto_version) => pos_integer | nil, - optional(:forge_version) => pos_integer | nil, - optional(atom) => any - } - - @type update_params :: %{ - optional(:crypto_version) => pos_integer | nil, - optional(:message) => String.t - } - - @creation_fields ~w/server_id entity_id message/a - @update_fields ~w/message crypto_version/a - - @required_fields ~w/server_id entity_id message/a + @type t :: + %__MODULE__{ + log_id: id, + revision_id: Revision.id, + server_id: Server.id, + creation_time: DateTime.t, + revision: nil | Revision.t + } + @type changeset :: %Changeset{data: %__MODULE__{}} + + @type type :: LogType.type + @type data :: LogType.data + @type info :: {type, data} + + @type creation_params :: + %{ + server_id: Server.id, + entity_id: Entity.id + } + + @creation_fields [:server_id] + @required_fields [:server_id, :revision_id, :creation_time, :log_id] + + @primary_key false schema "logs" do field :log_id, ID, primary_key: true + field :revision_id, :integer field :server_id, Server.ID - field :entity_id, Entity.ID - - field :message, :string - field :crypto_version, :integer + # Stores the exact moment the log was created. This value is immutable! Even + # if several revisions occurred at a later time, the `creation_time` of the + # log object remains unchanged. field :creation_time, :utc_datetime + field :revision, :map, + virtual: true, + default: nil + has_many :revisions, Revision, foreign_key: :log_id, references: :log_id, @@ -61,28 +61,74 @@ defmodule Helix.Log.Model.Log do on_replace: :delete end - @spec create_changeset(creation_params) :: - Changeset.t - def create_changeset(params) do - heritage = build_heritage(params) - revision = Revision.changeset(%Revision{}, params) - revision_time = get_change(revision, :creation_time) + @spec create_changeset(creation_params, Revision.creation_params) :: + changeset + def create_changeset(params, revision_params) do + log_id = + params + |> build_heritage() + |> ID.generate(:log) + + revision_changeset = Revision.create_changeset(log_id, 1, revision_params) + revision = apply_changes(revision_changeset) %__MODULE__{} |> cast(params, @creation_fields) + |> put_change(:creation_time, revision.creation_time) + |> put_change(:log_id, log_id) + |> put_change(:revision_id, 1) + |> put_assoc(:revisions, [revision_changeset]) + |> put_change(:revision, revision) |> validate_required(@required_fields) - |> put_assoc(:revisions, [revision]) - |> put_change(:creation_time, revision_time) - |> put_pk(heritage, :log) end - @spec update_changeset(t | Changeset.t, update_params) :: - Changeset.t - def update_changeset(struct, params) do - struct - |> cast(params, @update_fields) - |> validate_required(@required_fields) - |> validate_number(:crypto_version, greater_than_or_equal_to: 0) + @spec add_revision(t, Revision.creation_params) :: + {changeset, Revision.changeset} + def add_revision(log = %Log{}, revision_params) do + next_revision_id = log.revision_id + 1 + + revision_changeset = + Revision.create_changeset(log.log_id, next_revision_id, revision_params) + + log_changeset = + log + |> change() + |> put_change(:revision_id, next_revision_id) + |> put_change(:revision, apply_changes(revision_changeset)) + + {log_changeset, revision_changeset} + end + + @spec recover_revision(Log.t, previous_revision :: Revision.t | nil) :: + {:original, :natural | :artificial} + | {:recover, changeset} + @doc """ + Based on the previous revision (which may not exist) and the current revision, + figure out whether we are dealing with a natural or artificial log, and + whether it should be recovered, destroyed or kept as is. + + In case it's not clear: + + If a previous revision exists on the stack, it doesn't matter if the log is + artificial or natural, we should recover it anyway. + + If a previous revision does not exist, we look at the current revision (which + is, necessarily, the original one). If it has a `forger_version`, then the + first revision was created through a `LogForgeProcess`, and it's an artificial + log. Otherwise it's natural. + """ + def recover_revision(%Log{revision: %{forge_version: nil}}, nil), + do: {:original, :natural} + def recover_revision(_, nil), + do: {:original, :artificial} + def recover_revision(log = %Log{}, previous_revision = %Revision{}) do + changeset = + log + |> change() + |> put_change(:revision_id, previous_revision.revision_id) + |> put_change(:revision, previous_revision) + + {:recover, changeset} end @spec build_heritage(creation_params) :: @@ -92,37 +138,62 @@ defmodule Helix.Log.Model.Log do query do - alias Helix.Entity.Model.Entity alias Helix.Server.Model.Server alias Helix.Log.Model.Log - alias Helix.Log.Model.LogTouch + alias Helix.Log.Model.Revision + + @spec by_id(Queryable.t, Log.id) :: + Queryable.t + def by_id(query \\ Log, log_id), + do: where(query, [l], l.log_id == ^log_id) - @spec by_id(Queryable.t, Log.idtb) :: + @spec by_server(Queryable.t, Server.id) :: Queryable.t - def by_id(query \\ Log, id), - do: where(query, [l], l.log_id == ^id) + def by_server(query \\ Log, server_id), + do: where(query, [l], l.server_id == ^server_id) - @spec edited_by_entity(Queryable.t, Entity.idtb) :: + @spec include_revision(Queryable.t) :: Queryable.t - def edited_by_entity(query \\ Log, id) do - query - |> join(:inner, [l], lt in LogTouch, lt.log_id == l.log_id) - |> where([l, ..., lt], lt.entity_id == ^id) + @doc """ + Joins the Log.Revision table and includes the revision data with the result. + """ + def include_revision(query) do + from l in query, + inner_join: lr in Revision, + on: l.log_id == lr.log_id and l.revision_id == lr.revision_id, + select: %Log{l | revision: lr} end - @spec by_server(Queryable.t, Server.idtb) :: + @spec paginate_after_log(Queryable.t, Log.id) :: Queryable.t - def by_server(query \\ Log, id), - do: where(query, [l], l.server_id == ^id) + @doc """ + Returns only logs that are older than the given `log_id`. + """ + def paginate_after_log(query, log_id), + do: where(query, [l], l.log_id < ^log_id) - @spec by_message(Queryable.t, String.t) :: + @spec only(Queryable.t, pos_integer) :: Queryable.t - def by_message(query \\ Log, message), - do: where(query, [l], like(l.message, ^message)) + @doc """ + Limits the resulting set to `total` rows. + """ + def only(query, total), + do: limit(query, ^total) + + @spec lock_for_update(Queryable.t) :: + Queryable.t + def lock_for_update(query), + do: lock(query, "FOR UPDATE") + end + + order do - @spec order_by_newest(Queryable.t) :: + @spec by_id(Queryable.t) :: Queryable.t - def order_by_newest(query), - do: order_by(query, [l], desc: l.creation_time) + @doc """ + Orders the resulting set by the log id (newest first). + """ + def by_id(query), + do: order_by(query, [l], desc: l.log_id) end end diff --git a/lib/log/model/log_touch.ex b/lib/log/model/log_touch.ex deleted file mode 100644 index 8d2375d9..00000000 --- a/lib/log/model/log_touch.ex +++ /dev/null @@ -1,48 +0,0 @@ -defmodule Helix.Log.Model.LogTouch do - @moduledoc """ - Links entities to logs. - - This model caches the relationship of entities that edited (or created) a log. - - Does so to allow the client to display all logs that a certain user edited at - some point of history (even if their revision was removed). - - This record is opaque and should only be used on the `Helix.Log` domain to - mark logs as touched by a certain entity. - """ - - use Ecto.Schema - - import Ecto.Changeset - - alias Ecto.Changeset - alias Helix.Entity.Model.Entity - alias Helix.Log.Model.Log - - @type t :: %__MODULE__{ - log_id: Log.id, - entity_id: Entity.id, - log: term - } - - @primary_key false - schema "log_touches" do - field :log_id, Log.ID, - primary_key: true - field :entity_id, Entity.ID, - primary_key: true - - belongs_to :log, Log, - references: :log_id, - foreign_key: :log_id, - define_field: false - end - - @spec create(Log.t, Entity.idtb) :: - Changeset.t - def create(log, entity) do - %__MODULE__{} - |> cast(%{entity_id: entity}, [:entity_id]) - |> put_assoc(:log, log) - end -end diff --git a/lib/log/model/log_type.ex b/lib/log/model/log_type.ex new file mode 100644 index 00000000..0741effc --- /dev/null +++ b/lib/log/model/log_type.ex @@ -0,0 +1,172 @@ +defmodule Helix.Log.Model.LogType do + + use Helix.Log.Model.LogType.Macros + + @type type :: + :local_login + | :remote_login_gateway + | :remote_login_endpoint + | :connection_bounced + + @type data :: struct | map + + log :local_login, 0 do + @moduledoc """ + `LocalLoginLog` is the log displayed when the player logs into his own + server. + + Localhost logged in + """ + + data_struct [] + + gen0() + end + + log :remote_login_gateway, 1 do + @moduledoc """ + `RemoteLoginGatewayLog` is shown when a player logged into another server, + from the gateway perspective. + + localhost logged into $ip as root + """ + + data_struct [:network_id, :ip] + + gen2({:network_id, :network}, {:ip, :ip}) + end + + log :remote_login_endpoint, 2 do + @moduledoc """ + `RemoteLoginEndpointLog` is shown when a player logged into another server, + from the endpoint perspective. + + $ip logged in as root + """ + + data_struct [:network_id, :ip] + + gen2({:network_id, :network}, {:ip, :ip}) + end + + log :connection_bounced, 3 do + @moduledoc """ + `ConnectionBouncedLog` is the log displayed on intermediary hops (aka + bounces) of a connection. + + Connection bounced from $ip_prev to $ip_next + """ + + data_struct [:network_id, :ip_prev, :ip_next] + + gen3({:network_id, :network}, {:ip_prev, :ip}, {:ip_next, :ip}) + end + + log :file_download_gateway, 4 do + @moduledoc """ + `FileDownloadGateway` is the log displayed on the player who just downloaded + a file. + + localhost downloaded file $file_name from $first_ip + """ + + data_struct [:file_name, :ip, :network_id] + + gen3({:file_name, :file_name}, {:ip, :ip}, {:network_id, :network}) + end + + log :file_download_endpoint, 5 do + @moduledoc """ + `FileDownloadEndpoint` is the log displayed on the target server (endpoint) + that just had a file downloaded from it. + + $last_ip downloaded file $file_name from localhost + """ + + data_struct [:file_name, :ip, :network_id] + + gen3({:file_name, :file_name}, {:ip, :ip}, {:network_id, :network}) + end + + log :file_upload_gateway, 6 do + @moduledoc """ + `FileUploadGateway` is the log displayed on the player who just uploaded a + file. + + localhost uploaded file $file_name to $first_ip + """ + + data_struct [:file_name, :ip, :network_id] + + gen3({:file_name, :file_name}, {:ip, :ip}, {:network_id, :network}) + end + + log :file_upload_endpoint, 7 do + @moduledoc """ + `FileUploadEndpoint` is the log displayed on the target server (endpoint) + that just had a file uploaded to it. + + $last_ip uploaded file $file_name to localhost + """ + + data_struct [:file_name, :ip, :network_id] + + gen3({:file_name, :file_name}, {:ip, :ip}, {:network_id, :network}) + end + + log :pftp_file_download_gateway, 8 do + @moduledoc """ + `PFTPFileDownloadedGateway` is the log displayed on the gateway of the + player who just finished downloading a file from a PFTP server. + + Bounces are skipped. + + localhost downloaded $file_name from public FTP server at $endpoint_ip + """ + + data_struct [:file_name, :ip, :network_id] + + gen3({:file_name, :file_name}, {:ip, :ip}, {:network_id, :network}) + end + + log :pftp_file_download_endpoint, 9 do + @moduledoc """ + `PFTPFileDownloadedEndpoint` is the log displayed on the endpoint server + that a player just finished downloading a file from. + + Bounces are skipped. Gateway IP is censored. + + $censored_gateway_ip downloaded $file_name from local public FTP server + """ + + data_struct [:file_name, :ip, :network_id] + + gen3({:file_name, :file_name}, {:ip, :ip}, {:network_id, :network}) + end + + log :virus_installed_gateway, 10 do + @moduledoc """ + `VirusInstalledGateway` is the log displayed on the gateway of the player + who just installed a virus on someone. + + localhost installed virus $file_name at $first_ip + """ + + data_struct [:file_name, :ip, :network_id] + + gen3({:file_name, :file_name}, {:ip, :ip}, {:network_id, :network}) + end + + log :virus_installed_endpoint, 11 do + @moduledoc """ + `VirusInstalledEndpoint` is the log displayed on the endpoint server which + the player just installed a virus on. + + $last_ip installed virus $file_name at localhost + """ + + data_struct [:file_name, :ip, :network_id] + + gen3({:file_name, :file_name}, {:ip, :ip}, {:network_id, :network}) + end +end diff --git a/lib/log/model/log_type/macros.ex b/lib/log/model/log_type/macros.ex new file mode 100644 index 00000000..97e3aca5 --- /dev/null +++ b/lib/log/model/log_type/macros.ex @@ -0,0 +1,262 @@ +defmodule Helix.Log.Model.LogType.Macros do + + import HELL.Macros + + alias HELL.Utils + alias Helix.Network.Model.Network + + defmacro __using__(_) do + quote do + + import unquote(__MODULE__) + + Module.register_attribute( + __MODULE__, + :logs, + accumulate: true, persist: :false + ) + + @before_compile unquote(__MODULE__) + + end + end + + defmacro __before_compile__(_env) do + quote do + + import EctoEnum + + defenum LogEnum, @logs + + # @spec exists?(term) :: + # boolean + def exists?(log) do + Enum.any?(@logs, fn {valid_log, _} -> valid_log == log end) + end + + def new(type, data_params) do + type + |> get_type_module() + |> apply(:new, [data_params]) + end + + end + end + + defmacro log(name, enum_id, do: block) do + module_name = + __CALLER__.module + |> Module.concat(get_safe_name(name)) + + quote do + + Module.put_attribute( + __MODULE__, + :logs, + {unquote(name), unquote(enum_id)} + ) + + defmodule unquote(module_name) do + @moduledoc false + + @log_type unquote(name) + + unquote(block) + end + + end + end + + defmacro data_struct(keys) do + quote do + + @enforce_keys unquote(keys) + defstruct unquote(keys) + + end + end + + defmacro new(args, do: block) do + quote do + + @doc false + def new(unquote(args)) do + unquote(block) + end + + end + end + + @doc """ + Generates the boilerplate for a n-field log type. + + The `data_struct` construct is skipped on purpose; you have to explicitly + define it for documentation purposes. + + Example for n=2, i.e. `gen2({:network_id, :network}, {:ip, :ip})`: + + new(%{network_id: network_id, ip: ip}) do + %__MODULE__{ + network_id: network_id, + ip: ip + } + end + + parse(unsafe) do + %__MODULE__{ + network_id: validate(:network, unsafe["network_id"]), + ip: validate(:ip, unsafe["ip"]) + } + end + """ + defmacro gen0, + do: do_gen0() + defmacro gen2(p1, p2), + do: do_gen2(p1, p2) + defmacro gen3(p1, p2, p3), + do: do_gen3(p1, p2, p3) + + defmacro parse(args, do: block) do + quote do + + @doc false + def parse(unquote(args)) do + try do + {:ok, unquote(block)} + rescue + RuntimeError -> + :error + end + end + + end + end + + def validate(field_type, field_value) when is_atom(field_type) do + fun = Utils.concat_atom(:validate_, field_type) + + __MODULE__ + |> apply(fun, [field_value]) + |> handle_validate() + end + + def validate(validator, field_value) when is_function(validator) do + field_value + |> validator.() + |> handle_validate() + end + + defp handle_validate({:error, _}), + do: raise "bad" + defp handle_validate(:error), + do: raise "bad" + defp handle_validate({:ok, value}), + do: value + defp handle_validate(value), + do: value + + def validate_network(entry) when not is_binary(entry), + do: :error + def validate_network(entry), + do: Network.ID.cast(entry) + + def validate_file_name(entry) when not is_binary(entry), + do: :error + def validate_file_name(entry), + do: entry + + def validate_ip(ip) do + ip + end + + def get_type_module(type) do + __MODULE__ + |> Module.split() + |> List.replace_at(-1, get_safe_name(type)) + |> Module.concat() + end + + docp """ + Generates a "safe name" for the log type. + + This "safe name" is Capitalized and does not contain any un_der_scor_es. + """ + defp get_safe_name(type) do + type + |> to_string() + |> String.capitalize() + |> String.replace("_", "") + end + + ############################################################################## + # N-field generators + ############################################################################## + + defp do_gen0 do + quote do + + new(%{}) do + %__MODULE__{} + end + + parse(_) do + %__MODULE__{} + end + + end + end + + defp do_gen2({f1, v_f1}, {f2, v_f2}) do + str_f1 = to_string(f1) + str_f2 = to_string(f2) + + quote do + + new(%{unquote(f1) => local_f1, unquote(f2) => local_f2}) do + %__MODULE__{ + unquote(f1) => local_f1, + unquote(f2) => local_f2 + } + end + + parse(unsafe) do + %__MODULE__{ + unquote(f1) => + validate(unquote(v_f1), Map.get(unsafe, unquote(str_f1))), + unquote(f2) => + validate(unquote(v_f2), Map.get(unsafe, unquote(str_f2))) + } + end + + end + end + + defp do_gen3({f1, v_f1}, {f2, v_f2}, {f3, v_f3}) do + str_f1 = to_string(f1) + str_f2 = to_string(f2) + str_f3 = to_string(f3) + + quote do + + new(%{unquote(f1) => l_f1, unquote(f2) => l_f2, unquote(f3) => l_f3}) do + %__MODULE__{ + unquote(f1) => l_f1, + unquote(f2) => l_f2, + unquote(f3) => l_f3 + } + end + + parse(unsafe) do + %__MODULE__{ + unquote(f1) => + validate(unquote(v_f1), Map.get(unsafe, unquote(str_f1))), + unquote(f2) => + validate(unquote(v_f2), Map.get(unsafe, unquote(str_f2))), + unquote(f3) => + validate(unquote(v_f3), Map.get(unsafe, unquote(str_f3))) + } + end + + end + end +end diff --git a/lib/log/model/revision.ex b/lib/log/model/revision.ex index 79e4c123..5362caf7 100644 --- a/lib/log/model/revision.ex +++ b/lib/log/model/revision.ex @@ -7,7 +7,6 @@ defmodule Helix.Log.Model.Revision do """ use Ecto.Schema - use HELL.ID, field: :revision_id import Ecto.Changeset import HELL.Ecto.Macros @@ -15,35 +14,50 @@ defmodule Helix.Log.Model.Revision do alias Ecto.Changeset alias Helix.Entity.Model.Entity alias Helix.Log.Model.Log + alias Helix.Log.Model.LogType.LogEnum @type t :: %__MODULE__{ - revision_id: id, log_id: Log.id, + revision_id: id, entity_id: Entity.id, - message: String.t, - forge_version: pos_integer | nil, - log: term, + type: Log.type, + data: Log.data, creation_time: DateTime.t, } + @type changeset :: %Changeset{data: %__MODULE__{}} + @type creation_params :: %{ - :entity_id => Entity.idtb, - :message => String.t, - optional(:forge_version) => pos_integer | nil, - optional(atom) => any + entity_id: Entity.id, + type: Log.type, + data: Log.data, + forge_version: pos_integer | nil, } - @creation_fields ~w/entity_id message forge_version/a + @typedoc """ + `Revision.id` is a sequential, non-negative integer that acts as a counter. + Each revision increments it, and if a LogRecoveryProcess removes a revision, + it decrements. + """ + @type id :: non_neg_integer + + @creation_fields [:entity_id, :type, :data, :forge_version, :revision_id] + @required_fields [:log_id, :entity_id, :type, :data, :revision_id] - schema "revisions" do - field :revision_id, ID, + @primary_key false + schema "log_revisions" do + field :log_id, Log.ID, + primary_key: true + field :revision_id, :integer, primary_key: true - field :log_id, Log.ID field :entity_id, Entity.ID - field :message, :string - field :forge_version, :integer + field :type, LogEnum + field :data, :map + + field :forge_version, :integer, + default: nil field :creation_time, :utc_datetime @@ -53,66 +67,30 @@ defmodule Helix.Log.Model.Revision do define_field: false end - @spec create(Log.t, Entity.idtb, String.t, pos_integer | nil) :: - Changeset.t - def create(log, entity, message, forge \\ nil) do - params = %{ - entity_id: entity, - message: message, - forge_version: forge - } - - log = Log.update_changeset(log, %{message: message}) - + @spec create_changeset(Log.id, id, creation_params) :: + changeset + def create_changeset(log_id, revision_id, params) do %__MODULE__{} - |> changeset(params) - |> put_assoc(:log, log) - |> put_pk(%{}, {:log, :revision}) # Correct heritage is TODO - end - - @spec changeset(%__MODULE__{} | Changeset.t, creation_params) :: - Changeset.t - def changeset(struct, params) do - struct |> cast(params, @creation_fields) - |> validate_required([:entity_id, :message]) - |> validate_number(:forge_version, greater_than: 0) + |> put_change(:log_id, log_id) + |> put_change(:revision_id, revision_id) |> put_change(:creation_time, DateTime.utc_now()) - |> put_pk(%{}, {:log, :revision}) # Correct heritage is TODO + |> validate_required(@required_fields) end query do - alias Helix.Entity.Model.Entity alias Helix.Log.Model.Log alias Helix.Log.Model.Revision - @spec by_id(Queryable.t, Revision.idtb) :: - Queryable.t - def by_id(query \\ Revision, id), - do: where(query, [r], r.revision_id == ^id) - - @spec by_log(Queryable.t, Log.idtb) :: - Queryable.t - def by_log(query \\ Revision, id), - do: where(query, [r], r.log_id == ^id) - - @spec by_entity(Queryable.t, Entity.idtb) :: - Queryable.t - def by_entity(query \\ Revision, id), - do: where(query, [r], r.entity_id == ^id) - - @spec select_count(Queryable.t) :: + @spec by_log(Queryable.t, Log.id) :: Queryable.t - def select_count(query \\ Revision), - do: select(query, [r], count(r.revision_id)) + def by_log(query \\ Revision, log_id), + do: where(query, [lr], lr.log_id == ^log_id) - @spec last(Queryable.t, non_neg_integer) :: + @spec by_revision(Queryable.t, Revision.id) :: Queryable.t - def last(query, n) do - query - |> order_by([r], desc: r.creation_time) - |> limit(^n) - end + def by_revision(query, revision_id), + do: where(query, [lr], lr.revision_id == ^revision_id) end end diff --git a/lib/log/public/index.ex b/lib/log/public/index.ex index ace495ec..f60d587b 100644 --- a/lib/log/public/index.ex +++ b/lib/log/public/index.ex @@ -1,15 +1,23 @@ defmodule Helix.Log.Public.Index do alias HELL.ClientUtils + alias HELL.HETypes + alias HELL.Utils alias Helix.Server.Model.Server alias Helix.Log.Model.Log alias Helix.Log.Query.Log, as: LogQuery - @type index :: - [%{log_id: Log.id, message: Log.message, timestamp: DateTime.t}] + @type index :: [Log.t] - @type rendered_index :: - [%{log_id: String.t, message: String.t, timestamp: String.t}] + @type rendered_index :: [rendered_log] + + @typep rendered_log :: + %{ + log_id: String.t, + type: String.t, + data: map, + timestamp: HETypes.client_timestamp + } @spec index(Server.id) :: index @@ -23,13 +31,6 @@ defmodule Helix.Log.Public.Index do def index(server_id) do server_id |> LogQuery.get_logs_on_server() - |> Enum.map(fn log -> - %{ - log_id: log.log_id, - message: log.message, - timestamp: log.creation_time - } - end) end @spec render_index(index) :: @@ -37,13 +38,17 @@ defmodule Helix.Log.Public.Index do @doc """ Top-level renderer for `index/1` """ - def render_index(index) do - Enum.map(index, fn log -> - %{ - log_id: to_string(log.log_id), - message: log.message, - timestamp: ClientUtils.to_timestamp(log.timestamp) - } - end) + def render_index(index), + do: Enum.map(index, &render_log/1) + + @spec render_log(Log.t) :: + rendered_log + def render_log(log = %Log{}) do + %{ + log_id: to_string(log.log_id), + type: to_string(log.revision.type), + data: Utils.stringify_map(log.revision.data), + timestamp: ClientUtils.to_timestamp(log.creation_time) + } end end diff --git a/lib/network/event/connection.ex b/lib/network/event/connection.ex index 549985a0..5e847a4b 100644 --- a/lib/network/event/connection.ex +++ b/lib/network/event/connection.ex @@ -33,20 +33,24 @@ defmodule Helix.Network.Event.Connection do loggable do + @doc """ + Gateway: "localhost logged into $first_ip as root" + Endpoint: "$last_ip logged in as root" + """ log(event = %__MODULE__{type: :ssh}) do entity = EntityQuery.fetch_by_server(event.tunnel.gateway_id) - msg_gateway = "localhost logged into $first_ip" - msg_endpoint = "$last_ip logged in as root" - log_map %{ event: event, entity_id: entity.entity_id, gateway_id: event.tunnel.gateway_id, endpoint_id: event.tunnel.target_id, network_id: event.tunnel.network_id, - msg_gateway: msg_gateway, - msg_endpoint: msg_endpoint + type_gateway: :remote_login_gateway, + data_gateway: %{ip: "$first_ip"}, + type_endpoint: :remote_login_endpoint, + data_endpoint: %{ip: "$last_ip"}, + data_both: %{network_id: event.tunnel.network_id} } end end diff --git a/lib/notification/model/code.ex b/lib/notification/model/code.ex index 2e2644af..e5942ec8 100644 --- a/lib/notification/model/code.ex +++ b/lib/notification/model/code.ex @@ -21,8 +21,7 @@ defmodule Helix.Notification.Model.Code do Module.register_attribute( __MODULE__, :codes, - accumulate: true, - persist: :false + accumulate: true, persist: :false ) @before_compile unquote(__MODULE__) diff --git a/lib/server/event/server.ex b/lib/server/event/server.ex index 2a4b1f05..32622f2e 100644 --- a/lib/server/event/server.ex +++ b/lib/server/event/server.ex @@ -39,14 +39,16 @@ defmodule Helix.Server.Event.Server do loggable do + @doc """ + localhost logged in + """ log(event = %__MODULE__{join_type: :local}) do - msg = "Localhost logged in" - log_map %{ event: event, server_id: event.server_id, entity_id: event.entity_id, - msg_server: msg + type: :local_login, + data: %{} } end diff --git a/lib/software/event/file.ex b/lib/software/event/file.ex index 1e347a67..a766539f 100644 --- a/lib/software/event/file.ex +++ b/lib/software/event/file.ex @@ -198,40 +198,41 @@ defmodule Helix.Software.Event.File do log(event = %{connection_type: :public_ftp}) do file = get_file_name(event.file) - msg_gateway = - "localhost downloaded file #{file} from Public FTP server $first_ip" - msg_endpoint = - "$last_ip downloaded file #{file} from localhost Public FTP" - log_map %{ event: event, entity_id: event.entity_id, gateway_id: event.to_server_id, endpoint_id: event.from_server_id, network_id: event.network_id, - msg_gateway: msg_gateway, - msg_endpoint: msg_endpoint, + type_gateway: :pftp_file_download_gateway, + data_gateway: %{ip: "$first_ip"}, + type_endpoint: :pftp_file_download_endpoint, + data_endpoint: %{ip: "$last_ip"}, + data_both: %{network_id: event.network_id, file_name: file}, opts: %{skip_bounce: true, censor_last: true} } end @doc """ Generates a log entry when a File has been downloaded from a server. + + Gateway: "localhost downloaded file $file_name from $first_ip" + Endpoint: "$last_ip downloaded file $file_name from localhost" """ log(event = %{connection_type: :ftp}) do file_name = get_file_name(event.file) - msg_gateway = "localhost downloaded file #{file_name} from $first_ip" - msg_endpoint = "$last_ip downloaded file #{file_name} from localhost" - log_map %{ event: event, entity_id: event.entity_id, gateway_id: event.to_server_id, endpoint_id: event.from_server_id, network_id: event.network_id, - msg_gateway: msg_gateway, - msg_endpoint: msg_endpoint + type_gateway: :file_download_gateway, + data_gateway: %{ip: "$first_ip"}, + type_endpoint: :file_download_endpoint, + data_endpoint: %{ip: "$last_ip"}, + data_both: %{network_id: event.network_id, file_name: file_name} } end end @@ -405,21 +406,24 @@ defmodule Helix.Software.Event.File do loggable do @doc """ Generates a log entry when a File has been uploaded to a server. + + Gateway: "localhost uploaded file $file_name to $first_ip" + Endpoint: "$last_ip uploaded file $file_name to localhost" """ log(event) do file_name = get_file_name(event.file) - msg_gateway = "localhost uploaded file #{file_name} to $first_ip" - msg_endpoint = "$last_ip uploaded file #{file_name} to localhost" - log_map %{ event: event, entity_id: event.entity_id, gateway_id: event.from_server_id, endpoint_id: event.to_server_id, network_id: event.network_id, - msg_gateway: msg_gateway, - msg_endpoint: msg_endpoint + type_gateway: :file_upload_gateway, + data_gateway: %{ip: "$first_ip"}, + type_endpoint: :file_upload_endpoint, + data_endpoint: %{ip: "$last_ip"}, + data_both: %{network_id: event.network_id, file_name: file_name} } end end diff --git a/lib/software/event/log_forge/log_create.ex b/lib/software/event/log_forge/log_create.ex deleted file mode 100644 index 438856ef..00000000 --- a/lib/software/event/log_forge/log_create.ex +++ /dev/null @@ -1,31 +0,0 @@ -defmodule Helix.Software.Event.LogForge.LogCreate do - - import Helix.Event - - event Processed do - - alias Helix.Entity.Model.Entity - alias Helix.Server.Model.Server - alias Helix.Software.Model.SoftwareType.LogForge, as: LogForgeProcess - - @type t :: %__MODULE__{ - target_id: Server.id, - entity_id: Entity.id, - message: String.t, - version: pos_integer - } - - event_struct [:target_id, :entity_id, :message, :version] - - @spec new(LogForgeProcess.t) :: - t - def new(data = %LogForgeProcess{operation: :create}) do - %__MODULE__{ - target_id: data.target_id, - entity_id: data.entity_id, - message: data.message, - version: data.version - } - end - end -end diff --git a/lib/software/event/log_forge/log_edit.ex b/lib/software/event/log_forge/log_edit.ex deleted file mode 100644 index 4e325615..00000000 --- a/lib/software/event/log_forge/log_edit.ex +++ /dev/null @@ -1,31 +0,0 @@ -defmodule Helix.Software.Event.LogForge.LogEdit do - - import Helix.Event - - event Processed do - - alias Helix.Entity.Model.Entity - alias Helix.Log.Model.Log - alias Helix.Software.Model.SoftwareType.LogForge, as: LogForgeProcess - - @type t :: %__MODULE__{ - target_log_id: Log.id, - entity_id: Entity.id, - message: String.t, - version: pos_integer - } - - event_struct [:target_log_id, :entity_id, :message, :version] - - @spec new(LogForgeProcess.t) :: - t - def new(data = %LogForgeProcess{operation: :edit}) do - %__MODULE__{ - target_log_id: data.target_log_id, - entity_id: data.entity_id, - message: data.message, - version: data.version - } - end - end -end diff --git a/lib/software/event/virus.ex b/lib/software/event/virus.ex index bce38c1f..e1454dca 100644 --- a/lib/software/event/virus.ex +++ b/lib/software/event/virus.ex @@ -120,22 +120,25 @@ defmodule Helix.Software.Event.Virus do loggable do + @doc """ + Gateway: "localhost installed virus $file_name at $first_ip" + Endpoint: "$last_ip installed virus $file_name at localhost" + """ log(event) do process = get_process(event) - file_name = get_file_name(event.file) - msg_gateway = "localhost installed virus #{file_name} at $first_ip" - msg_endpoint = "$last_ip installed virus #{file_name} at localhost" - log_map %{ event: event, entity_id: event.entity_id, gateway_id: process.gateway_id, endpoint_id: process.target_id, network_id: process.network_id, - msg_gateway: msg_gateway, - msg_endpoint: msg_endpoint + type_gateway: :virus_installed_gateway, + data_gateway: %{ip: "$first_ip"}, + type_endpoint: :virus_installed_endpoint, + data_endpoint: %{ip: "$last_ip"}, + data_both: %{network_id: process.network_id, file_name: file_name} } end diff --git a/lib/software/model/software_type/log_forge/process_type.ex b/lib/software/model/software_type/log_forge/process_type.ex deleted file mode 100644 index 043cee51..00000000 --- a/lib/software/model/software_type/log_forge/process_type.ex +++ /dev/null @@ -1,221 +0,0 @@ -# TODO: This whole module needs to be rewritten with the new Process interface. -defmodule Helix.Software.Model.SoftwareType.LogForge do - - use Ecto.Schema - - import Ecto.Changeset - - alias Ecto.Changeset - alias HELL.Constant - alias Helix.Entity.Model.Entity - alias Helix.Log.Model.Log - alias Helix.Server.Model.Server - alias Helix.Software.Model.File - - # TODO: Remove `entity_id` and `version` when `Balance` module is implemented - @type t :: %__MODULE__{ - target_log_id: Log.id | nil, - target_id: Server.id | nil, - entity_id: Entity.id, - operation: :edit | :create, - message: String.t, - version: pos_integer - } - - @type create_params :: - %{ - :entity_id => Entity.idtb, - :operation => :edit | :create, - :message => String.t, - optional(:target_id) => Server.idtb, - optional(:target_log_id) => Log.idtb, - optional(atom) => any - } - - @edit_revision_cost 10_000 - @edit_version_cost 150 - @create_version_cost 400 - - @primary_key false - embedded_schema do - field :target_log_id, Log.ID - field :entity_id, Entity.ID - field :target_id, Server.ID - - field :operation, Constant - - field :message, :string - field :version, :integer - end - - @spec create(File.t, create_params) :: - {:ok, t} - | {:error, Changeset.t} - def create(file, params) do - %__MODULE__{} - |> cast(params, [:entity_id, :operation, :message]) - |> validate_required([:entity_id, :operation]) - |> validate_inclusion(:operation, [:edit, :create]) - |> cast_modules(file, params) - |> format_return() - end - - @spec edit_objective(t, Log.t, non_neg_integer) :: - %{cpu: pos_integer} - def edit_objective(data = %{operation: :edit}, target_log, revision_count) do - revision_cost = if data.entity_id == target_log.entity_id do - factorial(revision_count) * @edit_revision_cost - else - factorial(revision_count + 1) * @edit_revision_cost - end - - version_cost = data.version * @edit_version_cost - - %{cpu: revision_cost + version_cost} - end - - @spec create_objective(t) :: - %{cpu: pos_integer} - def create_objective(data = %{operation: :create}) do - %{cpu: data.version * @create_version_cost} - end - - @spec cast_modules(Changeset.t, File.t, create_params) :: - Changeset.t - defp cast_modules(changeset, file, params) do - case get_change(changeset, :operation) do - :create -> - changeset - |> cast(%{version: file.modules.log_create.version}, [:version]) - |> cast(params, [:target_id]) - |> validate_required([:target_id, :version]) - |> validate_number(:version, greater_than: 0) - :edit -> - changeset - |> cast(%{version: file.modules.log_edit.version}, [:version]) - |> cast(params, [:target_log_id]) - |> validate_required([:target_log_id, :version]) - |> validate_number(:version, greater_than: 0) - _ -> - # Changeset should already be invalid - changeset - end - end - - @spec format_return(Changeset.t) :: - {:ok, t} - | {:error, Changeset.t} - defp format_return(changeset = %{valid?: true}), - do: {:ok, apply_changes(changeset)} - defp format_return(changeset), - do: {:error, changeset} - - @spec factorial(non_neg_integer) :: - non_neg_integer - defp factorial(n), - do: Enum.reduce(1..n, &(&1 * &2)) - - defimpl Helix.Process.Model.Processable do - - alias Ecto.Changeset - alias Helix.Entity.Model.Entity - alias Helix.Log.Model.Log - alias Helix.Server.Model.Server - alias Helix.Software.Model.SoftwareType.LogForge, as: LogForgeProcess - alias Helix.Software.Event.LogForge.LogEdit.Processed, - as: LogEditProcessedEvent - alias Helix.Software.Event.LogForge.LogCreate.Processed, - as: LogCreateProcessedEvent - - @ram_base_factor 5 - @ram_sqrt_factor 50 - - def dynamic_resources(_), - do: [:cpu] - - def minimum(%{version: v}), - do: %{ - paused: %{ - ram: v * @ram_base_factor + trunc(:math.sqrt(v) * @ram_sqrt_factor) - }, - running: %{ - ram: v * @ram_base_factor + trunc(:math.sqrt(v) * @ram_sqrt_factor) - } - } - - def kill(_, _, _), - do: {:delete, []} - - def complete(data, _process) do - event = conclusion_event(data) - - {:delete, [event]} - end - - def source_connection_closed(_, _, _) do - {:delete, []} - end - - def target_connection_closed(_, _, _) do - {:delete, []} - end - - def state_change(_, process, _, _), - do: {process, []} - - def conclusion(data, process), - do: state_change(data, process, :running, :complete) - - defp conclusion_event(data = %{operation: :edit}), - do: LogEditProcessedEvent.new(data) - - defp conclusion_event(data = %{operation: :create}), - do: LogCreateProcessedEvent.new(data) - - def after_read_hook(data) do - target_log_id = data.target_log_id && Log.ID.cast!(data.target_log_id) - - %LogForgeProcess{ - entity_id: Entity.ID.cast!(data.entity_id), - target_log_id: target_log_id, - target_id: Server.ID.cast!(data.target_id), - operation: String.to_existing_atom(data.operation), - message: data.message, - version: data.version - } - end - end - - defimpl Helix.Process.Public.View.ProcessViewable do - - alias Helix.Log.Model.Log - alias Helix.Process.Model.Process - alias Helix.Process.Public.View.Process, as: ProcessView - alias Helix.Process.Public.View.Process.Helper, as: ProcessViewHelper - - @type data :: - %{ - optional(:target_log_id) => String.t - } - - def get_scope(data, process, server, entity), - do: ProcessViewHelper.get_default_scope(data, process, server, entity) - - @spec render(term, Process.t, ProcessView.scopes) :: - {ProcessView.full_process | ProcessView.partial_process, data} - def render(data, process, scope) do - base = take_data_from_process(process, scope) - complement = take_complement_from_data(data, scope) - - {base, complement} - end - - defp take_complement_from_data(data = %_{operation: :edit}, _), - do: %{target_log_id: to_string(data.target_log_id)} - defp take_complement_from_data(%_{operation: :create}, _), - do: %{} - - defp take_data_from_process(process, scope), - do: ProcessViewHelper.default_process_render(process, scope) - end -end diff --git a/lib/universe/bank/internal/bank_transfer.ex b/lib/universe/bank/internal/bank_transfer.ex index 230e007e..d45a1544 100644 --- a/lib/universe/bank/internal/bank_transfer.ex +++ b/lib/universe/bank/internal/bank_transfer.ex @@ -21,9 +21,8 @@ defmodule Helix.Universe.Bank.Internal.BankTransfer do a transaction. """ def fetch_for_update(transfer_id) do - unless Repo.in_transaction?() do - raise "Transaction required in order to acquire lock" - end + unless Repo.in_transaction?(), + do: raise "Transaction required in order to acquire lock" transfer_id |> BankTransfer.Query.by_id() diff --git a/mix.exs b/mix.exs index 00efef92..6bcf1244 100644 --- a/mix.exs +++ b/mix.exs @@ -71,7 +71,6 @@ defmodule Helix.Mixfile do {:entropy_string, "~> 1.3"}, {:burette, git: "https://github.com/HackerExperience/burette"}, - {:ex_machina, "~> 2.1", only: :test}, {:earmark, "~> 1.2.4", only: :dev}, {:ex_doc, "~> 0.18.1", only: :dev}, {:inch_ex, "~> 0.5.6", only: [:dev, :test]}, diff --git a/priv/repo/log/migrations/20180719044155_log_rewrite.exs b/priv/repo/log/migrations/20180719044155_log_rewrite.exs new file mode 100644 index 00000000..0067ed80 --- /dev/null +++ b/priv/repo/log/migrations/20180719044155_log_rewrite.exs @@ -0,0 +1,35 @@ +defmodule Helix.Log.Repo.Migrations.LogRewrite do + use Ecto.Migration + + def change do + drop table(:log_touches) + drop table(:revisions) + drop table(:logs) + + create table(:logs, primary_key: false) do + add :log_id, :inet, primary_key: true + + add :revision_id, :integer, null: false + add :server_id, :inet, null: false + + add :creation_time, :utc_datetime, null: false + end + create index(:logs, [:server_id, :creation_time]) + + create table(:log_revisions, primary_key: false) do + add :log_id, + references(:logs, column: :log_id, type: :inet, on_delete: :delete_all), + primary_key: true + + add :revision_id, :integer, primary_key: true + + add :type, :integer, null: false + add :data, :jsonb, null: false + + add :entity_id, :inet, null: false + add :forge_version, :integer + + add :creation_time, :utc_datetime, null: false + end + end +end diff --git a/test/event/loggable/flow_test.exs b/test/event/loggable/flow_test.exs index 2bc583a7..76a0b41b 100644 --- a/test/event/loggable/flow_test.exs +++ b/test/event/loggable/flow_test.exs @@ -6,6 +6,7 @@ defmodule Helix.Event.Loggable.FlowTest do alias Helix.Event.Loggable.Flow, as: LoggableFlow alias Helix.Test.Entity.Helper, as: EntityHelper + alias Helix.Test.Log.Helper, as: LogHelper alias Helix.Test.Network.Helper, as: NetworkHelper alias Helix.Test.Network.Setup, as: NetworkSetup alias Helix.Test.Server.Helper, as: ServerHelper @@ -18,35 +19,51 @@ defmodule Helix.Event.Loggable.FlowTest do end end - describe "build_bounce_entries/2" do + describe "build_bounce_entries/5" do test "creates bounce log entries in the correct order" do entity_id = EntityHelper.id() - gateway = {ServerHelper.id(), "::", "1.2.3.4"} - link1 = {ServerHelper.id(), "::", "1.1.1.1"} - link2 = {ServerHelper.id(), "::", "1.1.1.2"} - link3 = {ServerHelper.id(), "::", "1.1.1.3"} - target = {ServerHelper.id(), "::", "4.3.2.1"} + network_id = NetworkHelper.internet_id() + + gateway = {ServerHelper.id(), network_id, "1.2.3.4"} + link1 = {ServerHelper.id(), network_id, "1.1.1.1"} + link2 = {ServerHelper.id(), network_id, "1.1.1.2"} + link3 = {ServerHelper.id(), network_id, "1.1.1.3"} + target = {ServerHelper.id(), network_id, "4.3.2.1"} {bounce, _} = NetworkSetup.Bounce.bounce(links: [link1, link2, link3]) [entry1, entry2, entry3] = - LoggableFlow.build_bounce_entries(bounce, gateway, target, entity_id) + LoggableFlow.build_bounce_entries( + bounce, gateway, target, entity_id, network_id + ) # `link1` says bounce comes from `gateway` and goes to `link2` assert elem(entry1, 0) == elem(link1, 0) assert elem(entry1, 1) == entity_id - assert String.contains?(elem(entry1, 2), "from 1.2.3.4 to 1.1.1.2") + {log_type, log_data} = elem(entry1, 2) + assert log_type == :connection_bounced + assert log_data.ip_prev == "1.2.3.4" + assert log_data.ip_next == "1.1.1.2" + assert log_data.network_id == network_id # `link2` says bounce comes from `link1` and goes to `link3` assert elem(entry2, 0) == elem(link2, 0) assert elem(entry2, 1) == entity_id - assert String.contains?(elem(entry2, 2), "from 1.1.1.1 to 1.1.1.3") + {log_type, log_data} = elem(entry2, 2) + assert log_type == :connection_bounced + assert log_data.ip_prev == "1.1.1.1" + assert log_data.ip_next == "1.1.1.3" + assert log_data.network_id == network_id # `link3` says bounce comes from `link2` and goes to `target` assert elem(entry3, 0) == elem(link3, 0) assert elem(entry3, 1) == entity_id - assert String.contains?(elem(entry3, 2), "from 1.1.1.2 to 4.3.2.1") + {log_type, log_data} = elem(entry3, 2) + assert log_type == :connection_bounced + assert log_data.ip_prev == "1.1.1.2" + assert log_data.ip_next == "4.3.2.1" + assert log_data.network_id == network_id end end @@ -54,17 +71,21 @@ defmodule Helix.Event.Loggable.FlowTest do test "logs are saved" do {server, %{entity: entity}} = ServerSetup.server() - msg = "foobar" - entry = LoggableFlow.build_entry(server.server_id, entity.entity_id, msg) + log_info = {log_type, log_data} = LogHelper.log_info() + + entry = + LoggableFlow.build_entry(server.server_id, entity.entity_id, log_info) # Saves the entry; returns LogCreatedEvent assert [event] = LoggableFlow.save(entry) assert event.__struct__ == Helix.Log.Event.Log.Created + # Log is stored correctly on the server [log] = LogQuery.get_logs_on_server(server) - assert log.message == msg - assert log.entity_id == entity.entity_id assert log.server_id == server.server_id + assert log.revision.entity_id == entity.entity_id + assert log.revision.type == log_type + assert log.revision.data == Map.from_struct(log_data) end test "performs a noop on empty list" do diff --git a/test/features/file/transfer_test.exs b/test/features/file/transfer_test.exs index 1c7c5902..061cbe87 100644 --- a/test/features/file/transfer_test.exs +++ b/test/features/file/transfer_test.exs @@ -7,7 +7,6 @@ defmodule Helix.Test.Features.File.TransferTest do import Helix.Test.Channel.Macros import Helix.Test.Log.Macros - alias Helix.Log.Query.Log, as: LogQuery alias Helix.Process.Query.Process, as: ProcessQuery alias Helix.Software.Model.File alias Helix.Software.Query.File, as: FileQuery @@ -15,6 +14,7 @@ defmodule Helix.Test.Features.File.TransferTest do alias HELL.TestHelper.Random alias Helix.Test.Account.Helper, as: AccountHelper alias Helix.Test.Channel.Setup, as: ChannelSetup + alias Helix.Test.Log.Helper, as: LogHelper alias Helix.Test.Network.Setup, as: NetworkSetup alias Helix.Test.Process.TOPHelper alias Helix.Test.Software.Helper, as: SoftwareHelper @@ -118,25 +118,22 @@ defmodule Helix.Test.Features.File.TransferTest do # Now let's check the log generation - # Log on gateway (`gateway` downloaded from ``) - assert [log_gateway | _] = LogQuery.get_logs_on_server(gateway.server_id) - assert_log \ - log_gateway, gateway.server_id, entity.entity_id, - "localhost downloaded", - contains: [dl_file.name] + log_gateway = LogHelper.get_last_log(gateway, :file_download_gateway) + + file_name = LogHelper.log_file_name(dl_file) + + assert_log log_gateway, gateway.server_id, entity.entity_id, + :file_download_gateway, %{file_name: file_name} # Verify logging worked correctly within the bounce nodes - assert_bounce \ - bounce, gateway, destination, entity, - rejects: [dl_file.name, "download"] + assert_bounce bounce, gateway, destination, entity + + log_destination = + LogHelper.get_last_log(destination, :file_download_endpoint) # Log on destination (`` downloaded file at `destination`) - assert [log_destination | _] = - LogQuery.get_logs_on_server(destination.server_id) - assert_log \ - log_destination, destination.server_id, entity.entity_id, - "from localhost", - contains: [dl_file.name] + assert_log log_destination, destination.server_id, entity.entity_id, + :file_download_endpoint, %{file_name: file_name} # TODO: #388 Underlying connection(s) were removed @@ -218,27 +215,21 @@ defmodule Helix.Test.Features.File.TransferTest do assert file_added_event.data.file.id == to_string(new_file.file_id) # Now let's check the log generation - LogQuery.get_logs_on_server(gateway.server_id) + file_name = LogHelper.log_file_name(up_file) # Log on gateway (`gateway` uploaded to ``) - assert [log_gateway | _] = LogQuery.get_logs_on_server(gateway.server_id) - assert_log \ - log_gateway, gateway.server_id, entity.entity_id, - "localhost uploaded", - contains: [up_file.name] + log_gateway = LogHelper.get_last_log(gateway, :file_upload_gateway) + assert_log log_gateway, gateway.server_id, entity.entity_id, + :file_upload_gateway, %{file_name: file_name} # Verify logging worked correctly within the bounce nodes - assert_bounce \ - bounce, gateway, destination, entity, - rejects: [up_file.name, "upload"] + assert_bounce bounce, gateway, destination, entity # Log on destination (`` uploaded file at `destination`) - assert [log_destination | _] = - LogQuery.get_logs_on_server(destination.server_id) - assert_log \ - log_destination, destination.server_id, entity.entity_id, - "to localhost", - contains: [up_file.name] + log_destination = + LogHelper.get_last_log(destination, :file_upload_endpoint) + assert_log log_destination, destination.server_id, entity.entity_id, + :file_upload_endpoint, %{file_name: file_name} # TODO: #388 Underlying connection(s) were removed diff --git a/test/log/action/log_test.exs b/test/log/action/log_test.exs index dd874beb..a1376c7b 100644 --- a/test/log/action/log_test.exs +++ b/test/log/action/log_test.exs @@ -1,112 +1,112 @@ -defmodule Helix.Log.Action.LogTest do +# defmodule Helix.Log.Action.LogTest do - use Helix.Test.Case.Integration - - alias Helix.Log.Action.Log, as: LogAction - alias Helix.Log.Model.Log - alias Helix.Log.Event.Log.Created, as: LogCreatedEvent - alias Helix.Log.Event.Log.Deleted, as: LogDeletedEvent - alias Helix.Log.Event.Log.Modified, as: LogModifiedEvent - alias Helix.Log.Query.Log, as: LogQuery - alias Helix.Log.Repo - - alias Helix.Test.Log.Factory, as: LogFactory - - alias Helix.Test.Entity.Helper, as: EntityHelper - alias Helix.Test.Server.Helper, as: ServerHelper - - describe "create/3" do - test "succeeds with valid input" do - server_id = ServerHelper.id() - entity_id = EntityHelper.id() - message = "They are taking the hobbits to Isengard" - - assert {:ok, _, _} = LogAction.create(server_id, entity_id, message) - end - - test "returns LogCreated event" do - server_id = ServerHelper.id() - entity_id = EntityHelper.id() - message = "Just as expected" - - result = LogAction.create(server_id, entity_id, message) - assert {:ok, _, [%LogCreatedEvent{}]} = result - end - end - - describe "revise/4" do - test "overrides log message" do - log = LogFactory.insert(:log) - entity = EntityHelper.id() - message = "É nois que voa, bruxão!" - forge_version = Enum.random(1..999) - - assert {:ok, _, _} = LogAction.revise(log, entity, message, forge_version) - assert %{message: ^message} = LogQuery.fetch(log.log_id) - end - - test "returns LogModified event" do - log = LogFactory.insert(:log) - entity = EntityHelper.id() - message = "Don't dead, open inside" - - result = LogAction.revise(log, entity, message, 1) - assert {:ok, _, [%LogModifiedEvent{}]} = result - end - end - - describe "recover/1" do - test "recovers log to the last message" do - log = LogFactory.insert(:log) - entity = EntityHelper.id() - - message0 = log.message - message1 = "A monad is a monoid in the category of the endofunctors" - message2 = "A commit a day keeps the PM away" - - LogAction.revise(log, entity, message1, 1) - log = LogQuery.fetch(log.log_id) - assert %{message: ^message1} = log - - LogAction.revise(log, entity, message2, 2) - log = LogQuery.fetch(log.log_id) - assert %{message: ^message2} = log - - assert {:ok, :recovered, _} = LogAction.recover(log) - assert %{message: ^message1} = LogQuery.fetch(log.log_id) - - assert {:ok, :recovered, _} = LogAction.recover(log) - assert %{message: ^message0} = LogQuery.fetch(log.log_id) - end - - test "returns LogModified event when a message is recovered" do - log = LogFactory.insert(:log) - entity = EntityHelper.id() - message = "nullPointerException" - - LogAction.revise(log, entity, message, 1) - - assert {:ok, :recovered, [%LogModifiedEvent{}]} = LogAction.recover(log) - end - - test "returns error when log is original" do - log = LogFactory.insert(:log) - - assert {:error, :original_revision} == LogAction.recover(log) - end - - test "deletes log if it was forged" do - log = LogFactory.insert(:log, forge_version: 1) - - assert Repo.get(Log, log.log_id) - assert {:ok, :deleted, _} = LogAction.recover(log) - refute Repo.get(Log, log.log_id) - end - - test "returns LogDeleted event when forged log is deleted" do - log = LogFactory.insert(:log, forge_version: 1) - - assert {:ok, :deleted, [%LogDeletedEvent{}]} = LogAction.recover(log) - end - end -end +# use Helix.Test.Case.Integration + +# alias Helix.Log.Action.Log, as: LogAction +# alias Helix.Log.Model.Log +# alias Helix.Log.Event.Log.Created, as: LogCreatedEvent +# alias Helix.Log.Event.Log.Deleted, as: LogDeletedEvent +# alias Helix.Log.Event.Log.Modified, as: LogModifiedEvent +# alias Helix.Log.Query.Log, as: LogQuery +# alias Helix.Log.Repo + +# alias Helix.Test.Log.Factory, as: LogFactory + +# alias Helix.Test.Entity.Helper, as: EntityHelper +# alias Helix.Test.Server.Helper, as: ServerHelper + +# describe "create/3" do +# test "succeeds with valid input" do +# server_id = ServerHelper.id() +# entity_id = EntityHelper.id() +# message = "They are taking the hobbits to Isengard" + +# assert {:ok, _, _} = LogAction.create(server_id, entity_id, message) +# end + +# test "returns LogCreated event" do +# server_id = ServerHelper.id() +# entity_id = EntityHelper.id() +# message = "Just as expected" + +# result = LogAction.create(server_id, entity_id, message) +# assert {:ok, _, [%LogCreatedEvent{}]} = result +# end +# end + +# describe "revise/4" do +# test "overrides log message" do +# log = LogFactory.insert(:log) +# entity = EntityHelper.id() +# message = "É nois que voa, bruxão!" +# forge_version = Enum.random(1..999) + +# assert {:ok, _, _} = LogAction.revise(log, entity, message, forge_version) +# assert %{message: ^message} = LogQuery.fetch(log.log_id) +# end + +# test "returns LogModified event" do +# log = LogFactory.insert(:log) +# entity = EntityHelper.id() +# message = "Don't dead, open inside" + +# result = LogAction.revise(log, entity, message, 1) +# assert {:ok, _, [%LogModifiedEvent{}]} = result +# end +# end + +# describe "recover/1" do +# test "recovers log to the last message" do +# log = LogFactory.insert(:log) +# entity = EntityHelper.id() + +# message0 = log.message +# message1 = "A monad is a monoid in the category of the endofunctors" +# message2 = "A commit a day keeps the PM away" + +# LogAction.revise(log, entity, message1, 1) +# log = LogQuery.fetch(log.log_id) +# assert %{message: ^message1} = log + +# LogAction.revise(log, entity, message2, 2) +# log = LogQuery.fetch(log.log_id) +# assert %{message: ^message2} = log + +# assert {:ok, :recovered, _} = LogAction.recover(log) +# assert %{message: ^message1} = LogQuery.fetch(log.log_id) + +# assert {:ok, :recovered, _} = LogAction.recover(log) +# assert %{message: ^message0} = LogQuery.fetch(log.log_id) +# end + +# test "returns LogModified event when a message is recovered" do +# log = LogFactory.insert(:log) +# entity = EntityHelper.id() +# message = "nullPointerException" + +# LogAction.revise(log, entity, message, 1) + +# assert {:ok, :recovered, [%LogModifiedEvent{}]} = LogAction.recover(log) +# end + +# test "returns error when log is original" do +# log = LogFactory.insert(:log) + +# assert {:error, :original_revision} == LogAction.recover(log) +# end + +# test "deletes log if it was forged" do +# log = LogFactory.insert(:log, forge_version: 1) + +# assert Repo.get(Log, log.log_id) +# assert {:ok, :deleted, _} = LogAction.recover(log) +# refute Repo.get(Log, log.log_id) +# end + +# test "returns LogDeleted event when forged log is deleted" do +# log = LogFactory.insert(:log, forge_version: 1) + +# assert {:ok, :deleted, [%LogDeletedEvent{}]} = LogAction.recover(log) +# end +# end +# end diff --git a/test/log/event/handler/log_test.exs b/test/log/event/handler/log_test.exs index ae53f8fa..ed945811 100644 --- a/test/log/event/handler/log_test.exs +++ b/test/log/event/handler/log_test.exs @@ -18,7 +18,8 @@ defmodule Helix.Log.Event.Handler.LogTest do alias Helix.Test.Network.Setup, as: NetworkSetup alias Helix.Test.Server.Helper, as: ServerHelper alias Helix.Test.Server.Setup, as: ServerSetup - alias Helix.Test.Log.Factory, as: LogFactory + # alias Helix.Test.Log.Factory, as: LogFactory + alias Helix.Test.Log.Helper, as: LogHelper describe "handle_event/1" do test "follows the LoggableFlow" do @@ -32,25 +33,22 @@ defmodule Helix.Log.Event.Handler.LogTest do # Simulates the handler receiving the event assert :ok == LogHandler.handle_event(event) + file_name = LogHelper.log_file_name(event.file) + # Now we verify that the corresponding log has been saved on the relevant # places. [log_gateway] = LogQuery.get_logs_on_server(event.to_server_id) - assert_log \ - log_gateway, - event.to_server_id, - event.entity_id, - "localhost downloaded" + assert_log log_gateway, event.to_server_id, event.entity_id, + :file_download_gateway, %{file_name: file_name} [log_destination] = LogQuery.get_logs_on_server(event.from_server_id) - assert_log \ - log_destination, - event.from_server_id, - event.entity_id, - "from localhost" + assert_log log_destination, event.from_server_id, event.entity_id, + :file_download_endpoint, %{file_name: file_name} end test "creates logs on intermediary nodes (bounces)" do {bounce, _} = NetworkSetup.Bounce.bounce(total: 4) + [ {l1_server_id, _, l1_ip}, {l2_server_id, _, l2_ip}, @@ -62,6 +60,8 @@ defmodule Helix.Log.Event.Handler.LogTest do EventSetup.Software.file_downloaded() |> Event.set_bounce(bounce) + file_name = LogHelper.log_file_name(event.file) + gateway_ip = ServerHelper.get_ip(event.to_server_id) endpoint_ip = ServerHelper.get_ip(event.from_server_id) @@ -71,36 +71,32 @@ defmodule Helix.Log.Event.Handler.LogTest do # Now we verify that the corresponding log has been saved on the relevant # places. [log_gateway] = LogQuery.get_logs_on_server(event.to_server_id) - assert_log \ - log_gateway, event.to_server_id, event.entity_id, "localhost downloaded" + assert_log log_gateway, event.to_server_id, event.entity_id, + :file_download_gateway, %{file_name: file_name, ip: l1_ip} # log on `l1` tells connection was bounced from `gateway` to `l2` [log_bounce1] = LogQuery.get_logs_on_server(l1_server_id) - assert_log \ - log_bounce1, l1_server_id, event.entity_id, - "Connection bounced", contains: "from #{gateway_ip} to #{l2_ip}" + assert_log log_bounce1, l1_server_id, event.entity_id, + :connection_bounced, %{ip_prev: gateway_ip, ip_next: l2_ip} # log on `l2` tells connection was bounced from `l1` to `l3` [log_bounce2] = LogQuery.get_logs_on_server(l2_server_id) - assert_log \ - log_bounce2, l2_server_id, event.entity_id, - "Connection bounced", contains: "from #{l1_ip} to #{l3_ip}" + assert_log log_bounce2, l2_server_id, event.entity_id, + :connection_bounced, %{ip_prev: l1_ip, ip_next: l3_ip} # log on `l3` tells connection was bounced from `l2` to `l4` [log_bounce3] = LogQuery.get_logs_on_server(l3_server_id) - assert_log \ - log_bounce3, l3_server_id, event.entity_id, - "Connection bounced", contains: "from #{l2_ip} to #{l4_ip}" + assert_log log_bounce3, l3_server_id, event.entity_id, + :connection_bounced, %{ip_prev: l2_ip, ip_next: l4_ip} # log on `l4` tells connection was bounced from `l3` to `endpoint` [log_bounce4] = LogQuery.get_logs_on_server(l4_server_id) - assert_log \ - log_bounce4, l4_server_id, event.entity_id, - "Connection bounced", contains: "from #{l3_ip} to #{endpoint_ip}" + assert_log log_bounce4, l4_server_id, event.entity_id, + :connection_bounced, %{ip_prev: l3_ip, ip_next: endpoint_ip} [log_destination] = LogQuery.get_logs_on_server(event.from_server_id) - assert_log \ - log_destination, event.from_server_id, event.entity_id, "from localhost" + assert_log log_destination, event.from_server_id, event.entity_id, + :file_download_endpoint, %{file_name: file_name, ip: l4_ip} end test "works on single-node log ('offline log')" do @@ -113,52 +109,51 @@ defmodule Helix.Log.Event.Handler.LogTest do assert :ok == LogHandler.handle_event(event) [log_server] = LogQuery.get_logs_on_server(event.server_id) - assert_log \ - log_server, event.server_id, event.entity_id, - "Localhost logged in" - end - end - - describe "log_forge_conclusion/1 for LogForge.Edit" do - test "adds revision to target log" do - target_log = LogFactory.insert(:log) - {entity, _} = EntitySetup.entity() - message = "I just got hidden" - - event = %LogForgeEditComplete{ - target_log_id: target_log.log_id, - entity_id: entity.entity_id, - message: message, - version: 100 - } - - revisions_before = LogQuery.count_revisions_of_entity(target_log, entity) - LogHandler.log_forge_conclusion(event) - revisions_after = LogQuery.count_revisions_of_entity(target_log, entity) - target_log = LogQuery.fetch(target_log.log_id) - - assert revisions_after == revisions_before + 1 - assert message == target_log.message + assert_log log_server, event.server_id, event.entity_id, + :local_login, %{} end end - describe "log_forge_conclusion/1 for LogForge.Create" do - test "creates specified log on target server" do - {server, %{entity: entity}} = ServerSetup.server() - - message = "Mess with the best, die like the rest" - - event = %LogForgeCreateComplete{ - entity_id: entity.entity_id, - target_id: server.server_id, - message: message, - version: 456 - } - - LogHandler.log_forge_conclusion(event) - - assert [log] = LogQuery.get_logs_on_server(server) - assert [%{forge_version: 456}] = Repo.preload(log, :revisions).revisions - end - end + # describe "log_forge_conclusion/1 for LogForge.Edit" do + # test "adds revision to target log" do + # target_log = LogFactory.insert(:log) + # {entity, _} = EntitySetup.entity() + # message = "I just got hidden" + + # event = %LogForgeEditComplete{ + # target_log_id: target_log.log_id, + # entity_id: entity.entity_id, + # message: message, + # version: 100 + # } + + # revisions_before = LogQuery.count_revisions_of_entity(target_log, entity) + # LogHandler.log_forge_conclusion(event) + # revisions_after = LogQuery.count_revisions_of_entity(target_log, entity) + # target_log = LogQuery.fetch(target_log.log_id) + + # assert revisions_after == revisions_before + 1 + # assert message == target_log.message + # end + # end + + # describe "log_forge_conclusion/1 for LogForge.Create" do + # test "creates specified log on target server" do + # {server, %{entity: entity}} = ServerSetup.server() + + # message = "Mess with the best, die like the rest" + + # event = %LogForgeCreateComplete{ + # entity_id: entity.entity_id, + # target_id: server.server_id, + # message: message, + # version: 456 + # } + + # LogHandler.log_forge_conclusion(event) + + # assert [log] = LogQuery.get_logs_on_server(server) + # assert [%{forge_version: 456}] = Repo.preload(log, :revisions).revisions + # end + # end end diff --git a/test/log/event/log_test.exs b/test/log/event/log_test.exs index 357cacd2..afd3c6af 100644 --- a/test/log/event/log_test.exs +++ b/test/log/event/log_test.exs @@ -18,8 +18,8 @@ defmodule Helix.Log.Event.LogTest do # Returned payload and json-friendly assert data.log_id == to_string(event.log.log_id) - assert data.message == event.log.message - assert data.server_id == to_string(event.log.server_id) + assert data.type == to_string(event.log.revision.type) + assert data.data assert is_float(data.timestamp) # Returned event is correct @@ -33,23 +33,23 @@ defmodule Helix.Log.Event.LogTest do end end - describe "LogModifiedEvent" do - test "Publishable.generate_payload/2" do - event = EventSetup.Log.modified() + # describe "LogModifiedEvent" do + # test "Publishable.generate_payload/2" do + # event = EventSetup.Log.modified() - # Generates the payload - assert {:ok, data} = Publishable.generate_payload(event, @mocked_socket) + # # Generates the payload + # assert {:ok, data} = Publishable.generate_payload(event, @mocked_socket) - # Returned payload is json-friendly - assert data.log_id == to_string(event.log.log_id) - assert data.message == event.log.message - assert data.server_id == to_string(event.log.server_id) - assert is_float(data.timestamp) + # # Returned payload is json-friendly + # assert data.log_id == to_string(event.log.log_id) + # assert data.message == event.log.message + # assert data.server_id == to_string(event.log.server_id) + # assert is_float(data.timestamp) - # Returned event is correct - assert "log_modified" == Publishable.get_event_name(event) - end - end + # # Returned event is correct + # assert "log_modified" == Publishable.get_event_name(event) + # end + # end describe "LogDeletedEvent" do test "Publishable.generate_payload/2" do diff --git a/test/log/internal/log_test.exs b/test/log/internal/log_test.exs new file mode 100644 index 00000000..fbd2d78a --- /dev/null +++ b/test/log/internal/log_test.exs @@ -0,0 +1,182 @@ +defmodule Helix.Log.Internal.LogTest do + + use Helix.Test.Case.Integration + + alias Helix.Log.Internal.Log, as: LogInternal + + alias Helix.Test.Entity.Helper, as: EntityHelper + alias Helix.Test.Server.Helper, as: ServerHelper + alias Helix.Test.Software.Helper, as: SoftwareHelper + alias Helix.Test.Log.Helper, as: LogHelper + alias Helix.Test.Log.Setup, as: LogSetup + + describe "create/4" do + test "creates natural log (without forge_version)" do + server_id = ServerHelper.id() + entity_id = EntityHelper.id() + log_info = {log_type, log_data} = LogHelper.log_info() + + assert {:ok, log} = LogInternal.create(server_id, entity_id, log_info) + + # Current revision maps to the correct data + assert log.revision_id == 1 + assert log.revision.type == log_type + assert log.revision.data == Map.from_struct(log_data) + assert log.revision.entity_id == entity_id + refute log.revision.forge_version + + # And some other not-so-important verifications + assert log.revision.creation_time == log.creation_time + end + + test "creates artificial log (with forge_version)" do + server_id = ServerHelper.id() + entity_id = EntityHelper.id() + log_info = {log_type, log_data} = LogHelper.log_info() + forger_version = SoftwareHelper.random_version() + + assert {:ok, log} = + LogInternal.create(server_id, entity_id, log_info, forger_version) + + # Current revision maps to the correct data + assert log.revision_id == 1 + assert log.revision.type == log_type + assert log.revision.data == Map.from_struct(log_data) + assert log.revision.entity_id == entity_id + assert log.revision.forge_version == forger_version + + # And some other not-so-important verifications + assert log.revision.creation_time == log.creation_time + end + end + + describe "revise/4" do + test "adds a revision to the log entry" do + log = LogSetup.log!() + + new_entity_id_1 = EntityHelper.id() + new_info_1 = {new_type_1, new_data_1} = LogHelper.log_info() + new_version_1 = 50 + + # Add a revision (second overall) + assert {:ok, new_log_1} = + LogInternal.revise(log, new_entity_id_1, new_info_1, new_version_1) + + # The new log `revision_id` points to another revision + assert new_log_1.revision_id == 2 + + # The returned log's `revision` has changed + assert new_log_1.revision.entity_id == new_entity_id_1 + assert new_log_1.revision.type == new_type_1 + assert new_log_1.revision.data == Map.from_struct(new_data_1) + assert new_log_1.revision.forge_version == new_version_1 + + # But stuff that shouldn't change hasn't changed + assert new_log_1.creation_time == log.creation_time + + new_entity_id_2 = EntityHelper.id() + new_info_2 = {new_type_2, new_data_2} = LogHelper.log_info() + new_version_2 = 100 + + # Add yet another revision (third overall) + assert {:ok, new_log_2} = + LogInternal.revise( + new_log_1, new_entity_id_2, new_info_2, new_version_2 + ) + + # The new log `revision_id` points to another revision + assert new_log_2.revision_id == 3 + + # The returned log's `revision` has changed + assert new_log_2.revision.entity_id == new_entity_id_2 + assert new_log_2.revision.type == new_type_2 + assert new_log_2.revision.data == Map.from_struct(new_data_2) + assert new_log_2.revision.forge_version == new_version_2 + + # But stuff that shouldn't change hasn't changed + assert new_log_2.creation_time == log.creation_time + end + end + + describe "recover/1" do + test "returns own log when last revision of natural log" do + log = LogSetup.log!() + + # When recovering the last revision of a natural log, we must return the + # log itself. + assert {:original, same_log} = LogInternal.recover(log) + + # We can't assert `same_log == log` because of some Ecto internals... + assert same_log.log_id == log.log_id + assert same_log.revision_id == log.revision_id + + # Log was not deleted + assert LogInternal.fetch(log.log_id) + end + + test "destroys log when last revision of artificial log" do + log = LogSetup.log!(forge_version: 50) + + assert :destroyed == LogInternal.recover(log) + + # Log was deleted + refute LogInternal.fetch(log.log_id) + end + + test "recovers natural log with multiple revisions" do + log = LogSetup.log!(revisions: 3) + + [_rev3, rev2, rev1] = LogHelper.get_all_revisions(log.log_id) + + # Recover the first (originally 3 revisions) + assert {:recovered, new_log1} = LogInternal.recover(log) + + # Popped last revision from stack + assert new_log1.revision_id == 2 + assert new_log1.revision.type == rev2.type + assert new_log1.revision.data == rev2.data + assert new_log1.revision.entity_id == rev2.entity_id + assert new_log1.revision.forge_version == rev2.forge_version + + # Without changing stuff that shouldn't be changed + assert new_log1.creation_time == log.creation_time + + # Recover the second (originally 3 revisions) + assert {:recovered, new_log2} = LogInternal.recover(new_log1) + + # Popped last revision from stack + assert new_log2.revision_id == 1 + assert new_log2.revision.type == rev1.type + assert new_log2.revision.data == rev1.data + assert new_log2.revision.entity_id == rev1.entity_id + assert new_log2.revision.forge_version == rev1.forge_version + + # Without changing stuff that shouldn't be changed + assert new_log2.creation_time == log.creation_time + + # Can't recover any further, as we are currently at the original revision + assert {:original, new_log2} == LogInternal.recover(new_log2) + end + + test "recovers artificial log with multiple revisions" do + log = LogSetup.log!(revisions: 2, forge_version: 20) + + [_rev2, rev1] = LogHelper.get_all_revisions(log.log_id) + + assert {:recovered, new_log1} = LogInternal.recover(log) + + # Popped last revision from stack + assert new_log1.revision_id == 1 + assert new_log1.revision.type == rev1.type + assert new_log1.revision.data == rev1.data + assert new_log1.revision.entity_id == rev1.entity_id + assert new_log1.revision.forge_version == rev1.forge_version + + # Without changing stuff that shouldn't be changed + assert new_log1.creation_time == log.creation_time + + # And when attempting to recover once again, the artificial log is deleted + assert :destroyed == LogInternal.recover(new_log1) + end + end +end diff --git a/test/log/public/index_test.exs b/test/log/public/index_test.exs index 0cdb8418..d39b8ba0 100644 --- a/test/log/public/index_test.exs +++ b/test/log/public/index_test.exs @@ -11,18 +11,18 @@ defmodule Helix.Log.Public.IndexTest do test "indexes correctly" do {server, _} = ServerSetup.server() - log1 = LogSetup.log!([server_id: server.server_id, own_log: true]) - log2 = LogSetup.log!([server_id: server.server_id, own_log: true]) + log1 = LogSetup.log!(server_id: server.server_id, own_log: true) + log2 = LogSetup.log!(server_id: server.server_id, own_log: true) index = LogIndex.index(server.server_id) result_log1 = Enum.find(index, &(&1.log_id == log1.log_id)) - assert result_log1.message == log1.message - assert result_log1.timestamp == log1.creation_time + assert result_log1.revision.type == log1.revision.type + assert result_log1.creation_time == log1.creation_time result_log2 = Enum.find(index, &(&1.log_id == log2.log_id)) - assert result_log2.message == log2.message - assert result_log2.timestamp == log2.creation_time + assert result_log2.revision.type == log2.revision.type + assert result_log2.creation_time == log2.creation_time end end @@ -30,8 +30,8 @@ defmodule Helix.Log.Public.IndexTest do test "returns JSON friendly output" do {server, _} = ServerSetup.server() - log1 = LogSetup.log!([server_id: server.server_id, own_log: true]) - log2 = LogSetup.log!([server_id: server.server_id, own_log: true]) + log1 = LogSetup.log!(server_id: server.server_id, own_log: true) + log2 = LogSetup.log!(server_id: server.server_id, own_log: true) index = LogIndex.index(server.server_id) rendered = LogIndex.render_index(index) diff --git a/test/log/query/log_test.exs b/test/log/query/log_test.exs index 71b54813..deaace36 100644 --- a/test/log/query/log_test.exs +++ b/test/log/query/log_test.exs @@ -1,102 +1,102 @@ -defmodule Helix.Log.Query.LogTest do - - use Helix.Test.Case.Integration - - alias Helix.Log.Action.Log, as: LogAction - alias Helix.Log.Query.Log, as: LogQuery - - alias Helix.Test.Log.Factory, as: Factory - - alias Helix.Test.Entity.Helper, as: EntityHelper - alias Helix.Test.Server.Helper, as: ServerHelper - - describe "get_logs_on_server/1" do - # Well, i think that the function name might be a bit obvious, eh ? - test "returns logs that belongs to a server" do - # Random logs on other servers - Enum.each(1..5, fn _ -> Factory.insert(:log) end) - - server = ServerHelper.id() - expected = - Enum.map(1..5, fn _ -> - Factory.insert(:log, server_id: server) - end) - |> Enum.map(&(&1.log_id)) - |> MapSet.new() - - fetched = - server - |> LogQuery.get_logs_on_server() - |> Enum.map(&(&1.log_id)) - |> MapSet.new() - - assert MapSet.equal?(expected, fetched) - end - end - - describe "get_logs_from_entity_on_server/2" do - test "returns logs that were created by the entity" do - server = ServerHelper.id() - entity = EntityHelper.id() - - create_log = fn params -> - defaults = %{ - server_id: ServerHelper.id(), - entity_id: EntityHelper.id(), - message: "Default message" - } - p = Map.merge(defaults, params) - - {:ok, log, _} = LogAction.create(p.server_id, p.entity_id, p.message) - log - end - - # Random logs that were not created by the entity - Enum.each(1..5, fn _ -> create_log.(%{server_id: server}) end) - - entity_params = %{server_id: server, entity_id: entity} - expected = - 1..5 - |> Enum.map(fn _ -> create_log.(entity_params) end) - |> Enum.map(&(&1.log_id)) - |> MapSet.new() - - fetched = - server - |> LogQuery.get_logs_from_entity_on_server(entity) - |> Enum.map(&(&1.log_id)) - |> MapSet.new() - - assert MapSet.equal?(expected, fetched) - end - - test "returns logs that were touched by entity" do - server = ServerHelper.id() - entity = EntityHelper.id() - - # Random logs that were not touched by the entity - Enum.each(1..5, fn _ -> - Factory.insert(:log, server_id: server) - end) - - expected = - Enum.map(1..5, fn _ -> - Factory.insert(:log, server_id: server) - end) - |> Enum.map(fn log -> - LogAction.revise(log, entity, "touched", 1) - log - end) - |> Enum.map(&(&1.log_id)) - |> MapSet.new() - - fetched = - server - |> LogQuery.get_logs_from_entity_on_server(entity) - |> Enum.map(&(&1.log_id)) - |> MapSet.new() - - assert MapSet.equal?(expected, fetched) - end - end -end +# defmodule Helix.Log.Query.LogTest do + +# use Helix.Test.Case.Integration + +# alias Helix.Log.Action.Log, as: LogAction +# alias Helix.Log.Query.Log, as: LogQuery + +# alias Helix.Test.Log.Factory, as: Factory + +# alias Helix.Test.Entity.Helper, as: EntityHelper +# alias Helix.Test.Server.Helper, as: ServerHelper + +# describe "get_logs_on_server/1" do +# # Well, i think that the function name might be a bit obvious, eh ? +# test "returns logs that belongs to a server" do +# # Random logs on other servers +# Enum.each(1..5, fn _ -> Factory.insert(:log) end) + +# server = ServerHelper.id() +# expected = +# Enum.map(1..5, fn _ -> +# Factory.insert(:log, server_id: server) +# end) +# |> Enum.map(&(&1.log_id)) +# |> MapSet.new() + +# fetched = +# server +# |> LogQuery.get_logs_on_server() +# |> Enum.map(&(&1.log_id)) +# |> MapSet.new() + +# assert MapSet.equal?(expected, fetched) +# end +# end + +# describe "get_logs_from_entity_on_server/2" do +# test "returns logs that were created by the entity" do +# server = ServerHelper.id() +# entity = EntityHelper.id() + +# create_log = fn params -> +# defaults = %{ +# server_id: ServerHelper.id(), +# entity_id: EntityHelper.id(), +# message: "Default message" +# } +# p = Map.merge(defaults, params) + +# {:ok, log, _} = LogAction.create(p.server_id, p.entity_id, p.message) +# log +# end + +# # Random logs that were not created by the entity +# Enum.each(1..5, fn _ -> create_log.(%{server_id: server}) end) + +# entity_params = %{server_id: server, entity_id: entity} +# expected = +# 1..5 +# |> Enum.map(fn _ -> create_log.(entity_params) end) +# |> Enum.map(&(&1.log_id)) +# |> MapSet.new() + +# fetched = +# server +# |> LogQuery.get_logs_from_entity_on_server(entity) +# |> Enum.map(&(&1.log_id)) +# |> MapSet.new() + +# assert MapSet.equal?(expected, fetched) +# end + +# test "returns logs that were touched by entity" do +# server = ServerHelper.id() +# entity = EntityHelper.id() + +# # Random logs that were not touched by the entity +# Enum.each(1..5, fn _ -> +# Factory.insert(:log, server_id: server) +# end) + +# expected = +# Enum.map(1..5, fn _ -> +# Factory.insert(:log, server_id: server) +# end) +# |> Enum.map(fn log -> +# LogAction.revise(log, entity, "touched", 1) +# log +# end) +# |> Enum.map(&(&1.log_id)) +# |> MapSet.new() + +# fetched = +# server +# |> LogQuery.get_logs_from_entity_on_server(entity) +# |> Enum.map(&(&1.log_id)) +# |> MapSet.new() + +# assert MapSet.equal?(expected, fetched) +# end +# end +# end diff --git a/test/network/event/connection_test.exs b/test/network/event/connection_test.exs index 8b2d7bae..82019679 100644 --- a/test/network/event/connection_test.exs +++ b/test/network/event/connection_test.exs @@ -25,18 +25,12 @@ defmodule Helix.Network.Event.ConnectionTest do target_ip = ServerHelper.get_ip(event.tunnel.target_id) [log_source] = LogQuery.get_logs_on_server(event.tunnel.gateway_id) - assert_log \ - log_source, event.tunnel.gateway_id, entity_id, - "localhost logged into", - contains: target_ip, - rejects: gateway_ip + assert_log log_source, event.tunnel.gateway_id, entity_id, + :remote_login_gateway, %{ip: target_ip} [log_target] = LogQuery.get_logs_on_server(event.tunnel.target_id) - assert_log \ - log_target, event.tunnel.target_id, entity_id, - "logged in as", - contains: gateway_ip, - rejects: target_ip + assert_log log_target, event.tunnel.target_id, entity_id, + :remote_login_endpoint, %{ip: gateway_ip} end test "new log is created when ssh connection is started (with bounce)" do @@ -51,25 +45,18 @@ defmodule Helix.Network.Event.ConnectionTest do entity_id = ServerHelper.get_owner(event.tunnel.gateway_id).entity_id - gateway_ip = ServerHelper.get_ip(event.tunnel.gateway_id) - target_ip = ServerHelper.get_ip(event.tunnel.target_id) - # First log does not contain the target ip (it uses the bounce ip instead) [log_source] = LogQuery.get_logs_on_server(event.tunnel.gateway_id) - assert_log \ - log_source, event.tunnel.gateway_id, entity_id, - "localhost logged into", - rejects: target_ip + assert_log log_source, event.tunnel.gateway_id, entity_id, + :remote_login_gateway, %{} assert_bounce \ bounce, event.tunnel.gateway_id, event.tunnel.target_id, entity_id # Last log does not contain the source ip (it uses the bounce ip instead) [log_target] = LogQuery.get_logs_on_server(event.tunnel.target_id) - assert_log \ - log_target, event.tunnel.target_id, entity_id, - "logged in as", - rejects: gateway_ip + assert_log log_target, event.tunnel.target_id, entity_id, + :remote_login_endpoint, %{} end end end diff --git a/test/software/event/file_test.exs b/test/software/event/file_test.exs index 845eabc6..2831fa78 100644 --- a/test/software/event/file_test.exs +++ b/test/software/event/file_test.exs @@ -8,6 +8,7 @@ defmodule Helix.Software.Event.File.DownloadedTest do alias Helix.Log.Query.Log, as: LogQuery alias Helix.Test.Event.Setup, as: EventSetup + alias Helix.Test.Log.Helper, as: LogHelper alias Helix.Test.Server.Helper, as: ServerHelper describe "event reactions" do @@ -18,24 +19,19 @@ defmodule Helix.Software.Event.File.DownloadedTest do # Simulates the Event being dispatched to the listening handlers Event.emit(event) + file_name = LogHelper.log_file_name(event.file) gateway_ip = ServerHelper.get_ip(event.to_server_id) destination_ip = ServerHelper.get_ip(event.from_server_id) # Log saved on transfer source [log_gateway] = LogQuery.get_logs_on_server(event.to_server_id) - assert_log \ - log_gateway, event.to_server_id, event.entity_id, - "localhost downloaded", - contains: destination_ip, - rejects: [gateway_ip, "Public FTP"] + assert_log log_gateway, event.to_server_id, event.entity_id, + :file_download_gateway, %{file_name: file_name, ip: destination_ip} # Log saved on transfer target [log_destination] = LogQuery.get_logs_on_server(event.from_server_id) - assert_log \ - log_destination, event.from_server_id, event.entity_id, - "from localhost", - contains: gateway_ip, - rejects: [destination_ip, "Public FTP"] + assert_log log_destination, event.from_server_id, event.entity_id, + :file_download_endpoint, %{file_name: file_name, ip: gateway_ip} end # Context: gateway is downloading from destination @@ -45,24 +41,19 @@ defmodule Helix.Software.Event.File.DownloadedTest do # Simulates the Event being dispatched to the listening handlers Event.emit(event) - gateway_ip = ServerHelper.get_ip(event.to_server_id) + file_name = LogHelper.log_file_name(event.file) + gateway_ip = ServerHelper.get_ip(event.to_server_id) |> censor_ip destination_ip = ServerHelper.get_ip(event.from_server_id) # Log saved on transfer source (gateway) [log_gateway] = LogQuery.get_logs_on_server(event.to_server_id) - assert_log \ - log_gateway, event.to_server_id, event.entity_id, - "localhost downloaded", - contains: [destination_ip, "Public FTP"], - rejects: gateway_ip + assert_log log_gateway, event.to_server_id, event.entity_id, + :pftp_file_download_gateway, %{file_name: file_name, ip: destination_ip} # Log saved on transfer target [log_destination] = LogQuery.get_logs_on_server(event.from_server_id) - assert_log \ - log_destination, event.from_server_id, event.entity_id, - "from localhost Public FTP", - contains: [censor_ip(gateway_ip)], - rejects: [destination_ip, gateway_ip] + assert_log log_destination, event.from_server_id, event.entity_id, + :pftp_file_download_endpoint, %{file_name: file_name, ip: gateway_ip} end end end diff --git a/test/software/event/virus_test.exs b/test/software/event/virus_test.exs index b39e4ba8..ae722a9b 100644 --- a/test/software/event/virus_test.exs +++ b/test/software/event/virus_test.exs @@ -4,16 +4,13 @@ defmodule Helix.Software.Event.Virus.InstalledTest do import Helix.Test.Log.Macros - alias Helix.Log.Query.Log, as: LogQuery - alias Helix.Test.Event.Helper, as: EventHelper alias Helix.Test.Event.Setup, as: EventSetup - alias Helix.Test.Server.Helper, as: ServerHelper + alias Helix.Test.Log.Helper, as: LogHelper describe "event reactions" do test "logs are created" do - {event, %{virus: virus}} = - EventSetup.Software.file_install_processed(:virus) + {event, _} = EventSetup.Software.file_install_processed(:virus) EventHelper.emit(event) @@ -23,26 +20,19 @@ defmodule Helix.Software.Event.Virus.InstalledTest do gateway_id = process.gateway_id target_id = process.target_id - gateway_ip = ServerHelper.get_ip(gateway_id) - target_ip = ServerHelper.get_ip(target_id) + file_name = LogHelper.log_file_name(event.file) # Log saved on attacker (gateway) - [log_gateway | _] = LogQuery.get_logs_on_server(gateway_id) - assert_log \ - log_gateway, gateway_id, entity_id, - "localhost installed virus", - contains: [virus.name], - rejects: [gateway_ip, target_ip] + log_gateway = LogHelper.get_last_log(gateway_id, :virus_installed_gateway) + assert_log log_gateway, gateway_id, entity_id, + :virus_installed_gateway, %{file_name: file_name} assert_bounce process.bounce_id, gateway_id, target_id, entity_id # Log saved on victim (target) - [log_target | _] = LogQuery.get_logs_on_server(target_id) - assert_log \ - log_target, target_id, entity_id, - "at localhost", - contains: ["installed virus", virus.name], - rejects: [target_ip, target_ip] + log_target = LogHelper.get_last_log(target_id, :virus_installed_endpoint) + assert_log log_target, target_id, entity_id, + :virus_installed_endpoint, %{file_name: file_name} end end end diff --git a/test/software/model/software_type/log_forge/process_type_test.exs b/test/software/model/software_type/log_forge/process_type_test.exs deleted file mode 100644 index c4942701..00000000 --- a/test/software/model/software_type/log_forge/process_type_test.exs +++ /dev/null @@ -1,292 +0,0 @@ -defmodule Helix.Software.Model.SoftwareType.LogForgeTest do - - use Helix.Test.Case.Integration - - alias Ecto.Changeset - alias Helix.Log.Model.Log - alias Helix.Entity.Model.Entity - alias Helix.Process.Model.Processable - alias Helix.Process.Public.View.Process, as: ProcessView - alias Helix.Server.Model.Server - alias Helix.Software.Model.SoftwareType.LogForge - - alias Helix.Test.Entity.Helper, as: EntityHelper - alias Helix.Test.Log.Helper, as: LogHelper - alias Helix.Test.Server.Helper, as: ServerHelper - alias Helix.Test.Process.Helper, as: ProcessHelper - alias Helix.Test.Process.Setup, as: ProcessSetup - alias Helix.Test.Process.TOPHelper - alias Helix.Test.Process.View.Helper, as: ProcessViewHelper - alias Helix.Test.Software.Setup, as: SoftwareSetup - - defp forger_file do - SoftwareSetup.file!(type: :log_forger) - end - - describe "create/2" do - test "returns changeset if invalid" do - assert {:error, changeset} = LogForge.create(forger_file(), %{}) - assert %Changeset{valid?: false} = changeset - end - - test "requires operation and entity_id" do - expected_errors = [:operation, :entity_id] - - assert {:error, changeset} = LogForge.create(forger_file(), %{}) - errors = Keyword.keys(changeset.errors) - assert Enum.sort(expected_errors) == Enum.sort(errors) - end - - test "requires target_log_id when operation is edit" do - params = %{message: "", operation: :edit} - - expected_errors = [:target_log_id] - - assert {:error, changeset} = LogForge.create(forger_file(), params) - errors = Keyword.keys(changeset.errors) - assert Enum.all?(expected_errors, &(&1 in errors)) - end - - test "requires target_id when operation is create" do - params = %{message: "", operation: :create} - - expected_errors = [:target_id] - - assert {:error, changeset} = LogForge.create(forger_file(), params) - errors = Keyword.keys(changeset.errors) - assert Enum.all?(expected_errors, &(&1 in errors)) - end - - test "accepts binary input" do - params_edit = %{ - "target_log_id" => to_string(LogHelper.id()), - "message" => "WAKE ME UP INSIDE (can't wake up)", - "operation" => :edit, - "entity_id" => to_string(EntityHelper.id()) - } - params_create = %{ - "target_id" => to_string(ServerHelper.id()), - "message" => "A weapon to surpass Datal Gear", - "operation" => :create, - "entity_id" => to_string(EntityHelper.id()) - } - - assert {:ok, %LogForge{}} = LogForge.create(forger_file(), params_edit) - assert {:ok, %LogForge{}} = LogForge.create(forger_file(), params_create) - end - - test "accepts native erlang term entries" do - params_edit = %{ - target_log_id: LogHelper.id(), - message: "Oh yeah", - operation: :edit, - entity_id: EntityHelper.id() - } - params_create = %{ - target_id: ServerHelper.id(), - message: "Oh noes", - operation: :create, - entity_id: EntityHelper.id() - } - - assert {:ok, %LogForge{}} = LogForge.create(forger_file(), params_edit) - assert {:ok, %LogForge{}} = LogForge.create(forger_file(), params_create) - end - end - - describe "edit_objective/3" do - test "returns a higher objective when the revision count is bigger" do - process_data = %LogForge{ - target_log_id: LogHelper.id(), - entity_id: EntityHelper.id(), - operation: :edit, - message: "ring ring ring banana phone", - version: 100 - } - log = %Log{ - log_id: process_data.target_log_id, - server_id: ServerHelper.id(), - entity_id: EntityHelper.id(), - message: "" - } - - rev1 = LogForge.edit_objective(process_data, log, 1) - rev2 = LogForge.edit_objective(process_data, log, 2) - rev3 = LogForge.edit_objective(process_data, log, 3) - rev4 = LogForge.edit_objective(process_data, log, 20) - - assert rev2 > rev1 - assert rev3 > rev2 - assert rev4 > rev3 - end - - test "returns a higher objective the higher the forger version is" do - data = %LogForge{ - target_log_id: LogHelper.id(), - entity_id: EntityHelper.id(), - operation: :edit, - message: "Okay robot", - version: 100 - } - log = %Log{ - log_id: data.target_log_id, - server_id: ServerHelper.id(), - entity_id: EntityHelper.id(), - message: "" - } - - rev1 = LogForge.edit_objective(data, log, 1) - rev2 = LogForge.edit_objective(%{data| version: 200}, log, 1) - rev3 = LogForge.edit_objective(%{data| version: 300}, log, 1) - rev4 = LogForge.edit_objective(%{data| version: 999}, log, 1) - - assert rev2 > rev1 - assert rev3 > rev2 - assert rev4 > rev3 - end - - test "ignores first revision that created the log" do - process_data = %LogForge{ - target_log_id: LogHelper.id(), - entity_id: EntityHelper.id(), - operation: :edit, - message: "ring ring ring banana phone", - version: 100 - } - same_entity_log = %Log{ - log_id: process_data.target_log_id, - server_id: ServerHelper.id(), - entity_id: process_data.entity_id, - message: "" - } - diferent_entity_log = %{same_entity_log| entity_id: EntityHelper.id()} - - x = LogForge.edit_objective(process_data, same_entity_log, 3) - y = LogForge.edit_objective(process_data, diferent_entity_log, 3) - - # This is because on `x`, because the entity that started the log_forge - # process is the same that originally created the log (and when a log is - # created it starts with one revision), so we should ignore that first - # revision (ie: instead of using `3` as the revision count, we consider - # `3 - 1`) - assert y > x - end - end - - describe "create_objective/1" do - test "returns a higher objective the higher the forger version is" do - data = %LogForge{ - target_id: ServerHelper.id(), - entity_id: EntityHelper.id(), - operation: :create, - message: "Digital style", - version: 100 - } - - rev1 = LogForge.create_objective(data) - rev2 = LogForge.create_objective(%{data| version: 200}) - rev3 = LogForge.create_objective(%{data| version: 300}) - rev4 = LogForge.create_objective(%{data| version: 999}) - - assert rev2 > rev1 - assert rev3 > rev2 - assert rev4 > rev3 - end - end - - describe "ProcessView.render/4 for edit operation" do - test "both partial and full processes returns target_log_id" do - {process, %{target_entity_id: victim_entity}} = log_forger_process(:edit) - data = process.data - - victim_server = process.target_id - attacker_entity = process.source_entity_id - - # Victim rendering Log process on her own server. Partial access. - victim_view = - ProcessView.render(data, process, victim_server, victim_entity) - - # Attacker rendering Log process on Victim server. Full access. - attacker_view = - ProcessView.render(data, process, victim_server, attacker_entity) - - ProcessViewHelper.assert_keys(victim_view, :partial, &pview_edit_data/1) - ProcessViewHelper.assert_keys(attacker_view, :full, &pview_edit_data/1) - - assert victim_view.data.target_log_id - assert is_binary(victim_view.data.target_log_id) - assert attacker_view.data.target_log_id == to_string(data.target_log_id) - - TOPHelper.top_stop(process.gateway_id) - end - - defp pview_edit_data(_) do - [:target_log_id] - |> Enum.sort() - end - end - - describe "ProcessView.render/4 for create operation" do - test "both partial and full process adds no complement" do - {process, _} = log_forger_process(:create) - data = process.data - - attacker_server = process.gateway_id - victim_server = process.target_id - attacker_entity = process.source_entity_id - third_entity = EntityHelper.id() - - # Third-party rendering Log process on victim. Partial access. - third_view = - ProcessView.render(data, process, victim_server, third_entity) - - # Attacker who started the Log process, on his own server. Full access. - attacker_view = - ProcessView.render(data, process, attacker_server, attacker_entity) - - ProcessViewHelper.assert_keys(attacker_view, :full) - ProcessViewHelper.assert_keys(third_view, :partial) - - TOPHelper.top_stop(process.gateway_id) - end - end - - describe "after_read_hook/1" do - test "serializes to the internal representation" do - {process_create, _} = log_forger_process(:create) - {process_edit, _} = log_forger_process(:edit) - - db_create = ProcessHelper.raw_get(process_create.process_id) - db_edit = ProcessHelper.raw_get(process_edit.process_id) - - serialized_create = Processable.after_read_hook(db_create.data) - serialized_edit = Processable.after_read_hook(db_edit.data) - - # Create process has `target_log_id` equals nil - refute serialized_create.target_log_id - assert %Entity.ID{} = serialized_create.entity_id - assert %Server.ID{} = serialized_create.target_id - assert serialized_create.operation == :create - assert serialized_create.message - assert serialized_create.version - - # Edit has valid `target_log_id` - assert %Entity.ID{} = serialized_edit.entity_id - assert %Log.ID{} = serialized_edit.target_log_id - assert %Server.ID{} = serialized_edit.target_id - assert serialized_edit.operation == :edit - assert serialized_edit.message - assert serialized_edit.version - - TOPHelper.top_stop() - end - end - - defp log_forger_process(operation) do - ProcessSetup.process( - fake_server: true, - type: :forge, - data: [operation: operation] - ) - end -end diff --git a/test/support/event/setup/log.ex b/test/support/event/setup/log.ex index 2623991d..c6eea44d 100644 --- a/test/support/event/setup/log.ex +++ b/test/support/event/setup/log.ex @@ -2,7 +2,7 @@ defmodule Helix.Test.Event.Setup.Log do alias Helix.Log.Event.Log.Created, as: LogCreatedEvent alias Helix.Log.Event.Log.Deleted, as: LogDeletedEvent - alias Helix.Log.Event.Log.Modified, as: LogModifiedEvent + # alias Helix.Log.Event.Log.Modified, as: LogModifiedEvent alias Helix.Log.Model.Log alias Helix.Test.Log.Setup, as: LogSetup @@ -13,11 +13,11 @@ defmodule Helix.Test.Event.Setup.Log do def created(log = %Log{}), do: LogCreatedEvent.new(log) - def modified, - do: modified(generate_fake_log()) + # def modified, + # do: modified(generate_fake_log()) - def modified(log = %Log{}), - do: LogModifiedEvent.new(log) + # def modified(log = %Log{}), + # do: LogModifiedEvent.new(log) def deleted, do: deleted(generate_fake_log()) diff --git a/test/support/log/factory.ex b/test/support/log/factory.ex deleted file mode 100644 index b6c4c1ab..00000000 --- a/test/support/log/factory.ex +++ /dev/null @@ -1,77 +0,0 @@ -defmodule Helix.Test.Log.Factory do - - alias Ecto.Changeset - alias Helix.Log.Model.Log - alias Helix.Log.Repo - - alias Helix.Test.Entity.Helper, as: EntityHelper - alias Helix.Test.Server.Helper, as: ServerHelper - - @type thing :: :log - - @spec build(thing, map | Keyword.t) :: - struct - def build(thing, params \\ %{}) do - thing - |> changeset(params) - |> ensure_valid_changeset() - |> Changeset.apply_changes() - end - - @spec build_list(pos_integer, thing, map | Keyword.t) :: - [struct, ...] - def build_list(n, thing, params \\ %{}) when n >= 1 do - for _ <- 1..n, - do: build(thing, params) - end - - @spec insert(thing, map | Keyword.t) :: - struct - def insert(thing, params \\ %{}) do - thing - |> changeset(params) - |> Repo.insert!() - end - - @spec insert_list(pos_integer, thing, map | Keyword.t) :: - [struct, ...] - def insert_list(n, thing, params \\ %{}) when n >= 1 do - for _ <- 1..n, - do: insert(thing, params) - end - - @spec params_for(thing) :: - map - defp params_for(:log) do - %{ - server_id: ServerHelper.id(), - entity_id: EntityHelper.id, - message: "TODO: Use a generator for nice messages" - } - end - @spec fabricate_changeset(thing, map) :: - Changeset.t - defp fabricate_changeset(:log, params), - do: Log.create_changeset(params) - - @spec changeset(thing, map | Keyword.t) :: - Changeset.t - defp changeset(thing, params) do - attrs = - thing - |> params_for() - |> Map.merge(to_map(params)) - - fabricate_changeset(thing, attrs) - end - - defp to_map(x = %{}), - do: x - defp to_map(x) when is_list(x), - do: :maps.from_list(x) - - defp ensure_valid_changeset(cs = %Changeset{valid?: true}), - do: cs - defp ensure_valid_changeset(cs), - do: raise "invalid changeset generated on factory: #{inspect cs}" -end diff --git a/test/support/log/helper.ex b/test/support/log/helper.ex index e4479213..7c3f87e9 100644 --- a/test/support/log/helper.ex +++ b/test/support/log/helper.ex @@ -1,6 +1,16 @@ defmodule Helix.Test.Log.Helper do + import Ecto.Query + + alias Helix.Event.Loggable.Utils, as: LoggableUtils + alias Helix.Server.Model.Server alias Helix.Log.Model.Log + alias Helix.Log.Model.LogType + alias Helix.Log.Model.Revision + alias Helix.Log.Query.Log, as: LogQuery + alias Helix.Log.Repo, as: LogRepo + + alias Helix.Test.Log.Setup.LogType, as: LogTypeSetup @doc """ Given a log, returns the expected format of a public view. @@ -21,4 +31,57 @@ defmodule Helix.Test.Log.Helper do def id, do: Log.ID.generate(%{}, :log) + + @doc """ + Returns a random `Log.info`. + """ + def log_info(opts \\ []), + do: LogTypeSetup.log_info(opts) + + def log_file_name(file), + do: LoggableUtils.get_file_name(file) + + @doc """ + This method should be used when the tester wants to fetch the most recent log + of type `log_type` that was created on `server_id`. One might wonder, then, + why not use `LogQuery.get_logs_on_server/1`. Here it goes: + + Logs are ordered by ID. The MSB of a Log ID heritage corresponds to the server + it resides. Then, we have 30 bits dedicated to the time the log was created, + and finally 36 bits that are random, innate to the log itself (if this + paragraph made no sense, then please read docs at `lib/id/id.ex`). + + Notice that, as a result, all logs created on the same server, at the same + time hash (i.e. same second) have their order determined by the 36 random bits + of the object, and as such their order is non-deterministic. + + That's a problem for testing, as several logs will be created at the same + second. In real life, however, this is unlikely to happen, and if it does - + (multiple logs are created on the same server at the same second) - whichever + order we get is fine. + + So, to sum it up, `get_last_log/2` retrieves the most recent `log_type` on + `server_id`, making this query deterministic. + """ + def get_last_log(server = %Server{}, log_type), + do: get_last_log(server.server_id, log_type) + def get_last_log(server_id = %Server.ID{}, log_type) do + server_id + |> LogQuery.get_logs_on_server() + |> Enum.find(fn log -> log.revision.type == log_type end) + end + + @doc """ + Returns all existing revisions for the given log. + """ + def get_all_revisions(log = %Log{}), + do: get_all_revisions(log.log_id) + def get_all_revisions(log_id = %Log.ID{}) do + query = + from lr in Revision, + where: lr.log_id == ^log_id, + order_by: [desc: lr.revision_id] + + LogRepo.all(query) + end end diff --git a/test/support/log/macros.ex b/test/support/log/macros.ex index b92c00b5..a86b8163 100644 --- a/test/support/log/macros.ex +++ b/test/support/log/macros.ex @@ -1,15 +1,14 @@ defmodule Helix.Test.Log.Macros do - alias HELL.Utils alias Helix.Event.Loggable.Utils, as: LoggableUtils alias Helix.Entity.Model.Entity - alias Helix.Log.Query.Log, as: LogQuery alias Helix.Network.Model.Bounce alias Helix.Network.Query.Bounce, as: BounceQuery alias Helix.Server.Model.Server alias Helix.Test.Network.Helper, as: NetworkHelper alias Helix.Test.Server.Helper, as: ServerHelper + alias Helix.Test.Log.Helper, as: LogHelper @internet_id NetworkHelper.internet_id() @@ -18,35 +17,17 @@ defmodule Helix.Test.Log.Macros do `fragment` is a mandatory parameter, it's an excerpt of the log that must exist on the log content. - - Opts: - - contains: List of words/terms that should be present on the log message - - rejects: List of words/terms that must not be present on the log message """ - defmacro assert_log(log, s_id, e_id, fragment, opts \\ quote(do: [])) do - if Keyword.has_key?(opts, :contain), - do: raise "It's `contains`, not `contain`" - if Keyword.has_key?(opts, :reject), - do: raise "It's `rejects`, not `reject`" - - contains = Keyword.get(opts, :contains, []) |> Utils.ensure_list() - rejects = Keyword.get(opts, :rejects, []) |> Utils.ensure_list() - + defmacro assert_log(log, s_id, e_id, type, data) do quote do - # Cut some slack for the callers and handle nested lists - contains = unquote(contains) |> List.flatten() - rejects = unquote(rejects) |> List.flatten() - assert unquote(log).server_id == unquote(s_id) - assert unquote(log).entity_id == unquote(e_id) - assert unquote(log).message =~ unquote(fragment) + assert unquote(log).revision.entity_id == unquote(e_id) - Enum.each(contains, fn term -> - assert unquote(log).message =~ term - end) - Enum.each(rejects, fn term -> - refute unquote(log).message =~ term + assert unquote(log).revision.type == unquote(type) + + Enum.each(unquote(data), fn {key, value} -> + assert Map.fetch!(unquote(log).revision.data, to_string(key)) == value end) end @@ -62,28 +43,13 @@ defmodule Helix.Test.Log.Macros do Helper to assert the logs were correctly generated within the bounce chain. It will check for the std log message "Connection bounced from (n-1) to (n+1)" - - Opts: - - rejects: Values that must not be contained within the bounce message. It - automatically includes the `gateway_ip` and the `endpoint_ip` on the reject - list (if applicable). Useful for rejecting extra stuff, like the log action - (e.g. "download", "upload") or custom data (like the file name, version etc) """ - defmacro assert_bounce(bounce, gateway, endpoint, entity, opts \\ quote(do: [])) do - quote location: :keep do - { - links, - gateway_data = {_, _, gateway_ip}, - endpoint_data = {_, _, endpoint_ip}, - entity_id, - {extra_rejects, _opts} - } = verify_bounce_params( - unquote(bounce), - unquote(gateway), - unquote(endpoint), - unquote(entity), - unquote(opts) - ) + defmacro assert_bounce(bounce, gateway, endpoint, entity) do + quote location: :keep, generated: true do + {links, gateway_data, endpoint_data, entity_id} = + verify_bounce_params( + unquote(bounce), unquote(gateway), unquote(endpoint), unquote(entity) + ) bounce_map = [gateway_data | links] ++ [endpoint_data] @@ -96,58 +62,49 @@ defmodule Helix.Test.Log.Macros do links |> Enum.reduce(1, fn link = {server_id, _, _}, idx -> - # Unless we are on the first bounce, `gateway_ip` must not show up - first_ip = idx >= 2 && gateway_ip || [] - - # Unless we are on the last bounce, `endpoint_ip` must not show up - last_ip = idx <= length_bounce - 1 && endpoint_ip || [] - {_, _, ip_prev} = bounce_map[idx - 1] {_, _, ip_next} = bounce_map[idx + 1] - assert [log_bounce | _] = LogQuery.get_logs_on_server(server_id) - assert_log \ - log_bounce, server_id, entity_id, - "Connection bounced", - contains: ["from #{ip_prev} to #{ip_next}"], - rejects: [first_ip, last_ip, extra_rejects] + log_bounce = LogHelper.get_last_log(server_id, :connection_bounced) + assert_log log_bounce, server_id, entity_id, + :connection_bounced, %{ip_prev: ip_prev, ip_next: ip_next} idx + 1 end) end end - def verify_bounce_params(bounce, gat, endp, ent, opts, net_id \\ @internet_id) + def verify_bounce_params(bounce, gat, endp, ent, net_id \\ @internet_id) def verify_bounce_params( - bounce_id = %Bounce.ID{}, gateway, endpoint, entity, opts, network_id) + bounce_id = %Bounce.ID{}, gateway, endpoint, entity, network_id) do verify_bounce_params( - BounceQuery.fetch(bounce_id), gateway, endpoint, entity, opts, network_id + BounceQuery.fetch(bounce_id), gateway, endpoint, entity, network_id ) end def verify_bounce_params( - bounce, gateway = %Server{}, endpoint_id, entity_id, opts, network_id) + bounce, gateway = %Server{}, endpoint_id, entity_id, network_id) do verify_bounce_params( - bounce, gateway.server_id, endpoint_id, entity_id, opts, network_id + bounce, gateway.server_id, endpoint_id, entity_id, network_id ) end def verify_bounce_params( - bounce, gateway, endpoint = %Server{}, entity, opts, network_id) + bounce, gateway, endpoint = %Server{}, entity, network_id) do verify_bounce_params( - bounce, gateway, endpoint.server_id, entity, opts, network_id + bounce, gateway, endpoint.server_id, entity, network_id ) end def verify_bounce_params( - bounce, gateway_id, endpoint_id, entity = %Entity{}, opts, network_id) + bounce, gateway_id, endpoint_id, entity = %Entity{}, network_id) do verify_bounce_params( - bounce, gateway_id, endpoint_id, entity.entity_id, opts, network_id + bounce, gateway_id, endpoint_id, entity.entity_id, network_id ) end @@ -156,20 +113,16 @@ defmodule Helix.Test.Log.Macros do gateway_id = %Server.ID{}, endpoint_id = %Server.ID{}, entity_id = %Entity.ID{}, - opts, network_id) do gateway_ip = ServerHelper.get_ip(gateway_id, network_id) endpoint_ip = ServerHelper.get_ip(endpoint_id, network_id) - extra_rejects = Keyword.get(opts, :rejects, []) - { bounce.links, {gateway_id, network_id, gateway_ip}, {endpoint_id, network_id, endpoint_ip}, - entity_id, - {extra_rejects, opts} + entity_id } end end diff --git a/test/support/log/setup.ex b/test/support/log/setup.ex index f6777651..76c7dcfb 100644 --- a/test/support/log/setup.ex +++ b/test/support/log/setup.ex @@ -1,29 +1,43 @@ defmodule Helix.Test.Log.Setup do + alias Ecto.Changeset alias Helix.Server.Query.Server, as: ServerQuery alias Helix.Entity.Query.Entity, as: EntityQuery alias Helix.Log.Model.Log alias Helix.Log.Internal.Log, as: LogInternal + alias Helix.Log.Repo, as: LogRepo alias Helix.Test.Entity.Helper, as: EntityHelper alias Helix.Test.Entity.Setup, as: EntitySetup alias Helix.Test.Server.Setup, as: ServerSetup + alias Helix.Test.Software.Helper, as: SoftwareHelper alias Helix.Test.Log.Helper, as: LogHelper @doc """ See doc on `fake_log/1` """ def log(opts \\ []) do - {_, related = %{params: params}} = fake_log(opts) - {:ok, inserted} = - LogInternal.create( - params.server_id, - params.entity_id, - params.message, - params.forge_version - ) - - {inserted, related} + {_, related = %{changeset: changeset}} = fake_log(opts) + {:ok, log} = LogRepo.insert(changeset) + + log = + if related.revisions > 1 do + 1..(related.revisions - 1) + |> Enum.reduce(log, fn _, acc -> + entity_id = EntityHelper.id() + log_info = LogHelper.log_info() + forge_version = SoftwareHelper.random_version() + + {:ok, new_log} = + LogInternal.revise(acc, entity_id, log_info, forge_version) + + new_log + end) + else + log + end + + {log, related} end def log!(opts \\ []) do @@ -34,45 +48,45 @@ defmodule Helix.Test.Log.Setup do @doc """ - server_id: Server which that log belongs to. - entity_id: Entity which that log belongs to. - - message: Log message. + - type: Underlying log type. Defaults to random type. + - data_opts: Opts that will be used to generate the underlying log data. - forge_version: Set the forge version. Defaults to nil. - - fake_server: Whether the Server that hosts the log should be generated. - Defaults to false. + - real_server: Whether the Server that hosts the log should be real. Defaults + to false. - fake_entity: Whether the Entity that owns the log should be generated. Defaults to true. - own_log: Whether the generated log should belong to entity who owns that server. Defaults to false. + - revisions: How many revisions the log should have. Defaults to 1. Related: Log.creation_params, Server.t, Entity.id, message :: String.t """ def fake_log(opts \\ []) do - if opts[:own_log] == true and opts[:fake_server] == true do - raise "Can't set both `own_log` and `fake_server`" - end - # Makes credo happy... - {server, entity_id, message, forge_version} = fake_log_get_data(opts) + {server, entity_id, {type, data}, forge_version} = fake_log_get_data(opts) + + params = %{server_id: server.server_id} - params = %{ - server_id: server.server_id, + revision_params = %{ entity_id: entity_id, - message: message, - forge_version: forge_version + forge_version: forge_version, + type: type, + data: Map.from_struct(data) } - changeset = Log.create_changeset(params) - log = - changeset - |> Ecto.Changeset.apply_changes() - |> Map.replace(:creation_time, DateTime.utc_now()) - |> Map.replace(:log_id, LogHelper.id()) + changeset = Log.create_changeset(params, revision_params) + + log = Changeset.apply_changes(changeset) related = %{ params: params, + revision_params: revision_params, server: server, entity_id: entity_id, - message: message, - changeset: changeset + type: type, + data: data, + changeset: changeset, + revisions: Keyword.get(opts, :revisions, 1) } {log, related} @@ -82,10 +96,9 @@ defmodule Helix.Test.Log.Setup do {server, server_owner} = cond do # User asked for fake server - opts[:fake_server] -> - {server, _} = ServerSetup.fake_server() - - {server, nil} + opts[:real_server] -> + {server, %{entity: entity}} = ServerSetup.server() + {server, entity} # User specified a server_id (must exist on the DB) opts[:server_id] -> @@ -96,9 +109,8 @@ defmodule Helix.Test.Log.Setup do # All else: generate a real server true -> - {server, %{entity: entity}} = ServerSetup.server() - - {server, entity} + {server, _} = ServerSetup.fake_server() + {server, nil} end entity_id = @@ -120,9 +132,9 @@ defmodule Helix.Test.Log.Setup do EntityHelper.id() end - message = Access.get(opts, :message, LogHelper.random_message()) - forge_version = Access.get(opts, :forge_version, nil) + log_info = LogHelper.log_info(opts) + forge_version = Keyword.get(opts, :forge_version, nil) - {server, entity_id, message, forge_version} + {server, entity_id, log_info, forge_version} end end diff --git a/test/support/log/setup/log_type.ex b/test/support/log/setup/log_type.ex new file mode 100644 index 00000000..011f7f9b --- /dev/null +++ b/test/support/log/setup/log_type.ex @@ -0,0 +1,76 @@ +defmodule Helix.Test.Log.Setup.LogType do + + alias Helix.Log.Model.LogType + + alias HELL.TestHelper.Random + alias Helix.Test.Network.Helper, as: NetworkHelper + + @internet_id NetworkHelper.internet_id() + + @doc """ + Returns a random, valid log type. + """ + def random_type, + do: Enum.random(custom_types()) + + @doc """ + Generates a Log.info based on the given opts. + + Opts: + - type: What log type to use. Defaults to random one. + - data_opts: What data_opts to pass to `data/2`. If not specified, the log + data will be fake (generated randomly). + """ + def log_info(opts) do + type = Keyword.get(opts, :type, random_type()) + data = data(type, opts[:data_opts] || []) + + {type, LogType.new(type, data)} + end + + @doc """ + Returns the corresponding data to the given log type + """ + def data(:local_login, _data_opts), + do: %{} + + def data(:remote_login_gateway, data_opts) do + %{ + network_id: Keyword.get(data_opts, :network_id, @internet_id), + ip: Keyword.get(data_opts, :ip, Random.ipv4()) + } + end + + def data(:remote_login_endpoint, data_opts), + do: data(:remote_login_gateway, data_opts) + + def data(:connection_bounced, data_opts) do + %{ + ip_prev: Keyword.get(data_opts, :ip_prev, Random.ipv4()), + ip_next: Keyword.get(data_opts, :ip_next, Random.ipv4()), + network_id: Keyword.get(data_opts, :network_id, @internet_id) + } + end + + def data(:file_download_gateway, data_opts) do + %{ + file_name: Keyword.get(data_opts, :file_name, Random.string(max: 8)), + ip: Keyword.get(data_opts, :ip, Random.ipv4()), + network_id: Keyword.get(data_opts, :network_id, @internet_id) + } + end + + def data(:file_download_endpoint, data_opts), + do: data(:file_download_gateway, data_opts) + + defp custom_types do + [ + :local_login, + :remote_login_gateway, + :remote_login_endpoint, + :connection_bounced, + :file_download_gateway, + :file_download_endpoint + ] + end +end From 2a770e83ea8d9d239989826dcea15b22d0c91ed1 Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Fri, 3 Aug 2018 22:15:10 -0300 Subject: [PATCH 02/14] Add LogForgeProcess --- lib/log/event/forge.ex | 52 ++++++ lib/log/process/forge.ex | 167 ++++++++++++++++++ lib/process/executable.ex | 49 +++-- lib/process/model/process.ex | 11 +- lib/software/process/cracker/bruteforce.ex | 18 +- lib/software/process/cracker/overflow.ex | 19 +- lib/software/process/file/install.ex | 19 +- lib/software/process/file/transfer.ex | 27 ++- lib/software/process/virus/collect.ex | 28 ++- .../process/bank/account/password_reveal.ex | 16 +- lib/universe/bank/process/bank/transfer.ex | 11 +- .../20180729045339_add_target_log.exs | 11 ++ .../action/flow/software/log_forger_test.exs | 80 --------- test/support/process/fake_process.ex | 14 +- test/support/process/setup.ex | 24 ++- test/support/process/setup/data.ex | 76 +++++--- 16 files changed, 392 insertions(+), 230 deletions(-) create mode 100644 lib/log/event/forge.ex create mode 100644 lib/log/process/forge.ex create mode 100644 priv/repo/process/migrations/20180729045339_add_target_log.exs delete mode 100644 test/software/action/flow/software/log_forger_test.exs diff --git a/lib/log/event/forge.ex b/lib/log/event/forge.ex new file mode 100644 index 00000000..9cf899ed --- /dev/null +++ b/lib/log/event/forge.ex @@ -0,0 +1,52 @@ +defmodule Helix.Log.Event.Forge do + + import Helix.Event + + event Processed do + @moduledoc """ + `LogForgeProcessedEvent` is fired when the underlying LogForgeProcess has + achieved its objective and finished executing. + """ + + alias Helix.Process.Model.Process + alias Helix.Log.Model.Log + alias Helix.Log.Process.Forge, as: LogForgeProcess + + @type t :: + %__MODULE__{ + action: LogForgeProcess.action, + server_id: Server.id, + entity_id: Entity.id, + target_log_id: Log.id | nil, + log_info: Log.info, + forger_version: pos_integer + } + + event_struct [ + :action, + :server_id, + :entity_id, + :target_log_id, + :log_info, + :forger_version + ] + + @spec new(Process.t, LogForgeProcess.t) :: + t + def new(process = %Process{}, data = %LogForgeProcess{}) do + %__MODULE__{ + action: get_action(process), + server_id: process.target_id, + entity_id: process.source_entity_id, + target_log_id: process.tgt_log_id, + log_info: {data.log_type, data.log_data}, + forger_version: data.forger_version + } + end + + defp get_action(%Process{type: :log_forge_create}), + do: :create + defp get_action(%Process{type: :log_forge_edit}), + do: :edit + end +end diff --git a/lib/log/process/forge.ex b/lib/log/process/forge.ex new file mode 100644 index 00000000..4d9152dd --- /dev/null +++ b/lib/log/process/forge.ex @@ -0,0 +1,167 @@ +import Helix.Process + +process Helix.Log.Process.Forge do + @moduledoc """ + `LogForgeProcess` is launched when the user wants to edit an existing log, or + create a new one from scratch. + """ + + alias Helix.Software.Model.File + alias Helix.Log.Model.Log + alias __MODULE__, as: LogForgeProcess + + process_struct [:log_type, :log_data, :forger_version] + + @type t :: + %__MODULE__{ + log_type: Log.type, + log_data: Log.data, + forger_version: pos_integer + } + + @type action :: :create | :edit + @type process_type :: :log_forge_create | :log_forge_edit + + @type creation_params :: %{log_info: Log.info} + + @type executable_meta :: + %{ + forger: File.t_of_type(:log_forger), + action: LogForgeProcess.action, + log: Log.t | nil, + ssh: Connection.t | nil + } + + @type resources_params :: + %{ + action: action, + log: Log.t | nil, + forger: File.t_of_type(:log_forger) + } + + @type resources :: + %{ + objective: objective, + static: map, + l_dynamic: [:cpu], + r_dynamic: [] + } + + @type objective :: %{cpu: resource_usage} + + @spec new(creation_params, executable_meta) :: + t + def new( + %{log_info: {log_type, log_data}}, + %{action: action, forger: forger = %File{type: :log_forger}} + ) do + %__MODULE__{ + log_type: log_type, + log_data: log_data, + forger_version: get_forger_version(forger, action) + } + end + + @spec resources(resources_params) :: + resources + def resources(params), + do: get_resources params + + @spec get_forger_version(File.t, action) :: + pos_integer + defp get_forger_version(forger = %File{}, :create), + do: forger.modules.log_create.version + defp get_forger_version(forger = %File{}, :edit), + do: forger.modules.log_edit.version + + processable do + + alias HELL.MapUtils + alias Helix.Log.Event.Forge.Processed, as: LogForgeProcessedEvent + + on_completion(process, data) do + event = LogForgeProcessedEvent.new(process, data) + + {:delete, [event]} + end + + @doc false + def after_read_hook(data) do + %LogForgeProcess{ + log_type: String.to_existing_atom(data.log_type), + log_data: MapUtils.atomize_keys(data.log_data), + forger_version: data.forger_version + } + end + end + + resourceable do + + @type params :: LogForgeProcess.resources_params + @type factors :: + %{ + :forger => %{version: FileFactor.fact_version}, + optional(:log) => Log.t + } + + get_factors(_) do + + end + + # get_factors(%{action: :edit, log: log, forger: forger}) do + # factor FileFactor, %{file: forger}, + # only: [:version], as: :forger + # factor LogFactor, %{log: log}, + # only: [:total_revisions] + # end + + # get_factors(%{action: :create, forger: forger}) do + # factor FileFactor, %{file: forger}, + # only: [:version], as: :forger + # end + + # TODO: time resource (for minimum duration) + + cpu(%{action: :edit}) do + f.forger.version.log_edit * f.log.revisions.total + end + + cpu(%{action: :create}) do + f.forger.version.log_create + end + + dynamic do + [:cpu] + end + + static do + %{ + paused: %{ram: 100}, + running: %{ram: 200} + } + end + end + + executable do + + resources(_gateway, _target, _params, meta) do + %{ + log: meta.log, + forger: meta.forger, + action: meta.action + } + end + + source_file(_gateway, _target, _params, %{forger: forger}) do + forger.file_id + end + + source_connection(_gateway, _target, _params, %{ssh: ssh}) do + ssh.connection_id + end + + target_log(_gateway, _target, _params, %{action: :edit, log: log}) do + log.log_id + end + end +end diff --git a/lib/process/executable.ex b/lib/process/executable.ex index 6b3b8709..573cde6a 100644 --- a/lib/process/executable.ex +++ b/lib/process/executable.ex @@ -43,14 +43,14 @@ defmodule Helix.Process.Executable do """ defp handlers(process) do quote do - @spec get_process_data(params) :: + @spec get_process_data(params, meta) :: %{data: unquote(process).t} docp """ Retrieves the `process_data`, according to how it was defined at the Process' `new/1`. Subset of the full process params. """ - defp get_process_data(params) do - data = call_process(:new, params) + defp get_process_data(params, meta) do + data = call_process(:new, [params, meta]) %{data: data} end @@ -130,8 +130,10 @@ defmodule Helix.Process.Executable do """ defp call_process(function), do: apply(unquote(process), function, []) + defp call_process(function, param) when not is_list(param), + do: apply(unquote(process), function, [param]) defp call_process(function, params), - do: apply(unquote(process), function, [params]) + do: apply(unquote(process), function, params) @spec setup_connection( Server.t, @@ -257,6 +259,9 @@ defmodule Helix.Process.Executable do import HELL.Macros import Helix.Process.Executable + @type params :: unquote(process).creation_params + @type meta :: unquote(process).executable_meta + @type executable_error :: {:error, :resources} | {:error, :internal} @@ -277,7 +282,6 @@ defmodule Helix.Process.Executable do @spec get_bounce_id(Server.t, Server.t, params, meta) :: %{bounce_id: Bounce.id | nil} - @doc false defp get_bounce_id(_, _, _, %{bounce: bounce = %Bounce{}}), do: %{bounce_id: bounce.bounce_id} defp get_bounce_id(_, _, _, %{bounce: bounce_id = %Bounce.ID{}}), @@ -288,7 +292,6 @@ defmodule Helix.Process.Executable do @spec get_source_connection(Server.t, Server.t, params, meta) :: {:create, Connection.type} | nil - @doc false defp get_source_connection(_, _, _, _), do: nil @@ -296,19 +299,16 @@ defmodule Helix.Process.Executable do {:create, Connection.type} | :same_origin | nil - @doc false defp get_target_connection(_, _, _, _), do: nil @spec get_source_file(Server.t, Server.t, params, meta) :: %{src_file_id: File.id | nil} - @doc false defp get_source_file(_, _, _, _), do: %{src_file_id: nil} @spec get_target_file(Server.t, Server.t, params, meta) :: %{tgt_file_id: File.id | nil} - @doc false defp get_target_file(_, _, _, _), do: %{tgt_file_id: nil} @@ -317,7 +317,6 @@ defmodule Helix.Process.Executable do src_atm_id: Server.t | nil, src_acc_number: BankAccount.account | nil } - @doc false defp get_source_bank_account(_, _, _, _), do: %{src_atm_id: nil, src_acc_number: nil} @@ -326,15 +325,18 @@ defmodule Helix.Process.Executable do tgt_atm_id: Server.t | nil, tgt_acc_number: BankAccount.account | nil } - @doc false defp get_target_bank_account(_, _, _, _), do: %{tgt_atm_id: nil, tgt_acc_number: nil} @spec get_target_process(Server.t, Server.t, params, meta) :: %{tgt_process_id: Process.t | nil} - @doc false defp get_target_process(_, _, _, _), do: %{tgt_process_id: nil} + + @spec get_target_log(Server.t, Server.t, params, meta) :: + %{tgt_log_id: Process.t | nil} + defp get_target_log(_, _, _, _), + do: %{tgt_log_id: nil} end end end @@ -354,13 +356,14 @@ defmodule Helix.Process.Executable do Executes the process. """ def execute(unquote_splicing(args), relay) do - process_data = get_process_data(unquote(params)) + process_data = get_process_data(unquote(params), unquote(meta)) resources = get_resources(unquote_splicing(args)) source_file = get_source_file(unquote_splicing(args)) target_file = get_target_file(unquote_splicing(args)) source_bank_account = get_source_bank_account(unquote_splicing(args)) target_bank_account = get_target_bank_account(unquote_splicing(args)) target_process = get_target_process(unquote_splicing(args)) + target_log = get_target_log(unquote_splicing(args)) bounce_id = get_bounce_id(unquote_splicing(args)) ownership = get_ownership(unquote_splicing(args)) process_type = get_process_type(unquote(meta)) @@ -374,6 +377,7 @@ defmodule Helix.Process.Executable do |> Map.merge(source_bank_account) |> Map.merge(target_bank_account) |> Map.merge(target_process) + |> Map.merge(target_log) |> Map.merge(bounce_id) |> Map.merge(ownership) |> Map.merge(process_type) @@ -578,6 +582,24 @@ defmodule Helix.Process.Executable do end end + @doc """ + Returns the process' `tgt_log_id`, as defined on the `target_log` section of + the Process.Executable. + """ + defmacro target_log(gateway, target, params, meta, do: block) do + args = [gateway, target, params, meta] + + quote do + + defp get_target_log(unquote_splicing(args)) do + log_id = unquote(block) + + %{tgt_log_id: log_id} + end + + end + end + @doc """ Returns information about the resource usage of that process, including: @@ -593,7 +615,6 @@ defmodule Helix.Process.Executable do @spec get_resources(Server.t, Server.t, params, meta) :: unquote(process).resources - @doc false defp get_resources(unquote_splicing(args)) do params = unquote(block) diff --git a/lib/process/model/process.ex b/lib/process/model/process.ex index f24d277a..306ee0f6 100644 --- a/lib/process/model/process.ex +++ b/lib/process/model/process.ex @@ -21,6 +21,7 @@ defmodule Helix.Process.Model.Process do alias HELL.MapUtils alias HELL.NaiveStruct alias Helix.Entity.Model.Entity + alias Helix.Log.Model.Log alias Helix.Network.Model.Bounce alias Helix.Network.Model.Connection alias Helix.Network.Model.Network @@ -50,6 +51,7 @@ defmodule Helix.Process.Model.Process do tgt_atm_id: Server.id | nil, tgt_acc_number: BankAccount.account | nil, tgt_process_id: Process.id | nil, + tgt_log_id: Log.id | nil, priority: term, l_allocated: Process.Resources.t | nil, r_allocated: Process.Resources.t | nil, @@ -80,7 +82,8 @@ defmodule Helix.Process.Model.Process do | :virus_collect | :bank_reveal_password | :wire_transfer - | :log_forger + | :log_forge_create + | :log_forge_edit @typedoc """ List of signals a process may receive during its lifetime. @@ -225,6 +228,7 @@ defmodule Helix.Process.Model.Process do :tgt_atm_id => Server.id | nil, :tgt_acc_number => BankAccount.account | nil, :tgt_process_id => Process.id | nil, + :tgt_log_id => Log.id | nil, :objective => map, :l_dynamic => dynamic, :r_dynamic => dynamic, @@ -248,6 +252,7 @@ defmodule Helix.Process.Model.Process do :tgt_atm_id, :tgt_acc_number, :tgt_process_id, + :tgt_log_id, :objective, :static, :l_dynamic, @@ -359,6 +364,10 @@ defmodule Helix.Process.Model.Process do field :tgt_process_id, Process.ID, default: nil + # Which log (if any) is the target of this process + field :tgt_log_id, Log.ID, + default: nil + ### Helix.Process required data # Data used by the specific implementation for side-effects generation diff --git a/lib/software/process/cracker/bruteforce.ex b/lib/software/process/cracker/bruteforce.ex index b31e2188..089afe24 100644 --- a/lib/software/process/cracker/bruteforce.ex +++ b/lib/software/process/cracker/bruteforce.ex @@ -24,6 +24,11 @@ process Helix.Software.Process.Cracker.Bruteforce do target_server_ip: Network.ip } + @type executable_meta :: + %{ + cracker: File.t + } + @type objective :: %{cpu: resource_usage} @type resources :: @@ -40,9 +45,9 @@ process Helix.Software.Process.Cracker.Bruteforce do hasher: File.t_of_type(:hasher) | nil } - @spec new(creation_params) :: + @spec new(creation_params, executable_meta) :: t - def new(%{target_server_ip: ip}) do + def new(%{target_server_ip: ip}, _) do %__MODULE__{ target_server_ip: ip } @@ -141,17 +146,8 @@ process Helix.Software.Process.Cracker.Bruteforce do Defines how a BruteforceProcess should be executed. """ - alias Helix.Software.Process.Cracker.Bruteforce, as: BruteforceProcess alias Helix.Software.Query.File, as: FileQuery - @type params :: BruteforceProcess.creation_params - - @type meta :: - %{ - :cracker => File.t, - optional(atom) => term - } - resources(_, target, _, %{cracker: cracker}) do hasher = FileQuery.fetch_best(target, :password) diff --git a/lib/software/process/cracker/overflow.ex b/lib/software/process/cracker/overflow.ex index 83542edc..1d6d2999 100644 --- a/lib/software/process/cracker/overflow.ex +++ b/lib/software/process/cracker/overflow.ex @@ -11,6 +11,11 @@ process Helix.Software.Process.Cracker.Overflow do @type creation_params :: %{} + @type executable_meta :: + %{ + cracker: File.t + } + @type objective :: %{cpu: resource_usage} @type resources :: @@ -28,9 +33,9 @@ process Helix.Software.Process.Cracker.Overflow do @process_type :cracker_overflow - @spec new(creation_params) :: + @spec new(creation_params, executable_meta) :: t - def new(_), + def new(_, _), do: %__MODULE__{} @spec resources(resources_params) :: @@ -89,16 +94,6 @@ process Helix.Software.Process.Cracker.Overflow do executable do - alias Helix.Software.Model.File - alias Helix.Software.Process.Cracker.Overflow, as: OverflowProcess - - @type params :: OverflowProcess.creation_params - @type meta :: - %{ - :cracker => File.t, - optional(atom) => term - } - resources(_, _, _, %{cracker: cracker}) do %{cracker: cracker} end diff --git a/lib/software/process/file/install.ex b/lib/software/process/file/install.ex index 9a4d1e09..6c9224c0 100644 --- a/lib/software/process/file/install.ex +++ b/lib/software/process/file/install.ex @@ -33,6 +33,11 @@ process Helix.Software.Process.File.Install do } @type creation_params :: %{backend: backend} + @type executable_meta :: + %{ + file: File.t, + type: process_type + } @type objective :: %{cpu: resource_usage} @@ -42,9 +47,9 @@ process Helix.Software.Process.File.Install do backend: backend } - @spec new(creation_params) :: + @spec new(creation_params, executable_meta) :: t - def new(%{backend: backend}) do + def new(%{backend: backend}, _meta) do %__MODULE__{ backend: backend } @@ -112,16 +117,6 @@ process Helix.Software.Process.File.Install do executable do - @type params :: FileInstallProcess.creation_params - - @type meta :: - %{ - file: File.t, - type: process_type - } - - @typep process_type :: FileInstallProcess.process_type - resources(_gateway, _target, %{backend: backend}, %{file: file}) do %{ file: file, diff --git a/lib/software/process/file/transfer.ex b/lib/software/process/file/transfer.ex index c8be3a6f..a5821813 100644 --- a/lib/software/process/file/transfer.ex +++ b/lib/software/process/file/transfer.ex @@ -11,6 +11,7 @@ process Helix.Software.Process.File.Transfer do file is being transferred, is already present on the standard process data. """ + alias Helix.Network.Model.Bounce alias Helix.Network.Model.Network alias Helix.Software.Model.File alias Helix.Software.Model.Storage @@ -45,15 +46,22 @@ process Helix.Software.Process.File.Transfer do destination_storage_id: Storage.id } + @type executable_meta :: %{ + file: File.t, + type: process_type, + network_id: Network.id, + bounce: Bounce.idt | nil + } + @type resources_params :: %{ type: transfer_type, file: File.t, network_id: Network.id } - @spec new(creation_params) :: + @spec new(creation_params, executable_meta) :: t - def new(params = %{destination_storage_id: %Storage.ID{}}) do + def new(params = %{destination_storage_id: %Storage.ID{}}, _) do %__MODULE__{ type: params.type, destination_storage_id: params.destination_storage_id, @@ -198,21 +206,6 @@ process Helix.Software.Process.File.Transfer do Defines how FileTransferProcess should be executed. """ - alias Helix.Network.Model.Bounce - alias Helix.Network.Model.Network - alias Helix.Software.Model.File - alias Helix.Software.Process.File.Transfer, as: FileTransferProcess - - @type params :: FileTransferProcess.creation_params - - @type meta :: - %{ - file: File.t, - type: FileTransferProcess.process_type, - network_id: Network.id, - bounce: Bounce.idt | nil - } - resources(_, _, params, meta) do %{ type: params.type, diff --git a/lib/software/process/virus/collect.ex b/lib/software/process/virus/collect.ex index f8e0e52c..6d7d2b44 100644 --- a/lib/software/process/virus/collect.ex +++ b/lib/software/process/virus/collect.ex @@ -12,7 +12,10 @@ process Helix.Software.Process.Virus.Collect do is performed by `VirusHandler` once `VirusCollectProcessedEvent` is fired. """ + alias Helix.Network.Model.Bounce + alias Helix.Network.Model.Network alias Helix.Universe.Bank.Model.BankAccount + alias Helix.Software.Model.File process_struct [:wallet] @@ -40,11 +43,18 @@ process Helix.Software.Process.Virus.Collect do bank_account: BankAccount.t | nil } + @type executable_meta :: + %{ + virus: File.t, + network_id: Network.id, + bounce: Bounce.idt | nil + } + @type resources_params :: map - @spec new(creation_params) :: + @spec new(creation_params, executable_meta) :: t - def new(%{wallet: wallet}) do + def new(%{wallet: wallet}, _) do %__MODULE__{ wallet: wallet } @@ -96,20 +106,6 @@ process Helix.Software.Process.Virus.Collect do executable do - alias Helix.Network.Model.Bounce - alias Helix.Network.Model.Network - alias Helix.Software.Model.File - alias Helix.Software.Process.Virus.Collect, as: VirusCollectProcess - - @type params :: VirusCollectProcess.creation_params - - @type meta :: - %{ - virus: File.t, - network_id: Network.id, - bounce: Bounce.idt | nil - } - resources(_, _, _params, _meta) do %{} end diff --git a/lib/universe/bank/process/bank/account/password_reveal.ex b/lib/universe/bank/process/bank/account/password_reveal.ex index dbb9a2f7..e157c4bd 100644 --- a/lib/universe/bank/process/bank/account/password_reveal.ex +++ b/lib/universe/bank/process/bank/account/password_reveal.ex @@ -23,6 +23,8 @@ process Helix.Universe.Bank.Process.Bank.Account.RevealPassword do account: BankAccount.t } + @type executable_meta :: map + @type objective :: %{cpu: resource_usage} @type resources :: %{ @@ -37,9 +39,9 @@ process Helix.Universe.Bank.Process.Bank.Account.RevealPassword do account: BankAccount.t } - @spec new(creation_params) :: + @spec new(creation_params, executable_meta) :: t - def new(%{token_id: token_id, account: account = %BankAccount{}}) do + def new(%{token_id: token_id, account: account = %BankAccount{}}, _) do %__MODULE__{ token_id: token_id, atm_id: account.atm_id, @@ -86,16 +88,6 @@ process Helix.Universe.Bank.Process.Bank.Account.RevealPassword do executable do - alias Helix.Universe.Bank.Process.Bank.Account.RevealPassword, - as: RevealPasswordProcess - - @type params :: RevealPasswordProcess.creation_params - - @type meta :: - %{ - optional(atom) => term - } - resources(_, _, %{account: account}, _) do %{account: account} end diff --git a/lib/universe/bank/process/bank/transfer.ex b/lib/universe/bank/process/bank/transfer.ex index 166b125e..45278bc7 100644 --- a/lib/universe/bank/process/bank/transfer.ex +++ b/lib/universe/bank/process/bank/transfer.ex @@ -19,6 +19,8 @@ process Helix.Universe.Bank.Process.Bank.Transfer do transfer: BankTransfer.t } + @type executable_meta :: map + @type objective :: %{cpu: resource_usage} @type resources :: %{ @@ -33,9 +35,9 @@ process Helix.Universe.Bank.Process.Bank.Transfer do transfer: BankTransfer.t } - @spec new(creation_params) :: + @spec new(creation_params, executable_meta) :: t - def new(%{transfer: transfer = %BankTransfer{}}) do + def new(%{transfer: transfer = %BankTransfer{}}, _) do %__MODULE__{ transfer_id: transfer.transfer_id, amount: transfer.amount @@ -99,11 +101,6 @@ process Helix.Universe.Bank.Process.Bank.Transfer do executable do - alias Helix.Universe.Bank.Process.Bank.Transfer, as: BankTransferProcess - - @type params :: BankTransferProcess.creation_params - @type meta :: term - resources(_gateway, _atm, %{transfer: transfer}, _meta) do %{transfer: transfer} end diff --git a/priv/repo/process/migrations/20180729045339_add_target_log.exs b/priv/repo/process/migrations/20180729045339_add_target_log.exs new file mode 100644 index 00000000..5971ad6d --- /dev/null +++ b/priv/repo/process/migrations/20180729045339_add_target_log.exs @@ -0,0 +1,11 @@ +defmodule Helix.Process.Repo.Migrations.AddTargetLog do + use Ecto.Migration + + def change do + alter table(:processes, primary_key: false) do + add :tgt_log_id, :inet + end + + create index(:processes, [:tgt_log_id], where: "tgt_log_id IS NOT NULL") + end +end diff --git a/test/software/action/flow/software/log_forger_test.exs b/test/software/action/flow/software/log_forger_test.exs deleted file mode 100644 index 733e4033..00000000 --- a/test/software/action/flow/software/log_forger_test.exs +++ /dev/null @@ -1,80 +0,0 @@ -# defmodule Helix.Software.Action.Flow.Software.LogForgerTest do - -# use Helix.Test.Case.Integration - -# alias Helix.Log.Action.Log, as: LogAction -# alias Helix.Log.Model.Log -# alias Helix.Software.Action.Flow.Software.LogForger, as: LogForgerFlow -# alias Helix.Software.Model.SoftwareType.LogForge - -# alias Helix.Test.Process.TOPHelper -# alias Helix.Test.Server.Setup, as: ServerSetup -# alias Helix.Test.Software.Helper, as: SoftwareHelper -# alias Helix.Test.Software.Setup, as: SoftwareSetup - -# describe "execute/3 for 'create' operation" do -# test "fails if target log doesn't exist" do -# {server, %{entity: entity}} = ServerSetup.server() - -# storage_id = SoftwareHelper.get_storage_id(server) -# {file, _} = SoftwareSetup.file(type: :log_forger, storage_id: storage_id) - -# params = %{ -# target_log_id: Log.ID.generate(), -# message: "I say hey hey", -# operation: :edit, -# entity_id: entity.entity_id -# } - -# result = LogForgerFlow.execute(file, server.server_id, params) -# assert {:error, {:log, :notfound}} == result -# end - -# test "starts log_forger process on success" do -# {server, %{entity: entity}} = ServerSetup.server() - -# storage_id = SoftwareHelper.get_storage_id(server) -# {file, _} = SoftwareSetup.file(type: :log_forger, storage_id: storage_id) - -# {:ok, log, _} = -# LogAction.create(server, entity.entity_id, "Root logged in") - -# params = %{ -# target_log_id: log.log_id, -# message: "", -# operation: :edit, -# entity_id: entity.entity_id -# } - -# result = LogForgerFlow.execute(file, server.server_id, params) -# assert {:ok, process} = result -# assert %LogForge{} = process.data -# assert "log_forger" == process.type - -# TOPHelper.top_stop(server) -# end -# end - -# describe "log_forger 'create' operation" do -# test "starts log_forger process on success" do -# {server, %{entity: entity}} = ServerSetup.server() - -# storage_id = SoftwareHelper.get_storage_id(server) -# {file, _} = SoftwareSetup.file(type: :log_forger, storage_id: storage_id) - -# params = %{ -# target_id: server, -# message: "", -# operation: :create, -# entity_id: entity.entity_id -# } - -# result = LogForgerFlow.execute(file, server.server_id, params) -# assert {:ok, process} = result -# assert %LogForge{} = process.data -# assert "log_forger" == process.type - -# TOPHelper.top_stop(server) -# end -# end -# end diff --git a/test/support/process/fake_process.ex b/test/support/process/fake_process.ex index 8dfb1752..82fbf7d4 100644 --- a/test/support/process/fake_process.ex +++ b/test/support/process/fake_process.ex @@ -8,13 +8,16 @@ defmodule Helix.Test.Process do process_struct [:file_id] + @type creation_params :: term + @type executable_meta :: term + def new do %__MODULE__{ file_id: Random.number() } end - def new(%{file_id: file_id}) do + def new(%{file_id: file_id}, _) do %__MODULE__{ file_id: file_id } @@ -57,9 +60,6 @@ defmodule Helix.Test.Process do executable do - @type params :: term - @type meta :: term - resources(_, _, _, _) do %{} end @@ -70,6 +70,9 @@ defmodule Helix.Test.Process do process_struct [:foo] + @type creation_params :: term + @type executable_meta :: term + def new do %__MODULE__{ foo: :bar @@ -103,9 +106,6 @@ defmodule Helix.Test.Process do executable do - @type params :: term - @type meta :: term - resources(_, _, _, _) do %{} end diff --git a/test/support/process/setup.ex b/test/support/process/setup.ex index c7a97b6c..e738ca01 100644 --- a/test/support/process/setup.ex +++ b/test/support/process/setup.ex @@ -9,7 +9,7 @@ defmodule Helix.Test.Process.Setup do alias Helix.Test.Process.Data.Setup, as: ProcessDataSetup alias Helix.Test.Process.Helper, as: ProcessHelper - @internet NetworkHelper.internet_id() + @internet_id NetworkHelper.internet_id() def process(opts \\ []) do {_, related = %{params: params}} = fake_process(opts) @@ -31,6 +31,7 @@ defmodule Helix.Test.Process.Setup do - network_id: - src_connection_id: - tgt_connection_id: + - tgt_log_id: - single_server: - type: Set process type. If not specified, a random one is generated. - data: Data for that specific process type. Ignored if `type` is not set. @@ -38,17 +39,6 @@ defmodule Helix.Test.Process.Setup do Related: source_entity_id :: Entity.id, target_entity_id :: Entity.id """ def fake_process(opts \\ []) do - tmp_check = fn key -> - if opts[key] do - raise "#{inspect key} no longer used" - end - end - - tmp_check.(:file_id) - tmp_check.(:target_file_id) - tmp_check.(:connection_id) - tmp_check.(:target_connection_id) - gateway_id = Keyword.get(opts, :gateway_id, ServerHelper.id()) source_entity_id = Keyword.get(opts, :entity_id, EntityHelper.id()) {target_id, target_entity_id} = @@ -65,7 +55,8 @@ defmodule Helix.Test.Process.Setup do tgt_file_id = Keyword.get(opts, :tgt_file_id, nil) src_connection_id = Keyword.get(opts, :src_connection_id, nil) tgt_connection_id = Keyword.get(opts, :tgt_connection_id, nil) - network_id = Keyword.get(opts, :network_id, @internet) + tgt_log_id = Keyword.get(opts, :tgt_log_id, nil) + network_id = Keyword.get(opts, :network_id, @internet_id) meta = %{ source_entity_id: source_entity_id, @@ -76,6 +67,7 @@ defmodule Helix.Test.Process.Setup do tgt_file_id: tgt_file_id, src_connection_id: src_connection_id, tgt_connection_id: tgt_connection_id, + tgt_log_id: tgt_log_id, network_id: network_id } @@ -104,6 +96,7 @@ defmodule Helix.Test.Process.Setup do network_id: meta.network_id, src_connection_id: meta.src_connection_id, tgt_connection_id: meta.tgt_connection_id, + tgt_log_id: meta.tgt_log_id, static: static, l_limit: l_limit, r_limit: r_limit, @@ -126,4 +119,9 @@ defmodule Helix.Test.Process.Setup do {process, related} end + + def fake_process!(opts) do + {process, _} = fake_process(opts) + process + end end diff --git a/test/support/process/setup/data.ex b/test/support/process/setup/data.ex index 4d73166c..60410d7d 100644 --- a/test/support/process/setup/data.ex +++ b/test/support/process/setup/data.ex @@ -16,8 +16,8 @@ defmodule Helix.Test.Process.Data.Setup do """ # Processes + alias Helix.Log.Process.Forge, as: LogForgeProcess alias Helix.Software.Process.Cracker.Bruteforce, as: CrackerBruteforce - alias Helix.Software.Model.SoftwareType.LogForge alias Helix.Software.Process.File.Transfer, as: FileTransferProcess alias Helix.Software.Process.File.Install, as: FileInstallProcess @@ -142,15 +142,17 @@ defmodule Helix.Test.Process.Data.Setup do cond do data_opts[:target_server_ip] -> data_opts[:target_server_ip] + data_opts[:real_ip] -> raise "todo" + true -> Random.ipv4() end src_file_id = meta.src_file_id || SoftwareHelper.id() - data = CrackerBruteforce.new(%{target_server_ip: target_server_ip}) + data = CrackerBruteforce.new(%{target_server_ip: target_server_ip}, %{}) meta = %{meta| src_file_id: src_file_id} @@ -172,7 +174,7 @@ defmodule Helix.Test.Process.Data.Setup do src_connection_id = meta.src_connection_id || NetworkHelper.connection_id() tgt_file_id = meta.tgt_file_id || SoftwareHelper.id() - data = FileInstallProcess.new(%{backend: :virus}) + data = FileInstallProcess.new(%{backend: :virus}, %{}) meta = meta @@ -191,38 +193,48 @@ defmodule Helix.Test.Process.Data.Setup do end @doc """ - Opts for forge: - - operation: :edit | :create. Defaults to :edit. - - target_log_id: Which log to edit. Won't generate a real one. - - message: Revision message. - + `:log_forge` process will randomly select either `edit` or `create` operation. + """ + def custom(:log_forge, data_opts, meta) do + [:log_forge_edit, :log_forge_create] + |> Enum.random() + |> custom(data_opts, meta) + end + + @doc """ + Opts for `log_forge_edit:` + - tgt_log_id: Which log to edit. Won't generate a real one. + - log_info: Tuple with log type and data + - forger_version: Set forger version. Defaults to 100. + All others are automatically derived from process meta data. """ - def custom(:forge, data_opts, meta) do + def custom(:log_forge_edit, data_opts, meta), + do: custom_log_forge({:log_forge_edit, :edit}, data_opts, meta) + + @doc """ + Opts for `log_forge_create`: + - log_info: Tuple with log type and data. + - forger_version: Set forger version. Defaults to 100. + """ + def custom(:log_forge_create, data_opts, meta), + do: custom_log_forge({:log_forge_create, :create}, data_opts, meta) + + defp custom_log_forge({process_type, action}, data_opts, meta) do target_id = meta.target_id - target_log_id = Keyword.get(data_opts, :target_log_id, LogHelper.id()) entity_id = meta.source_entity_id - operation = Keyword.get(data_opts, :operation, :edit) - message = LogHelper.random_message() - version = 100 + version = Keyword.get(data_opts, :forger_version, 100) src_file_id = meta.src_file_id || SoftwareHelper.id() + {log_type, log_data} = + Keyword.get(data_opts, :log_info, LogHelper.log_info()) data = - %LogForge{ - target_id: target_id, - entity_id: entity_id, - operation: operation, - message: message, - version: version + %LogForgeProcess{ + log_type: log_type, + log_data: log_data, + forger_version: version } - data = - if operation == :edit do - Map.merge(data, %{target_log_id: target_log_id}) - else - data - end - resources = %{ l_dynamic: [:cpu], @@ -233,13 +245,21 @@ defmodule Helix.Test.Process.Data.Setup do meta = %{meta| src_file_id: src_file_id} - {:log_forger, data, meta, resources} + meta = + if action == :edit do + %{meta| tgt_log_id: meta.tgt_log_id || LogHelper.id()} + else + meta + end + + {process_type, data, meta, resources} end defp custom_implementations do [ :bruteforce, - :forge, + :log_forge_edit, + :log_forge_create, :file_download, :file_upload, :install_virus From 2b43835285f755f09e3f4851e68189bdb60c0eef Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Fri, 3 Aug 2018 22:16:45 -0300 Subject: [PATCH 03/14] Add handlers for LogForgeProcessedEvent --- lib/balance/software.ex | 1 + lib/event/dispatcher.ex | 13 +--- lib/event/loggable/flow.ex | 5 +- lib/event/loggable/utils.ex | 2 +- lib/id/domain.ex | 17 +++-- lib/log/action/log.ex | 21 +----- lib/log/event/handler/log.ex | 50 ++++++++----- lib/log/event/log.ex | 98 +++++++++++------------- lib/log/process/recover.ex | 0 lib/software/model/file/module.ex | 6 +- test/log/event/handler/log_test.exs | 112 ++++++++++++++++------------ test/log/event/log_test.exs | 29 ++++--- test/support/event/setup/log.ex | 37 ++++++--- test/support/macros.ex | 21 ++++++ 14 files changed, 223 insertions(+), 189 deletions(-) create mode 100644 lib/log/process/recover.ex diff --git a/lib/balance/software.ex b/lib/balance/software.ex index 59d33e1b..c7401a28 100644 --- a/lib/balance/software.ex +++ b/lib/balance/software.ex @@ -29,5 +29,6 @@ balance Software do |> mul(modules.overflow.version) bruteforce + overflow + # |> float_div(2) end end diff --git a/lib/event/dispatcher.ex b/lib/event/dispatcher.ex index 0ed7ba04..48cb24aa 100644 --- a/lib/event/dispatcher.ex +++ b/lib/event/dispatcher.ex @@ -144,9 +144,10 @@ defmodule Helix.Event.Dispatcher do ############################################################################## # All + event LogEvent.Forge.Processed event LogEvent.Log.Created event LogEvent.Log.Deleted - # event LogEvent.Log.Modified + event LogEvent.Log.Revised ############################################################################## # Process events @@ -209,8 +210,6 @@ defmodule Helix.Event.Dispatcher do event SoftwareEvent.File.Transfer.Processed event SoftwareEvent.Firewall.Started event SoftwareEvent.Firewall.Stopped - event SoftwareEvent.LogForge.LogCreate.Processed - event SoftwareEvent.LogForge.LogEdit.Processed event SoftwareEvent.Virus.Collect.Processed event SoftwareEvent.Virus.Collected event SoftwareEvent.Virus.Installed @@ -249,14 +248,6 @@ defmodule Helix.Event.Dispatcher do ProcessHandler.Cracker, :firewall_stopped - event SoftwareEvent.LogForge.LogCreate.Processed, - LogHandler.Log, - :log_forge_conclusion - - event SoftwareEvent.LogForge.LogEdit.Processed, - LogHandler.Log, - :log_forge_conclusion - event SoftwareEvent.Virus.Collect.Processed, SoftwareHandler.Virus, :handle_collect diff --git a/lib/event/loggable/flow.ex b/lib/event/loggable/flow.ex index 7a6c8698..9ecd83d6 100644 --- a/lib/event/loggable/flow.ex +++ b/lib/event/loggable/flow.ex @@ -322,10 +322,9 @@ defmodule Helix.Event.Loggable.Flow do {_, _, ip_next} = bounce_map[idx + 1] data = %{ip_prev: ip_prev, ip_next: ip_next, network_id: network_id} - # TODO - log_type = {:connection_bounced, LogType.new(:connection_bounced, data)} + log_info = {:connection_bounced, LogType.new(:connection_bounced, data)} - entry = build_entry(server_id, entity_id, log_type) + entry = build_entry(server_id, entity_id, log_info) {idx + 1, acc ++ [entry]} end diff --git a/lib/event/loggable/utils.ex b/lib/event/loggable/utils.ex index 0f38a7a9..f68ae168 100644 --- a/lib/event/loggable/utils.ex +++ b/lib/event/loggable/utils.ex @@ -39,7 +39,7 @@ defmodule Helix.Event.Loggable.Utils do end end) |> elem(0) - |> Enum.reverse + |> Enum.reverse() |> List.to_string() end diff --git a/lib/id/domain.ex b/lib/id/domain.ex index 8ce5e00c..d7c25554 100644 --- a/lib/id/domain.ex +++ b/lib/id/domain.ex @@ -26,10 +26,10 @@ defmodule Helix.ID.Domain do crc_bruteforce: 23 crc_overflow: 33 install_virus: 43 - log_forger: 53 - virus_collect: 63 - wire_transfer: 73 - bank_reveal_pass: 83 + virus_collect: 53 + wire_transfer: 63 + bank_reveal_pass: 73 + log_forge: 83 reserved_until: F3 reserved_until: F4 @@ -154,10 +154,11 @@ defmodule Helix.ID.Domain do {{:process, :cracker_bruteforce}, 0x23}, {{:process, :cracker_overflow}, 0x33}, {{:process, :install_virus}, 0x43}, - {{:process, :log_forger}, 0x53}, - {{:process, :virus_collect}, 0x63}, - {{:process, :wire_transfer}, 0x73}, - {{:process, :bank_reveal_password}, 0x83}, + {{:process, :virus_collect}, 0x53}, + {{:process, :wire_transfer}, 0x63}, + {{:process, :bank_reveal_password}, 0x73}, + {{:process, :log_forge_create}, 0x83}, + {{:process, :log_forge_edit}, 0x83}, {{:server, :desktop}, 0x05}, {{:server, :mobile}, 0x15}, {{:server, :npc}, 0x25}, diff --git a/lib/log/action/log.ex b/lib/log/action/log.ex index 063d86d7..e8ed9407 100644 --- a/lib/log/action/log.ex +++ b/lib/log/action/log.ex @@ -1,20 +1,4 @@ defmodule Helix.Log.Action.Log do - @moduledoc """ - Functions to work with in-game logs. - - An in-game log is a record registering an action done by an entity on a - server. - - It can be forged and recovered to a previous state. - - Its forging mechanics is implemented as an stack where the last revision is - the currently displayed message and to see the original log all revisions must - be recovered. - - Note that on Log context _forging_ and _revising_ are used interchangeably and - means changing the current message of a log by adding a (forged) revision to - it's stack. - """ alias Helix.Entity.Model.Entity alias Helix.Server.Model.Server @@ -23,13 +7,16 @@ defmodule Helix.Log.Action.Log do alias Helix.Log.Event.Log.Created, as: LogCreatedEvent alias Helix.Log.Event.Log.Deleted, as: LogDeletedEvent - alias Helix.Log.Event.Log.Modified, as: LogModifiedEvent + alias Helix.Log.Event.Log.Revised, as: LogRevisedEvent @spec create(Server.id, Entity.id, Log.info, pos_integer | nil) :: {:ok, Log.t, [LogCreatedEvent.t]} | :error @doc """ Creates a new log linked to `entity` on `server` with `log_info` as content. + + This log may be natural (created automatically by the game as a result to a + player's action) or artificial (explicitly created using LogForger.Edit). """ def create(server_id, entity_id, log_info, forge_version \\ nil) do case LogInternal.create(server_id, entity_id, log_info, forge_version) do diff --git a/lib/log/event/handler/log.ex b/lib/log/event/handler/log.ex index f4a6897d..e726cd86 100644 --- a/lib/log/event/handler/log.ex +++ b/lib/log/event/handler/log.ex @@ -2,14 +2,12 @@ defmodule Helix.Log.Event.Handler.Log do @moduledoc false alias Helix.Event - alias Helix.Log.Action.Log, as: LogAction alias Helix.Event.Loggable + alias Helix.Log.Action.Log, as: LogAction + alias Helix.Log.Model.Log alias Helix.Log.Query.Log, as: LogQuery - alias Helix.Software.Event.LogForge.LogCreate.Processed, - as: LogForgeCreateComplete - alias Helix.Software.Event.LogForge.LogEdit.Processed, - as: LogForgeEditComplete + alias Helix.Log.Event.Forge.Processed, as: LogForgeProcessedEvent @doc """ Generic event handler for all Helix events. If the event implement the @@ -28,24 +26,36 @@ defmodule Helix.Log.Event.Handler.Log do end @doc """ - Forges a revision onto a log or creates a fake new log - """ - def log_forge_conclusion(event = %LogForgeEditComplete{}) do - {:ok, _, events} = - event.target_log_id - |> LogQuery.fetch() - |> LogAction.revise(event.entity_id, event.message, event.version) + Handler called right after a `LogForgeProcess` has completed. It will then + either create a new log out of thin air, or edit an existing log. - Event.emit(events, from: event) + Emits: `LogCreatedEvent`, `LogRevisedEvent` + """ + def log_forge_processed(event = %LogForgeProcessedEvent{action: :create}) do + # `action` is `:create`, so we'll create a new log out of thin air! + result = + LogAction.create( + event.server_id, event.entity_id, event.log_info, event.forger_version + ) + + with {:ok, _, events} <- result do + Event.emit(events) + end end - def log_forge_conclusion(event = %LogForgeCreateComplete{}) do - {:ok, _, events} = LogAction.create( - event.target_id, - event.entity_id, - event.message, - event.version) + def log_forge_processed(event = %LogForgeProcessedEvent{action: :edit}) do + # `action` is `:edit`, so we'll stack up a revision on an existing log + revise = fn log -> + LogAction.revise( + log, event.entity_id, event.log_info, event.forger_version + ) + end - Event.emit(events, from: event) + with \ + log = %Log{} <- LogQuery.fetch(event.target_log_id), + {:ok, _, events} <- revise.(log) + do + Event.emit(events) + end end end diff --git a/lib/log/event/log.ex b/lib/log/event/log.ex index 0f0a4cf2..c6d18c17 100644 --- a/lib/log/event/log.ex +++ b/lib/log/event/log.ex @@ -5,25 +5,27 @@ defmodule Helix.Log.Event.Log do event Created do @moduledoc """ LogCreatedEvent is fired when a brand new log entry is added to the server. + + The newly created log may be either natural (automatically created by the + game) or artificial (explicitly created by the player through LogForge + mechanics). Either way, the receiving end of the event (Client) DOES NOT + know whether the log is natural or artificial. """ - alias Helix.Server.Model.Server alias Helix.Log.Model.Log @type t :: %__MODULE__{ - log: Log.t, - server_id: Server.id + log: Log.t } - event_struct [:server_id, :log] + event_struct [:log] @spec new(Log.t) :: t def new(log = %Log{}) do %__MODULE__{ - log: log, - server_id: log.server_id + log: log } end @@ -40,59 +42,51 @@ defmodule Helix.Log.Event.Log do end def whom_to_publish(event), - do: %{server: event.server_id} + do: %{server: event.log.server_id} end end - # event Modified do - # @moduledoc """ - # LogModifiedEvent is fired when an existing log has changed (revised) or - # has been recovered. + event Revised do + @moduledoc """ + `LogRevisedEvent` is fired when an existing log had a revision added to it. - # TODO: we'll probably want to create a LogRecovered event instead. - # """ + The revision may be stacked up on a natural or artificial log - the log + origin is transparent to the Client. + """ - # alias Helix.Server.Model.Server - # alias Helix.Log.Model.Log + alias Helix.Log.Model.Log - # @type t :: - # %__MODULE__{ - # log: Log.t, - # server_id: Server.id - # } + event_struct [:log] - # event_struct [:server_id, :log] + @type t :: + %__MODULE__{ + log: Log.t + } - # @spec new(Log.t) :: - # t - # def new(log = %Log{}) do - # %__MODULE__{ - # log: log, - # server_id: log.server_id - # } - # end + @spec new(Log.t) :: + t + def new(log = %Log{}) do + %__MODULE__{ + log: log + } + end - # publish do + publish do - # alias HELL.ClientUtils + alias Helix.Log.Public.Index, as: LogIndex - # @event :log_modified + @event :log_revised - # def generate_payload(event, _socket) do - # data = %{ - # log_id: to_string(event.log.log_id), - # server_id: to_string(event.server_id), - # timestamp: ClientUtils.to_timestamp(event.log.creation_time), - # message: event.log.message - # } + def generate_payload(event, _socket) do + data = LogIndex.render_log(event.log) - # {:ok, data} - # end + {:ok, data} + end - # def whom_to_publish(event), - # do: %{server: event.server_id} - # end - # end + def whom_to_publish(event), + do: %{server: event.log.server_id} + end + end event Deleted do @moduledoc """ @@ -100,23 +94,20 @@ defmodule Helix.Log.Event.Log do revision, leading to the log deletion. """ - alias Helix.Server.Model.Server alias Helix.Log.Model.Log @type t :: %__MODULE__{ - log: Log.t, - server_id: Server.id + log: Log.t } - event_struct [:server_id, :log] + event_struct [:log] @spec new(Log.t) :: t def new(log = %Log{}) do %__MODULE__{ - log: log, - server_id: log.server_id + log: log } end @@ -126,15 +117,14 @@ defmodule Helix.Log.Event.Log do def generate_payload(event, _socket) do data = %{ - log_id: to_string(event.log.log_id), - server_id: to_string(event.server_id) + log_id: to_string(event.log.log_id) } {:ok, data} end def whom_to_publish(event), - do: %{server: event.server_id} + do: %{server: event.log.server_id} end end end diff --git a/lib/log/process/recover.ex b/lib/log/process/recover.ex new file mode 100644 index 00000000..e69de29b diff --git a/lib/software/model/file/module.ex b/lib/software/model/file/module.ex index 9da62ecd..ae3fe84b 100644 --- a/lib/software/model/file/module.ex +++ b/lib/software/model/file/module.ex @@ -14,6 +14,7 @@ defmodule Helix.Software.Model.File.Module do use Ecto.Schema import Ecto.Changeset + import HELL.Ecto.Macros alias Ecto.Changeset alias HELL.Constant @@ -129,11 +130,8 @@ defmodule Helix.Software.Model.File.Module do end end - defmodule Query do + query do - import Ecto.Query - - alias Ecto.Queryable alias HELL.Constant alias Helix.Software.Model.File diff --git a/test/log/event/handler/log_test.exs b/test/log/event/handler/log_test.exs index ed945811..69164799 100644 --- a/test/log/event/handler/log_test.exs +++ b/test/log/event/handler/log_test.exs @@ -2,13 +2,10 @@ defmodule Helix.Log.Event.Handler.LogTest do use Helix.Test.Case.Integration + import Helix.Test.Macros import Helix.Test.Log.Macros alias Helix.Event - alias Helix.Software.Event.LogForge.LogEdit.Processed, - as: LogForgeEditComplete - alias Helix.Software.Event.LogForge.LogCreate.Processed, - as: LogForgeCreateComplete alias Helix.Log.Event.Handler.Log, as: LogHandler alias Helix.Log.Query.Log, as: LogQuery alias Helix.Log.Repo @@ -16,10 +13,11 @@ defmodule Helix.Log.Event.Handler.LogTest do alias Helix.Test.Event.Setup, as: EventSetup alias Helix.Test.Entity.Setup, as: EntitySetup alias Helix.Test.Network.Setup, as: NetworkSetup + alias Helix.Test.Process.Setup, as: ProcessSetup alias Helix.Test.Server.Helper, as: ServerHelper alias Helix.Test.Server.Setup, as: ServerSetup - # alias Helix.Test.Log.Factory, as: LogFactory alias Helix.Test.Log.Helper, as: LogHelper + alias Helix.Test.Log.Setup, as: LogSetup describe "handle_event/1" do test "follows the LoggableFlow" do @@ -114,46 +112,66 @@ defmodule Helix.Log.Event.Handler.LogTest do end end - # describe "log_forge_conclusion/1 for LogForge.Edit" do - # test "adds revision to target log" do - # target_log = LogFactory.insert(:log) - # {entity, _} = EntitySetup.entity() - # message = "I just got hidden" - - # event = %LogForgeEditComplete{ - # target_log_id: target_log.log_id, - # entity_id: entity.entity_id, - # message: message, - # version: 100 - # } - - # revisions_before = LogQuery.count_revisions_of_entity(target_log, entity) - # LogHandler.log_forge_conclusion(event) - # revisions_after = LogQuery.count_revisions_of_entity(target_log, entity) - # target_log = LogQuery.fetch(target_log.log_id) - - # assert revisions_after == revisions_before + 1 - # assert message == target_log.message - # end - # end - - # describe "log_forge_conclusion/1 for LogForge.Create" do - # test "creates specified log on target server" do - # {server, %{entity: entity}} = ServerSetup.server() - - # message = "Mess with the best, die like the rest" - - # event = %LogForgeCreateComplete{ - # entity_id: entity.entity_id, - # target_id: server.server_id, - # message: message, - # version: 456 - # } - - # LogHandler.log_forge_conclusion(event) - - # assert [log] = LogQuery.get_logs_on_server(server) - # assert [%{forge_version: 456}] = Repo.preload(log, :revisions).revisions - # end - # end + describe "log_forge_processed/1 for LogForge.Edit" do + test "adds a revision to the target log" do + log = LogSetup.log!() + process = + ProcessSetup.fake_process!( + type: :log_forge_edit, + tgt_log_id: log.log_id, + data: [forger_version: 50] + ) + event = EventSetup.Log.forge_processed(process: process) + + # Sanity check: we are editing the log we've created + assert event.target_log_id == process.tgt_log_id + assert event.action == :edit + + log_before = LogQuery.fetch(log.log_id) + + # Simulate handling of the event + LogHandler.log_forge_processed(event) + + log_after = LogQuery.fetch(log.log_id) + + # `log_after` had a revision added to it. + assert log_after.revision_id == log_before.revision_id + 1 + assert log_after.revision != log_before.revision + + # `log_after` revision is exactly the one specified at `event`/`process` + assert log_after.revision.type == process.data.log_type + assert_map_str log_after.revision.data, + Map.from_struct(process.data.log_data) + assert log_after.revision.forge_version == 50 + end + end + + describe "log_forge_processed/1 for LogForge.Create" do + test "creates a new log" do + process = + ProcessSetup.fake_process!( + type: :log_forge_create, data: [forger_version: 50] + ) + event = EventSetup.Log.forge_processed(process: process) + + # Sanity check: we are creating a new log + refute event.target_log_id + assert event.action == :create + + # Initially, the process (target) server has no logs + assert [] == LogQuery.get_logs_on_server(process.target_id) + + # Simulate handling of the event + LogHandler.log_forge_processed(event) + + # Now the process server has a new log + assert [log] = LogQuery.get_logs_on_server(process.target_id) + + # And the new log has exactly the data described at `event`/`process` + assert log.revision_id == 1 + assert log.revision.type == process.data.log_type + assert_map_str log.revision.data, Map.from_struct(process.data.log_data) + assert log.revision.forge_version == 50 + end + end end diff --git a/test/log/event/log_test.exs b/test/log/event/log_test.exs index afd3c6af..2144425d 100644 --- a/test/log/event/log_test.exs +++ b/test/log/event/log_test.exs @@ -33,23 +33,23 @@ defmodule Helix.Log.Event.LogTest do end end - # describe "LogModifiedEvent" do - # test "Publishable.generate_payload/2" do - # event = EventSetup.Log.modified() + describe "LogRevisedEvent" do + test "Publishable.generate_payload/2" do + event = EventSetup.Log.revised() - # # Generates the payload - # assert {:ok, data} = Publishable.generate_payload(event, @mocked_socket) + # Generates the payload + assert {:ok, data} = Publishable.generate_payload(event, @mocked_socket) - # # Returned payload is json-friendly - # assert data.log_id == to_string(event.log.log_id) - # assert data.message == event.log.message - # assert data.server_id == to_string(event.log.server_id) - # assert is_float(data.timestamp) + # Returned payload is json-friendly + assert data.log_id == to_string(event.log.log_id) + assert data.type == to_string(event.log.revision.type) + assert data.data + assert is_float(data.timestamp) - # # Returned event is correct - # assert "log_modified" == Publishable.get_event_name(event) - # end - # end + # Returned event is correct + assert "log_revised" == Publishable.get_event_name(event) + end + end describe "LogDeletedEvent" do test "Publishable.generate_payload/2" do @@ -60,7 +60,6 @@ defmodule Helix.Log.Event.LogTest do # Returned payload is json-friendly assert data.log_id == to_string(event.log.log_id) - assert data.server_id == to_string(event.log.server_id) # Returned event is correct assert "log_deleted" == Publishable.get_event_name(event) diff --git a/test/support/event/setup/log.ex b/test/support/event/setup/log.ex index c6eea44d..4a163303 100644 --- a/test/support/event/setup/log.ex +++ b/test/support/event/setup/log.ex @@ -1,27 +1,46 @@ defmodule Helix.Test.Event.Setup.Log do + alias Helix.Log.Model.Log + + alias Helix.Log.Event.Forge.Processed, as: LogForgeProcessedEvent + alias Helix.Log.Event.Log.Created, as: LogCreatedEvent alias Helix.Log.Event.Log.Deleted, as: LogDeletedEvent - # alias Helix.Log.Event.Log.Modified, as: LogModifiedEvent - alias Helix.Log.Model.Log + alias Helix.Log.Event.Log.Revised, as: LogRevisedEvent alias Helix.Test.Log.Setup, as: LogSetup + alias Helix.Test.Process.Setup, as: ProcessSetup def created, do: created(generate_fake_log()) - def created(log = %Log{}), do: LogCreatedEvent.new(log) - # def modified, - # do: modified(generate_fake_log()) - - # def modified(log = %Log{}), - # do: LogModifiedEvent.new(log) + def revised, + do: revised(generate_fake_log()) + def revised(log = %Log{}), + do: LogRevisedEvent.new(log) + + @doc """ + Opts: + - process: Source process. + - process_type: `:log_forge_edit` or `:log_forge_created`. If not set, a + random type will be selected + """ + def forge_processed(opts) do + process = + if opts[:process] do + opts[:process] + else + process_type = Keyword.get(opts, :process_type, :log_forge) + ProcessSetup.fake_process!(type: process_type) + end + + LogForgeProcessedEvent.new(process, process.data) + end def deleted, do: deleted(generate_fake_log()) - def deleted(log = %Log{}), do: LogDeletedEvent.new(log) diff --git a/test/support/macros.ex b/test/support/macros.ex index e5c1b3bc..4320670d 100644 --- a/test/support/macros.ex +++ b/test/support/macros.ex @@ -1,5 +1,8 @@ defmodule Helix.Test.Macros do + alias HELL.Utils + alias HELL.MapUtils + defmacro assert_between(a, range_min, range_max) do quote bind_quoted: binding() do assert a >= range_min @@ -14,6 +17,24 @@ defmodule Helix.Test.Macros do end end + @doc """ + Ensures two maps are identical, ignoring type differences. Especially useful + for cases where there's no practical distinction between Helix internal format + (e.g. structs or IDs) and plain maps. + + Example: + + %{foo: :bar} == %{"foo" => "bar"} # True + """ + defmacro assert_map_str(a, b) do + quote do + a = unquote(a) |> Utils.stringify_map() |> MapUtils.atomize_keys() + b = unquote(b) |> Utils.stringify_map() |> MapUtils.atomize_keys() + + assert a == b + end + end + defmacro timeout(severity \\ quote(do: _)) do env = System.get_env("HELIX_TEST_ENV") env_multiplier = get_env_multiplier(env) From d0cefe245d5491d211363bae1a439ad6b43da052 Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Sat, 4 Aug 2018 02:03:22 -0300 Subject: [PATCH 04/14] Add LogHenforcer and LogForgeHenforcer --- lib/log/henforcer/log.ex | 46 +++++++++++++ lib/log/henforcer/log/forge.ex | 98 +++++++++++++++++++++++++++ test/log/henforcer/log/forge_test.exs | 81 ++++++++++++++++++++++ test/log/henforcer/log_test.exs | 43 ++++++++++++ test/support/software/setup.ex | 32 ++++++--- 5 files changed, 291 insertions(+), 9 deletions(-) create mode 100644 lib/log/henforcer/log.ex create mode 100644 lib/log/henforcer/log/forge.ex create mode 100644 test/log/henforcer/log/forge_test.exs create mode 100644 test/log/henforcer/log_test.exs diff --git a/lib/log/henforcer/log.ex b/lib/log/henforcer/log.ex new file mode 100644 index 00000000..10325ade --- /dev/null +++ b/lib/log/henforcer/log.ex @@ -0,0 +1,46 @@ +defmodule Helix.Log.Henforcer.Log do + + import Helix.Henforcer + + alias Helix.Server.Model.Server + alias Helix.Log.Model.Log + alias Helix.Log.Query.Log, as: LogQuery + + @type log_exists_relay :: %{log: Log.t} + @type log_exists_relay_partial :: %{} + @type log_exists_error :: + {false, {:log, :not_found}, log_exists_relay_partial} + + @spec log_exists?(Log.id) :: + {true, log_exists_relay} + | log_exists_error + @doc """ + Henforces that the given `log_id` exists on the database. + """ + def log_exists?(log_id = %Log.ID{}) do + with log = %Log{} <- LogQuery.fetch(log_id) do + reply_ok(%{log: log}) + else + _ -> + reply_error({:log, :not_found}) + end + end + + @type belongs_to_server_relay :: %{} + @type belongs_to_server_relay_partial :: %{} + @type belongs_to_server_error :: + {false, {:log, :not_belongs}, belongs_to_server_relay_partial} + + @spec belongs_to_server?(Log.t, Server.idt) :: + {true, belongs_to_server_relay} + | belongs_to_server_error + @doc """ + Henforces that the given log belongs to the given server. + """ + def belongs_to_server?(%Log{server_id: s}, %Server{server_id: s}), + do: reply_ok() + def belongs_to_server?(%Log{server_id: s}, s = %Server.ID{}), + do: reply_ok() + def belongs_to_server?(%Log{}, _), + do: reply_error({:log, :not_belongs}) +end diff --git a/lib/log/henforcer/log/forge.ex b/lib/log/henforcer/log/forge.ex new file mode 100644 index 00000000..fbbc8576 --- /dev/null +++ b/lib/log/henforcer/log/forge.ex @@ -0,0 +1,98 @@ +defmodule Helix.Log.Henforcer.Log.Forge do + + import Helix.Henforcer + + alias Helix.Server.Model.Server + alias Helix.Server.Henforcer.Server, as: ServerHenforcer + alias Helix.Software.Henforcer.File, as: FileHenforcer + alias Helix.Log.Model.Log + alias Helix.Log.Henforcer.Log, as: LogHenforcer + + @type can_edit_relay :: %{log: Log.t, gateway: Server.t, forger: File.t} + @type can_edit_relay_partial :: map + @type can_edit_error :: + LogHenforcer.log_exists_error + | LogHenforcer.belongs_to_server_error + | ServerHenforcer.server_exists_error + | exists_forger_error + + @spec can_edit?(Log.id, Server.id, Server.id) :: + {true, can_edit_relay} + | can_edit_error + @doc """ + Henforces that the player can edit the given `log_id`. + + Among other things, it makes sure the log exists and the player have a valid + LogForger on her gateway. + """ + def can_edit?(log_id = %Log.ID{}, gateway_id, target_id) do + with \ + {true, r1} <- LogHenforcer.log_exists?(log_id), + log = r1.log, + + {true, _} <- LogHenforcer.belongs_to_server?(log, target_id), + + {true, r2} <- ServerHenforcer.server_exists?(gateway_id), + r2 = replace(r2, :server, :gateway), + gateway = r2.gateway, + + {true, r3} <- exists_forger?(:log_edit, gateway), + r3 = replace(r3, :file, :forger, only: true) + do + [r1, r2, r3] + |> relay() + |> reply_ok() + end + end + + @type can_create_relay :: %{gateway: Server.t, forger: File.t} + @type can_create_relay_partial :: map + @type can_create_error :: + ServerHenforcer.server_exists_error + | exists_forger_error + + @spec can_create?(Server.id) :: + {true, can_create_relay} + | can_create_error + @doc """ + Henforces that the player can create a log. Basically it ensures the player + have a valid LogForger. + """ + def can_create?(gateway_id) do + with \ + {true, r1} <- ServerHenforcer.server_exists?(gateway_id), + r1 = replace(r1, :server, :gateway), + gateway = r1.gateway, + {true, r2} <- exists_forger?(:log_create, gateway), + r2 = replace(r2, :file, :forger, only: true) + do + [r1, r2] + |> relay() + |> reply_ok() + end + end + + @type exists_forger_relay :: FileHenforcer.exists_software_module_relay + @type exists_forger_relay_partial :: + FileHenforcer.exists_software_module_relay_partial + @type exists_forger_error :: + {false, {:forger, :not_found}, exists_forger_relay_partial} + + @spec exists_forger?(:log_create | :log_edit, Server.t) :: + {true, exists_forger_relay} + | exists_forger_error + @doc """ + Ensures that exists a Forger file on `server`, sorting the result by `module` + (either `:log_edit` or `:log_create` in this context). + + It's simply a wrapper over `FileHenforcer.exists_software_module?` used to + generate a more meaningful error message ("forger_not_found") instead of + "module_not_found". + """ + def exists_forger?(module, server = %Server{}) do + henforce_else( + FileHenforcer.exists_software_module?(module, server), + {:forger, :not_found} + ) + end +end diff --git a/test/log/henforcer/log/forge_test.exs b/test/log/henforcer/log/forge_test.exs new file mode 100644 index 00000000..da3f45f8 --- /dev/null +++ b/test/log/henforcer/log/forge_test.exs @@ -0,0 +1,81 @@ +defmodule Helix.Log.Henforcer.Log.ForgeTest do + + use Helix.Test.Case.Integration + + import Helix.Test.Henforcer.Macros + + alias Helix.Log.Henforcer.Log.Forge, as: LogForgeHenforcer + + alias Helix.Test.Server.Helper, as: ServerHelper + alias Helix.Test.Server.Setup, as: ServerSetup + alias Helix.Test.Software.Setup, as: SoftwareSetup + alias Helix.Test.Log.Setup, as: LogSetup + + describe "can_edit?/3" do + test "accepts when everything is OK" do + {log, _} = LogSetup.log() + + gateway = ServerSetup.server!() + forger = SoftwareSetup.log_forger!(server_id: gateway.server_id) + + assert {true, relay} = + LogForgeHenforcer.can_edit?( + log.log_id, gateway.server_id, log.server_id + ) + + assert_relay relay, [:log, :forger, :gateway] + + assert relay.log.log_id == log.log_id + assert relay.gateway == gateway + assert relay.forger == forger + end + + test "rejects when player does not have a LogForger software" do + {log, _} = LogSetup.log() + gateway = ServerSetup.server!() + + assert {false, reason, _} = + LogForgeHenforcer.can_edit?( + log.log_id, gateway.server_id, log.server_id + ) + + assert reason == {:forger, :not_found} + end + + test "rejects when log does not belong to the given target (server)" do + {log, _} = LogSetup.log() + + gateway = ServerSetup.server!() + SoftwareSetup.log_forger(server_id: gateway.server_id) + + assert {false, reason, _} = + LogForgeHenforcer.can_edit?( + log.log_id, gateway.server_id, ServerHelper.id() + ) + + assert reason == {:log, :not_belongs} + end + end + + describe "can_create?/1" do + test "accepts when everything is ok" do + gateway = ServerSetup.server!() + forger = SoftwareSetup.log_forger!(server_id: gateway.server_id) + + assert {true, relay} = LogForgeHenforcer.can_create?(gateway.server_id) + + assert_relay relay, [:gateway, :forger] + + assert relay.gateway == gateway + assert relay.forger == forger + end + + test "rejects when player does not have a LogForger" do + gateway = ServerSetup.server!() + + assert {false, reason, _} = + LogForgeHenforcer.can_create?(gateway.server_id) + assert reason == {:forger, :not_found} + end + end +end diff --git a/test/log/henforcer/log_test.exs b/test/log/henforcer/log_test.exs new file mode 100644 index 00000000..d7415c26 --- /dev/null +++ b/test/log/henforcer/log_test.exs @@ -0,0 +1,43 @@ +defmodule Helix.Log.Henforcer.LogTest do + + use Helix.Test.Case.Integration + + import Helix.Test.Henforcer.Macros + + alias Helix.Log.Henforcer.Log, as: LogHenforcer + + alias Helix.Test.Server.Helper, as: ServerHelper + alias Helix.Test.Log.Helper, as: LogHelper + alias Helix.Test.Log.Setup, as: LogSetup + + describe "log_exists?/1" do + test "accepts when log exists" do + {log, _} = LogSetup.log() + + assert {true, relay} = LogHenforcer.log_exists?(log.log_id) + + assert_relay relay, [:log] + end + + test "rejects when log doesn't exists" do + assert {false, reason, _} = LogHenforcer.log_exists?(LogHelper.id()) + assert reason == {:log, :not_found} + end + end + + describe "belongs_to_server?/2" do + test "accepts when log belongs to server" do + {log, _} = LogSetup.log() + + assert {true, %{}} == LogHenforcer.belongs_to_server?(log, log.server_id) + end + + test "rejects when log does not belong to server" do + {fake_log, _} = LogSetup.fake_log() + + assert {false, reason, _} = + LogHenforcer.belongs_to_server?(fake_log, ServerHelper.id()) + assert reason == {:log, :not_belongs} + end + end +end diff --git a/test/support/software/setup.ex b/test/support/software/setup.ex index ae19c4ca..71cec596 100644 --- a/test/support/software/setup.ex +++ b/test/support/software/setup.ex @@ -213,24 +213,38 @@ defmodule Helix.Test.Software.Setup do @doc """ - bruteforce: set bruteforce module version. Defaults to random. - overflow: set overflow module version. Defaults to random. + Remaining opts are passed to `file/1` """ + def cracker!(opts \\ []), + do: cracker(opts) |> elem(0) def cracker(opts \\ []) do - bruteforce = Access.get(opts, :bruteforce, SoftwareHelper.random_version()) - overflow = Access.get(opts, :overflow, SoftwareHelper.random_version()) - - version_map = %{ - bruteforce: bruteforce, - overflow: overflow - } + bruteforce = Keyword.get(opts, :bruteforce, SoftwareHelper.random_version()) + overflow = Keyword.get(opts, :overflow, SoftwareHelper.random_version()) + version_map = %{bruteforce: bruteforce, overflow: overflow} modules = SoftwareHelper.generate_module(:cracker, version_map) file(opts ++ [type: :cracker, modules: modules]) end - def cracker!(opts \\ []), - do: cracker(opts) |> elem(0) + @doc """ + - log_create: set `log_create` module version. Defaults to random. + - log_edit: set `log_edit` module version. Defaults to random. + + Remaining opts are passed to `file/1` + """ + def log_forger!(opts \\ []), + do: log_forger(opts) |> elem(0) + def log_forger(opts \\ []) do + log_create = Keyword.get(opts, :log_create, SoftwareHelper.random_version()) + log_edit = Keyword.get(opts, :log_edit, SoftwareHelper.random_version()) + + version_map = %{log_create: log_create, log_edit: log_edit} + modules = SoftwareHelper.generate_module(:log_forger, version_map) + + file(opts ++ [type: :log_forger, modules: modules]) + end @doc """ Opts are passed to `file/1` From 148aaf513b2fc3e85f3d15c6327493e6ee03fe0f Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Sun, 12 Aug 2018 23:25:35 -0300 Subject: [PATCH 05/14] Remove old/unused Log-related files --- .../action/flow/software/log_forger.ex | 81 ------------- test/log/action/log_test.exs | 112 ------------------ test/log/model/log_test.exs | 41 ------- 3 files changed, 234 deletions(-) delete mode 100644 lib/software/action/flow/software/log_forger.ex delete mode 100644 test/log/action/log_test.exs delete mode 100644 test/log/model/log_test.exs diff --git a/lib/software/action/flow/software/log_forger.ex b/lib/software/action/flow/software/log_forger.ex deleted file mode 100644 index b9f05157..00000000 --- a/lib/software/action/flow/software/log_forger.ex +++ /dev/null @@ -1,81 +0,0 @@ -# TODO: Superseded by Process.Executable. Rewrite. Use Bruteforce as example. -# defmodule Helix.Software.Action.Flow.Software.LogForger do - -# import HELF.Flow - -# alias Helix.Event -# alias Helix.Log.Query.Log, as: LogQuery -# alias Helix.Process.Model.Process -# alias Helix.Process.Action.Process, as: ProcessAction -# alias Helix.Server.Model.Server -# alias Helix.Software.Model.File -# alias Helix.Software.Model.SoftwareType.LogForge - -# @type params :: LogForge.create_params - -# @type on_execute_error :: -# ProcessAction.on_create_error -# | {:error, {:log, :notfound}} -# | {:error, Ecto.Changeset.t} - -# @spec execute(File.t_of_type(:log_forger), Server.id, params) :: -# {:ok, Process.t} -# | on_execute_error -# def execute(file, server, params) do -# flowing do -# with \ -# {:ok, data} <- prepare(file, params), -# {:ok, process_params} <- process_params(file, server, data), -# {:ok, process, events} <- ProcessAction.create(process_params), -# on_success(fn -> Event.emit(events) end) -# do -# {:ok, process} -# end -# end -# end - -# @spec prepare(File.t_of_type(:log_forger), params) :: -# {:ok, LogForge.t} -# | {:error, Ecto.Changeset.t} -# defp prepare(file, params), -# do: LogForge.create(file, params) - -# @spec process_params(File.t_of_type(:log_forger), Server.id, LogForge.t) :: -# {:ok, Process.create_params} -# | {:error, {:log, :notfound}} -# defp process_params(file, server_id, data = %{operation: :edit}) do -# with \ -# log_id = data.target_log_id, -# log = %{} <- LogQuery.fetch(log_id) || {:error, {:log, :notfound}} -# do -# revision_count = LogQuery.count_revisions_of_entity(log, data.entity_id) -# objective = LogForge.edit_objective(data, log, revision_count) - -# process_params = %{ -# gateway_id: server_id, -# target_id: log.server_id, -# file_id: file.file_id, -# objective: objective, -# process_data: data, -# process_type: "log_forger" -# } - -# {:ok, process_params} -# end -# end - -# defp process_params(file, server, data = %{operation: :create}) do -# objective = LogForge.create_objective(data) - -# process_params = %{ -# gateway_id: server, -# target_id: data.target_id, -# file_id: file.file_id, -# objective: objective, -# process_data: data, -# process_type: "log_forger" -# } - -# {:ok, process_params} -# end -# end diff --git a/test/log/action/log_test.exs b/test/log/action/log_test.exs deleted file mode 100644 index a1376c7b..00000000 --- a/test/log/action/log_test.exs +++ /dev/null @@ -1,112 +0,0 @@ -# defmodule Helix.Log.Action.LogTest do - -# use Helix.Test.Case.Integration - -# alias Helix.Log.Action.Log, as: LogAction -# alias Helix.Log.Model.Log -# alias Helix.Log.Event.Log.Created, as: LogCreatedEvent -# alias Helix.Log.Event.Log.Deleted, as: LogDeletedEvent -# alias Helix.Log.Event.Log.Modified, as: LogModifiedEvent -# alias Helix.Log.Query.Log, as: LogQuery -# alias Helix.Log.Repo - -# alias Helix.Test.Log.Factory, as: LogFactory - -# alias Helix.Test.Entity.Helper, as: EntityHelper -# alias Helix.Test.Server.Helper, as: ServerHelper - -# describe "create/3" do -# test "succeeds with valid input" do -# server_id = ServerHelper.id() -# entity_id = EntityHelper.id() -# message = "They are taking the hobbits to Isengard" - -# assert {:ok, _, _} = LogAction.create(server_id, entity_id, message) -# end - -# test "returns LogCreated event" do -# server_id = ServerHelper.id() -# entity_id = EntityHelper.id() -# message = "Just as expected" - -# result = LogAction.create(server_id, entity_id, message) -# assert {:ok, _, [%LogCreatedEvent{}]} = result -# end -# end - -# describe "revise/4" do -# test "overrides log message" do -# log = LogFactory.insert(:log) -# entity = EntityHelper.id() -# message = "É nois que voa, bruxão!" -# forge_version = Enum.random(1..999) - -# assert {:ok, _, _} = LogAction.revise(log, entity, message, forge_version) -# assert %{message: ^message} = LogQuery.fetch(log.log_id) -# end - -# test "returns LogModified event" do -# log = LogFactory.insert(:log) -# entity = EntityHelper.id() -# message = "Don't dead, open inside" - -# result = LogAction.revise(log, entity, message, 1) -# assert {:ok, _, [%LogModifiedEvent{}]} = result -# end -# end - -# describe "recover/1" do -# test "recovers log to the last message" do -# log = LogFactory.insert(:log) -# entity = EntityHelper.id() - -# message0 = log.message -# message1 = "A monad is a monoid in the category of the endofunctors" -# message2 = "A commit a day keeps the PM away" - -# LogAction.revise(log, entity, message1, 1) -# log = LogQuery.fetch(log.log_id) -# assert %{message: ^message1} = log - -# LogAction.revise(log, entity, message2, 2) -# log = LogQuery.fetch(log.log_id) -# assert %{message: ^message2} = log - -# assert {:ok, :recovered, _} = LogAction.recover(log) -# assert %{message: ^message1} = LogQuery.fetch(log.log_id) - -# assert {:ok, :recovered, _} = LogAction.recover(log) -# assert %{message: ^message0} = LogQuery.fetch(log.log_id) -# end - -# test "returns LogModified event when a message is recovered" do -# log = LogFactory.insert(:log) -# entity = EntityHelper.id() -# message = "nullPointerException" - -# LogAction.revise(log, entity, message, 1) - -# assert {:ok, :recovered, [%LogModifiedEvent{}]} = LogAction.recover(log) -# end - -# test "returns error when log is original" do -# log = LogFactory.insert(:log) - -# assert {:error, :original_revision} == LogAction.recover(log) -# end - -# test "deletes log if it was forged" do -# log = LogFactory.insert(:log, forge_version: 1) - -# assert Repo.get(Log, log.log_id) -# assert {:ok, :deleted, _} = LogAction.recover(log) -# refute Repo.get(Log, log.log_id) -# end - -# test "returns LogDeleted event when forged log is deleted" do -# log = LogFactory.insert(:log, forge_version: 1) - -# assert {:ok, :deleted, [%LogDeletedEvent{}]} = LogAction.recover(log) -# end -# end -# end diff --git a/test/log/model/log_test.exs b/test/log/model/log_test.exs deleted file mode 100644 index 0c02454b..00000000 --- a/test/log/model/log_test.exs +++ /dev/null @@ -1,41 +0,0 @@ -defmodule Helix.Log.Model.LogTest do - - use ExUnit.Case, async: true - - alias Ecto.Changeset - alias Helix.Log.Model.Log - - alias Helix.Test.Entity.Helper, as: EntityHelper - alias Helix.Test.Server.Helper, as: ServerHelper - - @moduletag :unit - - describe "log creation" do - test "adds timestamp" do - params = - %{ - server_id: ServerHelper.id(), - entity_id: EntityHelper.id(), - message: "wut" - } - - changeset = Log.create_changeset(params) - assert changeset.valid? - - log = Changeset.apply_changes(changeset) - - assert log.message == params.message - assert log.server_id == params.server_id - assert log.entity_id == params.entity_id - assert %DateTime{} = log.creation_time - assert log.revisions - - [revision] = log.revisions - - assert revision.entity_id == params.entity_id - assert revision.message == params.message - refute revision.forge_version - assert revision.creation_time == log.creation_time - end - end -end From d35e78ab60037ceabc98d39753ab3422aa150e4b Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Sun, 12 Aug 2018 23:28:47 -0300 Subject: [PATCH 06/14] Add LogFactor --- lib/factor/client.ex | 2 - lib/factor/factor.ex | 32 ++++++++++++++- lib/log/factor/log.ex | 75 ++++++++++++++++++++++++++++++++++++ lib/log/internal/log.ex | 8 ++++ lib/log/process/forge.ex | 33 ++++++++-------- lib/log/query/log.ex | 6 +-- test/log/factor/log_test.exs | 66 +++++++++++++++++++++++++++++++ 7 files changed, 198 insertions(+), 24 deletions(-) create mode 100644 lib/log/factor/log.ex create mode 100644 test/log/factor/log_test.exs diff --git a/lib/factor/client.ex b/lib/factor/client.ex index df4befd3..3318d134 100644 --- a/lib/factor/client.ex +++ b/lib/factor/client.ex @@ -24,8 +24,6 @@ defmodule Helix.Factor.Client do defmacro get_factors(params, do: block) do quote do - @spec get_factors(map) :: - map def get_factors(unquote(params)) do var!(relay) = %{} var!(factors) = %{} diff --git a/lib/factor/factor.ex b/lib/factor/factor.ex index f0bde36a..d8f36260 100644 --- a/lib/factor/factor.ex +++ b/lib/factor/factor.ex @@ -326,6 +326,13 @@ defmodule Helix.Factor do |> String.downcase() |> String.to_atom() + # Generates typespecs for each fact + for key <- keys do + quote do + @spec unquote(key)(params, relay) :: {unquote(key), relay} + end + end + quote do @doc """ @@ -381,8 +388,6 @@ defmodule Helix.Factor do fname = :"fact_#{name}" quote do - @spec unquote(fname)(term, term) :: - {unquote(fname), term} def unquote(fname)(unquote(params), var!(relay) = unquote(relay)) do unquote(block) end @@ -521,6 +526,29 @@ defmodule Helix.Factor do end end + @doc """ + Used when `fact/3` was called without the required relay(s). It will append + the `append_relay` to the current relay, and execute the fact again. + """ + defmacro set_relay(params, current_relay, append_relay) do + {fname, _} = __CALLER__.function + + quote do + + # `set_relay` called again within the same fact; probably an infinite loop + if unquote(current_relay)[:__loop_checker] == unquote(fname), + do: raise "infinite loop detected" + + new_relay = + unquote(current_relay) + |> Map.merge(unquote(append_relay)) + |> Map.merge(%{__loop_checker: unquote(fname)}) + + apply(__MODULE__, unquote(fname), [unquote(params), new_relay]) + + end + end + @spec get_child_module(parent :: atom, child_name :: atom) :: child_module :: atom docp """ diff --git a/lib/log/factor/log.ex b/lib/log/factor/log.ex new file mode 100644 index 00000000..0966729d --- /dev/null +++ b/lib/log/factor/log.ex @@ -0,0 +1,75 @@ +import Helix.Factor + +factor Helix.Log.Factor.Log do + + alias Helix.Log.Model.Log + alias __MODULE__.Revisions, as: RevisionsFactor + + @type factor :: + %__MODULE__{ + revisions: RevisionsFactor.factor + } + + @type params :: RevisionsFactor.params + @type relay :: RevisionsFactor.relay + + @type fact_revisions :: RevisionsFactor.factor + + factor_struct [:revisions] + + child :revisions + + assembly do + get_fact :revisions + end + + factor Revisions do + + alias Helix.Entity.Model.Entity + alias Helix.Log.Model.Log + alias Helix.Log.Query.Log, as: LogQuery + + @type factor :: + %__MODULE__{ + total: fact_total, + from_entity: fact_from_entity + } + + @type params :: + %{log: Log.t, entity_id: Entity.id} + | %{log: Log.t} + + @type relay :: + %{revisions: pos_integer} + | %{} + + @type fact_total :: pos_integer + @type fact_from_entity :: non_neg_integer + + factor_struct [:from_entity, :total] + + fact(:total, _, %{revisions: revisions}) do + set_fact length(revisions) + end + + fact(:total, params = %{log: log}, relay) do + set_relay params, relay, %{revisions: get_revisions(log)} + end + + fact(:from_entity, %{entity_id: entity_id}, %{revisions: revs}) do + set_fact Enum.count(revs, &(&1.entity_id == entity_id)) + end + + fact(:from_entity, params = %{log: log, entity_id: _}, relay) do + set_relay params, relay, %{revisions: get_revisions(log)} + end + + assembly do + get_fact :total + get_fact :from_entity + end + + defp get_revisions(log = %Log{}), + do: LogQuery.fetch_revisions(log) + end +end diff --git a/lib/log/internal/log.ex b/lib/log/internal/log.ex index beda268f..2b832071 100644 --- a/lib/log/internal/log.ex +++ b/lib/log/internal/log.ex @@ -41,6 +41,14 @@ defmodule Helix.Log.Internal.Log do |> Repo.one() end + @spec fetch_revisions(Log.t) :: + [Revision.t] + def fetch_revisions(log = %Log{}) do + log.log_id + |> Revision.Query.by_log() + |> Repo.all() + end + @spec get_logs_on_server(Server.id, pos_integer) :: [Log.t] def get_logs_on_server(server_id, count \\ 20) do diff --git a/lib/log/process/forge.ex b/lib/log/process/forge.ex index 4d9152dd..36668333 100644 --- a/lib/log/process/forge.ex +++ b/lib/log/process/forge.ex @@ -97,37 +97,36 @@ process Helix.Log.Process.Forge do resourceable do + alias Helix.Software.Factor.File, as: FileFactor + alias Helix.Log.Factor.Log, as: LogFactor + @type params :: LogForgeProcess.resources_params @type factors :: %{ :forger => %{version: FileFactor.fact_version}, - optional(:log) => Log.t + optional(:log) => LogFactor.fact_revisions } - get_factors(_) do - + get_factors(params = %{action: :edit}) do + factor FileFactor, %{file: params.forger}, + only: [:version], as: :forger + factor LogFactor, %{log: params.log, entity_id: params.entity_id}, + only: [:revisions] end - # get_factors(%{action: :edit, log: log, forger: forger}) do - # factor FileFactor, %{file: forger}, - # only: [:version], as: :forger - # factor LogFactor, %{log: log}, - # only: [:total_revisions] - # end - - # get_factors(%{action: :create, forger: forger}) do - # factor FileFactor, %{file: forger}, - # only: [:version], as: :forger - # end + get_factors(params = %{action: :create}) do + factor FileFactor, %{file: params.forger}, + only: [:version], as: :forger + end - # TODO: time resource (for minimum duration) + # TODO: time resource (for minimum duration) #364 cpu(%{action: :edit}) do - f.forger.version.log_edit * f.log.revisions.total + f.forger.version.log_edit * (1 + f.log.revisions.from_entity) + 5000 end cpu(%{action: :create}) do - f.forger.version.log_create + f.forger.version.log_create + 5000 end dynamic do diff --git a/lib/log/query/log.ex b/lib/log/query/log.ex index 2ccd06ca..07dbfeb9 100644 --- a/lib/log/query/log.ex +++ b/lib/log/query/log.ex @@ -8,15 +8,15 @@ defmodule Helix.Log.Query.Log do alias Helix.Log.Internal.Log, as: LogInternal alias Helix.Log.Model.Log - @spec fetch(Log.id) :: - Log.t - | nil @doc """ Fetches a log """ defdelegate fetch(id), to: LogInternal + defdelegate fetch_revisions(log), + to: LogInternal + @spec get_logs_on_server(Server.idt) :: [Log.t] @doc """ diff --git a/test/log/factor/log_test.exs b/test/log/factor/log_test.exs new file mode 100644 index 00000000..7e720261 --- /dev/null +++ b/test/log/factor/log_test.exs @@ -0,0 +1,66 @@ +defmodule Helix.Log.Factor.LogTest do + + use Helix.Test.Case.Integration + + import Helix.Test.Factor.Macros + + alias Helix.Log.Factor.Log, as: LogFactor + alias Helix.Log.Internal.Log, as: LogInternal + + alias Helix.Test.Entity.Helper, as: EntityHelper + alias Helix.Test.Log.Helper, as: LogHelper + alias Helix.Test.Log.Setup, as: LogSetup + + describe "LogFactor.Revisions" do + test "fact: :from_entity" do + log = LogSetup.log!() + entity_id = EntityHelper.id() + info = LogHelper.log_info() + version = 50 + + params = %{log: log, entity_id: entity_id} + + {fact, relay} = get_fact(LogFactor.Revisions, :from_entity, params) + + assert fact == 0 + assert length(relay.revisions) == 1 + + # Add a revision from `entity_id` + LogInternal.revise(log, entity_id, info, version) + + {fact, relay} = get_fact(LogFactor.Revisions, :from_entity, params) + + assert fact == 1 + assert length(relay.revisions) == 2 + + # Add a revision from some other entity + LogInternal.revise(log, EntityHelper.id(), info, version) + + {fact, relay} = get_fact(LogFactor.Revisions, :from_entity, params) + + assert fact == 1 + assert length(relay.revisions) == 3 + end + + test "fact: :total" do + log = LogSetup.log!() + info = LogHelper.log_info() + version = 50 + + params = %{log: log} + + {fact, relay} = get_fact(LogFactor.Revisions, :total, params) + + assert fact == 1 + assert length(relay.revisions) == 1 + + # Add a revision from `entity_id` + LogInternal.revise(log, EntityHelper.id(), info, version) + + {fact, relay} = get_fact(LogFactor.Revisions, :total, params) + + assert fact == 2 + assert length(relay.revisions) == 2 + end + end +end From e2ab25b19ea8fc58f0ea57f497a7b7addfd4dca9 Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Sun, 12 Aug 2018 23:31:01 -0300 Subject: [PATCH 07/14] Add LogForgeRequest for Create and Edit operations --- lib/event/dispatcher.ex | 5 + lib/event/loggable/flow.ex | 2 +- lib/event/notificable/notificable.ex | 1 + lib/log/action/flow/forge.ex | 99 ++++ lib/log/event/forge.ex | 2 + lib/log/event/log.ex | 29 ++ lib/log/henforcer/log/forge.ex | 5 +- lib/log/model/log.ex | 17 +- lib/log/model/log_type/macros.ex | 103 ++-- lib/log/process/forge.ex | 65 ++- lib/log/public/forge.ex | 10 + lib/log/websocket/requests/forge.ex | 147 ++++++ .../event/handler/notification.ex | 2 + lib/notification/model/code/server.ex | 54 +++ lib/process/executable.ex | 24 +- lib/process/model/process.ex | 6 +- lib/process/model/top/allocator.ex | 6 +- lib/process/process.ex | 2 +- lib/process/public/index.ex | 2 +- lib/process/public/view/helper.ex | 19 +- lib/server/websocket/channel/server.ex | 34 ++ lib/server/websocket/channel/server/join.ex | 1 - .../websocket/requests/motherboard_update.ex | 4 +- lib/software/action/flow/file.ex | 2 +- lib/software/process/cracker/bruteforce.ex | 5 +- lib/software/process/file/install.ex | 13 +- lib/software/public/file.ex | 7 +- .../websocket/requests/file/install.ex | 1 + test/event/loggable/flow_test.exs | 4 +- test/features/file/transfer_test.exs | 3 +- test/features/log/forge_test.exs | 445 ++++++++++++++++++ test/log/event/handler/log_test.exs | 3 - test/log/websocket/requests/forge_test.exs | 424 +++++++++++++++++ test/support/channel/setup.ex | 27 +- test/support/log/helper.ex | 1 - test/support/log/setup.ex | 24 +- test/support/macros.ex | 16 +- test/support/process/setup/data.ex | 2 - 38 files changed, 1502 insertions(+), 114 deletions(-) create mode 100644 lib/log/action/flow/forge.ex create mode 100644 lib/log/public/forge.ex create mode 100644 lib/log/websocket/requests/forge.ex create mode 100644 test/features/log/forge_test.exs create mode 100644 test/log/websocket/requests/forge_test.exs diff --git a/lib/event/dispatcher.ex b/lib/event/dispatcher.ex index 48cb24aa..2638befa 100644 --- a/lib/event/dispatcher.ex +++ b/lib/event/dispatcher.ex @@ -149,6 +149,11 @@ defmodule Helix.Event.Dispatcher do event LogEvent.Log.Deleted event LogEvent.Log.Revised + # Custom handlers + event LogEvent.Forge.Processed, + LogHandler.Log, + :log_forge_processed + ############################################################################## # Process events ############################################################################## diff --git a/lib/event/loggable/flow.ex b/lib/event/loggable/flow.ex index 9ecd83d6..a8accb98 100644 --- a/lib/event/loggable/flow.ex +++ b/lib/event/loggable/flow.ex @@ -33,7 +33,7 @@ defmodule Helix.Event.Loggable.Flow do alias Helix.Network.Query.Bounce, as: BounceQuery alias Helix.Server.Model.Server - @typep log_entry :: + @type log_entry :: {Server.id, Entity.id, Log.info} @doc """ diff --git a/lib/event/notificable/notificable.ex b/lib/event/notificable/notificable.ex index f0314d78..4b43fd86 100644 --- a/lib/event/notificable/notificable.ex +++ b/lib/event/notificable/notificable.ex @@ -12,6 +12,7 @@ defprotocol Helix.Event.Notificable do | %{account_id: Account.id, server_id: Server.id} | %{account_id: Entity.id, server_id: Server.id} | Server.id + | :no_one @spec get_notification_info(Event.t) :: {Notification.class, Notification.code} diff --git a/lib/log/action/flow/forge.ex b/lib/log/action/flow/forge.ex new file mode 100644 index 00000000..8564f195 --- /dev/null +++ b/lib/log/action/flow/forge.ex @@ -0,0 +1,99 @@ +# credo:disable-for-this-file Credo.Check.Refactor.FunctionArity +defmodule Helix.Log.Action.Flow.Forge do + + alias Helix.Event + alias Helix.Entity.Model.Entity + alias Helix.Network.Model.Connection + alias Helix.Network.Model.Tunnel + alias Helix.Server.Model.Server + alias Helix.Software.Model.File + alias Helix.Log.Model.Log + + alias Helix.Log.Process.Forge, as: LogForgeProcess + + @spec create( + Server.t, + Server.t, + Log.info, + File.t, + {Tunnel.t, Connection.ssh} | nil, + Event.relay + ) :: + term + def create( + gateway = %Server{}, + endpoint = %Server{}, + log_info, + forger = %File{software_type: :log_forger}, + conn, + relay + ) do + start_process(gateway, endpoint, nil, log_info, forger, nil, conn, relay) + end + + @spec edit( + Server.t, + Server.t, + Log.t, + Log.info, + File.t, + Entity.id, + {Tunnel.t, Connection.ssh} | nil, + Event.relay + ) :: + term + def edit( + gateway = %Server{}, + endpoint = %Server{}, + log = %Log{}, + log_info, + forger = %File{software_type: :log_forger}, + entity_id = %Entity.ID{}, + conn, + relay + ) do + start_process( + gateway, endpoint, log, log_info, forger, entity_id, conn, relay + ) + end + + defp start_process( + gateway = %Server{}, + endpoint = %Server{}, + log, + log_info, + forger = %File{software_type: :log_forger}, + entity_id, + conn_info, + relay + ) do + action = + if is_nil(log) do + :create + else + :edit + end + + {network_id, ssh} = + if is_nil(conn_info) do + {nil, nil} + else + {tunnel, ssh} = conn_info + {tunnel.network_id, ssh} + end + + params = %{log_info: log_info} + + meta = + %{ + forger: forger, + log: log, + action: action, + ssh: ssh, + entity_id: entity_id, + network_id: network_id + } + + LogForgeProcess.execute(gateway, endpoint, params, meta, relay) + end +end diff --git a/lib/log/event/forge.ex b/lib/log/event/forge.ex index 9cf899ed..97ca448a 100644 --- a/lib/log/event/forge.ex +++ b/lib/log/event/forge.ex @@ -8,7 +8,9 @@ defmodule Helix.Log.Event.Forge do achieved its objective and finished executing. """ + alias Helix.Entity.Model.Entity alias Helix.Process.Model.Process + alias Helix.Server.Model.Server alias Helix.Log.Model.Log alias Helix.Log.Process.Forge, as: LogForgeProcess diff --git a/lib/log/event/log.ex b/lib/log/event/log.ex index c6d18c17..a7c1f7a0 100644 --- a/lib/log/event/log.ex +++ b/lib/log/event/log.ex @@ -44,6 +44,23 @@ defmodule Helix.Log.Event.Log do def whom_to_publish(event), do: %{server: event.log.server_id} end + + notification do + @moduledoc """ + When the created log is artificial a notification is sent to the player. + """ + + @class :server + @code :log_created + + def whom_to_notify(%_{log: log}) do + if Log.is_artificial?(log) do + %{account_id: log.revision.entity_id, server_id: log.server_id} + else + :no_one + end + end + end end event Revised do @@ -86,6 +103,18 @@ defmodule Helix.Log.Event.Log do def whom_to_publish(event), do: %{server: event.log.server_id} end + + notification do + @moduledoc """ + When the created log is artificial a notification is sent to the player. + """ + + @class :server + @code :log_revised + + def whom_to_notify(%_{log: log}), + do: %{account_id: log.revision.entity_id, server_id: log.server_id} + end end event Deleted do diff --git a/lib/log/henforcer/log/forge.ex b/lib/log/henforcer/log/forge.ex index fbbc8576..f9330888 100644 --- a/lib/log/henforcer/log/forge.ex +++ b/lib/log/henforcer/log/forge.ex @@ -2,11 +2,12 @@ defmodule Helix.Log.Henforcer.Log.Forge do import Helix.Henforcer - alias Helix.Server.Model.Server alias Helix.Server.Henforcer.Server, as: ServerHenforcer + alias Helix.Server.Model.Server alias Helix.Software.Henforcer.File, as: FileHenforcer - alias Helix.Log.Model.Log + alias Helix.Software.Model.File alias Helix.Log.Henforcer.Log, as: LogHenforcer + alias Helix.Log.Model.Log @type can_edit_relay :: %{log: Log.t, gateway: Server.t, forger: File.t} @type can_edit_relay_partial :: map diff --git a/lib/log/model/log.ex b/lib/log/model/log.ex index f0b7f6f5..97cbe6e6 100644 --- a/lib/log/model/log.ex +++ b/lib/log/model/log.ex @@ -7,7 +7,6 @@ defmodule Helix.Log.Model.Log do import HELL.Ecto.Macros alias Ecto.Changeset - alias Helix.Entity.Model.Entity alias Helix.Server.Model.Server alias Helix.Log.Model.LogType alias Helix.Log.Model.Revision @@ -28,11 +27,7 @@ defmodule Helix.Log.Model.Log do @type data :: LogType.data @type info :: {type, data} - @type creation_params :: - %{ - server_id: Server.id, - entity_id: Entity.id - } + @type creation_params :: %{server_id: Server.id} @creation_fields [:server_id] @required_fields [:server_id, :revision_id, :creation_time, :log_id] @@ -131,6 +126,16 @@ defmodule Helix.Log.Model.Log do {:recover, changeset} end + @spec is_artificial?(Log.t) :: + boolean + @doc """ + Returns whether the log is artificial or not. + """ + def is_artificial?(%Log{revision: %{forge_version: nil}}), + do: false + def is_artificial?(%Log{}), + do: true + @spec build_heritage(creation_params) :: Helix.ID.heritage defp build_heritage(params), diff --git a/lib/log/model/log_type/macros.ex b/lib/log/model/log_type/macros.ex index 97e3aca5..ce6d5193 100644 --- a/lib/log/model/log_type/macros.ex +++ b/lib/log/model/log_type/macros.ex @@ -28,21 +28,44 @@ defmodule Helix.Log.Model.LogType.Macros do defenum LogEnum, @logs - # @spec exists?(term) :: - # boolean - def exists?(log) do - Enum.any?(@logs, fn {valid_log, _} -> valid_log == log end) - end - - def new(type, data_params) do + @spec exists?(atom) :: + boolean + def exists?(log_type), + do: Enum.any?(@logs, fn {valid_type, _} -> valid_type == log_type end) + + @spec new(type, map) :: + struct + @doc """ + Creates a new struct for the given log `type`. + """ + def new(type, data_params), + do: dispatch(type, :new, data_params) + + @spec parse(type, map) :: + {:ok, struct} + | :error + @doc """ + Attempts to parse the potentially unsafe input into a valid LogData. + """ + def parse(type, unsafe_data_params), + do: dispatch(type, :parse, unsafe_data_params) + + @spec dispatch(type, atom, term) :: + term + defp dispatch(type, method, param) when not is_list(param), + do: dispatch(type, method, [param]) + defp dispatch(type, method, params) do type |> get_type_module() - |> apply(:new, [data_params]) + |> apply(method, params) end end end + @doc """ + Top-level macro used to define a LogType and its underlying LogData. + """ defmacro log(name, enum_id, do: block) do module_name = __CALLER__.module @@ -67,6 +90,9 @@ defmodule Helix.Log.Model.LogType.Macros do end end + @doc """ + Converts the module into a LogData struct. + """ defmacro data_struct(keys) do quote do @@ -76,17 +102,48 @@ defmodule Helix.Log.Model.LogType.Macros do end end - defmacro new(args, do: block) do + @doc """ + Creates a new LogData from the given `data` map. + """ + defmacro new(data, do: block) do quote do @doc false - def new(unquote(args)) do + def new(unquote(data)) do unquote(block) end end end + @doc """ + Attempts to parse the given unsafe input into a valid LogData. + """ + defmacro parse(data, do: block) do + quote do + + @spec parse(term) :: + {:ok, data :: struct} + | :error + @doc false + def parse(map = unquote(data)) when is_map(map) do + try do + {:ok, unquote(block)} + rescue + RuntimeError -> + :error + + KeyError -> + :error + end + end + + def parse(not_map) when not is_map(not_map), + do: :error + + end + end + @doc """ Generates the boilerplate for a n-field log type. @@ -116,22 +173,6 @@ defmodule Helix.Log.Model.LogType.Macros do defmacro gen3(p1, p2, p3), do: do_gen3(p1, p2, p3) - defmacro parse(args, do: block) do - quote do - - @doc false - def parse(unquote(args)) do - try do - {:ok, unquote(block)} - rescue - RuntimeError -> - :error - end - end - - end - end - def validate(field_type, field_value) when is_atom(field_type) do fun = Utils.concat_atom(:validate_, field_type) @@ -222,9 +263,9 @@ defmodule Helix.Log.Model.LogType.Macros do parse(unsafe) do %__MODULE__{ unquote(f1) => - validate(unquote(v_f1), Map.get(unsafe, unquote(str_f1))), + validate(unquote(v_f1), Map.fetch!(unsafe, unquote(str_f1))), unquote(f2) => - validate(unquote(v_f2), Map.get(unsafe, unquote(str_f2))) + validate(unquote(v_f2), Map.fetch!(unsafe, unquote(str_f2))) } end @@ -249,11 +290,11 @@ defmodule Helix.Log.Model.LogType.Macros do parse(unsafe) do %__MODULE__{ unquote(f1) => - validate(unquote(v_f1), Map.get(unsafe, unquote(str_f1))), + validate(unquote(v_f1), Map.fetch!(unsafe, unquote(str_f1))), unquote(f2) => - validate(unquote(v_f2), Map.get(unsafe, unquote(str_f2))), + validate(unquote(v_f2), Map.fetch!(unsafe, unquote(str_f2))), unquote(f3) => - validate(unquote(v_f3), Map.get(unsafe, unquote(str_f3))) + validate(unquote(v_f3), Map.fetch!(unsafe, unquote(str_f3))) } end diff --git a/lib/log/process/forge.ex b/lib/log/process/forge.ex index 36668333..250fcd56 100644 --- a/lib/log/process/forge.ex +++ b/lib/log/process/forge.ex @@ -6,6 +6,9 @@ process Helix.Log.Process.Forge do create a new one from scratch. """ + alias Helix.Entity.Model.Entity + alias Helix.Network.Model.Connection + alias Helix.Network.Model.Network alias Helix.Software.Model.File alias Helix.Log.Model.Log alias __MODULE__, as: LogForgeProcess @@ -29,14 +32,17 @@ process Helix.Log.Process.Forge do forger: File.t_of_type(:log_forger), action: LogForgeProcess.action, log: Log.t | nil, - ssh: Connection.t | nil + ssh: Connection.t | nil, + entity_id: Entity.id | nil, + network_id: Network.id | nil } @type resources_params :: %{ action: action, log: Log.t | nil, - forger: File.t_of_type(:log_forger) + forger: File.t_of_type(:log_forger), + entity_id: Entity.id | nil } @type resources :: @@ -53,7 +59,7 @@ process Helix.Log.Process.Forge do t def new( %{log_info: {log_type, log_data}}, - %{action: action, forger: forger = %File{type: :log_forger}} + %{action: action, forger: forger = %File{software_type: :log_forger}} ) do %__MODULE__{ log_type: log_type, @@ -62,6 +68,13 @@ process Helix.Log.Process.Forge do } end + @spec get_process_type(creation_params, executable_meta) :: + process_type + def get_process_type(_, %{action: :create}), + do: :log_forge_create + def get_process_type(_, %{action: :edit}), + do: :log_forge_edit + @spec resources(resources_params) :: resources def resources(params), @@ -76,7 +89,8 @@ process Helix.Log.Process.Forge do processable do - alias HELL.MapUtils + alias Helix.Log.Model.LogType + alias Helix.Log.Event.Forge.Processed, as: LogForgeProcessedEvent on_completion(process, data) do @@ -87,9 +101,11 @@ process Helix.Log.Process.Forge do @doc false def after_read_hook(data) do + log_type = String.to_existing_atom(data.log_type) + %LogForgeProcess{ - log_type: String.to_existing_atom(data.log_type), - log_data: MapUtils.atomize_keys(data.log_data), + log_type: log_type, + log_data: LogType.parse(log_type, data.log_data) |> elem(1), forger_version: data.forger_version } end @@ -107,16 +123,11 @@ process Helix.Log.Process.Forge do optional(:log) => LogFactor.fact_revisions } - get_factors(params = %{action: :edit}) do + get_factors(params) do factor FileFactor, %{file: params.forger}, only: [:version], as: :forger factor LogFactor, %{log: params.log, entity_id: params.entity_id}, - only: [:revisions] - end - - get_factors(params = %{action: :create}) do - factor FileFactor, %{file: params.forger}, - only: [:version], as: :forger + if: params.action == :edit, only: [:revisions], as: :log end # TODO: time resource (for minimum duration) #364 @@ -143,11 +154,14 @@ process Helix.Log.Process.Forge do executable do + import HELL.Macros + resources(_gateway, _target, _params, meta) do %{ log: meta.log, forger: meta.forger, - action: meta.action + action: meta.action, + entity_id: meta.entity_id } end @@ -155,12 +169,31 @@ process Helix.Log.Process.Forge do forger.file_id end - source_connection(_gateway, _target, _params, %{ssh: ssh}) do - ssh.connection_id + docp """ + The LogForgeProcess have a `source_connection` when the player is forging a + log on a remote server. + + However, if the operation is local, there is no `source_connection`. + """ + source_connection(_, _, _, %{ssh: ssh = %Connection{}}) do + ssh end + docp """ + When editing an existing log, we have a valid `target_log` entry. + + If, however, we are creating a new log, there is no such entry, as the + soon-to-be-created log does not exist yet! + """ target_log(_gateway, _target, _params, %{action: :edit, log: log}) do log.log_id end end + + process_viewable do + + @type data :: %{} + + render_empty_data() + end end diff --git a/lib/log/public/forge.ex b/lib/log/public/forge.ex new file mode 100644 index 00000000..d90a6228 --- /dev/null +++ b/lib/log/public/forge.ex @@ -0,0 +1,10 @@ +defmodule Helix.Log.Public.Forge do + + alias Helix.Log.Action.Flow.Forge, as: ForgeFlow + + defdelegate create(gateway, endpoint, log_info, forger, conn, relay), + to: ForgeFlow + + defdelegate edit(gtw, endpoint, log, log_info, forger, entity, conn, relay), + to: ForgeFlow +end diff --git a/lib/log/websocket/requests/forge.ex b/lib/log/websocket/requests/forge.ex new file mode 100644 index 00000000..1ec0ae11 --- /dev/null +++ b/lib/log/websocket/requests/forge.ex @@ -0,0 +1,147 @@ +import Helix.Websocket.Request + +request Helix.Log.Websocket.Requests.Forge do + @moduledoc """ + `LogForgeRequest` is called when the player wants to forge a log. The forge + operation may either edit an existing log or create a new one. + """ + + import HELL.Macros + + alias Helix.Server.Query.Server, as: ServerQuery + alias Helix.Log.Henforcer.Log.Forge, as: LogForgeHenforcer + alias Helix.Log.Model.Log + alias Helix.Log.Model.LogType + alias Helix.Log.Public.Forge, as: ForgePublic + + def check_params(request, socket) do + case request.unsafe["action"] do + "create" -> + check_params_create(request, socket) + + "edit" -> + check_params_edit(request, socket) + + _ -> + reply_error(request, "bad_action") + end + end + + defp check_params_create(request, _socket) do + with \ + {:ok, log_info} <- + cast_log_info(request.unsafe["log_type"], request.unsafe["log_data"]) + do + params = %{action: :create, log_info: log_info} + + update_params(request, params, reply: true) + else + {:error, reason} -> + reply_error(request, reason) + end + end + + defp check_params_edit(request, _socket) do + with \ + {:ok, log_id} <- Log.ID.cast(request.unsafe["log_id"]), + {:ok, log_info} <- + cast_log_info(request.unsafe["log_type"], request.unsafe["log_data"]) + do + params = %{action: :edit, log_id: log_id, log_info: log_info} + + update_params(request, params, reply: true) + else + {:error, reason} -> + reply_error(request, reason) + + _ -> + bad_request(request) + end + end + + def check_permissions(request = %{params: %{action: :create}}, socket) do + gateway_id = socket.assigns.gateway.server_id + + case LogForgeHenforcer.can_create?(gateway_id) do + {true, relay} -> + meta = %{gateway: relay.gateway, forger: relay.forger} + update_meta(request, meta, reply: true) + + {false, reason, _} -> + reply_error(request, reason) + end + end + + def check_permissions(request = %{params: %{action: :edit}}, socket) do + log_id = request.params.log_id + gateway_id = socket.assigns.gateway.server_id + target_id = socket.assigns.destination.server_id + + case LogForgeHenforcer.can_edit?(log_id, gateway_id, target_id) do + {true, relay} -> + meta = %{gateway: relay.gateway, forger: relay.forger, log: relay.log} + update_meta(request, meta, reply: true) + + {false, reason, _} -> + reply_error(request, reason) + end + end + + def handle_request(request, socket) do + log_info = request.params.log_info + forger = request.meta.forger + gateway = request.meta.gateway + relay = request.relay + + {target, conn_info} = + if socket.assigns.meta.access == :local do + {gateway, nil} + else + { + ServerQuery.fetch(socket.assigns.destination.server_id), + {socket.assigns.tunnel, socket.assigns.ssh} + } + end + + hespawn fn -> + if request.params.action == :create do + ForgePublic.create( + gateway, target, log_info, forger, conn_info, relay + ) + else + entity_id = socket.assigns.entity_id + log = request.meta.log + + ForgePublic.edit( + gateway, target, log, log_info, forger, entity_id, conn_info, relay + ) + end + end + + reply_ok(request) + end + + @spec cast_log_info(String.t, map) :: + {:ok, Log.info} + | {:error, :bad_log_type | :bad_log_data} + defp cast_log_info(unsafe_log_type, unsafe_log_data) do + with \ + {:ok, log_type} <- cast_existing_atom(unsafe_log_type), + true <- LogType.exists?(log_type) || {:error, :log_type}, + {:ok, log_data} <- LogType.parse(log_type, unsafe_log_data) + do + {:ok, {log_type, log_data}} + else + {:error, :atom_not_found} -> + {:error, :bad_log_type} + + {:error, :log_type} -> + {:error, :bad_log_type} + + :error -> + {:error, :bad_log_data} + end + end + + render_empty() +end diff --git a/lib/notification/event/handler/notification.ex b/lib/notification/event/handler/notification.ex index f9c57777..83f129e9 100644 --- a/lib/notification/event/handler/notification.ex +++ b/lib/notification/event/handler/notification.ex @@ -54,6 +54,8 @@ defmodule Helix.Notification.Event.Handler.Notification do Notice that the param sent to `get_id_map/2` isn't necessarily the same data returned by `Notificable.whom_to_notify/1`; it may be altered. """ + defp get_target_ids(_, :no_one), + do: [] defp get_target_ids(:account, account_id), do: [Notification.get_id_map(:account, account_id)] defp get_target_ids(:server, %{account_id: account_id, server_id: server_id}), diff --git a/lib/notification/model/code/server.ex b/lib/notification/model/code/server.ex index 108714f0..8fa08966 100644 --- a/lib/notification/model/code/server.ex +++ b/lib/notification/model/code/server.ex @@ -33,4 +33,58 @@ defmodule Helix.Notification.Model.Code.Server do def render_data(data), do: data end + + code :log_created, 4 do + @moduledoc """ + `LogCreatedNotification` notifies the player that the LogForge.Create + operation has finished successfully + """ + + alias Helix.Log.Model.Log + + @doc false + def generate_data(event) do + %{ + log_id: to_string(event.log.log_id) + } + end + + @doc false + def after_read_hook(data) do + %{ + log_id: Log.ID.cast!(data.log_id) + } + end + + @doc false + def render_data(data), + do: data + end + + code :log_revised, 5 do + @moduledoc """ + `LogRevisedNotification` notifies the player that the LogForge.Edit + operation has finished successfully + """ + + alias Helix.Log.Model.Log + + @doc false + def generate_data(event) do + %{ + log_id: to_string(event.log.log_id) + } + end + + @doc false + def after_read_hook(data) do + %{ + log_id: Log.ID.cast!(data.log_id) + } + end + + @doc false + def render_data(data), + do: data + end end diff --git a/lib/process/executable.ex b/lib/process/executable.ex index 573cde6a..c4efcbd1 100644 --- a/lib/process/executable.ex +++ b/lib/process/executable.ex @@ -73,15 +73,15 @@ defmodule Helix.Process.Executable do } end - @spec get_process_type(term) :: + @spec get_process_type(params, meta) :: %{type: Process.type} docp """ Returns the `process_type` parameter, a subset of the full process params. """ - defp get_process_type(%{type: process_type}), + defp get_process_type(_, %{type: process_type}), do: %{type: process_type} - defp get_process_type(_) do - process_type = call_process(:get_process_type) + defp get_process_type(params, meta) do + process_type = call_process(:get_process_type, [params, meta]) %{type: process_type} end @@ -90,7 +90,7 @@ defmodule Helix.Process.Executable do docp """ Returns the `network_id` parameter, a subset of the full process params. """ - defp get_network_id(%{network_id: network_id}), + defp get_network_id(%{network_id: network_id = %Network.ID{}}), do: %{network_id: network_id} defp get_network_id(_), do: %{network_id: nil} @@ -280,23 +280,25 @@ defmodule Helix.Process.Executable do # Defaults: in case these functions were not defined, we assume the # process is not interested on this (optional) data. - @spec get_bounce_id(Server.t, Server.t, params, meta) :: + @spec get_bounce_id(Bounce.idt | nil) :: %{bounce_id: Bounce.id | nil} - defp get_bounce_id(_, _, _, %{bounce: bounce = %Bounce{}}), + defp get_bounce_id(bounce = %Bounce{}), do: %{bounce_id: bounce.bounce_id} - defp get_bounce_id(_, _, _, %{bounce: bounce_id = %Bounce.ID{}}), + defp get_bounce_id(bounce_id = %Bounce.ID{}), do: %{bounce_id: bounce_id} - defp get_bounce_id(_, _, _, _), + defp get_bounce_id(nil), do: %{bounce_id: nil} @spec get_source_connection(Server.t, Server.t, params, meta) :: {:create, Connection.type} + | Connection.idt | nil defp get_source_connection(_, _, _, _), do: nil @spec get_target_connection(Server.t, Server.t, params, meta) :: {:create, Connection.type} + | Connection.idt | :same_origin | nil defp get_target_connection(_, _, _, _), @@ -356,6 +358,7 @@ defmodule Helix.Process.Executable do Executes the process. """ def execute(unquote_splicing(args), relay) do + process_type = get_process_type(unquote(params), unquote(meta)) process_data = get_process_data(unquote(params), unquote(meta)) resources = get_resources(unquote_splicing(args)) source_file = get_source_file(unquote_splicing(args)) @@ -364,9 +367,8 @@ defmodule Helix.Process.Executable do target_bank_account = get_target_bank_account(unquote_splicing(args)) target_process = get_target_process(unquote_splicing(args)) target_log = get_target_log(unquote_splicing(args)) - bounce_id = get_bounce_id(unquote_splicing(args)) + bounce_id = get_bounce_id(unquote(meta)[:bounce]) ownership = get_ownership(unquote_splicing(args)) - process_type = get_process_type(unquote(meta)) network_id = get_network_id(unquote(meta)) partial = diff --git a/lib/process/model/process.ex b/lib/process/model/process.ex index 306ee0f6..20f21375 100644 --- a/lib/process/model/process.ex +++ b/lib/process/model/process.ex @@ -95,7 +95,7 @@ defmodule Helix.Process.Model.Process do ## SIGTERM Process reached its objective. Handled by Processable's `on_completion`. - This callback must be implemented by the process. + This callback MUST be implemented by the process. Note that, despite the name, it has no similarities with UNIX's SIGTERM. @@ -323,6 +323,10 @@ defmodule Helix.Process.Model.Process do default: nil # Which bounce (if any) is this process bound to + # The `bounce_id` information will be used after the process completes, when + # it generates the underlying action log, creating `connection_bounced` logs + # on all hops within the bounce. + # If the process does not generate a log, this field may be ignored. field :bounce_id, Bounce.ID, default: nil diff --git a/lib/process/model/top/allocator.ex b/lib/process/model/top/allocator.ex index 341c9566..5d0c8ea5 100644 --- a/lib/process/model/top/allocator.ex +++ b/lib/process/model/top/allocator.ex @@ -203,10 +203,10 @@ defmodule Helix.Process.Model.TOP.Allocator do defp identify_origin(server_id, processes) do Enum.map(processes, fn process -> local? = - if process.target_id == server_id do - false - else + if process.gateway_id == server_id do true + else + false end %{process| local?: local?} diff --git a/lib/process/process.ex b/lib/process/process.ex index f435fd19..a8c7debe 100644 --- a/lib/process/process.ex +++ b/lib/process/process.ex @@ -35,7 +35,7 @@ defmodule Helix.Process do @doc """ Returns the process type. """ - def get_process_type, + def get_process_type(_, _), do: @process_type end diff --git a/lib/process/public/index.ex b/lib/process/public/index.ex index 34b975f5..0a8de791 100644 --- a/lib/process/public/index.ex +++ b/lib/process/public/index.ex @@ -5,7 +5,7 @@ defmodule Helix.Process.Public.Index do alias Helix.Process.Public.View.Process, as: ProcessView alias Helix.Process.Query.Process, as: ProcessQuery - @type index :: [map] + @type index :: [ProcessView.full_process | ProcessView.partial_process] @spec index(Server.id, Entity.id) :: index diff --git a/lib/process/public/view/helper.ex b/lib/process/public/view/helper.ex index 3c8c3c29..bbde3835 100644 --- a/lib/process/public/view/helper.ex +++ b/lib/process/public/view/helper.ex @@ -34,6 +34,7 @@ defmodule Helix.Process.Public.View.Process.Helper do Map.merge(common, %{access: partial}) end + def default_process_render(process, :full) do source_connection_id = process.src_connection_id && to_string(process.src_connection_id) @@ -46,7 +47,17 @@ defmodule Helix.Process.Public.View.Process.Helper do # OPTIMIZE: Possibly cache `origin_ip` and `target_ip` on the Process.t # It's used on several other places and must be queried every time it's # displayed. - origin_ip = ServerQuery.get_ip(process.gateway_id, process.network_id) + # TODO: Snippet below seems hacky and smelly + origin_ip = + if process.network_id do + ServerQuery.get_ip(process.gateway_id, process.network_id) || "Unknown" + else + if process.gateway_id == process.target_id do + "localhost" + else + "Unknown" + end + end full = %{ origin_ip: origin_ip, @@ -103,13 +114,13 @@ defmodule Helix.Process.Public.View.Process.Helper do """ defp build_file(nil, _), do: nil + defp build_file(file_id, :partial), + do: build_file_common(file_id) defp build_file(file_id, :full) do file_id |> build_file_common() |> Map.put(:id, to_string(file_id)) end - defp build_file(file_id, :partial), - do: build_file_common(file_id) docp """ It's possible that a file related to a process has been deleted and the @@ -160,6 +171,8 @@ defmodule Helix.Process.Public.View.Process.Helper do @spec get_target_ip(Process.t) :: String.t + defp get_target_ip(%Process{network_id: nil}), + do: "localhost" defp get_target_ip(process = %Process{}) do case CacheQuery.from_server_get_nips(process.target_id) do {:ok, nips} -> diff --git a/lib/server/websocket/channel/server.ex b/lib/server/websocket/channel/server.ex index 9351120d..68ba0fcc 100644 --- a/lib/server/websocket/channel/server.ex +++ b/lib/server/websocket/channel/server.ex @@ -13,6 +13,8 @@ channel Helix.Server.Websocket.Channel.Server do alias Helix.Server.State.Websocket.Channel, as: ServerWebsocketChannelState + alias Helix.Log.Websocket.Requests.Forge, as: LogForgeRequest + alias Helix.Network.Websocket.Requests.Browse, as: BrowseRequest alias Helix.Software.Websocket.Requests.Cracker.Bruteforce, @@ -137,6 +139,38 @@ channel Helix.Server.Websocket.Channel.Server do """ topic "config.check", ConfigCheckRequest + @doc """ + Starts a LogForgeProcess. When forging, the player may want to edit an + existing log, or create a brand new log. + + Params (create): + - *log_type: Type of the desired log revision. + - *log_data: Data of the desired log revision. + - *action: Explicitly set action to "create". + + Params (edit): + - *log_id: ID of the log that will be edited. + - *log_type: Type of the desired log revision. + - *log_data: Data of the desired log revision. + - *action: Explicitly set action to "edit". + + Errors: + + Input validation: + - "bad_action" - Action is neither "edit" or "create". + - "bad_log_type" - The given `log_type` is not valid. + - "bad_log_data" - The given `log_data` is not valid for the `log_type`. + + Henforcer: + - "forger_not_found" - Player does not have a valid LogForger file. + - "log_not_found" (edit) - The given log ID was not found. + - "log_not_belongs" (edit) - Attempting to edit a log that does not belong to + the open channel. + + - base errors + """ + topic "log.forge", LogForgeRequest + @doc """ Updates the player's motherboard. May be used to attach, detach or update the mobo components. diff --git a/lib/server/websocket/channel/server/join.ex b/lib/server/websocket/channel/server/join.ex index 62508f90..351633fa 100644 --- a/lib/server/websocket/channel/server/join.ex +++ b/lib/server/websocket/channel/server/join.ex @@ -214,7 +214,6 @@ join Helix.Server.Websocket.Channel.Server.Join do socket = socket - |> assign.(:access, :local) |> assign.(:gateway, gateway_data) |> assign.(:destination, gateway_data) |> assign.(:meta, build_meta(request)) diff --git a/lib/server/websocket/requests/motherboard_update.ex b/lib/server/websocket/requests/motherboard_update.ex index f0812f21..5f37d005 100644 --- a/lib/server/websocket/requests/motherboard_update.ex +++ b/lib/server/websocket/requests/motherboard_update.ex @@ -20,7 +20,7 @@ request Helix.Server.Websocket.Requests.MotherboardUpdate do end end - def check_detach(request, socket) do + defp check_detach(request, socket) do with \ true <- socket.assigns.meta.access == :local || :bad_src do @@ -34,7 +34,7 @@ request Helix.Server.Websocket.Requests.MotherboardUpdate do end end - def check_update(request, socket) do + defp check_update(request, socket) do with \ true <- socket.assigns.meta.access == :local || :bad_src, {:ok, mobo_id} <- Component.ID.cast(request.unsafe["motherboard_id"]), diff --git a/lib/software/action/flow/file.ex b/lib/software/action/flow/file.ex index 6219c773..908304ba 100644 --- a/lib/software/action/flow/file.ex +++ b/lib/software/action/flow/file.ex @@ -33,7 +33,7 @@ defmodule Helix.Software.Action.Flow.File do @typep relay :: Event.relay - @spec execute_file(executable, Server.t, Server.t, params, meta, relay) :: + @spec execute_file(executable, Server.t, Server.t, params, meta | term, relay) :: {:ok, Process.t} | executable_errors | {:error, :not_executable} diff --git a/lib/software/process/cracker/bruteforce.ex b/lib/software/process/cracker/bruteforce.ex index 089afe24..00f849d6 100644 --- a/lib/software/process/cracker/bruteforce.ex +++ b/lib/software/process/cracker/bruteforce.ex @@ -7,6 +7,7 @@ process Helix.Software.Process.Cracker.Bruteforce do `target_id`). """ + alias Helix.Network.Model.Bounce alias Helix.Network.Model.Network alias Helix.Software.Model.File @@ -26,7 +27,9 @@ process Helix.Software.Process.Cracker.Bruteforce do @type executable_meta :: %{ - cracker: File.t + cracker: File.t, + network_id: Network.id, + bounce: Bounce.t | nil } @type objective :: %{cpu: resource_usage} diff --git a/lib/software/process/file/install.ex b/lib/software/process/file/install.ex index 6c9224c0..d723e9f5 100644 --- a/lib/software/process/file/install.ex +++ b/lib/software/process/file/install.ex @@ -8,6 +8,9 @@ process Helix.Software.Process.File.Install do the process finishes, as well as how much resources it should take, etc. """ + alias Helix.Network.Model.Connection + alias Helix.Network.Model.Network + alias Helix.Network.Model.Tunnel alias Helix.Software.Model.File alias __MODULE__, as: FileInstallProcess @@ -36,7 +39,9 @@ process Helix.Software.Process.File.Install do @type executable_meta :: %{ file: File.t, - type: process_type + network_id: Network.id, + bounce: Tunnel.bounce_id, + ssh: Connection.ssh } @type objective :: %{cpu: resource_usage} @@ -70,9 +75,9 @@ process Helix.Software.Process.File.Install do def get_backend(%File{}), do: :virus - @spec get_process_type(backend) :: + @spec get_process_type(creation_params, executable_meta) :: process_type - def get_process_type(:virus), + def get_process_type(%{backend: :virus}, _), do: :install_virus processable do @@ -125,7 +130,7 @@ process Helix.Software.Process.File.Install do end source_connection(_gateway, _target, _params, %{ssh: ssh}) do - ssh.connection_id + ssh end target_file(_gateway, _target, _params, %{file: file}) do diff --git a/lib/software/public/file.ex b/lib/software/public/file.ex index 613a5073..6dd0503e 100644 --- a/lib/software/public/file.ex +++ b/lib/software/public/file.ex @@ -80,7 +80,7 @@ defmodule Helix.Software.Public.File do gateway :: Server.t, target :: Server.t, target_nip :: {Network.id, Network.ip}, - term, + Tunnel.bounce, relay) :: {:ok, Process.t} @@ -126,18 +126,17 @@ defmodule Helix.Software.Public.File do Installs a generic file using FileInstallProcess with the given `backend`. """ def install(file = %File{}, gateway, target, backend, {tunnel, ssh}, relay) do - process_type = FileInstallProcess.get_process_type(backend) - params = %{backend: backend} meta = %{ file: file, - type: process_type, network_id: tunnel.network_id, bounce: tunnel.bounce_id, ssh: ssh } + process_type = FileInstallProcess.get_process_type(params, meta) + install_process = ProcessQuery.get_custom( process_type, gateway.server_id, %{tgt_file_id: file.file_id} diff --git a/lib/software/websocket/requests/file/install.ex b/lib/software/websocket/requests/file/install.ex index 60bc59d8..6ac22a6f 100644 --- a/lib/software/websocket/requests/file/install.ex +++ b/lib/software/websocket/requests/file/install.ex @@ -60,6 +60,7 @@ request Helix.Software.Websocket.Requests.File.Install do end end + # TODO: This does not support local installations def handle_request(request, socket) do file = request.meta.file gateway = request.meta.gateway diff --git a/test/event/loggable/flow_test.exs b/test/event/loggable/flow_test.exs index 76a0b41b..6c58cf5f 100644 --- a/test/event/loggable/flow_test.exs +++ b/test/event/loggable/flow_test.exs @@ -2,6 +2,8 @@ defmodule Helix.Event.Loggable.FlowTest do use Helix.Test.Case.Integration + import Helix.Test.Macros + alias Helix.Log.Query.Log, as: LogQuery alias Helix.Event.Loggable.Flow, as: LoggableFlow @@ -85,7 +87,7 @@ defmodule Helix.Event.Loggable.FlowTest do assert log.server_id == server.server_id assert log.revision.entity_id == entity.entity_id assert log.revision.type == log_type - assert log.revision.data == Map.from_struct(log_data) + assert_map_str log.revision.data, Map.from_struct(log_data) end test "performs a noop on empty list" do diff --git a/test/features/file/transfer_test.exs b/test/features/file/transfer_test.exs index 061cbe87..bfe502ec 100644 --- a/test/features/file/transfer_test.exs +++ b/test/features/file/transfer_test.exs @@ -37,9 +37,8 @@ defmodule Helix.Test.Features.File.TransferTest do socket: socket ) - account_id = AccountHelper.cast_from_entity(entity.entity_id) - # Connect to gateway channel too, so we can receive gateway publications + account_id = AccountHelper.cast_from_entity(entity.entity_id) ChannelSetup.join_server(socket: socket, own_server: true) # Connect to account channel, so we can receive notifications diff --git a/test/features/log/forge_test.exs b/test/features/log/forge_test.exs new file mode 100644 index 00000000..d7cb3088 --- /dev/null +++ b/test/features/log/forge_test.exs @@ -0,0 +1,445 @@ +defmodule Helix.Test.Features.Log.Forge do + + use Helix.Test.Case.Integration + + import Phoenix.ChannelTest + import Helix.Test.Case.ID + import Helix.Test.Channel.Macros + import Helix.Test.Macros + + alias Helix.Log.Model.Log + alias Helix.Log.Query.Log, as: LogQuery + alias Helix.Process.Model.Process + alias Helix.Process.Query.Process, as: ProcessQuery + + alias Helix.Test.Account.Helper, as: AccountHelper + alias Helix.Test.Channel.Setup, as: ChannelSetup + alias Helix.Test.Channel.Request.Helper, as: RequestHelper + alias Helix.Test.Log.Helper, as: LogHelper + alias Helix.Test.Log.Setup, as: LogSetup + alias Helix.Test.Network.Helper, as: NetworkHelper + alias Helix.Test.Process.TOPHelper + alias Helix.Test.Software.Setup, as: SoftwareSetup + + @internet_id NetworkHelper.internet_id() + + @moduletag :feature + + describe "log forge" do + test "LogForge.Create life cycle (local)" do + {socket, %{entity: entity, server: gateway}} = + ChannelSetup.create_socket() + + # Connect to gateway channel + {socket, _} = + ChannelSetup.join_server( + gateway_id: gateway.server_id, own_server: true, socket: socket + ) + + # Connect to account channel, so we can receive notifications + account_id = AccountHelper.cast_from_entity(entity.entity_id) + ChannelSetup.join_account(socket: socket, account_id: account_id) + + # Prepare request params + log_info = {log_type, log_data} = LogHelper.log_info() + {req_log_type, req_log_data} = request_log_info(log_info) + request_id = RequestHelper.id() + + params = + %{ + "action" => "create", + "log_type" => req_log_type, + "log_data" => req_log_data, + "request_id" => request_id + } + + # We'll attempt to create a log at localhost. This should fail because we + # do not have a LogForger! + ref = push socket, "log.forge", params + assert_reply ref, :error, response, timeout(:fast) + + assert response.data.message == "forger_not_found" + assert response.meta.request_id == request_id + + # Let's create the forger and try again... + forger = SoftwareSetup.log_forger!(server_id: gateway.server_id) + + # It worked! + ref = push socket, "log.forge", params + assert_reply ref, :ok, _, timeout(:slow) + + [process_created_event] = wait_events [:process_created] + + assert process_created_event.data.type == "log_forge_create" + assert process_created_event.meta.request_id == request_id + + process = + process_created_event.data.process_id + |> Process.ID.cast!() + |> ProcessQuery.fetch() + + # Make sure the process was created correctly + assert process.type == :log_forge_create + assert process.data.forger_version == forger.modules.log_create.version + assert process.data.log_type == log_type + assert_map_str process.data.log_data, log_data + + assert process.gateway_id == gateway.server_id + assert process.target_id == gateway.server_id + assert process.source_entity_id == entity.entity_id + assert process.src_file_id == forger.file_id + + # local process; no connection info + refute process.network_id + refute process.src_connection_id + refute process.bounce_id + + # Simulate completion of the software + TOPHelper.force_completion(process) + + [log_created_event, notification_added_event] = + wait_events [:log_created, :notification_added] + + # Local server receives information about the newly created log + assert log_created_event.data.type == to_string(log_type) + assert_map_str log_created_event.data.data, log_data + + # The newly created log is sitting there at the server + log_id = log_created_event.data.log_id |> Log.ID.cast!() + log = LogQuery.fetch(log_id) + + assert log.revision_id == 1 + assert log.server_id == gateway.server_id + assert log.revision.entity_id == entity.entity_id + assert log.revision.forge_version == forger.modules.log_create.version + assert log.revision.type == log_type + assert_map_str log.revision.data, log_data + + # Client received the log notification + assert notification_added_event.data.class == :server + assert notification_added_event.data.code == :log_created + assert notification_added_event.data.data.log_id == to_string(log.log_id) + + TOPHelper.top_stop(gateway) + end + + test "LogForge.Create life cycle (remote)" do + {socket, %{entity: entity, server: gateway}} = + ChannelSetup.create_socket() + + # Connect to gateway channel + {socket, %{gateway: gateway, destination: destination}} = + ChannelSetup.join_server(gateway_id: gateway.server_id, socket: socket) + + # Connect to account channel, so we can receive notifications + account_id = AccountHelper.cast_from_entity(entity.entity_id) + ChannelSetup.join_account(socket: socket, account_id: account_id) + + # Prepare request params + log_info = {log_type, log_data} = LogHelper.log_info() + {req_log_type, req_log_data} = request_log_info(log_info) + request_id = RequestHelper.id() + + # Prepare required stuff + forger = SoftwareSetup.log_forger!(server_id: gateway.server_id) + + params = + %{ + "action" => "create", + "log_type" => req_log_type, + "log_data" => req_log_data, + "request_id" => request_id + } + + # It worked! + ref = push socket, "log.forge", params + assert_reply ref, :ok, _, timeout(:slow) + + [process_created_event] = wait_events [:process_created] + + assert process_created_event.data.type == "log_forge_create" + assert process_created_event.meta.request_id == request_id + + process = + process_created_event.data.process_id + |> Process.ID.cast!() + |> ProcessQuery.fetch() + + # Make sure the process was created correctly + assert process.type == :log_forge_create + assert process.data.forger_version == forger.modules.log_create.version + assert process.data.log_type == log_type + assert_map_str process.data.log_data, log_data + + assert process.gateway_id == gateway.server_id + assert process.target_id == destination.server_id + assert process.source_entity_id == entity.entity_id + assert process.src_file_id == forger.file_id + + # remote process; has (some) connection info + assert process.network_id == @internet_id + assert process.src_connection_id == socket.assigns.ssh.connection_id + + # While there may exist a bounce (on the SSH connection), we can safely + # ignore it, as creating a log won't generate another log, and the bounce + # information on the process is only used for log generation. + refute process.bounce_id + + # Simulate completion of the software + TOPHelper.force_completion(process) + + [log_created_event, notification_added_event] = + wait_events [:log_created, :notification_added] + + # Local server receives information about the newly created log + assert log_created_event.data.type == to_string(log_type) + assert_map_str log_created_event.data.data, log_data + + # The newly created log is sitting there at the server + log_id = log_created_event.data.log_id |> Log.ID.cast!() + log = LogQuery.fetch(log_id) + + assert log.revision_id == 1 + assert log.server_id == destination.server_id + assert log.revision.entity_id == entity.entity_id + assert log.revision.forge_version == forger.modules.log_create.version + assert log.revision.type == log_type + assert_map_str log.revision.data, log_data + + # Client received the log notification + assert notification_added_event.data.class == :server + assert notification_added_event.data.code == :log_created + assert notification_added_event.data.data.log_id == to_string(log.log_id) + + TOPHelper.top_stop(gateway) + end + + test "LogForge.Edit life cycle (local)" do + {socket, %{entity: entity, server: gateway}} = + ChannelSetup.create_socket() + + # Connect to gateway channel + {socket, _} = + ChannelSetup.join_server( + gateway_id: gateway.server_id, own_server: true, socket: socket + ) + + # Connect to account channel, so we can receive notifications + account_id = AccountHelper.cast_from_entity(entity.entity_id) + ChannelSetup.join_account(socket: socket, account_id: account_id) + + # Prepare request params + log_info = {log_type, log_data} = LogHelper.log_info() + {req_log_type, req_log_data} = request_log_info(log_info) + request_id = RequestHelper.id() + + # Prepare required stuff + old_log = LogSetup.log!(server_id: gateway.server_id) + + params = + %{ + "action" => "edit", + "log_id" => to_string(old_log.log_id), + "log_type" => req_log_type, + "log_data" => req_log_data, + "request_id" => request_id + } + + # We'll attempt to edit a log at localhost. This should fail because we do + # not have a LogForger! + ref = push socket, "log.forge", params + assert_reply ref, :error, response, timeout(:fast) + + assert response.data.message == "forger_not_found" + assert response.meta.request_id == request_id + + # Let's create the forger and try again... + forger = SoftwareSetup.log_forger!(server_id: gateway.server_id) + + # It worked! + ref = push socket, "log.forge", params + assert_reply ref, :ok, _, timeout(:slow) + + [process_created_event] = wait_events [:process_created] + + assert process_created_event.data.type == "log_forge_edit" + assert process_created_event.meta.request_id == request_id + + process = + process_created_event.data.process_id + |> Process.ID.cast!() + |> ProcessQuery.fetch() + + # Make sure the process was created correctly + assert process.type == :log_forge_edit + assert process.data.forger_version == forger.modules.log_edit.version + assert process.data.log_type == log_type + assert_map_str process.data.log_data, log_data + + assert process.gateway_id == gateway.server_id + assert process.target_id == gateway.server_id + assert process.source_entity_id == entity.entity_id + assert process.src_file_id == forger.file_id + assert process.tgt_log_id == old_log.log_id + + # local process; no connection info + refute process.network_id + refute process.src_connection_id + refute process.bounce_id + + # Simulate completion of the software + TOPHelper.force_completion(process) + + [log_revised_event, notification_added_event] = + wait_events [:log_revised, :notification_added] + + # Local server receives information about the newly revised log + assert log_revised_event.data.type == to_string(log_type) + assert_map_str log_revised_event.data.data, log_data + + # The newly revised log is sitting there at the server + new_log = LogQuery.fetch(old_log.log_id) + + assert new_log.revision_id == 2 + assert new_log.server_id == gateway.server_id + assert new_log.revision.entity_id == entity.entity_id + assert new_log.revision.forge_version == forger.modules.log_edit.version + assert new_log.revision.type == log_type + assert_map_str new_log.revision.data, log_data + + # Client received the log notification + assert notification_added_event.data.class == :server + assert notification_added_event.data.code == :log_revised + assert_id notification_added_event.data.data.log_id, new_log.log_id + + TOPHelper.top_stop(gateway) + end + + test "LogForge.Edit life cycle (remote)" do + {socket, %{entity: entity, server: gateway}} = + ChannelSetup.create_socket() + + # Connect to gateway channel + {socket, %{gateway: gateway, destination: destination}} = + ChannelSetup.join_server( + gateway_id: gateway.server_id, socket: socket + ) + + # Connect to account channel, so we can receive notifications + account_id = AccountHelper.cast_from_entity(entity.entity_id) + ChannelSetup.join_account(socket: socket, account_id: account_id) + + # Prepare request params + log_info = {log_type, log_data} = LogHelper.log_info() + {req_log_type, req_log_data} = request_log_info(log_info) + request_id = RequestHelper.id() + + # Prepare required stuff + old_log = LogSetup.log!(server_id: destination.server_id) + + params = + %{ + "action" => "edit", + "log_id" => to_string(old_log.log_id), + "log_type" => req_log_type, + "log_data" => req_log_data, + "request_id" => request_id + } + + # We'll attempt to edit a log at localhost. This should fail because we do + # not have a LogForger! + ref = push socket, "log.forge", params + assert_reply ref, :error, response, timeout(:fast) + + assert response.data.message == "forger_not_found" + assert response.meta.request_id == request_id + + # Let's create the forger and try again... + forger = SoftwareSetup.log_forger!(server_id: gateway.server_id) + + # Now we'll attempt to edit a log that belongs to another server! + bad_log = LogSetup.log!() + + bad_params = %{params| "log_id" => to_string(bad_log.log_id)} + + ref = push socket, "log.forge", bad_params + assert_reply ref, :error, response, timeout(:fast) + + assert response.data.message == "log_not_belongs" + + # Let's try again, now with the valid log and with a forger + ref = push socket, "log.forge", params + assert_reply ref, :ok, _, timeout(:slow) + + # It worked! + [process_created_event] = wait_events [:process_created] + + assert process_created_event.data.type == "log_forge_edit" + assert process_created_event.meta.request_id == request_id + + process = + process_created_event.data.process_id + |> Process.ID.cast!() + |> ProcessQuery.fetch() + + # Make sure the process was created correctly + assert process.type == :log_forge_edit + assert process.data.forger_version == forger.modules.log_edit.version + assert process.data.log_type == log_type + assert_map_str process.data.log_data, log_data + + assert process.gateway_id == gateway.server_id + assert process.target_id == destination.server_id + assert process.source_entity_id == entity.entity_id + assert process.src_file_id == forger.file_id + assert process.tgt_log_id == old_log.log_id + + # local process; no connection info + assert process.network_id == @internet_id + assert process.src_connection_id == socket.assigns.ssh.connection_id + + # See remark on test `LogForge.create (remote)` + refute process.bounce_id + + # Simulate completion of the software + TOPHelper.force_completion(process) + + [log_revised_event, notification_added_event] = + wait_events [:log_revised, :notification_added] + + # Local server receives information about the newly revised log + assert log_revised_event.data.type == to_string(log_type) + assert_map_str log_revised_event.data.data, log_data + + # The newly revised log is sitting there at the server + new_log = LogQuery.fetch(old_log.log_id) + + assert new_log.revision_id == 2 + assert new_log.server_id == destination.server_id + assert new_log.revision.entity_id == entity.entity_id + assert new_log.revision.forge_version == forger.modules.log_edit.version + assert new_log.revision.type == log_type + assert_map_str new_log.revision.data, log_data + + # Client received the log notification + assert notification_added_event.data.class == :server + assert notification_added_event.data.code == :log_revised + assert_id notification_added_event.data.data.log_id, new_log.log_id + + TOPHelper.top_stop(gateway) + end + + defp request_log_info({log_type, log_data}) do + # Phoenix input has this format: %{"map" => "string"} + stringified_log_data = + log_data + |> Map.from_struct() + |> Enum.reduce([], fn {k, v}, acc -> + [{to_string(k), to_string(v)} | acc] + end) + |> Enum.into(%{}) + + {to_string(log_type), stringified_log_data} + end + end +end diff --git a/test/log/event/handler/log_test.exs b/test/log/event/handler/log_test.exs index 69164799..ce40b9d7 100644 --- a/test/log/event/handler/log_test.exs +++ b/test/log/event/handler/log_test.exs @@ -8,14 +8,11 @@ defmodule Helix.Log.Event.Handler.LogTest do alias Helix.Event alias Helix.Log.Event.Handler.Log, as: LogHandler alias Helix.Log.Query.Log, as: LogQuery - alias Helix.Log.Repo alias Helix.Test.Event.Setup, as: EventSetup - alias Helix.Test.Entity.Setup, as: EntitySetup alias Helix.Test.Network.Setup, as: NetworkSetup alias Helix.Test.Process.Setup, as: ProcessSetup alias Helix.Test.Server.Helper, as: ServerHelper - alias Helix.Test.Server.Setup, as: ServerSetup alias Helix.Test.Log.Helper, as: LogHelper alias Helix.Test.Log.Setup, as: LogSetup diff --git a/test/log/websocket/requests/forge_test.exs b/test/log/websocket/requests/forge_test.exs new file mode 100644 index 00000000..3c7b9ce6 --- /dev/null +++ b/test/log/websocket/requests/forge_test.exs @@ -0,0 +1,424 @@ +defmodule Helix.Log.Websocket.Requests.ForgeTest do + + use Helix.Test.Case.Integration + + import Helix.Test.Macros + + alias Helix.Websocket.Requestable + alias Helix.Process.Query.Process, as: ProcessQuery + alias Helix.Log.Websocket.Requests.Forge, as: LogForgeRequest + + alias Helix.Test.Channel.Request.Helper, as: RequestHelper + alias Helix.Test.Channel.Setup, as: ChannelSetup + alias Helix.Test.Process.TOPHelper + alias Helix.Test.Server.Helper, as: ServerHelper + alias Helix.Test.Server.Setup, as: ServerSetup + alias Helix.Test.Software.Setup, as: SoftwareSetup + alias Helix.Test.Log.Helper, as: LogHelper + alias Helix.Test.Log.Setup, as: LogSetup + + @mock_socket ChannelSetup.mock_server_socket() + + describe "LogForgeRequest.check_params/2" do + test "validates expected data (create)" do + log_info = LogHelper.log_info() + {req_log_type, req_log_data} = request_log_info(log_info) + + params = %{ + "action" => "create", + "log_type" => req_log_type, + "log_data" => req_log_data + } + + request = LogForgeRequest.new(params) + + assert {:ok, request} = Requestable.check_params(request, @mock_socket) + + assert request.params.action == :create + assert request.params.log_info == log_info + end + + test "validates expected data (edit)" do + log_id = LogHelper.id() + log_info = LogHelper.log_info() + {req_log_type, req_log_data} = request_log_info(log_info) + + params = %{ + "action" => "edit", + "log_id" => to_string(log_id), + "log_type" => req_log_type, + "log_data" => req_log_data + } + + request = LogForgeRequest.new(params) + + assert {:ok, request} = Requestable.check_params(request, @mock_socket) + + assert request.params.action == :edit + assert request.params.log_id == log_id + assert request.params.log_info == log_info + end + + test "rejects when log_info is invalid" do + p_base = %{"action" => "create"} + + p0 = + %{ + "log_type" => "invalid_type", + "log_data" => %{} + } |> Map.merge(p_base) + p1 = + %{ + "log_type" => "error", + "log_data" => "string" + } |> Map.merge(p_base) + + # missing entries + p2 = + %{ + "log_type" => "connection_bounced", + "log_data" => %{"ip_prev" => "1.2.3.4", "network_id" => "::"} + } |> Map.merge(p_base) + + # invalid data type + p3 = + %{ + "log_type" => "connection_bounced", + "log_data" => nil + } |> Map.merge(p_base) + + # missing `log_data` + p4 = + %{ + "log_type" => "connection_bounced", + } |> Map.merge(p_base) + + r0 = LogForgeRequest.new(p0) + r1 = LogForgeRequest.new(p1) + r2 = LogForgeRequest.new(p2) + r3 = LogForgeRequest.new(p3) + r4 = LogForgeRequest.new(p4) + + assert {:error, data0, _} = Requestable.check_params(r0, @mock_socket) + assert {:error, data1, _} = Requestable.check_params(r1, @mock_socket) + assert {:error, data2, _} = Requestable.check_params(r2, @mock_socket) + assert {:error, data3, _} = Requestable.check_params(r3, @mock_socket) + assert {:error, data4, _} = Requestable.check_params(r4, @mock_socket) + + assert data0.message == "bad_log_type" + assert data0 == data1 + + assert data2.message == "bad_log_data" + assert data3 == data2 + assert data4 == data3 + end + + test "rejects when `log_id` is missing or invalid" do + {req_log_type, req_log_data} = request_log_info() + + p_base = %{ + "action" => "edit", + "log_type" => req_log_type, + "log_data" => req_log_data + } + + p_invalid = %{"log_id" => "w00t"} |> Map.merge(p_base) + p_missing = p_base + + r_invalid = LogForgeRequest.new(p_invalid) + r_missing = LogForgeRequest.new(p_missing) + + assert {:error, data1, _} = + Requestable.check_params(r_missing, @mock_socket) + assert {:error, data2, _} = + Requestable.check_params(r_invalid, @mock_socket) + + assert data1.message == "bad_request" + assert data2 == data1 + end + + test "rejects when action is missing or invalid" do + {req_log_type, req_log_data} = request_log_info() + + p_base = %{ + "log_type" => req_log_type, + "log_data" => req_log_data + } + + p_invalid = %{"action" => "asdf"} |> Map.merge(p_base) + p_missing = p_base + + r_invalid = LogForgeRequest.new(p_invalid) + r_missing = LogForgeRequest.new(p_missing) + + assert {:error, data1, _} = + Requestable.check_params(r_missing, @mock_socket) + assert {:error, data2, _} = + Requestable.check_params(r_invalid, @mock_socket) + + assert data1.message == "bad_action" + assert data2 == data1 + end + end + + describe "LogForgeRequest.check_permissions/2" do + test "accepts when everything is OK" do + gateway = ServerSetup.server!() + target_id = ServerHelper.id() + forger = SoftwareSetup.log_forger!(server_id: gateway.server_id) + + {req_log_type, req_log_data} = request_log_info() + + params = + %{ + "action" => Enum.random(["create", "edit"]), + "log_type" => req_log_type, + "log_data" => req_log_data + } + + params = + if params["action"] == "edit" do + log = LogSetup.log!(server_id: target_id) + Map.put(params, "log_id", to_string(log.log_id)) + else + params + end + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, destination_id: target_id + ) + + request = LogForgeRequest.new(params) + + {:ok, request} = Requestable.check_params(request, socket) + assert {:ok, request} = Requestable.check_permissions(request, socket) + + assert request.meta.forger == forger + assert request.meta.gateway == gateway + + if request.params.action == :edit do + assert to_string(request.meta.log.log_id) == params["log_id"] + assert request.meta.log.server_id == target_id + end + end + + test "rejects when player does not have a forger" do + gateway = ServerSetup.server!() + target_id = ServerHelper.id() + + {req_log_type, req_log_data} = request_log_info() + + params = + %{ + "action" => Enum.random(["create", "edit"]), + "log_type" => req_log_type, + "log_data" => req_log_data + } + + params = + if params["action"] == "edit" do + log = LogSetup.log!(server_id: target_id) + Map.put(params, "log_id", to_string(log.log_id)) + else + params + end + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, destination_id: target_id + ) + + request = LogForgeRequest.new(params) + + {:ok, request} = Requestable.check_params(request, socket) + assert {:error, reason, _} = + Requestable.check_permissions(request, socket) + + assert reason.message == "forger_not_found" + end + + test "rejects when attempting to edit another server's log (edit)" do + gateway = ServerSetup.server!() + + {req_log_type, req_log_data} = request_log_info() + + log = LogSetup.log!() + + params = + %{ + "action" => "edit", + "log_id" => to_string(log.log_id), + "log_type" => req_log_type, + "log_data" => req_log_data + } + + # socket's `destination` is different from `log.server_id` + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, destination_id: ServerHelper.id() + ) + + request = LogForgeRequest.new(params) + + {:ok, request} = Requestable.check_params(request, socket) + assert {:error, reason, _} = + Requestable.check_permissions(request, socket) + + assert reason.message == "log_not_belongs" + end + end + + describe "handle_request/2" do + test "starts the process (create, local)" do + log_info = {log_type, log_data} = LogHelper.log_info() + + gateway = ServerSetup.server!() + + forger = SoftwareSetup.log_forger!(server_id: gateway.server_id) + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, own_server: true + ) + + params = %{log_info: log_info, action: :create} + meta = %{forger: forger, gateway: gateway} + + request = RequestHelper.mock_request(LogForgeRequest, params, meta) + + assert {:ok, _request} = Requestable.handle_request(request, socket) + + assert [process] = ProcessQuery.get_processes_on_server(gateway.server_id) + + assert process.type == :log_forge_create + assert process.gateway_id == process.target_id + assert process.src_file_id == forger.file_id + refute process.src_connection_id + assert process.data.forger_version == forger.modules.log_create.version + assert process.data.log_type == log_type + assert_map_str process.data.log_data, Map.from_struct(log_data) + + TOPHelper.top_stop(gateway) + end + + test "starts the process (create, remote)" do + log_info = {log_type, log_data} = LogHelper.log_info() + + gateway = ServerSetup.server!() + target = ServerSetup.server!() + + forger = SoftwareSetup.log_forger!(server_id: gateway.server_id) + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, + destination_id: target.server_id, + real_connection?: true + ) + + params = %{log_info: log_info, action: :create} + meta = %{forger: forger, gateway: gateway} + + request = RequestHelper.mock_request(LogForgeRequest, params, meta) + + assert {:ok, _request} = Requestable.handle_request(request, socket) + + assert [process] = ProcessQuery.get_processes_on_server(gateway.server_id) + + assert process.type == :log_forge_create + assert process.target_id == target.server_id + assert process.src_file_id == forger.file_id + assert process.src_connection_id == socket.assigns.ssh.connection_id + assert process.data.forger_version == forger.modules.log_create.version + assert process.data.log_type == log_type + assert_map_str process.data.log_data, Map.from_struct(log_data) + + TOPHelper.top_stop(gateway) + end + + test "starts the process (edit, local)" do + log_info = {log_type, log_data} = LogHelper.log_info() + + gateway = ServerSetup.server!() + + log = LogSetup.log!(server_id: gateway.server_id) + forger = SoftwareSetup.log_forger!(server_id: gateway.server_id) + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, own_server: true + ) + + params = %{log_info: log_info, action: :edit} + meta = %{log: log, forger: forger, gateway: gateway} + + request = RequestHelper.mock_request(LogForgeRequest, params, meta) + + assert {:ok, _request} = Requestable.handle_request(request, socket) + + assert [process] = ProcessQuery.get_processes_on_server(gateway.server_id) + + assert process.type == :log_forge_edit + assert process.gateway_id == process.target_id + assert process.src_file_id == forger.file_id + refute process.src_connection_id + assert process.data.forger_version == forger.modules.log_edit.version + assert process.data.log_type == log_type + assert_map_str process.data.log_data, Map.from_struct(log_data) + + TOPHelper.top_stop(gateway) + end + + test "starts the process (edit, remote)" do + log_info = {log_type, log_data} = LogHelper.log_info() + + gateway = ServerSetup.server!() + target = ServerSetup.server!() + + log = LogSetup.log!(server_id: gateway.server_id) + forger = SoftwareSetup.log_forger!(server_id: gateway.server_id) + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, + destination_id: target.server_id, + real_connection?: true + ) + + params = %{log_info: log_info, action: :edit} + meta = %{log: log, forger: forger, gateway: gateway} + + request = RequestHelper.mock_request(LogForgeRequest, params, meta) + + assert {:ok, _request} = Requestable.handle_request(request, socket) + + assert [process] = ProcessQuery.get_processes_on_server(gateway.server_id) + + assert process.type == :log_forge_edit + assert process.target_id == target.server_id + assert process.src_file_id == forger.file_id + assert process.src_connection_id == socket.assigns.ssh.connection_id + assert process.data.forger_version == forger.modules.log_edit.version + assert process.data.log_type == log_type + assert_map_str process.data.log_data, Map.from_struct(log_data) + + TOPHelper.top_stop(gateway) + end + end + + defp request_log_info, + do: LogHelper.log_info() |> request_log_info() + defp request_log_info({log_type, log_data}) do + # Phoenix input has this format: %{"map" => "string"} + stringified_log_data = + log_data + |> Map.from_struct() + |> Enum.reduce([], fn {k, v}, acc -> + [{to_string(k), to_string(v)} | acc] + end) + |> Enum.into(%{}) + + {to_string(log_type), stringified_log_data} + end +end diff --git a/test/support/channel/setup.ex b/test/support/channel/setup.ex index 3be71c04..8350ba9f 100644 --- a/test/support/channel/setup.ex +++ b/test/support/channel/setup.ex @@ -19,6 +19,7 @@ defmodule Helix.Test.Channel.Setup do alias Helix.Test.Cache.Helper, as: CacheHelper alias Helix.Test.Entity.Helper, as: EntityHelper alias Helix.Test.Network.Helper, as: NetworkHelper + alias Helix.Test.Network.Setup, as: NetworkSetup alias Helix.Test.Server.Helper, as: ServerHelper alias Helix.Test.Server.Setup, as: ServerSetup alias Helix.Test.Software.Setup, as: SoftwareSetup @@ -311,6 +312,8 @@ defmodule Helix.Test.Channel.Setup do - own_server: Force socket to represent own server channel. Defaults to false. - counter: Defaults to 0. - connect_opts: Opts that will be relayed to the `mock_connection_socket` + - real_connection?: Whether to create the underlying SSH connection (and + tunnel). Defaults to false. """ def mock_server_socket(opts \\ []) do gateway_id = Access.get(opts, :gateway_id, ServerHelper.id()) @@ -348,6 +351,26 @@ defmodule Helix.Test.Channel.Setup do counter: counter } + if not is_nil(opts[:real_connection?]) and access == :local, + do: raise "Can't create SSH connection on :local socket" + + {ssh, tunnel} = + if opts[:real_connection?] do + tunnel_opts = + [ + gateway_id: gateway_id, + target_id: destination_id, + fake_servers: true + ] + + {connection, %{tunnel: tunnel}} = + NetworkSetup.connection(type: :ssh, tunnel_opts: tunnel_opts) + + {connection, tunnel} + else + {nil, nil} + end + server_assigns = %{ gateway: %{ server_id: gateway_id, @@ -359,7 +382,9 @@ defmodule Helix.Test.Channel.Setup do ip: destination_ip, entity_id: destination_entity_id }, - meta: meta + meta: meta, + ssh: ssh, + tunnel: tunnel } assigns = diff --git a/test/support/log/helper.ex b/test/support/log/helper.ex index 7c3f87e9..fc730ff7 100644 --- a/test/support/log/helper.ex +++ b/test/support/log/helper.ex @@ -5,7 +5,6 @@ defmodule Helix.Test.Log.Helper do alias Helix.Event.Loggable.Utils, as: LoggableUtils alias Helix.Server.Model.Server alias Helix.Log.Model.Log - alias Helix.Log.Model.LogType alias Helix.Log.Model.Revision alias Helix.Log.Query.Log, as: LogQuery alias Helix.Log.Repo, as: LogRepo diff --git a/test/support/log/setup.ex b/test/support/log/setup.ex index 76c7dcfb..886fae07 100644 --- a/test/support/log/setup.ex +++ b/test/support/log/setup.ex @@ -1,7 +1,6 @@ defmodule Helix.Test.Log.Setup do alias Ecto.Changeset - alias Helix.Server.Query.Server, as: ServerQuery alias Helix.Entity.Query.Entity, as: EntityQuery alias Helix.Log.Model.Log alias Helix.Log.Internal.Log, as: LogInternal @@ -9,6 +8,7 @@ defmodule Helix.Test.Log.Setup do alias Helix.Test.Entity.Helper, as: EntityHelper alias Helix.Test.Entity.Setup, as: EntitySetup + alias Helix.Test.Server.Helper, as: ServerHelper alias Helix.Test.Server.Setup, as: ServerSetup alias Helix.Test.Software.Helper, as: SoftwareHelper alias Helix.Test.Log.Helper, as: LogHelper @@ -63,9 +63,10 @@ defmodule Helix.Test.Log.Setup do """ def fake_log(opts \\ []) do # Makes credo happy... - {server, entity_id, {type, data}, forge_version} = fake_log_get_data(opts) + {server_id, entity_id, {type, data}, forge_version} = + fake_log_get_data(opts) - params = %{server_id: server.server_id} + params = %{server_id: server_id} revision_params = %{ entity_id: entity_id, @@ -81,7 +82,6 @@ defmodule Helix.Test.Log.Setup do related = %{ params: params, revision_params: revision_params, - server: server, entity_id: entity_id, type: type, data: data, @@ -93,24 +93,24 @@ defmodule Helix.Test.Log.Setup do end defp fake_log_get_data(opts) do - {server, server_owner} = + {server_id, server_owner} = cond do # User asked for fake server opts[:real_server] -> {server, %{entity: entity}} = ServerSetup.server() - {server, entity} + {server.server_id, entity} # User specified a server_id (must exist on the DB) opts[:server_id] -> - server = ServerQuery.fetch(opts[:server_id]) - entity = EntityQuery.fetch_by_server(opts[:server_id]) + entity_id = + EntityQuery.fetch_by_server(opts[:server_id]) + || EntityHelper.id() - {server, entity} + {opts[:server_id], entity_id} # All else: generate a real server true -> - {server, _} = ServerSetup.fake_server() - {server, nil} + {ServerHelper.id(), nil} end entity_id = @@ -135,6 +135,6 @@ defmodule Helix.Test.Log.Setup do log_info = LogHelper.log_info(opts) forge_version = Keyword.get(opts, :forge_version, nil) - {server, entity_id, log_info, forge_version} + {server_id, entity_id, log_info, forge_version} end end diff --git a/test/support/macros.ex b/test/support/macros.ex index 4320670d..e3c3057b 100644 --- a/test/support/macros.ex +++ b/test/support/macros.ex @@ -24,12 +24,22 @@ defmodule Helix.Test.Macros do Example: - %{foo: :bar} == %{"foo" => "bar"} # True + %Struct{foo: :bar} == %{"foo" => "bar"} # True """ defmacro assert_map_str(a, b) do quote do - a = unquote(a) |> Utils.stringify_map() |> MapUtils.atomize_keys() - b = unquote(b) |> Utils.stringify_map() |> MapUtils.atomize_keys() + cast_map = fn map -> + if Map.has_key?(map, :__struct__) do + Map.from_struct(map) + else + map + end + |> Utils.stringify_map() + |> MapUtils.atomize_keys() + end + + a = cast_map.(unquote(a)) + b = cast_map.(unquote(b)) assert a == b end diff --git a/test/support/process/setup/data.ex b/test/support/process/setup/data.ex index 60410d7d..6584079d 100644 --- a/test/support/process/setup/data.ex +++ b/test/support/process/setup/data.ex @@ -221,8 +221,6 @@ defmodule Helix.Test.Process.Data.Setup do do: custom_log_forge({:log_forge_create, :create}, data_opts, meta) defp custom_log_forge({process_type, action}, data_opts, meta) do - target_id = meta.target_id - entity_id = meta.source_entity_id version = Keyword.get(data_opts, :forger_version, 100) src_file_id = meta.src_file_id || SoftwareHelper.id() {log_type, log_data} = From e2f508935873e87001e98e53894a30c431838ef9 Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Thu, 16 Aug 2018 08:03:32 -0300 Subject: [PATCH 08/14] Add LogRecoverProcess --- lib/event/dispatcher.ex | 7 +- lib/hell/hack.ex | 1 + lib/id/domain.ex | 3 + lib/log/event/handler/log.ex | 36 +- lib/log/event/recover.ex | 56 +++ lib/log/factor/log.ex | 16 +- lib/log/model/log.ex | 22 +- lib/log/process/forge.ex | 18 +- lib/log/process/recover.ex | 329 ++++++++++++++++++ lib/process/action/flow/process.ex | 2 +- lib/process/action/process.ex | 16 + lib/process/action/top.ex | 2 +- lib/process/event/handler/process.ex | 16 + lib/process/executable.ex | 124 ++++--- lib/process/internal/process.ex | 24 +- lib/process/model/process.ex | 46 +++ lib/process/model/processable.ex | 25 ++ lib/process/processable.ex | 34 +- .../software_type/firewall/process_type.ex | 4 + lib/software/process/cracker/bruteforce.ex | 8 +- lib/software/process/cracker/overflow.ex | 12 +- lib/software/process/file/install.ex | 8 +- lib/software/process/file/transfer.ex | 8 +- lib/software/process/virus/collect.ex | 12 +- .../process/bank/account/password_reveal.ex | 4 +- lib/universe/bank/process/bank/transfer.ex | 6 +- test/log/process/recover_test.exs | 216 ++++++++++++ test/support/log/setup.ex | 3 + test/support/process/fake_process.ex | 8 +- test/support/software/setup.ex | 22 +- 30 files changed, 979 insertions(+), 109 deletions(-) create mode 100644 lib/log/event/recover.ex create mode 100644 test/log/process/recover_test.exs diff --git a/lib/event/dispatcher.ex b/lib/event/dispatcher.ex index 2638befa..960d0fa6 100644 --- a/lib/event/dispatcher.ex +++ b/lib/event/dispatcher.ex @@ -145,6 +145,7 @@ defmodule Helix.Event.Dispatcher do # All event LogEvent.Forge.Processed + event LogEvent.Recover.Processed event LogEvent.Log.Created event LogEvent.Log.Deleted event LogEvent.Log.Revised @@ -152,7 +153,11 @@ defmodule Helix.Event.Dispatcher do # Custom handlers event LogEvent.Forge.Processed, LogHandler.Log, - :log_forge_processed + :forge_processed + + event LogEvent.Recover.Processed, + LogHandler.Log, + :recover_processed ############################################################################## # Process events diff --git a/lib/hell/hack.ex b/lib/hell/hack.ex index 4bfbef69..e8d57b56 100644 --- a/lib/hell/hack.ex +++ b/lib/hell/hack.ex @@ -50,6 +50,7 @@ defmodule HELL.Hack.Experience do "Elixir.Helix.Process.Model.Processable" => [ {:kill, 3}, {:complete, 2}, + {:retarget, 2}, {:source_connection_closed, 3}, {:target_connection_closed, 3}, {:after_read_hook, 1} diff --git a/lib/id/domain.ex b/lib/id/domain.ex index d7c25554..8b67acd0 100644 --- a/lib/id/domain.ex +++ b/lib/id/domain.ex @@ -30,6 +30,7 @@ defmodule Helix.ID.Domain do wire_transfer: 63 bank_reveal_pass: 73 log_forge: 83 + log_recover: 93 reserved_until: F3 reserved_until: F4 @@ -159,6 +160,8 @@ defmodule Helix.ID.Domain do {{:process, :bank_reveal_password}, 0x73}, {{:process, :log_forge_create}, 0x83}, {{:process, :log_forge_edit}, 0x83}, + {{:process, :log_recover_global}, 0x93}, + {{:process, :log_recover_custom}, 0x93}, {{:server, :desktop}, 0x05}, {{:server, :mobile}, 0x15}, {{:server, :npc}, 0x25}, diff --git a/lib/log/event/handler/log.ex b/lib/log/event/handler/log.ex index e726cd86..97e94510 100644 --- a/lib/log/event/handler/log.ex +++ b/lib/log/event/handler/log.ex @@ -3,11 +3,13 @@ defmodule Helix.Log.Event.Handler.Log do alias Helix.Event alias Helix.Event.Loggable + alias Helix.Process.Action.Flow.Process, as: ProcessFlow alias Helix.Log.Action.Log, as: LogAction alias Helix.Log.Model.Log alias Helix.Log.Query.Log, as: LogQuery alias Helix.Log.Event.Forge.Processed, as: LogForgeProcessedEvent + alias Helix.Log.Event.Recover.Processed, as: LogRecoverProcessedEvent @doc """ Generic event handler for all Helix events. If the event implement the @@ -31,7 +33,7 @@ defmodule Helix.Log.Event.Handler.Log do Emits: `LogCreatedEvent`, `LogRevisedEvent` """ - def log_forge_processed(event = %LogForgeProcessedEvent{action: :create}) do + def forge_processed(event = %LogForgeProcessedEvent{action: :create}) do # `action` is `:create`, so we'll create a new log out of thin air! result = LogAction.create( @@ -43,7 +45,7 @@ defmodule Helix.Log.Event.Handler.Log do end end - def log_forge_processed(event = %LogForgeProcessedEvent{action: :edit}) do + def forge_processed(event = %LogForgeProcessedEvent{action: :edit}) do # `action` is `:edit`, so we'll stack up a revision on an existing log revise = fn log -> LogAction.revise( @@ -58,4 +60,34 @@ defmodule Helix.Log.Event.Handler.Log do Event.emit(events) end end + + @doc """ + Handler called right after a `LogRecoverProcess` has completed. We check + whether the log it was working on (if any) has any revisions we can pop out of + the stack. + + If the `target_log_id` is nil, it means the process have been working on a + log that is already on its original state, so there's nothing we can do other + than send a SIGRETARGET signal to the process. + + Otherwise, we pop the revision out and send the SIGRETARGET signal. + """ + def recover_processed(event = %LogRecoverProcessedEvent{target_log_id: nil}), + do: sigretarget(event) + def recover_processed(event = %LogRecoverProcessedEvent{target_log_id: _) do + with \ + log = %Log{} <- LogQuery.fetch(event.target_log_id), + {:ok, _, events} <- LogAction.recover(log) + do + Event.emit(events) + end + + sigretarget(event) + end + + defp sigretarget(event = %LogRecoverProcessedEvent{}) do + event + |> Event.get_process() + |> ProcessFlow.signal(:SIGRETARGET) + end end diff --git a/lib/log/event/recover.ex b/lib/log/event/recover.ex new file mode 100644 index 00000000..6e9df134 --- /dev/null +++ b/lib/log/event/recover.ex @@ -0,0 +1,56 @@ +defmodule Helix.Log.Event.Recover do + + import Helix.Event + + event Processed do + @moduledoc """ + `LogRecoverProcessedEvent` is fired when the underlying LogRecoverProcess + has achieved its objective and finished executing, thus popping the last + revision from the Log. + """ + + alias Helix.Entity.Model.Entity + alias Helix.Process.Model.Process + alias Helix.Server.Model.Server + alias Helix.Log.Model.Log + alias Helix.Log.Process.Recover, as: LogRecoverProcess + + @type t :: + %__MODULE__{ + method: LogRecoverProcess.method, + server_id: Server.id, + entity_id: Entity.id, + target_log_id: Log.id | nil, + recover_version: pos_integer + } + + event_struct [ + :method, + :server_id, + :entity_id, + :target_log_id, + :recover_version + ] + + @spec new(Process.t, LogRecoverProcess.t) :: + t + def new(process = %Process{}, data = %LogRecoverProcess{}) do + %__MODULE__{ + method: get_method(process), + server_id: process.target_id, + entity_id: process.source_entity_id, + target_log_id: process.tgt_log_id, + recover_version: data.recover_version + } + + # Later on, after we pop out the revision from the stack, we'll send a + # SIGRETARGET signal to the process, so it can keep working on another log + |> put_process(process) + end + + defp get_method(%Process{type: :log_recover_global}), + do: :global + defp get_method(%Process{type: :log_recover_custom}), + do: :custom + end +end diff --git a/lib/log/factor/log.ex b/lib/log/factor/log.ex index 0966729d..9ec471fc 100644 --- a/lib/log/factor/log.ex +++ b/lib/log/factor/log.ex @@ -32,7 +32,8 @@ factor Helix.Log.Factor.Log do @type factor :: %__MODULE__{ total: fact_total, - from_entity: fact_from_entity + from_entity: fact_from_entity, + extra: fact_extra } @type params :: @@ -45,8 +46,9 @@ factor Helix.Log.Factor.Log do @type fact_total :: pos_integer @type fact_from_entity :: non_neg_integer + @type fact_extra :: non_neg_integer - factor_struct [:from_entity, :total] + factor_struct [:from_entity, :total, :extra] fact(:total, _, %{revisions: revisions}) do set_fact length(revisions) @@ -64,9 +66,19 @@ factor Helix.Log.Factor.Log do set_relay params, relay, %{revisions: get_revisions(log)} end + @doc """ + Counts how many additional revisions are there on top of the original one. + + See docs at `Log.count_extra_revisions/1` for more details. + """ + fact(:extra, %{log: log = %Log{}}, _) do + set_fact Log.count_extra_revisions(log) + end + assembly do get_fact :total get_fact :from_entity + get_fact :extra end defp get_revisions(log = %Log{}), diff --git a/lib/log/model/log.ex b/lib/log/model/log.ex index 97cbe6e6..432ce7c6 100644 --- a/lib/log/model/log.ex +++ b/lib/log/model/log.ex @@ -126,7 +126,7 @@ defmodule Helix.Log.Model.Log do {:recover, changeset} end - @spec is_artificial?(Log.t) :: + @spec is_artificial?(t) :: boolean @doc """ Returns whether the log is artificial or not. @@ -136,6 +136,26 @@ defmodule Helix.Log.Model.Log do def is_artificial?(%Log{}), do: true + @spec count_extra_revisions(t) :: + non_neg_integer + @doc """ + Counts how many revisions are there on top of the original one. + + The "original" revision for an artificial log is considered to be an empty + log, so when an artificial log is at `revision_id=1`, there still is one extra + revision on top of the original. + + On the other hand, natural longs at `revision_id=1` are already at the + original one. + """ + def count_extra_revisions(log = %Log{}) do + if is_artificial?(log) do + log.revision_id + else + log.revision_id - 1 + end + end + @spec build_heritage(creation_params) :: Helix.ID.heritage defp build_heritage(params), diff --git a/lib/log/process/forge.ex b/lib/log/process/forge.ex index 250fcd56..cf3cfbfa 100644 --- a/lib/log/process/forge.ex +++ b/lib/log/process/forge.ex @@ -33,8 +33,8 @@ process Helix.Log.Process.Forge do action: LogForgeProcess.action, log: Log.t | nil, ssh: Connection.t | nil, - entity_id: Entity.id | nil, - network_id: Network.id | nil + network_id: Network.id | nil, + entity_id: Entity.id | nil } @type resources_params :: @@ -125,9 +125,9 @@ process Helix.Log.Process.Forge do get_factors(params) do factor FileFactor, %{file: params.forger}, - only: [:version], as: :forger + only: :version, as: :forger factor LogFactor, %{log: params.log, entity_id: params.entity_id}, - if: params.action == :edit, only: [:revisions], as: :log + if: params.action == :edit, only: :revisions, as: :log end # TODO: time resource (for minimum duration) #364 @@ -156,7 +156,9 @@ process Helix.Log.Process.Forge do import HELL.Macros - resources(_gateway, _target, _params, meta) do + @type custom :: %{} + + resources(_gateway, _target, _params, meta, _) do %{ log: meta.log, forger: meta.forger, @@ -165,7 +167,7 @@ process Helix.Log.Process.Forge do } end - source_file(_gateway, _target, _params, %{forger: forger}) do + source_file(_gateway, _target, _params, %{forger: forger}, _) do forger.file_id end @@ -175,7 +177,7 @@ process Helix.Log.Process.Forge do However, if the operation is local, there is no `source_connection`. """ - source_connection(_, _, _, %{ssh: ssh = %Connection{}}) do + source_connection(_, _, _, %{ssh: ssh = %Connection{}}, _) do ssh end @@ -185,7 +187,7 @@ process Helix.Log.Process.Forge do If, however, we are creating a new log, there is no such entry, as the soon-to-be-created log does not exist yet! """ - target_log(_gateway, _target, _params, %{action: :edit, log: log}) do + target_log(_gateway, _target, _params, %{action: :edit, log: log}, _) do log.log_id end end diff --git a/lib/log/process/recover.ex b/lib/log/process/recover.ex index e69de29b..ce95d4dd 100644 --- a/lib/log/process/recover.ex +++ b/lib/log/process/recover.ex @@ -0,0 +1,329 @@ +import Helix.Process + +process Helix.Log.Process.Recover do + @moduledoc """ + `LogRecoverProcess` is launched when the user wants to recover one or more + logs. Recovery may happen in two ways: `global` or `custom`. + + - Global: This recovery method acts like a scanner. The LogRecoverProcess will + scan the server's logs for any log that may have revisions, and then work on + whichever log it chose. + + - Custom: This recovery method is a little bit more direct. The player chooses + a log and then the LogRecoverProcess will work on that log. + + In both methods, the process will run in a recursive fashion: once a revision + is found, it will send a `SIGRETARGET` and the process will find a new target. + `global` processes might choose a different log to recover, while `custom` + processes will keep working on the same log. + + The `custom` method is a bit faster than `global`, but it may work on a log + that has no revisions. It also uses more CPU than `global`. + + If `custom` is executing on a log that is already on its original revision, + the process will keep working infinitely. The user does not know whether the + target log is on its final revision or not. + + If `global` is working on a server that have all its logs on their original + revision, it will keep working infinitely. The user does not know whether the + target log is on its final revision or not. + """ + + alias Helix.Entity.Model.Entity + alias Helix.Network.Model.Connection + alias Helix.Network.Model.Network + alias Helix.Server.Model.Server + alias Helix.Software.Model.File + alias Helix.Log.Model.Log + alias Helix.Log.Query.Log, as: LogQuery + alias __MODULE__, as: LogRecoverProcess + + process_struct [:recover_version] + + @type t :: + %__MODULE__{ + recover_version: pos_integer + } + + @type method :: :global | :custom + @type process_type :: :log_recover_global | :log_recover_custom + + @type creation_params :: %{} + + @type executable_meta :: + %{ + recover: File.t_of_type(:log_recover), + method: LogRecoverProcess.method, + log: Log.t | nil, + ssh: Connection.t | nil, + network_id: Network.id | nil, + entity_id: Entity.id + } + + @type resources_params :: + %{ + method: method, + log: Log.t | nil, + recover: File.t_of_type(:log_recover) | nil, + recover_version: pos_integer | nil, + entity_id: Entity.id + } + + @type resources :: + %{ + objective: objective, + static: map, + l_dynamic: [:cpu], + r_dynamic: [] + } + + @type objective :: %{cpu: resource_usage} + + @spec new(creation_params, executable_meta) :: + t + def new(%{}, %{recover: recover = %File{software_type: :log_recover}}) do + %__MODULE__{ + recover_version: recover.modules.log_recover.version + } + end + + @spec get_process_type(creation_params, executable_meta) :: + process_type + def get_process_type(_, %{method: :global}), + do: :log_recover_global + def get_process_type(_, %{method: :custom}), + do: :log_recover_custom + + @spec resources(resources_params) :: + resources + def resources(params), + do: get_resources params + + @spec find_next_target(Server.idt) :: + Log.t + | nil + @doc """ + Selects the next log that we should work on. Only called on `global` method. + + IMPROVE: If there are other recover process from attacker at server, make sure + to select a different log, allowing users to run multiple "threads" of the + LogRecoverProcess. + """ + def find_next_target(server) do + recoverable_logs = + server + |> LogQuery.get_logs_on_server() + |> Enum.filter(&(Log.count_extra_revisions(&1) >= 1)) + + if Enum.empty?(recoverable_logs) do + nil + else + Enum.random(recoverable_logs) + end + end + + processable do + + alias Helix.Log.Event.Recover.Processed, as: LogRecoverProcessedEvent + + on_completion(process, data) do + event = LogRecoverProcessedEvent.new(process, data) + + # We can't send a SIGRETARGET now because if we do so, it might fetch the + # existing Log before the freshly recovered revision isn't removed from it + # yet. So, to fix this race condition, we first process the log recovery + # (by handling `LogRecoverProcessedEvent`), and only then we send the + # SIGRETARGET to this process. + {:noop, [event]} + end + + on_retarget(process, _data) do + {new_log, method} = + # On `log_recover_global`, we must select a new log on each iteration + if process.type == :log_recover_global do + log = LogRecoverProcess.find_next_target(process.target_id) + {log, :global} + + # On `log_recover_custom` we always work at the same log + else + log = LogQuery.fetch(process.tgt_log_id) + {log, :custom} + end + + params = + %{ + method: method, + log: new_log, + recover: nil, + recover_version: process.data.recover_version, + entity_id: process.source_entity_id + } + + new_resources = LogRecoverProcess.resources(params) + new_objects = %{tgt_log_id: new_log && new_log.log_id || nil} + + changes = Map.merge(new_resources, new_objects) + + {{:retarget, changes}, []} + end + + # TODO \/: React to log deletion. + # on_target_log_deleted(process, data) do + # action = + # if process.type == :log_recover_global do + # :retarget + # else + # {:SIGKILL, :log_deleted} + # end + + # {action, []} + # end + end + + resourceable do + + alias Helix.Software.Factor.File, as: FileFactor + alias Helix.Log.Factor.Log, as: LogFactor + + @type params :: LogRecoverProcess.resources_params + @type factors :: + %{ + optional(:recover) => %{version: FileFactor.fact_version}, + :log => LogFactor.fact_revisions + } + + get_factors(%{log: nil}) do + %{log: nil} + end + + get_factors(params) do + factor FileFactor, %{file: params.recover}, + if: not is_nil(params.recover), only: :version, as: :recover + factor LogFactor, %{log: params.log, entity_id: params.entity_id}, + only: :revisions, as: :log + + factors = + if is_nil(params.recover) do + Map.put( + factors, + :recover, + %{version: %{log_recover: params.recover_version}} + ) + else + factors + end + end + + # TODO: time resource (for minimum duration) #364 + + # `log` may be nil iff `method = :global`, when there are no logs that can + # be recovered. This means infinite work! + cpu(%{log: nil}) do + 999_999_999_999 + end + + cpu(%{method: method}) do + multiplier = + if method == :custom, + do: 500, + else: 1000 + + if f.log.revisions.extra == 0 do + 999_999_999_999 + else + (f.log.revisions.total * multiplier) / f.recover.version.log_recover + end + end + + dynamic do + [:cpu] + end + + static do + %{ + paused: %{ram: 100}, + running: %{ram: 200} + } + end + end + + executable do + + import HELL.Macros + + @type custom :: %{log: Log.t | nil} + + custom(_, target, _params, meta) do + log = + # If it's a `global` recovery, we'll randomly select a recoverable log + if meta.method == :global do + LogRecoverProcess.find_next_target(target) + + # But if it's a `custom` recovery, the user already selected a log + else + meta.log + end + + %{log: log} + end + + resources(_gateway, _, _params, meta, custom) do + log = + if meta.method == :global do + custom.log + else + meta.log + end + + %{ + log: log, + recover: meta.recover, + method: meta.method, + entity_id: meta.entity_id + } + end + + source_file(_gateway, _target, _params, %{recover: recover}, _) do + recover.file_id + end + + docp """ + The LogRecoverProcess have a `source_connection` when the player is + recovering a log on a remote server. + + However, if the operation is local, there is no `source_connection`. + """ + source_connection(_, _, _, %{ssh: ssh = %Connection{}}, _) do + ssh + end + + docp """ + `custom` log is nil => there are no recoverable logs on the server. + """ + target_log(_gateway, _target, _params, %{method: :global}, %{log: nil}) do + nil + end + + docp """ + `custom` log is not nil => Random recoverable log was select, and that is + the one we'll work on during this process iteration. + """ + target_log(_gateway, _target, _params, %{method: :global}, %{log: log}) do + log.log_id + end + + docp """ + Method is `custom`, so the player already choose which log we'll work on. + """ + target_log(_gateway, _target, _params, %{method: :custom, log: log}, _) do + log.log_id + end + end + + process_viewable do + + @type data :: %{} + + render_empty_data() + end +end diff --git a/lib/process/action/flow/process.ex b/lib/process/action/flow/process.ex index 60523147..f2379902 100644 --- a/lib/process/action/flow/process.ex +++ b/lib/process/action/flow/process.ex @@ -14,7 +14,7 @@ defmodule Helix.Process.Action.Flow.Process do Emits ProcessSignaledEvent and any other event defined at the Processable callback. """ - def signal(process = %Process{}, signal, params) do + def signal(process = %Process{}, signal, params \\ %{}) do flowing do with {:ok, events} <- ProcessAction.signal(process, signal, params) do Event.emit(events) diff --git a/lib/process/action/process.ex b/lib/process/action/process.ex index 7483dc17..a0deedff 100644 --- a/lib/process/action/process.ex +++ b/lib/process/action/process.ex @@ -60,6 +60,19 @@ defmodule Helix.Process.Action.Process do {:ok, [event]} end + @spec retarget(Process.t, Process.retarget_changes) :: + {:ok, list} + @doc """ + Retargets a process. + + Modifies the process target and/or objectives according to `changes`. + """ + def retarget(process = %Process{}, changes) do + ProcessInternal.retarget(process, changes) + + {:ok, []} + end + # def pause(process = %Process{}) do # ProcessInternal.pause(process) @@ -104,6 +117,9 @@ defmodule Helix.Process.Action.Process do # defp signal_handler(:SIGPRIO, process, %{priority: priority}), # do: Processable.priority(process.data, process, priority) + defp signal_handler(:SIGRETARGET, process, _), + do: Processable.retarget(process.data, process) + defp signal_handler(:SIGSRCCONND, process, %{connection: connection}), do: Processable.source_connection_closed(process.data, process, connection) diff --git a/lib/process/action/top.ex b/lib/process/action/top.ex index 1b16e65e..1644718e 100644 --- a/lib/process/action/top.ex +++ b/lib/process/action/top.ex @@ -115,7 +115,7 @@ defmodule Helix.Process.Action.TOP do # This organization is useful because we can only forecast local processes. # (A process may be completed only on its local server; so the remote # processes here that are not being forecast will be forecast during *their* - # server's TOP recalque, which should happen shortly). + # server's TOP recalque, which should happen soon). local_processes = Enum.filter(processes, &(&1.local? == true)) remote_processes = Enum.filter(processes, &(&1.local? == false)) diff --git a/lib/process/event/handler/process.ex b/lib/process/event/handler/process.ex index 570533d4..6c0e80b3 100644 --- a/lib/process/event/handler/process.ex +++ b/lib/process/event/handler/process.ex @@ -21,6 +21,8 @@ defmodule Helix.Process.Event.Handler.Process do |> Event.emit(from: event) end + # Actions + @spec action_handler(Processable.action, Process.t, Process.signal_params) :: [Event.t] defp action_handler(:delete, process, %{reason: reason}) do @@ -29,12 +31,26 @@ defmodule Helix.Process.Event.Handler.Process do events end + defp action_handler({:retarget, changes}, process, _) do + {:ok, events} = ProcessAction.retarget(process, changes) + + events + end + + # Signals + defp action_handler({:SIGKILL, reason}, process, _) do {:ok, events} = ProcessAction.signal(process, :SIGKILL, %{reason: reason}) events end + defp action_handler(:SIGRETARGET, process, _) do + {:ok, events} = ProcessAction.signal(process, :SIGRETARGET) + + events + end + defp action_handler(:noop, _, _), do: [] diff --git a/lib/process/executable.ex b/lib/process/executable.ex index c4efcbd1..44e8a2e9 100644 --- a/lib/process/executable.ex +++ b/lib/process/executable.ex @@ -54,7 +54,7 @@ defmodule Helix.Process.Executable do %{data: data} end - @spec get_ownership(Server.t, Server.t, params, meta) :: + @spec get_ownership(Server.t, Server.t, params, meta, custom) :: %{ gateway_id: Server.id, target_id: Server.id, @@ -64,8 +64,9 @@ defmodule Helix.Process.Executable do Infers ownership information about the process, which is a subset of the full process params. """ - defp get_ownership(gateway, target, params, meta) do + defp get_ownership(gateway, target, params, meta, _custom) do entity = EntityQuery.fetch_by_server(gateway.server_id) + %{ gateway_id: gateway.server_id, target_id: target.server_id, @@ -280,6 +281,11 @@ defmodule Helix.Process.Executable do # Defaults: in case these functions were not defined, we assume the # process is not interested on this (optional) data. + @spec pre_hook_custom(Server.t, Server.t, params, meta) :: + custom + defp pre_hook_custom(_, _, _, _), + do: %{} + @spec get_bounce_id(Bounce.idt | nil) :: %{bounce_id: Bounce.id | nil} defp get_bounce_id(bounce = %Bounce{}), @@ -289,55 +295,55 @@ defmodule Helix.Process.Executable do defp get_bounce_id(nil), do: %{bounce_id: nil} - @spec get_source_connection(Server.t, Server.t, params, meta) :: + @spec get_source_connection(Server.t, Server.t, params, meta, custom) :: {:create, Connection.type} | Connection.idt | nil - defp get_source_connection(_, _, _, _), + defp get_source_connection(_, _, _, _, _), do: nil - @spec get_target_connection(Server.t, Server.t, params, meta) :: + @spec get_target_connection(Server.t, Server.t, params, meta, custom) :: {:create, Connection.type} | Connection.idt | :same_origin | nil - defp get_target_connection(_, _, _, _), + defp get_target_connection(_, _, _, _, _), do: nil - @spec get_source_file(Server.t, Server.t, params, meta) :: + @spec get_source_file(Server.t, Server.t, params, meta, custom) :: %{src_file_id: File.id | nil} - defp get_source_file(_, _, _, _), + defp get_source_file(_, _, _, _, _), do: %{src_file_id: nil} - @spec get_target_file(Server.t, Server.t, params, meta) :: + @spec get_target_file(Server.t, Server.t, params, meta, custom) :: %{tgt_file_id: File.id | nil} - defp get_target_file(_, _, _, _), + defp get_target_file(_, _, _, _, _), do: %{tgt_file_id: nil} - @spec get_source_bank_account(Server.t, Server.t, params, meta) :: + @spec get_source_bank_account(Server.t, Server.t, params, meta, custom) :: %{ src_atm_id: Server.t | nil, src_acc_number: BankAccount.account | nil } - defp get_source_bank_account(_, _, _, _), + defp get_source_bank_account(_, _, _, _, _), do: %{src_atm_id: nil, src_acc_number: nil} - @spec get_target_bank_account(Server.t, Server.t, params, meta) :: + @spec get_target_bank_account(Server.t, Server.t, params, meta, custom) :: %{ tgt_atm_id: Server.t | nil, tgt_acc_number: BankAccount.account | nil } - defp get_target_bank_account(_, _, _, _), + defp get_target_bank_account(_, _, _, _, _), do: %{tgt_atm_id: nil, tgt_acc_number: nil} - @spec get_target_process(Server.t, Server.t, params, meta) :: + @spec get_target_process(Server.t, Server.t, params, meta, custom) :: %{tgt_process_id: Process.t | nil} - defp get_target_process(_, _, _, _), + defp get_target_process(_, _, _, _, _), do: %{tgt_process_id: nil} - @spec get_target_log(Server.t, Server.t, params, meta) :: + @spec get_target_log(Server.t, Server.t, params, meta, custom) :: %{tgt_log_id: Process.t | nil} - defp get_target_log(_, _, _, _), + defp get_target_log(_, _, _, _, _), do: %{tgt_log_id: nil} end end @@ -358,17 +364,19 @@ defmodule Helix.Process.Executable do Executes the process. """ def execute(unquote_splicing(args), relay) do + custom = pre_hook_custom(unquote_splicing(args)) + process_type = get_process_type(unquote(params), unquote(meta)) process_data = get_process_data(unquote(params), unquote(meta)) - resources = get_resources(unquote_splicing(args)) - source_file = get_source_file(unquote_splicing(args)) - target_file = get_target_file(unquote_splicing(args)) - source_bank_account = get_source_bank_account(unquote_splicing(args)) - target_bank_account = get_target_bank_account(unquote_splicing(args)) - target_process = get_target_process(unquote_splicing(args)) - target_log = get_target_log(unquote_splicing(args)) + resources = get_resources(unquote_splicing(args), custom) + source_file = get_source_file(unquote_splicing(args), custom) + target_file = get_target_file(unquote_splicing(args), custom) + src_bank_acc = get_source_bank_account(unquote_splicing(args), custom) + tgt_bank_acc = get_target_bank_account(unquote_splicing(args), custom) + target_process = get_target_process(unquote_splicing(args), custom) + target_log = get_target_log(unquote_splicing(args), custom) bounce_id = get_bounce_id(unquote(meta)[:bounce]) - ownership = get_ownership(unquote_splicing(args)) + ownership = get_ownership(unquote_splicing(args), custom) network_id = get_network_id(unquote(meta)) partial = @@ -376,8 +384,8 @@ defmodule Helix.Process.Executable do |> Map.merge(resources) |> Map.merge(source_file) |> Map.merge(target_file) - |> Map.merge(source_bank_account) - |> Map.merge(target_bank_account) + |> Map.merge(src_bank_acc) + |> Map.merge(tgt_bank_acc) |> Map.merge(target_process) |> Map.merge(target_log) |> Map.merge(bounce_id) @@ -385,8 +393,10 @@ defmodule Helix.Process.Executable do |> Map.merge(process_type) |> Map.merge(network_id) - source_connection_info = get_source_connection(unquote_splicing(args)) - target_connection_info = get_target_connection(unquote_splicing(args)) + source_connection_info = + get_source_connection(unquote_splicing(args), custom) + target_connection_info = + get_target_connection(unquote_splicing(args), custom) flowing do with \ @@ -435,14 +445,30 @@ defmodule Helix.Process.Executable do end end + @doc """ + Creates a `custom` map that will be sent to all executable callbacks. Called + as a pre-hook to Executable. + """ + defmacro custom(gateway, target, params, meta, do: block) do + args = [gateway, target, params, meta] + + quote do + + defp pre_hook_custom(unquote_splicing(args)) do + unquote(block) + end + + end + end + @doc """ Returns the raw result of the Executable's `source_connection` section. It will be later interpreted by `setup_connection`, which will make sense whether a new connection should be created, and what the `src_connection_id` should be set to. """ - defmacro source_connection(gateway, target, params, meta, do: block) do - args = [gateway, target, params, meta] + defmacro source_connection(gateway, target, params, meta, custom, do: block) do + args = [gateway, target, params, meta, custom] quote do @@ -462,8 +488,8 @@ defmodule Helix.Process.Executable do If `:same_origin` is returned, the process will target the same connection that originated it. """ - defmacro target_connection(gateway, target, params, meta, do: block) do - args = [gateway, target, params, meta] + defmacro target_connection(gateway, target, params, meta, custom, do: block) do + args = [gateway, target, params, meta, custom] quote do @@ -478,8 +504,8 @@ defmodule Helix.Process.Executable do Returns the process' `src_file_id`, as defined on the `source_file` section of the Process.Executable. """ - defmacro source_file(gateway, target, params, meta, do: block) do - args = [gateway, target, params, meta] + defmacro source_file(gateway, target, params, meta, custom, do: block) do + args = [gateway, target, params, meta, custom] quote do @@ -496,8 +522,8 @@ defmodule Helix.Process.Executable do Returns the process' `tgt_file_id`, as defined on the `target_file` section of the Process.Executable. """ - defmacro target_file(gateway, target, params, meta, do: block) do - args = [gateway, target, params, meta] + defmacro target_file(gateway, target, params, meta, custom, do: block) do + args = [gateway, target, params, meta, custom] quote do @@ -514,8 +540,8 @@ defmodule Helix.Process.Executable do Returns the process' `src_atm_id` and `src_acc_number`, as defined on the `source_bank_account` section of the Process.Executable """ - defmacro source_bank_account(gateway, target, params, meta, do: block) do - args = [gateway, target, params, meta] + defmacro source_bank_account(gateway, target, params, meta, custom, do: block) do + args = [gateway, target, params, meta, custom] quote do @@ -537,8 +563,8 @@ defmodule Helix.Process.Executable do Returns the process' `tgt_atm_id` and `tgt_acc_number`, as defined on the `target_bank_account` section of the Process.Executable """ - defmacro target_bank_account(gateway, target, params, meta, do: block) do - args = [gateway, target, params, meta] + defmacro target_bank_account(gateway, target, params, meta, custom, do: block) do + args = [gateway, target, params, meta, custom] quote do @@ -570,8 +596,8 @@ defmodule Helix.Process.Executable do Returns the process' `tgt_process_id`, as defined on the `target_process` section of the Process.Executable. """ - defmacro target_process(gateway, target, params, meta, do: block) do - args = [gateway, target, params, meta] + defmacro target_process(gateway, target, params, meta, custom, do: block) do + args = [gateway, target, params, meta, custom] quote do @@ -588,8 +614,8 @@ defmodule Helix.Process.Executable do Returns the process' `tgt_log_id`, as defined on the `target_log` section of the Process.Executable. """ - defmacro target_log(gateway, target, params, meta, do: block) do - args = [gateway, target, params, meta] + defmacro target_log(gateway, target, params, meta, custom, do: block) do + args = [gateway, target, params, meta, custom] quote do @@ -609,13 +635,13 @@ defmodule Helix.Process.Executable do - which resources can be allocated dynamically - what are the statically allocated resources """ - defmacro resources(gateway, target, params, meta, do: block) do - args = [gateway, target, params, meta] + defmacro resources(gateway, target, params, meta, custom, do: block) do + args = [gateway, target, params, meta, custom] process = get_process(__CALLER__) quote do - @spec get_resources(Server.t, Server.t, params, meta) :: + @spec get_resources(Server.t, Server.t, params, meta, custom) :: unquote(process).resources defp get_resources(unquote_splicing(args)) do params = unquote(block) diff --git a/lib/process/internal/process.ex b/lib/process/internal/process.ex index 365ed7e9..a0f843d6 100644 --- a/lib/process/internal/process.ex +++ b/lib/process/internal/process.ex @@ -5,15 +5,6 @@ defmodule Helix.Process.Internal.Process do alias Helix.Process.Model.Process alias Helix.Process.Repo - @spec create(Process.creation_params) :: - {:ok, Process.t} - | {:error, Process.changeset} - def create(params) do - params - |> Process.create_changeset() - |> Repo.insert() - end - @spec fetch(Process.id) :: Process.t | nil @@ -23,6 +14,15 @@ defmodule Helix.Process.Internal.Process do end end + @spec create(Process.creation_params) :: + {:ok, Process.t} + | {:error, Process.changeset} + def create(params) do + params + |> Process.create_changeset() + |> Repo.insert() + end + @spec get_processes_on_server(Server.idt) :: [Process.t] @doc """ @@ -79,6 +79,12 @@ defmodule Helix.Process.Internal.Process do end) end + def retarget(process, changes) do + process + |> Process.retarget(changes) + |> Repo.update() + end + @spec delete(Process.t) :: :ok @doc """ diff --git a/lib/process/model/process.ex b/lib/process/model/process.ex index 20f21375..ef3c9240 100644 --- a/lib/process/model/process.ex +++ b/lib/process/model/process.ex @@ -60,6 +60,7 @@ defmodule Helix.Process.Model.Process do l_reserved: Process.Resources.t, r_reserved: Process.Resources.t, last_checkpoint_time: DateTime.t, + objective: map, static: static, l_dynamic: dynamic, r_dynamic: dynamic, @@ -84,6 +85,8 @@ defmodule Helix.Process.Model.Process do | :wire_transfer | :log_forge_create | :log_forge_edit + | :log_recover_global + | :log_recover_custom @typedoc """ List of signals a process may receive during its lifetime. @@ -122,6 +125,17 @@ defmodule Helix.Process.Model.Process do Default action is to resume the process. + ## SIGRETARGET + + Signal sent when the process finished prior execution and is now looking for + a new target to work on. + + Keep in mind that, when using `SIGRETARGET` on recursive processes, you might + want the signal to be sent only after the side-effect of the process has been + properly processed. As an example, see `LogRecoverProcess`. + + Default action is to ignore the signal. + ## SIGPRIO Signal sent when the user changed the priority of the process. @@ -179,6 +193,7 @@ defmodule Helix.Process.Model.Process do | :SIGKILL | :SIGSTOP | :SIGCONT + | :SIGRETARGET | :SIGPRIO | :SIGSRCCONND | :SIGTGTCONND @@ -195,6 +210,7 @@ defmodule Helix.Process.Model.Process do | %{priority: term} | %{connection: Connection.t} | %{file: File.t} + | %{} @typedoc """ Valid reasons for which a Process may be killed. @@ -209,6 +225,11 @@ defmodule Helix.Process.Model.Process do | :src_bank_acc_closed | :tgt_bank_acc_closed + @typedoc """ + Return type for `retarget` changes. + """ + @type retarget_changes :: map + @type changeset :: %Changeset{data: %__MODULE__{}} @type creation_params :: %{ @@ -473,6 +494,31 @@ defmodule Helix.Process.Model.Process do |> put_pk(heritage, {:process, params.type}) end + @retarget_fields [ + # A retarget may change the process' resources, listed below + :static, + :l_dynamic, + :r_dynamic, + :objective, + + # It may also change some objects (add as needed) + :tgt_log_id + ] + + @spec retarget(t, retarget_changes :: map) :: + changeset + @doc """ + Updates the process according to the retarget changes. It also empties any + amount of previous work (`processed`). + """ + def retarget(process = %Process{}, changes) do + process + |> change() + |> cast(changes, @retarget_fields) + |> put_change(:processed, %{}) + |> validate_required(@required_fields) + end + @spec format(raw_process :: t) :: t @doc """ diff --git a/lib/process/model/processable.ex b/lib/process/model/processable.ex index aa1a2cea..7672c25d 100644 --- a/lib/process/model/processable.ex +++ b/lib/process/model/processable.ex @@ -50,8 +50,15 @@ defprotocol Helix.Process.Model.Processable do ## :restart + Resets any work the process may have done, and starts from scratch. + Not implemented yet. + ## :retarget + + Modify the target of a process, potentially changing its resources and/or + relevant objects. Commonly used with recursive processes. + ## {:SIGKILL, } Sends a SIGKILL to itself, with the given reason as a parameter. @@ -61,6 +68,13 @@ defprotocol Helix.Process.Model.Processable do Later on, the process *might* be killed. Depends on how it implements the `on_kill` callback. + ## :SIGRETARGET + + Sends a SIGRETARGET to itself + + Later on, the process *might* change. Depends on how it implements the + `on_retarget` callback. + ## :noop Makes a lot of nada. @@ -73,7 +87,9 @@ defprotocol Helix.Process.Model.Processable do | :resume | :renice | :restart + | {:retarget, Process.retarget_changes} | {:SIGKILL, Process.kill_reason} + | :SIGRETARGET | :noop @spec complete(t, Process.t) :: @@ -90,6 +106,15 @@ defprotocol Helix.Process.Model.Processable do """ def kill(data, process, reason) + @spec retarget(t, Process.t) :: + {action, [Event.t]} + @doc """ + Called when the process receives a SIGRETARGET, meaning the process finished + its previous objective and is now looking for something else to do. Commonly + used on recursive processes. + """ + def retarget(data, process) + @spec source_connection_closed(t, Process.t, Connection.t) :: {action, [Event.t]} @doc """ diff --git a/lib/process/processable.ex b/lib/process/processable.ex index 213c5b91..060c6da9 100644 --- a/lib/process/processable.ex +++ b/lib/process/processable.ex @@ -33,6 +33,10 @@ defmodule Helix.Process.Processable do {:delete, []} end + on_retarget(_process, _data) do + {:noop, []} + end + on_source_connection_closed(_process, _data, _connection) do {{:SIGKILL, :src_connection_closed}, []} end @@ -100,6 +104,24 @@ defmodule Helix.Process.Processable do end end + @doc """ + Called when the process receives a SIGRETARGET. + + Defines what should happen when the process is asked to look for a new target. + + Default behaviour is to ignore the signal. + """ + defmacro on_retarget(process, data, do: block) do + quote do + + def retarget(unquote(data), p = unquote(process)) do + unquote(block) + |> add_fingerprint(p) + end + + end + end + @doc """ Called when the process receives a SIGSRCCONND. @@ -111,10 +133,8 @@ defmodule Helix.Process.Processable do quote do def source_connection_closed( - unquote(data), - p = unquote(process), - unquote(connection)) - do + unquote(data), p = unquote(process), unquote(connection) + ) do unquote(block) |> add_fingerprint(p) end @@ -133,10 +153,8 @@ defmodule Helix.Process.Processable do quote do def target_connection_closed( - unquote(data), - p = unquote(process), - unquote(connection)) - do + unquote(data), p = unquote(process), unquote(connection) + ) do unquote(block) |> add_fingerprint(p) end diff --git a/lib/software/model/software_type/firewall/process_type.ex b/lib/software/model/software_type/firewall/process_type.ex index 537d631a..3f2bdd76 100644 --- a/lib/software/model/software_type/firewall/process_type.ex +++ b/lib/software/model/software_type/firewall/process_type.ex @@ -30,6 +30,10 @@ defmodule Helix.Software.Model.SoftwareType.Firewall.Passive do {:delete, [event]} end + def retarget(_, _) do + {:noop, []} + end + def source_connection_closed(_, _, _) do {:delete, []} end diff --git a/lib/software/process/cracker/bruteforce.ex b/lib/software/process/cracker/bruteforce.ex index 00f849d6..2e13f164 100644 --- a/lib/software/process/cracker/bruteforce.ex +++ b/lib/software/process/cracker/bruteforce.ex @@ -149,9 +149,11 @@ process Helix.Software.Process.Cracker.Bruteforce do Defines how a BruteforceProcess should be executed. """ + @type custom :: %{} + alias Helix.Software.Query.File, as: FileQuery - resources(_, target, _, %{cracker: cracker}) do + resources(_, target, _, %{cracker: cracker}, _) do hasher = FileQuery.fetch_best(target, :password) %{ @@ -160,11 +162,11 @@ process Helix.Software.Process.Cracker.Bruteforce do } end - source_file(_gateway, _target, _params, %{cracker: cracker}) do + source_file(_gateway, _target, _params, %{cracker: cracker}, _) do cracker.file_id end - source_connection(_gateway, _target, _params, _meta) do + source_connection(_gateway, _target, _params, _meta, _) do {:create, :cracker_bruteforce} end end diff --git a/lib/software/process/cracker/overflow.ex b/lib/software/process/cracker/overflow.ex index 1d6d2999..61c01ab3 100644 --- a/lib/software/process/cracker/overflow.ex +++ b/lib/software/process/cracker/overflow.ex @@ -94,23 +94,25 @@ process Helix.Software.Process.Cracker.Overflow do executable do - resources(_, _, _, %{cracker: cracker}) do + @type custom :: %{} + + resources(_, _, _, %{cracker: cracker}, _) do %{cracker: cracker} end - source_file(_gateway, _target, _params, %{cracker: cracker}) do + source_file(_gateway, _target, _params, %{cracker: cracker}, _) do cracker.file_id end - source_connection(_, _, _, %{ssh: ssh}) do + source_connection(_, _, _, %{ssh: ssh}, _) do ssh.connection_id end - target_connection(_, _, _, %{connection: connection}) do + target_connection(_, _, _, %{connection: connection}, _) do connection.connection_id end - target_process(_, _, _, %{process: process}) do + target_process(_, _, _, %{process: process}, _) do process.process_id end end diff --git a/lib/software/process/file/install.ex b/lib/software/process/file/install.ex index d723e9f5..716d438a 100644 --- a/lib/software/process/file/install.ex +++ b/lib/software/process/file/install.ex @@ -122,18 +122,20 @@ process Helix.Software.Process.File.Install do executable do - resources(_gateway, _target, %{backend: backend}, %{file: file}) do + @type custom :: %{} + + resources(_gateway, _target, %{backend: backend}, %{file: file}, _) do %{ file: file, backend: backend } end - source_connection(_gateway, _target, _params, %{ssh: ssh}) do + source_connection(_gateway, _target, _params, %{ssh: ssh}, _) do ssh end - target_file(_gateway, _target, _params, %{file: file}) do + target_file(_gateway, _target, _params, %{file: file}, _) do file.file_id end end diff --git a/lib/software/process/file/transfer.ex b/lib/software/process/file/transfer.ex index a5821813..b1bdbdfd 100644 --- a/lib/software/process/file/transfer.ex +++ b/lib/software/process/file/transfer.ex @@ -206,7 +206,9 @@ process Helix.Software.Process.File.Transfer do Defines how FileTransferProcess should be executed. """ - resources(_, _, params, meta) do + @type custom :: %{} + + resources(_, _, params, meta, _) do %{ type: params.type, file: meta.file, @@ -214,11 +216,11 @@ process Helix.Software.Process.File.Transfer do } end - target_file(_gateway, _target, _params, %{file: file}) do + target_file(_gateway, _target, _params, %{file: file}, _) do file.file_id end - source_connection(_gateway, _target, params, _) do + source_connection(_gateway, _target, params, _, _) do {:create, params.connection_type} end end diff --git a/lib/software/process/virus/collect.ex b/lib/software/process/virus/collect.ex index 6d7d2b44..fa7c2697 100644 --- a/lib/software/process/virus/collect.ex +++ b/lib/software/process/virus/collect.ex @@ -106,25 +106,27 @@ process Helix.Software.Process.Virus.Collect do executable do - resources(_, _, _params, _meta) do + @type custom :: %{} + + resources(_, _, _params, _meta, _) do %{} end - source_file(_, _, _, %{virus: virus}) do + source_file(_, _, _, %{virus: virus}, _) do virus.file_id end - source_connection(_, _, _, _) do + source_connection(_, _, _, _, _) do {:create, :virus_collect} end # There's no bank account when collecting the earnings of a `miner` virus - target_bank_account(_, _, _, %{virus: %{software_type: :virus_miner}}) do + target_bank_account(_, _, _, %{virus: %{software_type: :virus_miner}}, _) do nil end # For any other virus, there must always have a bank account - target_bank_account(_, _, %{bank_account: bank_acc}, _) do + target_bank_account(_, _, %{bank_account: bank_acc}, _, _) do bank_acc end end diff --git a/lib/universe/bank/process/bank/account/password_reveal.ex b/lib/universe/bank/process/bank/account/password_reveal.ex index e157c4bd..d80bbe5a 100644 --- a/lib/universe/bank/process/bank/account/password_reveal.ex +++ b/lib/universe/bank/process/bank/account/password_reveal.ex @@ -88,7 +88,9 @@ process Helix.Universe.Bank.Process.Bank.Account.RevealPassword do executable do - resources(_, _, %{account: account}, _) do + @type custom :: %{} + + resources(_, _, %{account: account}, _, _) do %{account: account} end end diff --git a/lib/universe/bank/process/bank/transfer.ex b/lib/universe/bank/process/bank/transfer.ex index 45278bc7..2dd89c4e 100644 --- a/lib/universe/bank/process/bank/transfer.ex +++ b/lib/universe/bank/process/bank/transfer.ex @@ -101,11 +101,13 @@ process Helix.Universe.Bank.Process.Bank.Transfer do executable do - resources(_gateway, _atm, %{transfer: transfer}, _meta) do + @type custom :: %{} + + resources(_gateway, _atm, %{transfer: transfer}, _meta, _) do %{transfer: transfer} end - source_connection(_gateway, _atm, _, _) do + source_connection(_gateway, _atm, _, _, _) do {:create, :wire_transfer} end end diff --git a/test/log/process/recover_test.exs b/test/log/process/recover_test.exs new file mode 100644 index 00000000..f8047e81 --- /dev/null +++ b/test/log/process/recover_test.exs @@ -0,0 +1,216 @@ +defmodule Helix.Log.Process.RecoverTest do + + use Helix.Test.Case.Integration + + alias Helix.Process.Model.Processable + alias Helix.Log.Process.Recover, as: LogRecoverProcess + + alias Helix.Test.Event.Helper, as: EventHelper + alias Helix.Test.Process.TOPHelper + alias Helix.Test.Server.Helper, as: ServerHelper + alias Helix.Test.Server.Setup, as: ServerSetup + alias Helix.Test.Software.Setup, as: SoftwareSetup + alias Helix.Test.Log.Setup, as: LogSetup + + @relay nil + + describe "LogRecoverProcess.find_next_target/1" do + test "returns `nil` when no logs are recoverable" do + server_id = ServerHelper.id() + + # None logs on the server + refute LogRecoverProcess.find_next_target(server_id) + + # Add a log, not recoverable + LogSetup.log!(server_id: server_id) + + refute LogRecoverProcess.find_next_target(server_id) + end + + test "selects a log when recoverable logs exist" do + server_id = ServerHelper.id() + + # Not recoverable + LogSetup.log!(server_id: server_id) + + # Recoverable + log = LogSetup.log!(server_id: server_id, forge_version: 50) + + assert log.log_id == LogRecoverProcess.find_next_target(server_id).log_id + + # Another server... + server_id2 = ServerHelper.id() + + # Recoverable + log = LogSetup.log!(server_id: server_id2, revisions: 2) + + assert log.log_id == LogRecoverProcess.find_next_target(server_id2).log_id + end + end + + describe "Process.Executable" do + test "starts the LogRecoverProcess (global) when everything is OK" do + {gateway, %{entity: entity}} = ServerSetup.server() + + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + _log1 = LogSetup.log!(server_id: gateway.server_id) + + params = %{} + + meta = + %{ + recover: recover, + method: :global, + log: nil, + ssh: nil, + network_id: nil, + entity_id: entity.entity_id + } + + # First we'll start the process on a server without recoverable logs + assert {:ok, process} = + LogRecoverProcess.execute(gateway, gateway, params, meta, @relay) + + # No log was selected; runs forever + refute process.tgt_log_id + assert process.objective.cpu == 999_999_999_999 + + assert process.type == :log_recover_global + assert process.data.recover_version == recover.modules.log_recover.version + + # This log is recoverable + log2 = LogSetup.log!(server_id: gateway.server_id, revisions: 2) + + # Now we'll restart the process. `log2` should be automatically selected + assert {:ok, process} = + LogRecoverProcess.execute(gateway, gateway, params, meta, @relay) + + assert process.tgt_log_id == log2.log_id + + TOPHelper.top_stop(gateway) + end + + test "starts the LogRecoverProcess (custom) when everything is OK" do + {gateway, %{entity: entity}} = ServerSetup.server() + + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + # This is the log we'll attempt to recover + log = LogSetup.log!(server_id: gateway.server_id) + + params = %{} + + meta = + %{ + recover: recover, + method: :custom, + log: log, + ssh: nil, + network_id: nil, + entity_id: entity.entity_id + } + + assert {:ok, process} = + LogRecoverProcess.execute(gateway, gateway, params, meta, @relay) + + assert process.type == :log_recover_custom + assert process.tgt_log_id == log.log_id + + # This log is unrecoverable (already original revision), so process should + # run "forever" + assert process.objective.cpu == 999_999_999_999 + assert process.data.recover_version == recover.modules.log_recover.version + + TOPHelper.top_stop(gateway) + end + end + + describe "Process.Processable" do + test "on_retarget/2 (global)" do + {gateway, %{entity: entity}} = ServerSetup.server() + + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + # The server has two logs that are recoverable. Note that once one of them + # gets recovered, necessarily the other one must be selected. + LogSetup.log!(server_id: gateway.server_id, revisions: 2) + LogSetup.log!(server_id: gateway.server_id, revisions: 2) + + params = %{} + + meta = + %{ + recover: recover, + method: :global, + log: nil, + ssh: nil, + network_id: nil, + entity_id: entity.entity_id + } + + assert {:ok, process} = + LogRecoverProcess.execute(gateway, gateway, params, meta, @relay) + + # Simulate completion + assert {:noop, event} = Processable.complete(process.data, process) + + # Let LogHandler handle the `LogRecoverProcessedEvent` + EventHelper.emit(event) + + assert {{:retarget, changes}, _} = + Processable.retarget(process.data, process) + + # `retarget` selected a different log + refute changes.tgt_log_id == process.tgt_log_id + + # And the objective to this new log isn't infinity, as it is recoverable + refute changes.objective.cpu == 999_999_999_999 + + TOPHelper.top_stop(gateway) + end + + test "on_retarget/2 (custom)" do + {gateway, %{entity: entity}} = ServerSetup.server() + + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + # This is the log we'll attempt to recover + log = LogSetup.log!(server_id: gateway.server_id, revisions: 2) + + # This is another log that is recoverable but should be ignored by the + # process + LogSetup.log!(server_id: gateway.server_id, revisions: 2) + + params = %{} + + meta = + %{ + recover: recover, + method: :custom, + log: log, + ssh: nil, + network_id: nil, + entity_id: entity.entity_id + } + + assert {:ok, process} = + LogRecoverProcess.execute(gateway, gateway, params, meta, @relay) + + # Simulate completion + assert {:noop, event} = Processable.complete(process.data, process) + + # Let LogHandler handle the `LogRecoverProcessedEvent` + EventHelper.emit(event) + + assert {{:retarget, changes}, _} = + Processable.retarget(process.data, process) + + # Infinity CPU objective because we've recovered the last additional + # revision of `log`. Now it is currently at the original revision. + assert changes.objective.cpu == 999_999_999_999 + assert changes.tgt_log_id == log.log_id + + TOPHelper.top_stop(gateway) + end + end +end diff --git a/test/support/log/setup.ex b/test/support/log/setup.ex index 886fae07..ffe570ad 100644 --- a/test/support/log/setup.ex +++ b/test/support/log/setup.ex @@ -62,6 +62,9 @@ defmodule Helix.Test.Log.Setup do Related: Log.creation_params, Server.t, Entity.id, message :: String.t """ def fake_log(opts \\ []) do + if opts[:forger_version], + do: raise "It's `forge_version`" + # Makes credo happy... {server_id, entity_id, {type, data}, forge_version} = fake_log_get_data(opts) diff --git a/test/support/process/fake_process.ex b/test/support/process/fake_process.ex index 82fbf7d4..75f372bc 100644 --- a/test/support/process/fake_process.ex +++ b/test/support/process/fake_process.ex @@ -60,7 +60,9 @@ defmodule Helix.Test.Process do executable do - resources(_, _, _, _) do + @type custom :: %{} + + resources(_, _, _, _, _) do %{} end end @@ -106,7 +108,9 @@ defmodule Helix.Test.Process do executable do - resources(_, _, _, _) do + @type custom :: %{} + + resources(_, _, _, _, _) do %{} end end diff --git a/test/support/software/setup.ex b/test/support/software/setup.ex index 71cec596..81f709b8 100644 --- a/test/support/software/setup.ex +++ b/test/support/software/setup.ex @@ -247,14 +247,30 @@ defmodule Helix.Test.Software.Setup do end @doc """ - Opts are passed to `file/1` + - log_recover: set `log_recover` module version. Defaults to random + + Remaining opts are passed to `file/1` """ - def virus(opts \\ []) do - file(opts ++ [type: :virus_spyware]) + def log_recover!(opts \\ []), + do: log_recover(opts) |> elem(0) + def log_recover(opts \\ []) do + log_recover = + Keyword.get(opts, :log_recover, SoftwareHelper.random_version()) + + version_map = %{log_recover: log_recover} + modules = SoftwareHelper.generate_module(:log_recover, version_map) + + file(opts ++ [type: :log_recover, modules: modules]) end + @doc """ + Opts are passed to `file/1` + """ def virus!(opts \\ []), do: virus(opts) |> elem(0) + def virus(opts \\ []) do + file(opts ++ [type: :virus_spyware]) + end @doc """ Generates a non-executable file From 241fb6ced5f111418c5a5d80e85c9d59e0d98963 Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Sun, 19 Aug 2018 18:46:45 -0300 Subject: [PATCH 09/14] Complete handling of LogRecoverProcessedEvent --- events.json | 8 +- lib/event/dispatcher.ex | 3 +- lib/log/action/log.ex | 15 ++-- lib/log/event/handler/log.ex | 10 +-- lib/log/event/log.ex | 89 ++++++++++++++++++-- lib/notification/model/code/server.ex | 56 ++++++++++++ lib/process/model/process/resources/utils.ex | 6 +- lib/server/websocket/channel/server.ex | 32 ++++++- test/features/process/progress_test.exs | 2 +- test/log/event/handler/log_test.exs | 33 +++++++- test/log/event/log_test.exs | 6 +- test/support/event/setup/log.ex | 32 +++++-- test/support/process/setup.ex | 2 + test/support/process/setup/data.ex | 41 +++++++++ 14 files changed, 293 insertions(+), 42 deletions(-) diff --git a/events.json b/events.json index df390bce..4f10d7a0 100644 --- a/events.json +++ b/events.json @@ -73,7 +73,7 @@ "emits": [ "Log.Created", "Log.Modified", - "Log.Deleted" + "Log.Destroyed" ] }, "Process.Process":{ @@ -176,7 +176,7 @@ "publishable": [ "Log.Created", "Log.Modified", - "Log.Deleted", + "Log.Destroyed", "Network.Bounce.Created", "Network.Bounce.CreateFailed", "Network.Bounce.Removed", @@ -188,7 +188,7 @@ "Process.Completed", "Process.Killed", "File.Added", - "File.Deleted", + "File.Destroyed", "File.Uploaded", "Story.Email.Sent", "Story.Reply.Sent", @@ -207,7 +207,7 @@ "DownloadCracker": { "filters": [ "File.Downloaded", - "File.Deleted", + "File.Destroyed", "Process.Created" ], "emits": [ diff --git a/lib/event/dispatcher.ex b/lib/event/dispatcher.ex index 960d0fa6..69f867ca 100644 --- a/lib/event/dispatcher.ex +++ b/lib/event/dispatcher.ex @@ -147,7 +147,8 @@ defmodule Helix.Event.Dispatcher do event LogEvent.Forge.Processed event LogEvent.Recover.Processed event LogEvent.Log.Created - event LogEvent.Log.Deleted + event LogEvent.Log.Destroyed + event LogEvent.Log.Recovered event LogEvent.Log.Revised # Custom handlers diff --git a/lib/log/action/log.ex b/lib/log/action/log.ex index e8ed9407..e62de247 100644 --- a/lib/log/action/log.ex +++ b/lib/log/action/log.ex @@ -6,7 +6,8 @@ defmodule Helix.Log.Action.Log do alias Helix.Log.Model.Log alias Helix.Log.Event.Log.Created, as: LogCreatedEvent - alias Helix.Log.Event.Log.Deleted, as: LogDeletedEvent + alias Helix.Log.Event.Log.Destroyed, as: LogDestroyedEvent + alias Helix.Log.Event.Log.Recovered, as: LogRecoveredEvent alias Helix.Log.Event.Log.Revised, as: LogRevisedEvent @spec create(Server.id, Entity.id, Log.info, pos_integer | nil) :: @@ -15,7 +16,7 @@ defmodule Helix.Log.Action.Log do @doc """ Creates a new log linked to `entity` on `server` with `log_info` as content. - This log may be natural (created automatically by the game as a result to a + This log may be natural (created automatically by the game as a result of a player's action) or artificial (explicitly created using LogForger.Edit). """ def create(server_id, entity_id, log_info, forge_version \\ nil) do @@ -45,23 +46,23 @@ defmodule Helix.Log.Action.Log do end end - @spec recover(Log.t) :: - {:ok, :destroyed, [LogDeletedEvent.t]} + @spec recover(Log.t, Entity.id) :: + {:ok, :destroyed, [LogDestroyedEvent.t]} | {:ok, :original, []} | {:ok, :recovered, [LogRecoveredEvent.t]} @doc """ Attempts to recover the given `log`. """ - def recover(log = %Log{}) do + def recover(log = %Log{}, entity_id = %Entity.ID{}) do case LogInternal.recover(log) do :destroyed -> - {:ok, :destroyed, [LogDeletedEvent.new(log)]} + {:ok, :destroyed, [LogDestroyedEvent.new(log, entity_id)]} {:original, _} -> {:ok, :original, []} {:recovered, new_log} -> - {:ok, :recovered, [LogRecoveredEvent.new(new_log)]} + {:ok, :recovered, [LogRecoveredEvent.new(new_log, entity_id)]} end end end diff --git a/lib/log/event/handler/log.ex b/lib/log/event/handler/log.ex index 97e94510..356af2c8 100644 --- a/lib/log/event/handler/log.ex +++ b/lib/log/event/handler/log.ex @@ -41,7 +41,7 @@ defmodule Helix.Log.Event.Handler.Log do ) with {:ok, _, events} <- result do - Event.emit(events) + Event.emit(events, from: event) end end @@ -57,7 +57,7 @@ defmodule Helix.Log.Event.Handler.Log do log = %Log{} <- LogQuery.fetch(event.target_log_id), {:ok, _, events} <- revise.(log) do - Event.emit(events) + Event.emit(events, from: event) end end @@ -74,12 +74,12 @@ defmodule Helix.Log.Event.Handler.Log do """ def recover_processed(event = %LogRecoverProcessedEvent{target_log_id: nil}), do: sigretarget(event) - def recover_processed(event = %LogRecoverProcessedEvent{target_log_id: _) do + def recover_processed(event = %LogRecoverProcessedEvent{target_log_id: _}) do with \ log = %Log{} <- LogQuery.fetch(event.target_log_id), - {:ok, _, events} <- LogAction.recover(log) + {:ok, _, events} <- LogAction.recover(log, event.entity_id) do - Event.emit(events) + Event.emit(events, from: event) end sigretarget(event) diff --git a/lib/log/event/log.ex b/lib/log/event/log.ex index a7c1f7a0..f2eccb9b 100644 --- a/lib/log/event/log.ex +++ b/lib/log/event/log.ex @@ -117,32 +117,90 @@ defmodule Helix.Log.Event.Log do end end - event Deleted do + event Recovered do @moduledoc """ - LogDeletedEvent is fired when a forged log is recovered beyond its original - revision, leading to the log deletion. + LogRecoveredEvent is fired when a forged log has a revision popped out of + its stack, and a new revision is discovered. """ + alias Helix.Entity.Model.Entity alias Helix.Log.Model.Log @type t :: + %__MODULE__{ + log: Log.t, + entity_id: Entity.id + } + + event_struct [:log, :entity_id] + + @spec new(Log.t, Entity.id) :: + t + def new(log = %Log{}, entity_id) do %__MODULE__{ - log: Log.t + log: log, + entity_id: entity_id } + end - event_struct [:log] + publish do - @spec new(Log.t) :: + @event :log_recovered + + def generate_payload(event, _socket) do + data = %{ + log_id: to_string(event.log.log_id) + } + + {:ok, data} + end + + def whom_to_publish(event), + do: %{server: event.log.server_id} + end + + notification do + @moduledoc """ + Notify the user when the process finishes and the log got recovered. + """ + + @class :server + @code :log_recovered + + def whom_to_notify(event), + do: %{account_id: event.entity_id, server_id: event.log.server_id} + end + end + + event Destroyed do + @moduledoc """ + LogDestroyedEvent is fired when a forged log is recovered beyond its + original revision, leading to the log deletion. + """ + + alias Helix.Entity.Model.Entity + alias Helix.Log.Model.Log + + @type t :: + %__MODULE__{ + log: Log.t, + entity_id: Entity.id + } + + event_struct [:log, :entity_id] + + @spec new(Log.t, Entity.id) :: t - def new(log = %Log{}) do + def new(log = %Log{}, entity_id) do %__MODULE__{ - log: log + log: log, + entity_id: entity_id } end publish do - @event :log_deleted + @event :log_destroyed def generate_payload(event, _socket) do data = %{ @@ -155,5 +213,18 @@ defmodule Helix.Log.Event.Log do def whom_to_publish(event), do: %{server: event.log.server_id} end + + notification do + @moduledoc """ + Notify the user when the process finishes and the artificial log is + destroyed. + """ + + @class :server + @code :log_destroyed + + def whom_to_notify(event), + do: %{account_id: event.entity_id, server_id: event.log.server_id} + end end end diff --git a/lib/notification/model/code/server.ex b/lib/notification/model/code/server.ex index 8fa08966..a6f9d940 100644 --- a/lib/notification/model/code/server.ex +++ b/lib/notification/model/code/server.ex @@ -2,6 +2,7 @@ defmodule Helix.Notification.Model.Code.Server do use Helix.Notification.Model.Code + # TODO: Macrify code :file_downloaded, 0 do @moduledoc """ `FileDownloadedNotification` notifies the player that their download has @@ -87,4 +88,59 @@ defmodule Helix.Notification.Model.Code.Server do def render_data(data), do: data end + + code :log_recovered, 6 do + @moduledoc """ + `LogRecoveredNotification` notifies the player that the LogRecover operation + has finished successfully, and it popped out of the stack a new revision. + """ + + alias Helix.Log.Model.Log + + @doc false + def generate_data(event) do + %{ + log_id: to_string(event.log.log_id) + } + end + + @doc false + def after_read_hook(data) do + %{ + log_id: Log.ID.cast!(data.log_id) + } + end + + @doc false + def render_data(data), + do: data + end + + code :log_destroyed, 7 do + @moduledoc """ + `LogDestroyedNotification` notifies the player that the LogRecover operation + has finished successfully, and it recovered beyond the original revision of + a forged (artificial) log, leading to its deletion. + """ + + alias Helix.Log.Model.Log + + @doc false + def generate_data(event) do + %{ + log_id: to_string(event.log.log_id) + } + end + + @doc false + def after_read_hook(data) do + %{ + log_id: Log.ID.cast!(data.log_id) + } + end + + @doc false + def render_data(data), + do: data + end end diff --git a/lib/process/model/process/resources/utils.ex b/lib/process/model/process/resources/utils.ex index 9b3cf76f..6c56c5d7 100644 --- a/lib/process/model/process/resources/utils.ex +++ b/lib/process/model/process/resources/utils.ex @@ -3,9 +3,9 @@ defmodule Helix.Process.Model.Process.Resources.Utils do alias Helix.Network.Model.Network @spec format_network(Network.idtb, term) :: - {Network.id, term} + {Network.id, term} def format_network(key = %Network.ID{}, value), do: {key, value} - def format_network(key, value), - do: {Network.ID.cast!(key), value} + def format_network(key, value), + do: {Network.ID.cast!(key), value} end diff --git a/lib/server/websocket/channel/server.ex b/lib/server/websocket/channel/server.ex index 68ba0fcc..c5f61a28 100644 --- a/lib/server/websocket/channel/server.ex +++ b/lib/server/websocket/channel/server.ex @@ -14,6 +14,7 @@ channel Helix.Server.Websocket.Channel.Server do alias Helix.Server.State.Websocket.Channel, as: ServerWebsocketChannelState alias Helix.Log.Websocket.Requests.Forge, as: LogForgeRequest + alias Helix.Log.Websocket.Requests.Recover, as: LogRecoverRequest alias Helix.Network.Websocket.Requests.Browse, as: BrowseRequest @@ -162,7 +163,7 @@ channel Helix.Server.Websocket.Channel.Server do - "bad_log_data" - The given `log_data` is not valid for the `log_type`. Henforcer: - - "forger_not_found" - Player does not have a valid LogForger file. + - "forger_not_found" - Player does not have a valid LogForger software. - "log_not_found" (edit) - The given log ID was not found. - "log_not_belongs" (edit) - Attempting to edit a log that does not belong to the open channel. @@ -171,6 +172,35 @@ channel Helix.Server.Websocket.Channel.Server do """ topic "log.forge", LogForgeRequest + @doc """ + Starts a LogRecoverProcess. When recovering, the player may either start the + process using the `global` method or the `custom` method. + + The `global` method scans all logs on the server, randomly selects a + recoverable log and starts working on it. The `custom` method works on a + specific log defined by the user. + + Params (global): + - *method: Explicitly set method to "global" + + Params (custom): + - *log_id: ID of the log that will be recovered. + - *method: Explicitly set method to "custom" + + Errors: + + Henforcer: + - "recover_not_found" - Player does not have a valid LogRecover software. + - "log_not_found" (custom) - The given log ID was not found. + - "log_not_belongs" (custom) - Attempting to recover a log that does not + belong to the open channel. + + Input Validation: + - "bad_method" - Method is neither "global" or "custom" + + base errors + """ + topic "log.recover", LogRecoverRequest + @doc """ Updates the player's motherboard. May be used to attach, detach or update the mobo components. diff --git a/test/features/process/progress_test.exs b/test/features/process/progress_test.exs index a5c0b01d..21284905 100644 --- a/test/features/process/progress_test.exs +++ b/test/features/process/progress_test.exs @@ -87,7 +87,7 @@ defmodule Helix.Test.Features.Process.Progress do if System.get_env("HELIX_TEST_ENV") == "jenkins" do 0.20 # Almost useless test else - 0.05 + 0.06 end end end diff --git a/test/log/event/handler/log_test.exs b/test/log/event/handler/log_test.exs index ce40b9d7..89c7e550 100644 --- a/test/log/event/handler/log_test.exs +++ b/test/log/event/handler/log_test.exs @@ -127,7 +127,7 @@ defmodule Helix.Log.Event.Handler.LogTest do log_before = LogQuery.fetch(log.log_id) # Simulate handling of the event - LogHandler.log_forge_processed(event) + LogHandler.forge_processed(event) log_after = LogQuery.fetch(log.log_id) @@ -159,7 +159,7 @@ defmodule Helix.Log.Event.Handler.LogTest do assert [] == LogQuery.get_logs_on_server(process.target_id) # Simulate handling of the event - LogHandler.log_forge_processed(event) + LogHandler.forge_processed(event) # Now the process server has a new log assert [log] = LogQuery.get_logs_on_server(process.target_id) @@ -171,4 +171,33 @@ defmodule Helix.Log.Event.Handler.LogTest do assert log.revision.forge_version == 50 end end + + describe "log_recover_processed/1" do + test "pops out revision from stack" do + server_id = ServerHelper.id() + log = LogSetup.log!(server_id: server_id, revisions: 2) + + process = + ProcessSetup.process!( + target_id: server_id, + type: :log_recover_custom, + tgt_log_id: log.log_id, + data: [recover_version: 50] + ) + + event = EventSetup.Log.recover_processed(process: process) + + assert [old_log] = LogQuery.get_logs_on_server(process.target_id) + + LogHandler.recover_processed(event) + + assert [new_log] = LogQuery.get_logs_on_server(process.target_id) + + # Old log was at revision 2... + assert old_log.revision_id == 2 + + # But new log is at revision 1 + assert new_log.revision_id == 1 + end + end end diff --git a/test/log/event/log_test.exs b/test/log/event/log_test.exs index 2144425d..c03a78d9 100644 --- a/test/log/event/log_test.exs +++ b/test/log/event/log_test.exs @@ -51,9 +51,9 @@ defmodule Helix.Log.Event.LogTest do end end - describe "LogDeletedEvent" do + describe "LogDestroyedEvent" do test "Publishable.generate_payload/2" do - event = EventSetup.Log.deleted() + event = EventSetup.Log.destroyed() # Generates the payload assert {:ok, data} = Publishable.generate_payload(event, @mocked_socket) @@ -62,7 +62,7 @@ defmodule Helix.Log.Event.LogTest do assert data.log_id == to_string(event.log.log_id) # Returned event is correct - assert "log_deleted" == Publishable.get_event_name(event) + assert "log_destroyed" == Publishable.get_event_name(event) end end end diff --git a/test/support/event/setup/log.ex b/test/support/event/setup/log.ex index 4a163303..fc6f59b9 100644 --- a/test/support/event/setup/log.ex +++ b/test/support/event/setup/log.ex @@ -3,11 +3,13 @@ defmodule Helix.Test.Event.Setup.Log do alias Helix.Log.Model.Log alias Helix.Log.Event.Forge.Processed, as: LogForgeProcessedEvent + alias Helix.Log.Event.Recover.Processed, as: LogRecoverProcessedEvent alias Helix.Log.Event.Log.Created, as: LogCreatedEvent - alias Helix.Log.Event.Log.Deleted, as: LogDeletedEvent + alias Helix.Log.Event.Log.Destroyed, as: LogDestroyedEvent alias Helix.Log.Event.Log.Revised, as: LogRevisedEvent + alias Helix.Test.Entity.Helper, as: EntityHelper alias Helix.Test.Log.Setup, as: LogSetup alias Helix.Test.Process.Setup, as: ProcessSetup @@ -23,7 +25,7 @@ defmodule Helix.Test.Event.Setup.Log do @doc """ Opts: - - process: Source process. + - process: Source process (optional). - process_type: `:log_forge_edit` or `:log_forge_created`. If not set, a random type will be selected """ @@ -39,10 +41,28 @@ defmodule Helix.Test.Event.Setup.Log do LogForgeProcessedEvent.new(process, process.data) end - def deleted, - do: deleted(generate_fake_log()) - def deleted(log = %Log{}), - do: LogDeletedEvent.new(log) + @doc """ + Opts: + - process: Source process (optional). + - process_type: `:log_recover_custom` or `:log_recover_global`. If not set, a + random type will be selected. + """ + def recover_processed(opts) do + process = + if opts[:process] do + opts[:process] + else + process_type = Keyword.get(opts, :process_type, :log_recover) + ProcessSetup.fake_process!(type: process_type) + end + + LogRecoverProcessedEvent.new(process, process.data) + end + + def destroyed, + do: destroyed(generate_fake_log(), EntityHelper.id()) + def destroyed(log = %Log{}, entity_id), + do: LogDestroyedEvent.new(log, entity_id) defp generate_fake_log do {log, _} = LogSetup.fake_log() diff --git a/test/support/process/setup.ex b/test/support/process/setup.ex index e738ca01..bfccc929 100644 --- a/test/support/process/setup.ex +++ b/test/support/process/setup.ex @@ -11,6 +11,8 @@ defmodule Helix.Test.Process.Setup do @internet_id NetworkHelper.internet_id() + def process!(opts \\ []), + do: process(opts) |> elem(0) def process(opts \\ []) do {_, related = %{params: params}} = fake_process(opts) {:ok, inserted} = ProcessInternal.create(params) diff --git a/test/support/process/setup/data.ex b/test/support/process/setup/data.ex index 6584079d..34fc6aee 100644 --- a/test/support/process/setup/data.ex +++ b/test/support/process/setup/data.ex @@ -17,6 +17,7 @@ defmodule Helix.Test.Process.Data.Setup do # Processes alias Helix.Log.Process.Forge, as: LogForgeProcess + alias Helix.Log.Process.Recover, as: LogRecoverProcess alias Helix.Software.Process.Cracker.Bruteforce, as: CrackerBruteforce alias Helix.Software.Process.File.Transfer, as: FileTransferProcess alias Helix.Software.Process.File.Install, as: FileInstallProcess @@ -253,11 +254,51 @@ defmodule Helix.Test.Process.Data.Setup do {process_type, data, meta, resources} end + def custom(:log_recover, data_opts, meta) do + [:log_recover_custom, :log_recover_global] + |> Enum.random() + |> custom(data_opts, meta) + end + + def custom(:log_recover_custom, data_opts, meta), + do: custom_log_recover({:log_recover_custom, :custom}, data_opts, meta) + + def custom(:log_recover_global, data_opts, meta), + do: custom_log_recover({:log_recover_global, :global}, data_opts, meta) + + defp custom_log_recover({process_type, method}, data_opts, meta) do + version = + Keyword.get(data_opts, :recover_version, SoftwareHelper.random_version()) + + src_file_id = meta.src_file_id || SoftwareHelper.id() + tgt_log_id = meta.tgt_log_id || LogHelper.id() + + data = %LogRecoverProcess{recover_version: version} + + resources = + %{ + l_dynamic: [:cpu], + r_dynamic: [], + static: TOPHelper.Resources.random_static(), + objective: TOPHelper.Resources.objective(cpu: 500) + } + + meta = + %{meta | + tgt_log_id: tgt_log_id, + src_file_id: src_file_id + } + + {process_type, data, meta, resources} + end + defp custom_implementations do [ :bruteforce, :log_forge_edit, :log_forge_create, + :log_recover_custom, + :log_recover_global, :file_download, :file_upload, :install_virus From 2a215ea48357228f2aade84d894c793ea3c46bd3 Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Wed, 29 Aug 2018 18:46:55 -0300 Subject: [PATCH 10/14] Add LogRecoverRequest --- lib/log/action/flow/recover.ex | 96 ++++++ lib/log/event/log.ex | 6 +- lib/log/henforcer/log/recover.ex | 104 ++++++ lib/log/process/recover.ex | 8 +- lib/log/public/recover.ex | 10 + lib/log/websocket/requests/recover.ex | 120 +++++++ test/features/log/recover_test.exs | 305 ++++++++++++++++++ test/log/event/log_test.exs | 18 ++ test/log/henforcer/log/recover_test.exs | 82 +++++ test/log/websocket/requests/recover_test.exs | 317 +++++++++++++++++++ test/support/event/setup/log.ex | 6 + test/support/process/helper/top.ex | 4 +- test/support/process/setup/data.ex | 26 +- 13 files changed, 1081 insertions(+), 21 deletions(-) create mode 100644 lib/log/action/flow/recover.ex create mode 100644 lib/log/henforcer/log/recover.ex create mode 100644 lib/log/public/recover.ex create mode 100644 lib/log/websocket/requests/recover.ex create mode 100644 test/features/log/recover_test.exs create mode 100644 test/log/henforcer/log/recover_test.exs create mode 100644 test/log/websocket/requests/recover_test.exs diff --git a/lib/log/action/flow/recover.ex b/lib/log/action/flow/recover.ex new file mode 100644 index 00000000..17239175 --- /dev/null +++ b/lib/log/action/flow/recover.ex @@ -0,0 +1,96 @@ +# credo:disable-for-this-file Credo.Check.Refactor.FunctionArity +defmodule Helix.Log.Action.Flow.Recover do + + alias Helix.Event + alias Helix.Entity.Model.Entity + alias Helix.Network.Model.Connection + alias Helix.Network.Model.Tunnel + alias Helix.Server.Model.Server + alias Helix.Software.Model.File + alias Helix.Log.Model.Log + + alias Helix.Log.Process.Recover, as: LogRecoverProcess + + @spec global( + Server.t, + Server.t, + File.t, + Entity.id, + {Tunnel.t, Connection.ssh} | nil, + Event.relay + ) :: + term + def global( + gateway = %Server{}, + endpoint = %Server{}, + recover = %File{software_type: :log_recover}, + entity_id = %Entity.ID{}, + conn, + relay + ) do + start_process(gateway, endpoint, nil, recover, entity_id, conn, relay) + end + + @spec custom( + Server.t, + Server.t, + Log.t, + File.t, + Entity.id, + {Tunnel.t, Connection.ssh} | nil, + Event.relay + ) :: + term + def custom( + gateway = %Server{}, + endpoint = %Server{}, + log = %Log{}, + recover = %File{software_type: :log_recover}, + entity_id = %Entity.ID{}, + conn, + relay + ) do + start_process( + gateway, endpoint, log, recover, entity_id, conn, relay + ) + end + + defp start_process( + gateway = %Server{}, + endpoint = %Server{}, + log, + recover = %File{software_type: :log_recover}, + entity_id = %Entity.ID{}, + conn_info, + relay + ) do + method = + if is_nil(log) do + :global + else + :custom + end + + {network_id, ssh} = + if is_nil(conn_info) do + {nil, nil} + else + {tunnel, ssh} = conn_info + {tunnel.network_id, ssh} + end + + params = %{} + + meta = + %{ + recover: recover, + log: log, + method: method, + ssh: ssh, + entity_id: entity_id, + network_id: network_id + } + + LogRecoverProcess.execute(gateway, endpoint, params, meta, relay) + end +end diff --git a/lib/log/event/log.ex b/lib/log/event/log.ex index f2eccb9b..987ba2f7 100644 --- a/lib/log/event/log.ex +++ b/lib/log/event/log.ex @@ -145,12 +145,12 @@ defmodule Helix.Log.Event.Log do publish do + alias Helix.Log.Public.Index, as: LogIndex + @event :log_recovered def generate_payload(event, _socket) do - data = %{ - log_id: to_string(event.log.log_id) - } + data = LogIndex.render_log(event.log) {:ok, data} end diff --git a/lib/log/henforcer/log/recover.ex b/lib/log/henforcer/log/recover.ex new file mode 100644 index 00000000..bff04f00 --- /dev/null +++ b/lib/log/henforcer/log/recover.ex @@ -0,0 +1,104 @@ +defmodule Helix.Log.Henforcer.Log.Recover do + + import Helix.Henforcer + + alias Helix.Server.Henforcer.Server, as: ServerHenforcer + alias Helix.Server.Model.Server + alias Helix.Software.Henforcer.File, as: FileHenforcer + alias Helix.Software.Model.File + alias Helix.Log.Henforcer.Log, as: LogHenforcer + alias Helix.Log.Model.Log + + @type can_recover_global_relay :: %{gateway: Server.t, recover: File.t} + @type can_recover_global_relay_partial :: map + @type can_recover_global_error :: + ServerHenforcer.server_exists_error + | exists_recover_error + + @spec can_recover_global?(Server.id) :: + {true, can_recover_global_relay} + | can_recover_global_error + @doc """ + Henforces that the player can start a global LogRecover process. + + In order to recover globally, all a user needs to have is: + - SSH access to the server + - a valid LogRecover file on his gateway filesystem + """ + def can_recover_global?(gateway_id) do + with \ + {true, r1} <- ServerHenforcer.server_exists?(gateway_id), + r1 = replace(r1, :server, :gateway), + gateway = r1.gateway, + {true, r2} <- exists_recover?(gateway), + r2 = replace(r2, :file, :recover, only: true) + do + [r1, r2] + |> relay() + |> reply_ok() + end + end + + @type can_recover_custom_relay :: + %{log: Log.t, gateway: Server.t, recover: File.t} + @type can_recover_custom_relay_partial :: map + @type can_recover_custom_error :: + LogHenforcer.log_exists_error + | LogHenforcer.belongs_to_server_error + | ServerHenforcer.server_exists_error + | exists_recover_error + + @spec can_recover_custom?(Log.id, Server.id, Server.id) :: + {true, can_recover_custom_relay} + | can_recover_custom_error + @doc """ + Henforces that the player can start a custom LogRecover process. + + In order to recover a specific (custom) log, the player must have the same + requirements of a `global` recover (SSH access and valid LogRecover file), and + also the given `log_id` must exist, and it must exist on the target server. + """ + def can_recover_custom?(log_id = %Log.ID{}, gateway_id, target_id) do + with \ + {true, r1} <- LogHenforcer.log_exists?(log_id), + log = r1.log, + + {true, _} <- LogHenforcer.belongs_to_server?(log, target_id), + + {true, r2} <- ServerHenforcer.server_exists?(gateway_id), + r2 = replace(r2, :server, :gateway), + gateway = r2.gateway, + + {true, r3} <- exists_recover?(gateway), + r3 = replace(r3, :file, :recover, only: true) + do + [r1, r2, r3] + |> relay() + |> reply_ok() + end + end + + @type exists_recover_relay :: FileHenforcer.exists_software_module_relay + @type exists_recover_relay_partial :: + FileHenforcer.exists_software_module_relay_partial + @type exists_recover_error :: + {false, {:recover, :not_found}, exists_recover_relay_partial} + + @spec exists_recover?(Server.t) :: + {true, exists_recover_relay} + | exists_recover_error + @doc """ + Ensures that exists a Recover file on `server`, sorting the result by `module` + (only `:log_recover` in this context). + + It's simply a wrapper over `FileHenforcer.exists_software_module?` used to + generate a more meaningful error message ("recover_not_found") instead of + "module_not_found". + """ + def exists_recover?(server = %Server{}) do + henforce_else( + FileHenforcer.exists_software_module?(:log_recover, server), + {:recover, :not_found} + ) + end +end diff --git a/lib/log/process/recover.ex b/lib/log/process/recover.ex index ce95d4dd..9c0bff8d 100644 --- a/lib/log/process/recover.ex +++ b/lib/log/process/recover.ex @@ -139,12 +139,12 @@ process Helix.Log.Process.Recover do on_retarget(process, _data) do {new_log, method} = - # On `log_recover_global`, we must select a new log on each iteration + # On `log_recover_global`, we must select a new log on each iteration. if process.type == :log_recover_global do log = LogRecoverProcess.find_next_target(process.target_id) {log, :global} - # On `log_recover_custom` we always work at the same log + # On `log_recover_custom` we always work at the same log. May be `nil`. else log = LogQuery.fetch(process.tgt_log_id) {log, :custom} @@ -231,7 +231,9 @@ process Helix.Log.Process.Recover do if f.log.revisions.extra == 0 do 999_999_999_999 else - (f.log.revisions.total * multiplier) / f.recover.version.log_recover + t = (f.log.revisions.total * multiplier) / f.recover.version.log_recover + + t + 5000 end end diff --git a/lib/log/public/recover.ex b/lib/log/public/recover.ex new file mode 100644 index 00000000..a3c28cd5 --- /dev/null +++ b/lib/log/public/recover.ex @@ -0,0 +1,10 @@ +defmodule Helix.Log.Public.Recover do + + alias Helix.Log.Action.Flow.Recover, as: RecoverFlow + + defdelegate global(gateway, endpoint, recover, entity, conn_info, relay), + to: RecoverFlow + + defdelegate custom(gateway, endpoint, log, recover, entity, conn_info, relay), + to: RecoverFlow +end diff --git a/lib/log/websocket/requests/recover.ex b/lib/log/websocket/requests/recover.ex new file mode 100644 index 00000000..4bfcdca1 --- /dev/null +++ b/lib/log/websocket/requests/recover.ex @@ -0,0 +1,120 @@ +import Helix.Websocket.Request + +request Helix.Log.Websocket.Requests.Recover do + @moduledoc """ + `LogRecoverRequest` is called when the player wants to recover a log. It may + either be a `global` recovery, in which case a recoverable log is randomly + selected from all logs within the server, or it may be a `custom` recovery, + in which case a specific log to be recovered is defined by the player. + """ + + import HELL.Macros + + alias Helix.Server.Query.Server, as: ServerQuery + alias Helix.Log.Henforcer.Log.Recover, as: LogRecoverHenforcer + alias Helix.Log.Model.Log + alias Helix.Log.Public.Recover, as: RecoverPublic + + def check_params(request, socket) do + case request.unsafe["method"] do + "global" -> + check_params_global(request, socket) + + "custom" -> + check_params_custom(request, socket) + + _ -> + reply_error(request, "bad_method") + end + end + + defp check_params_global(request, _socket) do + with \ + true <- is_nil(request.unsafe["log_id"]) + do + update_params(request, %{method: :global}, reply: true) + else + _ -> + bad_request(request) + end + end + + defp check_params_custom(request, _socket) do + with \ + {:ok, log_id} <- Log.ID.cast(request.unsafe["log_id"]) + do + params = %{method: :custom, log_id: log_id} + + update_params(request, params, reply: true) + else + _ -> + bad_request(request) + end + end + + def check_permissions(request = %{params: %{method: :global}}, socket) do + gateway_id = socket.assigns.gateway.server_id + + case LogRecoverHenforcer.can_recover_global?(gateway_id) do + {true, relay} -> + meta = %{gateway: relay.gateway, recover: relay.recover} + update_meta(request, meta, reply: true) + + {false, reason, _} -> + reply_error(request, reason) + end + end + + def check_permissions(request = %{params: %{method: :custom}}, socket) do + log_id = request.params.log_id + gateway_id = socket.assigns.gateway.server_id + target_id = socket.assigns.destination.server_id + + can_recover? = + LogRecoverHenforcer.can_recover_custom?(log_id, gateway_id, target_id) + + case can_recover? do + {true, relay} -> + meta = %{gateway: relay.gateway, recover: relay.recover, log: relay.log} + update_meta(request, meta, reply: true) + + {false, reason, _} -> + reply_error(request, reason) + end + end + + def handle_request(request, socket) do + entity_id = socket.assigns.entity_id + recover = request.meta.recover + gateway = request.meta.gateway + relay = request.relay + + {target, conn_info} = + if socket.assigns.meta.access == :local do + {gateway, nil} + else + { + ServerQuery.fetch(socket.assigns.destination.server_id), + {socket.assigns.tunnel, socket.assigns.ssh} + } + end + + hespawn fn -> + if request.params.method == :global do + RecoverPublic.global( + gateway, target, recover, entity_id, conn_info, relay + ) + else + log = request.meta.log + + RecoverPublic.custom( + gateway, target, log, recover, entity_id, conn_info, relay + ) + end + end + + reply_ok(request) + end + + render_empty() +end diff --git a/test/features/log/recover_test.exs b/test/features/log/recover_test.exs new file mode 100644 index 00000000..0f723d70 --- /dev/null +++ b/test/features/log/recover_test.exs @@ -0,0 +1,305 @@ +defmodule Helix.Test.Features.Log.Recover do + + use Helix.Test.Case.Integration + + import Phoenix.ChannelTest + import Helix.Test.Channel.Macros + import Helix.Test.Macros + + alias Helix.Log.Query.Log, as: LogQuery + alias Helix.Process.Model.Process + alias Helix.Process.Query.Process, as: ProcessQuery + + alias Helix.Test.Account.Helper, as: AccountHelper + alias Helix.Test.Channel.Setup, as: ChannelSetup + alias Helix.Test.Channel.Request.Helper, as: RequestHelper + alias Helix.Test.Log.Setup, as: LogSetup + alias Helix.Test.Network.Helper, as: NetworkHelper + alias Helix.Test.Process.TOPHelper + alias Helix.Test.Software.Setup, as: SoftwareSetup + + @internet_id NetworkHelper.internet_id() + + @moduletag :feature + + describe "log recover" do + test "LogRecover.Global life cycle (local, natural)" do + {socket, %{entity: entity, server: gateway}} = + ChannelSetup.create_socket() + + # Connect to gateway channel + {socket, _} = + ChannelSetup.join_server( + gateway_id: gateway.server_id, own_server: true, socket: socket + ) + + # Connect to account channel, so we can receive notifications + account_id = AccountHelper.cast_from_entity(entity.entity_id) + ChannelSetup.join_account(socket: socket, account_id: account_id) + + # Logs that will be worked on (one of them is recoverable) + LogSetup.log!(server_id: gateway.server_id) + log = LogSetup.log!(server_id: gateway.server_id, revisions: 2) + + # Prepare request params + request_id = RequestHelper.id() + + params = + %{ + "method" => "global", + "request_id" => request_id + } + + # We'll attempt to recover a log at localhost. This should fail because we + # do not have a LogRecover! + ref = push socket, "log.recover", params + assert_reply ref, :error, response, timeout(:fast) + + assert response.data.message == "recover_not_found" + assert response.meta.request_id == request_id + + # Let's create the recover and try again... + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + # It worked! + ref = push socket, "log.recover", params + assert_reply ref, :ok, _, timeout(:slow) + + [process_created_event] = wait_events [:process_created] + + assert process_created_event.data.type == "log_recover_global" + assert process_created_event.meta.request_id == request_id + + process = + process_created_event.data.process_id + |> Process.ID.cast!() + |> ProcessQuery.fetch() + + # Make sure the process was created correctly + assert process.type == :log_recover_global + assert process.data.recover_version == recover.modules.log_recover.version + assert process.tgt_log_id == log.log_id + + assert process.gateway_id == gateway.server_id + assert process.target_id == gateway.server_id + assert process.source_entity_id == entity.entity_id + assert process.src_file_id == recover.file_id + + # local process; no connection info + refute process.network_id + refute process.src_connection_id + refute process.bounce_id + + # Simulate completion of the software + TOPHelper.force_completion(process) + + [log_recovered_event, notification_added_event] = + wait_events [:log_recovered, :notification_added] + + [original_revision, _fake_revision] = LogQuery.fetch_revisions(log) + + # Local server receives information about the newly recovered log + assert log_recovered_event.data.type == to_string(original_revision.type) + assert_map_str log_recovered_event.data.data, original_revision.data + + # The recently recovered log has changed its last revision + new_log = LogQuery.fetch(log.log_id) + + assert new_log.revision_id == 1 + assert new_log.server_id == gateway.server_id + assert new_log.revision.type == original_revision.type + assert_map_str new_log.revision.data, original_revision.data + + # Client received the log notification + assert notification_added_event.data.class == :server + assert notification_added_event.data.code == :log_recovered + assert notification_added_event.data.data.log_id == to_string(log.log_id) + + # LogRecoverProcess is recursive, so it should still be working. + new_process = ProcessQuery.fetch(process.process_id) + + # It's not working on any log, as there aren't any recoverable logs now + refute new_process.tgt_log_id + + TOPHelper.top_stop(gateway) + end + + test "LogRecover.Custom life cycle (local, artificial)" do + {socket, %{entity: entity, server: gateway}} = + ChannelSetup.create_socket() + + # Connect to gateway channel + {socket, _} = + ChannelSetup.join_server( + gateway_id: gateway.server_id, own_server: true, socket: socket + ) + + # Connect to account channel, so we can receive notifications + account_id = AccountHelper.cast_from_entity(entity.entity_id) + ChannelSetup.join_account(socket: socket, account_id: account_id) + + # Log that will be worked on + log = LogSetup.log!(server_id: gateway.server_id, forge_version: 50) + + # Create LogRecover software + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + # Prepare request params + request_id = RequestHelper.id() + + params = + %{ + "method" => "custom", + "log_id" => to_string(log.log_id), + "request_id" => request_id + } + + # It worked! + ref = push socket, "log.recover", params + assert_reply ref, :ok, _, timeout(:slow) + + [process_created_event] = wait_events [:process_created] + + assert process_created_event.data.type == "log_recover_custom" + assert process_created_event.meta.request_id == request_id + + process = + process_created_event.data.process_id + |> Process.ID.cast!() + |> ProcessQuery.fetch() + + # Make sure the process was created correctly + assert process.type == :log_recover_custom + assert process.data.recover_version == recover.modules.log_recover.version + assert process.tgt_log_id == log.log_id + + assert process.gateway_id == gateway.server_id + assert process.target_id == gateway.server_id + assert process.source_entity_id == entity.entity_id + assert process.src_file_id == recover.file_id + + # local process; no connection info + refute process.network_id + refute process.src_connection_id + refute process.bounce_id + + # Simulate completion of the software + TOPHelper.force_completion(process) + + [log_destroyed_event, notification_added_event] = + wait_events [:log_destroyed, :notification_added] + + # Local server receives information about the destroyed log + assert log_destroyed_event.data.log_id == to_string(log.log_id) + + # Destroyed log no longer exists + refute LogQuery.fetch(log.log_id) + + # Client received the log notification + assert notification_added_event.data.class == :server + assert notification_added_event.data.code == :log_destroyed + assert notification_added_event.data.data.log_id == to_string(log.log_id) + + # LogRecoverProcess is recursive, so it should still be working. + new_process = ProcessQuery.fetch(process.process_id) + + # It's not working on any log, as there aren't any recoverable logs now + refute new_process.tgt_log_id + + TOPHelper.top_stop(gateway) + end + + test "LogRecover.Custom life cycle (remote, natural)" do + {socket, %{entity: entity, server: gateway}} = + ChannelSetup.create_socket() + + # Connect to gateway channel + {socket, %{gateway: gateway, destination: destination}} = + ChannelSetup.join_server(gateway_id: gateway.server_id, socket: socket) + + # Connect to account channel, so we can receive notifications + account_id = AccountHelper.cast_from_entity(entity.entity_id) + ChannelSetup.join_account(socket: socket, account_id: account_id) + + # Relevant logs (two of them are recoverable) + LogSetup.log!(server_id: destination.server_id, revisions: 2) + log = LogSetup.log!(server_id: destination.server_id, revisions: 2) + + # LogRecover that will be used + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + # Prepare request params + request_id = RequestHelper.id() + + params = + %{ + "method" => "custom", + "log_id" => to_string(log.log_id), + "request_id" => request_id + } + + ref = push socket, "log.recover", params + assert_reply ref, :ok, _, timeout(:slow) + + [process_created_event] = wait_events [:process_created] + + assert process_created_event.data.type == "log_recover_custom" + assert process_created_event.meta.request_id == request_id + + process = + process_created_event.data.process_id + |> Process.ID.cast!() + |> ProcessQuery.fetch() + + # Make sure the process was created correctly + assert process.type == :log_recover_custom + assert process.data.recover_version == recover.modules.log_recover.version + assert process.tgt_log_id == log.log_id + + assert process.gateway_id == gateway.server_id + assert process.target_id == destination.server_id + assert process.source_entity_id == entity.entity_id + assert process.src_file_id == recover.file_id + + # remote process; has connection info + assert process.network_id == @internet_id + assert process.src_connection_id == socket.assigns.ssh.connection_id + + # Simulate completion of the software + TOPHelper.force_completion(process) + + [log_recovered_event, notification_added_event] = + wait_events [:log_recovered, :notification_added] + + [original_revision, _fake_revision] = LogQuery.fetch_revisions(log) + + # Local server receives information about the newly recovered log + assert log_recovered_event.data.type == to_string(original_revision.type) + assert_map_str log_recovered_event.data.data, original_revision.data + + # The recently recovered log has changed its last revision + new_log = LogQuery.fetch(log.log_id) + + assert new_log.revision_id == 1 + assert new_log.server_id == destination.server_id + assert new_log.revision.type == original_revision.type + assert_map_str new_log.revision.data, original_revision.data + + # Client received the log notification + assert notification_added_event.data.class == :server + assert notification_added_event.data.code == :log_recovered + assert notification_added_event.data.data.log_id == to_string(log.log_id) + + # LogRecoverProcess is recursive, so it should still be working. + new_process = ProcessQuery.fetch(process.process_id) + + # It's working on `log`, since it's a Custom process... + assert new_process.tgt_log_id == log.log_id + + # But it will "never" complete, as this is the original revision + assert new_process.time_left > 999_999_999 + + TOPHelper.top_stop(gateway) + end + end +end diff --git a/test/log/event/log_test.exs b/test/log/event/log_test.exs index c03a78d9..0ddee65d 100644 --- a/test/log/event/log_test.exs +++ b/test/log/event/log_test.exs @@ -51,6 +51,24 @@ defmodule Helix.Log.Event.LogTest do end end + describe "LogRecoveredEvent" do + test "Publishable.generate_payload/2" do + event = EventSetup.Log.recovered() + + # Generates the payload + assert {:ok, data} = Publishable.generate_payload(event, @mocked_socket) + + # Returned payload is json-friendly + assert data.log_id == to_string(event.log.log_id) + assert data.type == to_string(event.log.revision.type) + assert data.data + assert is_float(data.timestamp) + + # Returned event is correct + assert "log_recovered" == Publishable.get_event_name(event) + end + end + describe "LogDestroyedEvent" do test "Publishable.generate_payload/2" do event = EventSetup.Log.destroyed() diff --git a/test/log/henforcer/log/recover_test.exs b/test/log/henforcer/log/recover_test.exs new file mode 100644 index 00000000..54600d40 --- /dev/null +++ b/test/log/henforcer/log/recover_test.exs @@ -0,0 +1,82 @@ +defmodule Helix.Log.Henforcer.Log.RecoverTest do + + use Helix.Test.Case.Integration + + import Helix.Test.Henforcer.Macros + + alias Helix.Log.Henforcer.Log.Recover, as: LogRecoverHenforcer + + alias Helix.Test.Server.Helper, as: ServerHelper + alias Helix.Test.Server.Setup, as: ServerSetup + alias Helix.Test.Software.Setup, as: SoftwareSetup + alias Helix.Test.Log.Setup, as: LogSetup + + describe "can_recover_custom?/3" do + test "accepts when everything is OK" do + {log, _} = LogSetup.log() + + gateway = ServerSetup.server!() + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + assert {true, relay} = + LogRecoverHenforcer.can_recover_custom?( + log.log_id, gateway.server_id, log.server_id + ) + + assert_relay relay, [:log, :recover, :gateway] + + assert relay.log.log_id == log.log_id + assert relay.gateway == gateway + assert relay.recover == recover + end + + test "rejects when player does not have a LogRecover software" do + {log, _} = LogSetup.log() + gateway = ServerSetup.server!() + + assert {false, reason, _} = + LogRecoverHenforcer.can_recover_custom?( + log.log_id, gateway.server_id, log.server_id + ) + + assert reason == {:recover, :not_found} + end + + test "rejects when log does not belong to the given target (server)" do + {log, _} = LogSetup.log() + + gateway = ServerSetup.server!() + SoftwareSetup.log_recover(server_id: gateway.server_id) + + assert {false, reason, _} = + LogRecoverHenforcer.can_recover_custom?( + log.log_id, gateway.server_id, ServerHelper.id() + ) + + assert reason == {:log, :not_belongs} + end + end + + describe "can_recover_global?/1" do + test "accepts when everything is ok" do + gateway = ServerSetup.server!() + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + assert {true, relay} = + LogRecoverHenforcer.can_recover_global?(gateway.server_id) + + assert_relay relay, [:gateway, :recover] + + assert relay.gateway == gateway + assert relay.recover == recover + end + + test "rejects when player does not have a LogRecover" do + gateway = ServerSetup.server!() + + assert {false, reason, _} = + LogRecoverHenforcer.can_recover_global?(gateway.server_id) + assert reason == {:recover, :not_found} + end + end +end diff --git a/test/log/websocket/requests/recover_test.exs b/test/log/websocket/requests/recover_test.exs new file mode 100644 index 00000000..4f85219a --- /dev/null +++ b/test/log/websocket/requests/recover_test.exs @@ -0,0 +1,317 @@ +defmodule Helix.Log.Websocket.Requests.RecoverTest do + + use Helix.Test.Case.Integration + + alias Helix.Websocket.Requestable + alias Helix.Process.Query.Process, as: ProcessQuery + alias Helix.Log.Websocket.Requests.Recover, as: LogRecoverRequest + + alias Helix.Test.Channel.Request.Helper, as: RequestHelper + alias Helix.Test.Channel.Setup, as: ChannelSetup + alias Helix.Test.Process.TOPHelper + alias Helix.Test.Server.Helper, as: ServerHelper + alias Helix.Test.Server.Setup, as: ServerSetup + alias Helix.Test.Software.Setup, as: SoftwareSetup + alias Helix.Test.Log.Helper, as: LogHelper + alias Helix.Test.Log.Setup, as: LogSetup + + @mock_socket ChannelSetup.mock_server_socket() + + describe "LogRecoverRequest.check_params/2" do + test "validates expected data (global)" do + params = %{"method" => "global"} + + request = LogRecoverRequest.new(params) + + assert {:ok, request} = Requestable.check_params(request, @mock_socket) + + assert request.params.method == :global + end + + test "validates expected data (custom)" do + log_id = LogHelper.id() + + params = + %{ + "method" => "custom", + "log_id" => to_string(log_id) + } + + request = LogRecoverRequest.new(params) + + assert {:ok, request} = Requestable.check_params(request, @mock_socket) + + assert request.params.method == :custom + assert request.params.log_id == log_id + end + + test "rejects when `log_id` is missing or invalid" do + p_base = %{"method" => "custom"} + + p0 = %{"log_id" => "abcd"} |> Map.merge(p_base) + p1 = %{"log_id" => nil} |> Map.merge(p_base) + p2 = %{} |> Map.merge(p_base) + + req0 = LogRecoverRequest.new(p0) + req1 = LogRecoverRequest.new(p1) + req2 = LogRecoverRequest.new(p2) + + assert {:error, reason0, _} = Requestable.check_params(req0, @mock_socket) + assert {:error, reason1, _} = Requestable.check_params(req1, @mock_socket) + assert {:error, reason2, _} = Requestable.check_params(req2, @mock_socket) + + assert reason0.message == "bad_request" + assert reason1 == reason0 + assert reason2 == reason1 + end + + test "rejects when `method` is wrong" do + p0 = %{"method" => "invalid"} + p1 = %{"method" => nil} + p2 = %{"method" => "custom"} + + req0 = LogRecoverRequest.new(p0) + req1 = LogRecoverRequest.new(p1) + req2 = LogRecoverRequest.new(p2) + + assert {:error, reason0, _} = Requestable.check_params(req0, @mock_socket) + assert {:error, reason1, _} = Requestable.check_params(req1, @mock_socket) + assert {:error, reason2, _} = Requestable.check_params(req2, @mock_socket) + + assert reason0.message == "bad_method" + assert reason1 == reason0 + assert reason2.message == "bad_request" + end + end + + describe "LogRecoverRequest.check_permissions/2" do + test "accepts when everything is OK" do + gateway = ServerSetup.server!() + target_id = ServerHelper.id() + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + params = %{"method" => Enum.random(["global", "custom"])} + + params = + if params["method"] == "custom" do + log = LogSetup.log!(server_id: target_id) + Map.put(params, "log_id", to_string(log.log_id)) + else + params + end + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, destination_id: target_id + ) + + request = LogRecoverRequest.new(params) + + {:ok, request} = Requestable.check_params(request, socket) + assert {:ok, request} = Requestable.check_permissions(request, socket) + + assert request.meta.recover == recover + assert request.meta.gateway == gateway + + if request.params.method == :custom do + assert to_string(request.meta.log.log_id) == params["log_id"] + assert request.meta.log.server_id == target_id + end + + end + + test "rejects when player does not have a recover" do + gateway = ServerSetup.server!() + target_id = ServerHelper.id() + + params = %{"method" => Enum.random(["global", "custom"])} + + params = + if params["method"] == "custom" do + log = LogSetup.log!(server_id: target_id) + Map.put(params, "log_id", to_string(log.log_id)) + else + params + end + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, destination_id: target_id + ) + + request = LogRecoverRequest.new(params) + + {:ok, request} = Requestable.check_params(request, socket) + assert {:error, reason, _} = + Requestable.check_permissions(request, socket) + + assert reason.message == "recover_not_found" + end + + test "rejects when attempting to recover another server's log (custom)" do + gateway = ServerSetup.server!() + log = LogSetup.log!() + + params = + %{ + "method" => "custom", + "log_id" => to_string(log.log_id), + } + + # socket's `destination` is different from `log.server_id` + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, destination_id: ServerHelper.id() + ) + + request = LogRecoverRequest.new(params) + + {:ok, request} = Requestable.check_params(request, socket) + assert {:error, reason, _} = + Requestable.check_permissions(request, socket) + + assert reason.message == "log_not_belongs" + end + end + + describe "LogRecoverRequest.handle_request/2" do + test "starts the process (global, local, no recoverable logs)" do + gateway = ServerSetup.server!() + + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, own_server: true + ) + + params = %{method: :global} + meta = %{recover: recover, gateway: gateway} + + request = RequestHelper.mock_request(LogRecoverRequest, params, meta) + + assert {:ok, _request} = Requestable.handle_request(request, socket) + + assert [process] = ProcessQuery.get_processes_on_server(gateway.server_id) + + assert process.type == :log_recover_global + assert process.gateway_id == process.target_id + assert process.src_file_id == recover.file_id + refute process.src_connection_id + assert process.data.recover_version == recover.modules.log_recover.version + + # No log being recovered because there are no recoverable logs on server + refute process.tgt_log_id + + TOPHelper.top_stop(gateway) + end + + test "starts the process (global, remote, with recoverable logs)" do + gateway = ServerSetup.server!() + target = ServerSetup.server!() + + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + # Add a recoverable log to the target server + log = LogSetup.log!(server_id: target.server_id, revisions: 2) + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, + destination_id: target.server_id, + real_connection?: true + ) + + params = %{method: :global} + meta = %{recover: recover, gateway: gateway} + + request = RequestHelper.mock_request(LogRecoverRequest, params, meta) + + assert {:ok, _request} = Requestable.handle_request(request, socket) + + assert [process] = ProcessQuery.get_processes_on_server(gateway.server_id) + + assert process.type == :log_recover_global + assert process.target_id == target.server_id + assert process.src_file_id == recover.file_id + assert process.src_connection_id == socket.assigns.ssh.connection_id + assert process.data.recover_version == recover.modules.log_recover.version + + # There's a target log because there's a recoverable log! + assert process.tgt_log_id == log.log_id + + TOPHelper.top_stop(gateway) + end + + test "starts the process (custom, local)" do + gateway = ServerSetup.server!() + + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + # Add a recoverable log on the target server (which is the gateway) + log = LogSetup.log!(server_id: gateway.server_id, forge_version: 50) + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, own_server: true + ) + + params = %{method: :custom, log_id: log.log_id} + meta = %{recover: recover, gateway: gateway, log: log} + + request = RequestHelper.mock_request(LogRecoverRequest, params, meta) + + assert {:ok, _request} = Requestable.handle_request(request, socket) + + assert [process] = ProcessQuery.get_processes_on_server(gateway.server_id) + + assert process.type == :log_recover_custom + assert process.gateway_id == process.target_id + assert process.src_file_id == recover.file_id + refute process.src_connection_id + assert process.data.recover_version == recover.modules.log_recover.version + + # Process is targeting the correct log + assert process.tgt_log_id == log.log_id + + TOPHelper.top_stop(gateway) + end + + test "starts the process (custom, remote)" do + gateway = ServerSetup.server!() + target = ServerSetup.server!() + + recover = SoftwareSetup.log_recover!(server_id: gateway.server_id) + + # Add a recoverable log to the target server + log = LogSetup.log!(server_id: target.server_id, revisions: 2) + + socket = + ChannelSetup.mock_server_socket( + gateway_id: gateway.server_id, + destination_id: target.server_id, + real_connection?: true + ) + + params = %{method: :local, log_id: log.log_id} + meta = %{recover: recover, gateway: gateway, log: log} + + request = RequestHelper.mock_request(LogRecoverRequest, params, meta) + + assert {:ok, _request} = Requestable.handle_request(request, socket) + + assert [process] = ProcessQuery.get_processes_on_server(gateway.server_id) + + assert process.type == :log_recover_custom + assert process.target_id == target.server_id + assert process.src_file_id == recover.file_id + assert process.src_connection_id == socket.assigns.ssh.connection_id + assert process.data.recover_version == recover.modules.log_recover.version + + # Targets the correct log + assert process.tgt_log_id == log.log_id + + TOPHelper.top_stop(gateway) + end + end +end diff --git a/test/support/event/setup/log.ex b/test/support/event/setup/log.ex index fc6f59b9..e3947877 100644 --- a/test/support/event/setup/log.ex +++ b/test/support/event/setup/log.ex @@ -7,6 +7,7 @@ defmodule Helix.Test.Event.Setup.Log do alias Helix.Log.Event.Log.Created, as: LogCreatedEvent alias Helix.Log.Event.Log.Destroyed, as: LogDestroyedEvent + alias Helix.Log.Event.Log.Recovered, as: LogRecoveredEvent alias Helix.Log.Event.Log.Revised, as: LogRevisedEvent alias Helix.Test.Entity.Helper, as: EntityHelper @@ -23,6 +24,11 @@ defmodule Helix.Test.Event.Setup.Log do def revised(log = %Log{}), do: LogRevisedEvent.new(log) + def recovered, + do: recovered(generate_fake_log(), EntityHelper.id()) + def recovered(log = %Log{}, entity_id), + do: LogRecoveredEvent.new(log, entity_id) + @doc """ Opts: - process: Source process (optional). diff --git a/test/support/process/helper/top.ex b/test/support/process/helper/top.ex index 77c22e10..e1c89c8c 100644 --- a/test/support/process/helper/top.ex +++ b/test/support/process/helper/top.ex @@ -46,8 +46,8 @@ defmodule Helix.Test.Process.TOPHelper do |> ProcessRepo.update() # Force a recalque on the server - if opts[:from] do - TOPAction.recalque(process, opts[:from]) + if opts[:source] do + TOPAction.recalque(process, source: opts[:source]) else TOPAction.recalque(process) end diff --git a/test/support/process/setup/data.ex b/test/support/process/setup/data.ex index 34fc6aee..3ee36e57 100644 --- a/test/support/process/setup/data.ex +++ b/test/support/process/setup/data.ex @@ -221,6 +221,18 @@ defmodule Helix.Test.Process.Data.Setup do def custom(:log_forge_create, data_opts, meta), do: custom_log_forge({:log_forge_create, :create}, data_opts, meta) + def custom(:log_recover, data_opts, meta) do + [:log_recover_custom, :log_recover_global] + |> Enum.random() + |> custom(data_opts, meta) + end + + def custom(:log_recover_custom, data_opts, meta), + do: custom_log_recover({:log_recover_custom, :custom}, data_opts, meta) + + def custom(:log_recover_global, data_opts, meta), + do: custom_log_recover({:log_recover_global, :global}, data_opts, meta) + defp custom_log_forge({process_type, action}, data_opts, meta) do version = Keyword.get(data_opts, :forger_version, 100) src_file_id = meta.src_file_id || SoftwareHelper.id() @@ -254,19 +266,7 @@ defmodule Helix.Test.Process.Data.Setup do {process_type, data, meta, resources} end - def custom(:log_recover, data_opts, meta) do - [:log_recover_custom, :log_recover_global] - |> Enum.random() - |> custom(data_opts, meta) - end - - def custom(:log_recover_custom, data_opts, meta), - do: custom_log_recover({:log_recover_custom, :custom}, data_opts, meta) - - def custom(:log_recover_global, data_opts, meta), - do: custom_log_recover({:log_recover_global, :global}, data_opts, meta) - - defp custom_log_recover({process_type, method}, data_opts, meta) do + defp custom_log_recover({process_type, _method}, data_opts, meta) do version = Keyword.get(data_opts, :recover_version, SoftwareHelper.random_version()) From db59ff9eea1ba0119fc54813057243fe40733a3c Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Thu, 30 Aug 2018 00:34:01 -0300 Subject: [PATCH 11/14] Add Processable signals revised/recovered/destroyed Logs --- lib/event/dispatcher.ex | 12 ++ lib/hell/hack.ex | 3 + lib/log/process/forge.ex | 8 ++ lib/log/process/recover.ex | 36 ++++-- lib/process/action/process.ex | 9 ++ lib/process/event/handler/top.ex | 36 +++++- lib/process/internal/process.ex | 10 ++ lib/process/model/process.ex | 63 +++++++++-- lib/process/model/processable.ex | 27 +++++ lib/process/processable.ex | 51 +++++++++ lib/process/query/process.ex | 9 ++ .../software_type/firewall/process_type.ex | 12 ++ test/log/process/recover_test.exs | 56 +++++++++ test/support/hell/random.ex | 7 ++ test/support/log/helper.ex | 6 + test/support/log/setup.ex | 107 ++++++++++++++++++ 16 files changed, 428 insertions(+), 24 deletions(-) diff --git a/lib/event/dispatcher.ex b/lib/event/dispatcher.ex index 69f867ca..e4651220 100644 --- a/lib/event/dispatcher.ex +++ b/lib/event/dispatcher.ex @@ -160,6 +160,18 @@ defmodule Helix.Event.Dispatcher do LogHandler.Log, :recover_processed + event LogEvent.Log.Revised, + ProcessHandler.TOP, + :object_handler + + event LogEvent.Log.Recovered, + ProcessHandler.TOP, + :object_handler + + event LogEvent.Log.Destroyed, + ProcessHandler.TOP, + :object_handler + ############################################################################## # Process events ############################################################################## diff --git a/lib/hell/hack.ex b/lib/hell/hack.ex index e8d57b56..162236ad 100644 --- a/lib/hell/hack.ex +++ b/lib/hell/hack.ex @@ -53,6 +53,9 @@ defmodule HELL.Hack.Experience do {:retarget, 2}, {:source_connection_closed, 3}, {:target_connection_closed, 3}, + {:target_log_revised, 3}, + {:target_log_recovered, 3}, + {:target_log_destroyed, 3}, {:after_read_hook, 1} ], "Elixir.Helix.Process.Public.View.ProcessViewable" => [ diff --git a/lib/log/process/forge.ex b/lib/log/process/forge.ex index cf3cfbfa..cf2407bc 100644 --- a/lib/log/process/forge.ex +++ b/lib/log/process/forge.ex @@ -99,6 +99,14 @@ process Helix.Log.Process.Forge do {:delete, [event]} end + @doc """ + If the Log currently being forged was destroyed, the process should be + killed and the user notified. + """ + on_target_log_destroyed(_process, _data, _log) do + {{:SIGKILL, :tgt_log_deleted}, []} + end + @doc false def after_read_hook(data) do log_type = String.to_existing_atom(data.log_type) diff --git a/lib/log/process/recover.ex b/lib/log/process/recover.ex index 9c0bff8d..bf5d6b79 100644 --- a/lib/log/process/recover.ex +++ b/lib/log/process/recover.ex @@ -137,6 +137,11 @@ process Helix.Log.Process.Recover do {:noop, [event]} end + @doc """ + When a `:retarget` is requested, we'll find a new target for the process. If + it's a `Global` process, a random log is selected on each iteration. If it's + a `Custom` process, however, the same log is always selected. + """ on_retarget(process, _data) do {new_log, method} = # On `log_recover_global`, we must select a new log on each iteration. @@ -167,17 +172,26 @@ process Helix.Log.Process.Recover do {{:retarget, changes}, []} end - # TODO \/: React to log deletion. - # on_target_log_deleted(process, data) do - # action = - # if process.type == :log_recover_global do - # :retarget - # else - # {:SIGKILL, :log_deleted} - # end - - # {action, []} - # end + @doc """ + If the Log currently being recovered was recovered by someone else, + automatically `:retarget` the process. + """ + on_target_log_recovered(_process, _data, _log) do + {:SIGRETARGET, []} + end + + @doc """ + If the Log currently being recovered was destroyed, `:retarget` if it's a + global process, otherwise the custom process should be killed and the user + notified. + """ + on_target_log_destroyed(%{type: :log_recover_global}, _data, _log) do + {:SIGRETARGET, []} + end + + on_target_log_destroyed(%{type: :log_recover_custom}, _data, _log) do + {{:SIGKILL, :tgt_log_deleted}, []} + end end resourceable do diff --git a/lib/process/action/process.ex b/lib/process/action/process.ex index a0deedff..48231d6f 100644 --- a/lib/process/action/process.ex +++ b/lib/process/action/process.ex @@ -126,6 +126,15 @@ defmodule Helix.Process.Action.Process do defp signal_handler(:SIGTGTCONND, process, %{connection: connection}), do: Processable.target_connection_closed(process.data, process, connection) + defp signal_handler(:SIG_TGT_LOG_REVISED, process, %{log: log}), + do: Processable.target_log_revised(process.data, process, log) + + defp signal_handler(:SIG_TGT_LOG_RECOVERED, process, %{log: log}), + do: Processable.target_log_recovered(process.data, process, log) + + defp signal_handler(:SIG_TGT_LOG_DESTROYED, process, %{log: log}), + do: Processable.target_log_destroyed(process.data, process, log) + # defp signal_handler(:SIGSRCFILED, process, %{file: file}), # do: Processable.file_deleted(process.data, process, file) diff --git a/lib/process/event/handler/top.ex b/lib/process/event/handler/top.ex index 9de92226..a462e969 100644 --- a/lib/process/event/handler/top.ex +++ b/lib/process/event/handler/top.ex @@ -4,12 +4,16 @@ defmodule Helix.Process.Event.Handler.TOP do import HELL.Macros alias Helix.Event - alias Helix.Network.Event.Connection.Closed, as: ConnectionClosedEvent alias Helix.Process.Action.Flow.Process, as: ProcessFlow alias Helix.Process.Action.TOP, as: TOPAction alias Helix.Process.Model.Process alias Helix.Process.Query.Process, as: ProcessQuery + alias Helix.Network.Event.Connection.Closed, as: ConnectionClosedEvent + alias Helix.Log.Event.Log.Revised, as: LogRevisedEvent + alias Helix.Log.Event.Log.Recovered, as: LogRecoveredEvent + alias Helix.Log.Event.Log.Destroyed, as: LogDestroyedEvent + alias Helix.Process.Event.Process.Created, as: ProcessCreatedEvent alias Helix.Process.Event.TOP.BringMeToLife, as: TOPBringMeToLifeEvent @@ -124,6 +128,36 @@ defmodule Helix.Process.Event.Handler.TOP do |> Enum.each(&ProcessFlow.signal(&1, :SIGTGTCONND, signal_param)) end + def object_handler(event = %LogRevisedEvent{}) do + signal_param = %{log: event.log} + + # Send SIG_TGT_LOG_REVISED for process that are targeting such log + event.log.log_id + |> ProcessQuery.get_processes_targeting_log() + |> filter_self_message(event) + |> Enum.each(&ProcessFlow.signal(&1, :SIG_TGT_LOG_REVISED, signal_param)) + end + + def object_handler(event = %LogRecoveredEvent{}) do + signal_param = %{log: event.log} + + # Send SIG_TGT_LOG_RECOVERED for process that are targeting such log + event.log.log_id + |> ProcessQuery.get_processes_targeting_log() + |> filter_self_message(event) + |> Enum.each(&ProcessFlow.signal(&1, :SIG_TGT_LOG_RECOVERED, signal_param)) + end + + def object_handler(event = %LogDestroyedEvent{}) do + signal_param = %{log: event.log} + + # Send SIG_TGT_LOG_DESTROYED for process that are targeting such log + event.log.log_id + |> ProcessQuery.get_processes_targeting_log() + |> filter_self_message(event) + |> Enum.each(&ProcessFlow.signal(&1, :SIG_TGT_LOG_DESTROYED, signal_param)) + end + docp """ `filter_self_message/2` filters events related to element changes (connection closed, file deleted, log deleted) that were inflicted by the process itself. diff --git a/lib/process/internal/process.ex b/lib/process/internal/process.ex index a0f843d6..49d72a6c 100644 --- a/lib/process/internal/process.ex +++ b/lib/process/internal/process.ex @@ -1,5 +1,6 @@ defmodule Helix.Process.Internal.Process do + alias Helix.Log.Model.Log alias Helix.Network.Model.Connection alias Helix.Server.Model.Server alias Helix.Process.Model.Process @@ -65,6 +66,15 @@ defmodule Helix.Process.Internal.Process do |> Enum.map(&Process.format/1) end + @spec get_processes_targeting_log(Log.id) :: + [Process.t] + def get_processes_targeting_log(log_id) do + log_id + |> Process.Query.by_target_log() + |> Repo.all() + |> Enum.map(&Process.format/1) + end + @spec batch_update([Process.t]) :: term @doc """ diff --git a/lib/process/model/process.ex b/lib/process/model/process.ex index ef3c9240..e85fdb2e 100644 --- a/lib/process/model/process.ex +++ b/lib/process/model/process.ex @@ -187,6 +187,28 @@ defmodule Helix.Process.Model.Process do Signal sent when the bank account the process is targeting was closed. Default action is to send itself a SIGKILL with `:tgt_bank_acc_closed` reason. + + ## SIG_TGT_LOG_REVISED + + Signal sent when the log the process is targeting was revised (i.e. a new + revision was added). + + Default action it to ignore the signal. + + ## SIG_TGT_LOG_RECOVERED + + Signal sent when the log the process is targeting was recovered (i.e. a + revision was removed from the stack). + + Default action it to ignore the signal. + + ## SIG_TGT_LOG_DESTROYED + + Signal sent when the log the process is targeting was destroyed (i.e. it was + an artificial log that got recovered beyond the original revision, hence it no + longer exists). + + Default action it to ignore the signal. """ @type signal :: :SIGTERM @@ -201,6 +223,9 @@ defmodule Helix.Process.Model.Process do | :SIGTGTFILED | :SIGSRCBANKACCD | :SIGTGTBANKACCD + | :SIG_TGT_LOG_REVISED + | :SIG_TGT_LOG_RECOVERED + | :SIG_TGT_LOG_DESTROYED @typedoc """ Valid params for each type of signal. @@ -210,6 +235,7 @@ defmodule Helix.Process.Model.Process do | %{priority: term} | %{connection: Connection.t} | %{file: File.t} + | %{log: Log.t} | %{} @typedoc """ @@ -224,6 +250,7 @@ defmodule Helix.Process.Model.Process do | :tgt_file_deleted | :src_bank_acc_closed | :tgt_bank_acc_closed + | :tgt_log_destroyed @typedoc """ Return type for `retarget` changes. @@ -295,6 +322,17 @@ defmodule Helix.Process.Model.Process do :priority ] + @retarget_fields [ + # A retarget may change the process' resources, listed below + :static, + :l_dynamic, + :r_dynamic, + :objective, + + # It may also change some objects (add as needed) + :tgt_log_id + ] + @type state :: :waiting_allocation | :running @@ -494,17 +532,6 @@ defmodule Helix.Process.Model.Process do |> put_pk(heritage, {:process, params.type}) end - @retarget_fields [ - # A retarget may change the process' resources, listed below - :static, - :l_dynamic, - :r_dynamic, - :objective, - - # It may also change some objects (add as needed) - :tgt_log_id - ] - @spec retarget(t, retarget_changes :: map) :: changeset @doc """ @@ -512,6 +539,12 @@ defmodule Helix.Process.Model.Process do amount of previous work (`processed`). """ def retarget(process = %Process{}, changes) do + # TODO: Potential bug: retarget directly changes the process, but does not + # force it to be re-scheduled. Specifically, it does not modify the last + # checkpoint date. This may cause issues. + # Possible solution: change here \/ `last_checkpoint_time` and force + # TOP recalque. + process |> change() |> cast(changes, @retarget_fields) @@ -770,10 +803,11 @@ defmodule Helix.Process.Model.Process do query do - alias Helix.Software.Model.File + alias Helix.Log.Model.Log alias Helix.Network.Model.Connection alias Helix.Network.Model.Network alias Helix.Server.Model.Server + alias Helix.Software.Model.File @spec by_id(Queryable.t, Process.idtb) :: Queryable.t @@ -823,6 +857,11 @@ defmodule Helix.Process.Model.Process do def by_target_connection(query \\ Process, id), do: where(query, [p], p.tgt_connection_id == ^id) + @spec by_target_log(Queryable.t, Log.id) :: + Queryable.t + def by_target_log(query \\ Process, id), + do: where(query, [p], p.tgt_log_id == ^id) + @spec by_target_process(Queryable.t, Process.id) :: Queryable.t def by_target_process(query \\ Process, id), diff --git a/lib/process/model/processable.ex b/lib/process/model/processable.ex index 7672c25d..6f6084f0 100644 --- a/lib/process/model/processable.ex +++ b/lib/process/model/processable.ex @@ -11,6 +11,7 @@ defprotocol Helix.Process.Model.Processable do """ alias Helix.Event + alias Helix.Log.Model.Log alias Helix.Network.Model.Connection alias Helix.Process.Model.Process @@ -133,6 +134,32 @@ defprotocol Helix.Process.Model.Processable do """ def target_connection_closed(data, process, connection) + @spec target_log_revised(t, Process.t, Log.t) :: + {action, [Event.t]} + @doc """ + Called when the process receives a SIG_TGT_LOG_REVISED, meaning the log that + process is targeting has been revised. Also receives the newly revised log. + """ + def target_log_revised(data, process, log) + + @spec target_log_recovered(t, Process.t, Log.t) :: + {action, [Event.t]} + @doc """ + Called when the process receives a SIG_TGT_LOG_RECOVERED, meaning the log that + process is targeting has been recovered. Also receives the newly recovered + log. + """ + def target_log_recovered(data, process, log) + + @spec target_log_destroyed(t, Process.t, Log.t) :: + {action, [Event.t]} + @doc """ + Called when the process receives a SIG_TGT_LOG_DESTROYED, meaning the log that + process is targeting has been destroyed. Also receives the newly destroyed + log. + """ + def target_log_destroyed(data, process, log) + @spec after_read_hook(term) :: t @doc """ diff --git a/lib/process/processable.ex b/lib/process/processable.ex index 060c6da9..fe1b90b9 100644 --- a/lib/process/processable.ex +++ b/lib/process/processable.ex @@ -45,6 +45,18 @@ defmodule Helix.Process.Processable do {{:SIGKILL, :tgt_connection_closed}, []} end + on_target_log_revised(_process, _data, _log) do + {:noop, []} + end + + on_target_log_recovered(_process, _data, _log) do + {:noop, []} + end + + on_target_log_destroyed(_process, _data, _log) do + {:noop, []} + end + @doc false def after_read_hook(data), do: data @@ -161,4 +173,43 @@ defmodule Helix.Process.Processable do end end + + defmacro on_target_log_revised(process, data, log, do: block) do + quote do + + def target_log_revised( + unquote(data), p = unquote(process), unquote(log) + ) do + unquote(block) + |> add_fingerprint(p) + end + + end + end + + defmacro on_target_log_recovered(process, data, log, do: block) do + quote do + + def target_log_recovered( + unquote(data), p = unquote(process), unquote(log) + ) do + unquote(block) + |> add_fingerprint(p) + end + + end + end + + defmacro on_target_log_destroyed(process, data, log, do: block) do + quote do + + def target_log_destroyed( + unquote(data), p = unquote(process), unquote(log) + ) do + unquote(block) + |> add_fingerprint(p) + end + + end + end end diff --git a/lib/process/query/process.ex b/lib/process/query/process.ex index 9b7e12fd..5b61ef4d 100644 --- a/lib/process/query/process.ex +++ b/lib/process/query/process.ex @@ -2,6 +2,7 @@ defmodule Helix.Process.Query.Process do import __MODULE__.Macros + alias Helix.Log.Model.Log alias Helix.Network.Model.Connection alias Helix.Server.Model.Server alias Helix.Software.Model.File @@ -75,6 +76,14 @@ defmodule Helix.Process.Query.Process do defdelegate get_processes_targeting_connection(connection), to: ProcessInternal + @spec get_processes_targeting_log(Log.idt) :: + [Process.t] + @doc """ + Returns a list of processes that are targeting `log`. + """ + defdelegate get_processes_targeting_log(log), + to: ProcessInternal + @spec get_custom(Process.type, Server.idt, meta :: map) :: [Process.t] | nil diff --git a/lib/software/model/software_type/firewall/process_type.ex b/lib/software/model/software_type/firewall/process_type.ex index 3f2bdd76..f45f5d73 100644 --- a/lib/software/model/software_type/firewall/process_type.ex +++ b/lib/software/model/software_type/firewall/process_type.ex @@ -42,6 +42,18 @@ defmodule Helix.Software.Model.SoftwareType.Firewall.Passive do {:delete, []} end + def target_log_revised(_, _, _) do + {:noop, []} + end + + def target_log_recovered(_, _, _) do + {:noop, []} + end + + def target_log_destroyed(_, _, _) do + {:noop, []} + end + def after_read_hook(data), do: data end diff --git a/test/log/process/recover_test.exs b/test/log/process/recover_test.exs index f8047e81..90c3d62e 100644 --- a/test/log/process/recover_test.exs +++ b/test/log/process/recover_test.exs @@ -3,13 +3,17 @@ defmodule Helix.Log.Process.RecoverTest do use Helix.Test.Case.Integration alias Helix.Process.Model.Processable + alias Helix.Process.Query.Process, as: ProcessQuery alias Helix.Log.Process.Recover, as: LogRecoverProcess + alias Helix.Log.Query.Log, as: LogQuery alias Helix.Test.Event.Helper, as: EventHelper + alias Helix.Test.Event.Setup, as: EventSetup alias Helix.Test.Process.TOPHelper alias Helix.Test.Server.Helper, as: ServerHelper alias Helix.Test.Server.Setup, as: ServerSetup alias Helix.Test.Software.Setup, as: SoftwareSetup + alias Helix.Test.Log.Helper, as: LogHelper alias Helix.Test.Log.Setup, as: LogSetup @relay nil @@ -212,5 +216,57 @@ defmodule Helix.Log.Process.RecoverTest do TOPHelper.top_stop(gateway) end + + test "on_target_log: retargets either process when log is recovered" do + {{:ok, process1}, %{gateway: gateway, entity_id: entity_id}} = + LogSetup.recover_flow(method: :global, local?: true) + + # TODO: How to ensure it was retargeted? + end + + test "on_target_log: retargets `global` process when log is destroyed" do + # `process` is recovering `log` at `gateway` + {{:ok, process1}, %{gateway: gateway, entity_id: entity_id}} = + LogSetup.recover_flow(method: :global, local?: true) + + # Refetch the process so it contains allocation data + process1 = ProcessQuery.fetch(process1.process_id) + + # This is the `log` that the process is currently targeting + log = LogQuery.fetch(process1.tgt_log_id) + + # Now we'll gladly destroy it with LogDestroyedEvent + # (And *actually* destroy the Log. Needed for test sequence below) + LogHelper.delete(log) + + log + |> EventSetup.Log.destroyed(entity_id) + |> EventHelper.emit() + + # Process still exists + process2 = ProcessQuery.fetch(process1.process_id) + + # The new process exists but isn't working on any log, because there was + # only one recoverable log on `gateway` and it was destroyed. Retargeted. + refute process2.tgt_log_id + + TOPHelper.top_stop(gateway) + end + + test "on_target_log: deletes `custom` process when log is destroyed" do + # `process` is recovering `log` at `gateway` + {{:ok, process}, %{gateway: gateway, logs: [log], entity_id: entity_id}} = + LogSetup.recover_flow(method: :custom, local?: true) + + # Now we'll gladly destroy it with LogDestroyedEvent + log + |> EventSetup.Log.destroyed(entity_id) + |> EventHelper.emit() + + # Process no longer exists + refute ProcessQuery.fetch(process.process_id) + + TOPHelper.top_stop(gateway) + end end end diff --git a/test/support/hell/random.ex b/test/support/hell/random.ex index d7f7524d..985d22ad 100644 --- a/test/support/hell/random.ex +++ b/test/support/hell/random.ex @@ -206,6 +206,13 @@ defmodule HELL.TestHelper.Random do |> String.downcase() end + @doc """ + Randomly returns `true` or `false`. + """ + def boolean do + Enum.random([true, false]) + end + docp """ Fetches a random letter from an alphabet """ diff --git a/test/support/log/helper.ex b/test/support/log/helper.ex index fc730ff7..2b60e329 100644 --- a/test/support/log/helper.ex +++ b/test/support/log/helper.ex @@ -83,4 +83,10 @@ defmodule Helix.Test.Log.Helper do LogRepo.all(query) end + + @doc """ + Directly deletes a log on the DB. Use with caution! + """ + def delete(log = %Log{}), + do: LogRepo.delete(log) end diff --git a/test/support/log/setup.ex b/test/support/log/setup.ex index ffe570ad..7befe896 100644 --- a/test/support/log/setup.ex +++ b/test/support/log/setup.ex @@ -6,11 +6,13 @@ defmodule Helix.Test.Log.Setup do alias Helix.Log.Internal.Log, as: LogInternal alias Helix.Log.Repo, as: LogRepo + alias HELL.TestHelper.Random alias Helix.Test.Entity.Helper, as: EntityHelper alias Helix.Test.Entity.Setup, as: EntitySetup alias Helix.Test.Server.Helper, as: ServerHelper alias Helix.Test.Server.Setup, as: ServerSetup alias Helix.Test.Software.Helper, as: SoftwareHelper + alias Helix.Test.Software.Setup, as: SoftwareSetup alias Helix.Test.Log.Helper, as: LogHelper @doc """ @@ -140,4 +142,109 @@ defmodule Helix.Test.Log.Setup do {server_id, entity_id, log_info, forge_version} end + + @relay nil + + alias Helix.Log.Action.Flow.Recover, as: LogRecoverFlow + + @doc """ + Starts a LogRecoverProcess. + + Opts: + - *method: Whether the process is `global` or `custom`. Random otherwise. + - local?: Whether the process is recovering a log from the same gateway. + Defaults to random. + - gateway: Set gateway server. Type: `Server.t`. + - endpoint: Set endpoint server. Type: `Server.t`. + - recover: Set recover software. Type: `File.t`. + - entity_id: Set entity who is performing action. Defaults to *random* entity. + - conn_info: Conn info ({tunnel, connection}). Defaults to `nil` (local). + - log: Log to be recovered. Only valid to `custom`. + """ + def recover_flow(opts \\ []) do + method = Keyword.get(opts, :method, Enum.random([:global, :custom])) + + if not is_nil(opts[:conn_info]) and opts[:local?] == false, + do: raise("Can't set both `conn_info` and `local?`. ") + + if method == :global and not is_nil(opts[:log]), + do: raise("Can't use `global` method with custom `log`") + + local? = + cond do + not is_nil(opts[:conn_info]) -> + false + + not is_nil(opts[:local?]) -> + opts[:local?] + + true -> + Random.boolean() + end + + gateway = Keyword.get(opts, :gateway, ServerSetup.server!()) + entity_id = Keyword.get(opts, :entity_id, EntityHelper.id()) + + endpoint = + if local? do + gateway + else + Keyword.get(opts, :endpoint, ServerSetup.server!()) + end + + recover = + Keyword.get( + opts, :recover, SoftwareSetup.log_recover!(server_id: gateway.server_id) + ) + + conn_info = + if local? do + nil + else + Keyword.get(opts, :conn_info, raise("Remote conn_info is TODO")) + end + + logs = + if method == :global do + total = Keyword.get(opts, :total_logs, 1) + + 0..(total - 1) + |> Enum.reduce([], fn _, acc -> + [log!(server_id: endpoint.server_id, revisions: 2) | acc] + end) + else + log = + Keyword.get( + opts, :log, log!(server_id: endpoint.server_id, revisions: 2) + ) + + [log] + end + + result = + if method == :global do + LogRecoverFlow.global( + gateway, endpoint, recover, entity_id, conn_info, @relay + ) + else + log = Enum.random(logs) + + LogRecoverFlow.custom( + gateway, endpoint, log, recover, entity_id, conn_info, @relay + ) + end + + related = + %{ + gateway: gateway, + endpoint: endpoint, + logs: logs, + entity_id: entity_id, + method: method, + recover: recover, + conn_info: conn_info + } + + {result, related} + end end From 275b5d62d42b18741b6fb530cf1fb6879a9cc3eb Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Thu, 30 Aug 2018 03:27:12 -0300 Subject: [PATCH 12/14] Force retarget (SIGRETARGET) to reset last checkpoint --- lib/process/internal/process.ex | 7 +++ lib/process/model/process.ex | 14 +++--- lib/process/model/top/scheduler.ex | 31 +++++++------ lib/server/websocket/channel/server.ex | 5 +-- test/log/process/recover_test.exs | 25 ++++++++++- test/process/event/handler/process_test.exs | 49 +++++++++++++++++++++ test/process/event/handler/top_test.exs | 2 +- test/support/event/setup/process.ex | 4 ++ 8 files changed, 107 insertions(+), 30 deletions(-) create mode 100644 test/process/event/handler/process_test.exs diff --git a/lib/process/internal/process.ex b/lib/process/internal/process.ex index 49d72a6c..bd242eaf 100644 --- a/lib/process/internal/process.ex +++ b/lib/process/internal/process.ex @@ -89,6 +89,13 @@ defmodule Helix.Process.Internal.Process do end) end + @spec retarget(Process.t, changes :: map) :: + {:ok, Process.t} + | {:error, Process.changeset} + @doc """ + Retargets a process, modifying resources objectives and objects as defined on + the Process' Processable, which demanded these changes. + """ def retarget(process, changes) do process |> Process.retarget(changes) diff --git a/lib/process/model/process.ex b/lib/process/model/process.ex index e85fdb2e..3f80e828 100644 --- a/lib/process/model/process.ex +++ b/lib/process/model/process.ex @@ -329,6 +329,9 @@ defmodule Helix.Process.Model.Process do :r_dynamic, :objective, + # Retarget must always reset the `last_checkpoint_time` + :last_checkpoint_time, + # It may also change some objects (add as needed) :tgt_log_id ] @@ -535,20 +538,15 @@ defmodule Helix.Process.Model.Process do @spec retarget(t, retarget_changes :: map) :: changeset @doc """ - Updates the process according to the retarget changes. It also empties any - amount of previous work (`processed`). + Updates the process according to the retarget changes. It also resets any + previous work (`processed`) and checkpoints (`last_checkpoint_time`). """ def retarget(process = %Process{}, changes) do - # TODO: Potential bug: retarget directly changes the process, but does not - # force it to be re-scheduled. Specifically, it does not modify the last - # checkpoint date. This may cause issues. - # Possible solution: change here \/ `last_checkpoint_time` and force - # TOP recalque. - process |> change() |> cast(changes, @retarget_fields) |> put_change(:processed, %{}) + |> put_change(:last_checkpoint_time, DateTime.utc_now()) |> validate_required(@required_fields) end diff --git a/lib/process/model/top/scheduler.ex b/lib/process/model/top/scheduler.ex index 4ae3a44e..5600207a 100644 --- a/lib/process/model/top/scheduler.ex +++ b/lib/process/model/top/scheduler.ex @@ -21,6 +21,21 @@ defmodule Helix.Process.Model.TOP.Scheduler do running: [Process.t] } + @spec estimate_completion(Process.t) :: + {Process.t, Process.time_left | -1 | :infinity} + @doc """ + `estimate_completion/1` will, as the name says, estimate how long it will take + for the process to reach its current objectives. + + It may return `:infinity` if the process is paused or do not have allocated + resources to it; and `-1` if the process is already completed. + """ + def estimate_completion(process) do + process + |> simulate() + |> seconds_for_completion() + end + @spec simulate(Process.t) :: {:completed, Process.t} | {:running, Process.t} @@ -43,7 +58,6 @@ defmodule Helix.Process.Model.TOP.Scheduler do will modify the process state to `:running`. This is done because `simulate/1` (and all methods on `TOP.Scheduler`) are called from `TOP.Action`, after the process was allocated. So it's safe to update the Process state to `:running`. - """ def simulate(process = %{state: :paused}), do: {:paused, process} @@ -136,21 +150,6 @@ defmodule Helix.Process.Model.TOP.Scheduler do end) end - @spec estimate_completion(Process.t) :: - {Process.t, Process.time_left | -1 | :infinity} - @doc """ - `estimate_completion/1` will, as the name says, estimate how long it will take - for the process to reach its current objectives. - - It may return `:infinity` if the process is paused or do not have allocated - resources to it; and `-1` if the process is already completed. - """ - def estimate_completion(process) do - process - |> simulate() - |> seconds_for_completion() - end - @spec checkpoint(Process.t) :: {true, Process.changeset} | false diff --git a/lib/server/websocket/channel/server.ex b/lib/server/websocket/channel/server.ex index c5f61a28..472cea99 100644 --- a/lib/server/websocket/channel/server.ex +++ b/lib/server/websocket/channel/server.ex @@ -488,10 +488,7 @@ channel Helix.Server.Websocket.Channel.Server do ip = socket.assigns.destination.ip ServerWebsocketChannelState.leave( - entity_id, - server_id, - {network_id, ip}, - counter + entity_id, server_id, {network_id, ip}, counter ) end end diff --git a/test/log/process/recover_test.exs b/test/log/process/recover_test.exs index 90c3d62e..4b36f406 100644 --- a/test/log/process/recover_test.exs +++ b/test/log/process/recover_test.exs @@ -221,7 +221,27 @@ defmodule Helix.Log.Process.RecoverTest do {{:ok, process1}, %{gateway: gateway, entity_id: entity_id}} = LogSetup.recover_flow(method: :global, local?: true) - # TODO: How to ensure it was retargeted? + # Refetch the process so it contains allocation data + process1 = ProcessQuery.fetch(process1.process_id) + + # This is the `log` that the process is currently targeting + log = LogQuery.fetch(process1.tgt_log_id) + + # Now we'll recover it with LogRecoveredEvent + log + |> EventSetup.Log.recovered(entity_id) + |> EventHelper.emit() + + # Process still exists + process2 = ProcessQuery.fetch(process1.process_id) + + # The new process exists and is working on whatever log it selected. + assert process2.process_id + + # `last_checkpoint_time` has changed! So it was retargeted. + refute process1.last_checkpoint_time == process2.last_checkpoint_time + + TOPHelper.top_stop(gateway) end test "on_target_log: retargets `global` process when log is destroyed" do @@ -250,6 +270,9 @@ defmodule Helix.Log.Process.RecoverTest do # only one recoverable log on `gateway` and it was destroyed. Retargeted. refute process2.tgt_log_id + # Another proof of retarget: `last_checkpoint_time` has changed. + refute process1.last_checkpoint_time == process2.last_checkpoint_time + TOPHelper.top_stop(gateway) end diff --git a/test/process/event/handler/process_test.exs b/test/process/event/handler/process_test.exs new file mode 100644 index 00000000..25becbf3 --- /dev/null +++ b/test/process/event/handler/process_test.exs @@ -0,0 +1,49 @@ +defmodule Helix.Process.Event.Handler.ProcessTest do + + use Helix.Test.Case.Integration + + alias Helix.Log.Query.Log, as: LogQuery + alias Helix.Process.Query.Process, as: ProcessQuery + + alias Helix.Test.Event.Helper, as: EventHelper + alias Helix.Test.Event.Setup, as: EventSetup + alias Helix.Test.Log.Setup, as: LogSetup + alias Helix.Test.Process.TOPHelper + + describe "ProcessSignaledEvent" do + test ":retarget - modifies process target, objectives" do + # `process1` is recovering `log` at `gateway` + {{:ok, process1}, %{gateway: gateway}} = + LogSetup.recover_flow(method: :global, local?: true) + + # Wait 100ms to give some working time for the process + :timer.sleep(100) + + # Refetch the process so it contains allocation data + process1 = ProcessQuery.fetch(process1.process_id) + + # This is the `log` that the process is currently targeting + LogQuery.fetch(process1.tgt_log_id) + + # An empty map indicates nothing will actually change on the process, but + # the underlying `processed` and `last_checkpoint_time` should be reset. + changes = %{} + + # Force retarget + process1 + |> EventSetup.Process.signaled(:SIGRETARGET, {:retarget, changes}, %{}) + |> EventHelper.emit() + + process2 = ProcessQuery.fetch(process1.process_id) + + # Retarget changed the process' `last_checkpoint_time` + refute process1.last_checkpoint_time == process2.last_checkpoint_time + + # All else being equal, `process2` takes longer to complete + assert process2.time_left > process1.time_left + assert process2.percentage < process1.percentage + + TOPHelper.top_stop(gateway) + end + end +end diff --git a/test/process/event/handler/top_test.exs b/test/process/event/handler/top_test.exs index ea1829cf..6c2cf637 100644 --- a/test/process/event/handler/top_test.exs +++ b/test/process/event/handler/top_test.exs @@ -8,10 +8,10 @@ defmodule Helix.Process.Event.Handler.TOPTest do alias Helix.Test.Event.Setup, as: EventSetup alias Helix.Test.Network.Setup, as: NetworkSetup + alias Helix.Test.Server.Setup, as: ServerSetup alias Helix.Test.Process.FakeDefaultProcess alias Helix.Test.Process.Setup, as: ProcessSetup alias Helix.Test.Process.TOPHelper - alias Helix.Test.Server.Setup, as: ServerSetup test "process is killed when its originating connection is closed" do {connection, _} = NetworkSetup.fake_connection() diff --git a/test/support/event/setup/process.ex b/test/support/event/setup/process.ex index e8321733..5053aca5 100644 --- a/test/support/event/setup/process.ex +++ b/test/support/event/setup/process.ex @@ -1,6 +1,7 @@ defmodule Helix.Test.Event.Setup.Process do alias Helix.Process.Event.Process.Created, as: ProcessCreatedEvent + alias Helix.Process.Event.Process.Signaled, as: ProcessSignaledEvent alias HELL.TestHelper.Random alias Helix.Test.Process.Setup, as: ProcessSetup @@ -17,4 +18,7 @@ defmodule Helix.Test.Event.Setup.Process do target_ip: Random.ipv4() } end + + def signaled(process, signal, action, params), + do: ProcessSignaledEvent.new(signal, process, action, params) end From 353b97c56214ff6c3ad7d1e3a731c6f9623c91bf Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Fri, 31 Aug 2018 11:26:27 -0300 Subject: [PATCH 13/14] Use a nicer name pattern for Process signals --- lib/log/event/handler/log.ex | 6 ++-- lib/log/event/recover.ex | 2 +- lib/log/process/recover.ex | 10 +++--- lib/process/action/process.ex | 10 +++--- lib/process/event/handler/process.ex | 4 +-- lib/process/event/handler/top.ex | 10 +++--- lib/process/model/process.ex | 40 +++++++-------------- lib/process/model/processable.ex | 12 +++---- lib/process/processable.ex | 6 ++-- test/process/event/handler/process_test.exs | 2 +- 10 files changed, 44 insertions(+), 58 deletions(-) diff --git a/lib/log/event/handler/log.ex b/lib/log/event/handler/log.ex index 356af2c8..d0984c05 100644 --- a/lib/log/event/handler/log.ex +++ b/lib/log/event/handler/log.ex @@ -68,9 +68,9 @@ defmodule Helix.Log.Event.Handler.Log do If the `target_log_id` is nil, it means the process have been working on a log that is already on its original state, so there's nothing we can do other - than send a SIGRETARGET signal to the process. + than send a SIG_RETARGET signal to the process. - Otherwise, we pop the revision out and send the SIGRETARGET signal. + Otherwise, we pop the revision out and send the SIG_RETARGET signal. """ def recover_processed(event = %LogRecoverProcessedEvent{target_log_id: nil}), do: sigretarget(event) @@ -88,6 +88,6 @@ defmodule Helix.Log.Event.Handler.Log do defp sigretarget(event = %LogRecoverProcessedEvent{}) do event |> Event.get_process() - |> ProcessFlow.signal(:SIGRETARGET) + |> ProcessFlow.signal(:SIG_RETARGET) end end diff --git a/lib/log/event/recover.ex b/lib/log/event/recover.ex index 6e9df134..cb0d9023 100644 --- a/lib/log/event/recover.ex +++ b/lib/log/event/recover.ex @@ -44,7 +44,7 @@ defmodule Helix.Log.Event.Recover do } # Later on, after we pop out the revision from the stack, we'll send a - # SIGRETARGET signal to the process, so it can keep working on another log + # SIG_RETARGET signal to the process, so it can keep focus on another log. |> put_process(process) end diff --git a/lib/log/process/recover.ex b/lib/log/process/recover.ex index bf5d6b79..c8bb8d76 100644 --- a/lib/log/process/recover.ex +++ b/lib/log/process/recover.ex @@ -13,7 +13,7 @@ process Helix.Log.Process.Recover do a log and then the LogRecoverProcess will work on that log. In both methods, the process will run in a recursive fashion: once a revision - is found, it will send a `SIGRETARGET` and the process will find a new target. + is found, it will send a `SIG_RETARGET` and the process will find a new target. `global` processes might choose a different log to recover, while `custom` processes will keep working on the same log. @@ -129,11 +129,11 @@ process Helix.Log.Process.Recover do on_completion(process, data) do event = LogRecoverProcessedEvent.new(process, data) - # We can't send a SIGRETARGET now because if we do so, it might fetch the + # We can't send a SIG_RETARGET now because if we do so, it might fetch the # existing Log before the freshly recovered revision isn't removed from it # yet. So, to fix this race condition, we first process the log recovery # (by handling `LogRecoverProcessedEvent`), and only then we send the - # SIGRETARGET to this process. + # SIG_RETARGET to this process. {:noop, [event]} end @@ -177,7 +177,7 @@ process Helix.Log.Process.Recover do automatically `:retarget` the process. """ on_target_log_recovered(_process, _data, _log) do - {:SIGRETARGET, []} + {:SIG_RETARGET, []} end @doc """ @@ -186,7 +186,7 @@ process Helix.Log.Process.Recover do notified. """ on_target_log_destroyed(%{type: :log_recover_global}, _data, _log) do - {:SIGRETARGET, []} + {:SIG_RETARGET, []} end on_target_log_destroyed(%{type: :log_recover_custom}, _data, _log) do diff --git a/lib/process/action/process.ex b/lib/process/action/process.ex index 48231d6f..f7184b8d 100644 --- a/lib/process/action/process.ex +++ b/lib/process/action/process.ex @@ -114,16 +114,16 @@ defmodule Helix.Process.Action.Process do # defp signal_handler(:SIGCONT, process, _), # do: Processable.resume(process.data, process, reason) - # defp signal_handler(:SIGPRIO, process, %{priority: priority}), + # defp signal_handler(:SIG_RENICE, process, %{priority: priority}), # do: Processable.priority(process.data, process, priority) - defp signal_handler(:SIGRETARGET, process, _), + defp signal_handler(:SIG_RETARGET, process, _), do: Processable.retarget(process.data, process) - defp signal_handler(:SIGSRCCONND, process, %{connection: connection}), + defp signal_handler(:SIG_SRC_CONN_DELETED, process, %{connection: connection}), do: Processable.source_connection_closed(process.data, process, connection) - defp signal_handler(:SIGTGTCONND, process, %{connection: connection}), + defp signal_handler(:SIG_TGT_CONN_DELETED, process, %{connection: connection}), do: Processable.target_connection_closed(process.data, process, connection) defp signal_handler(:SIG_TGT_LOG_REVISED, process, %{log: log}), @@ -135,7 +135,7 @@ defmodule Helix.Process.Action.Process do defp signal_handler(:SIG_TGT_LOG_DESTROYED, process, %{log: log}), do: Processable.target_log_destroyed(process.data, process, log) - # defp signal_handler(:SIGSRCFILED, process, %{file: file}), + # defp signal_handler(:SIG_SRC_FILE_DELETED, process, %{file: file}), # do: Processable.file_deleted(process.data, process, file) @spec get_process_ips(Process.creation_params) :: diff --git a/lib/process/event/handler/process.ex b/lib/process/event/handler/process.ex index 6c0e80b3..28818608 100644 --- a/lib/process/event/handler/process.ex +++ b/lib/process/event/handler/process.ex @@ -45,8 +45,8 @@ defmodule Helix.Process.Event.Handler.Process do events end - defp action_handler(:SIGRETARGET, process, _) do - {:ok, events} = ProcessAction.signal(process, :SIGRETARGET) + defp action_handler(:SIG_RETARGET, process, _) do + {:ok, events} = ProcessAction.signal(process, :SIG_RETARGET) events end diff --git a/lib/process/event/handler/top.ex b/lib/process/event/handler/top.ex index a462e969..89ec24ad 100644 --- a/lib/process/event/handler/top.ex +++ b/lib/process/event/handler/top.ex @@ -115,17 +115,17 @@ defmodule Helix.Process.Event.Handler.TOP do def object_handler(event = %ConnectionClosedEvent{}) do signal_param = %{connection: event.connection} - # Send SIGSRCCONND for processes that originated on such connection + # Send SIG_SRC_CONN_DELETED for processes that originated on such connection event.connection.connection_id |> ProcessQuery.get_processes_originated_on_connection() |> filter_self_message(event) - |> Enum.each(&ProcessFlow.signal(&1, :SIGSRCCONND, signal_param)) + |> Enum.each(&ProcessFlow.signal(&1, :SIG_SRC_CONN_DELETED, signal_param)) - # Send SIGTGTCONND for processes that are targeting such connection + # Send SIG_TGT_CONN_DELETED for processes that are targeting such connection event.connection.connection_id |> ProcessQuery.get_processes_targeting_connection() |> filter_self_message(event) - |> Enum.each(&ProcessFlow.signal(&1, :SIGTGTCONND, signal_param)) + |> Enum.each(&ProcessFlow.signal(&1, :SIG_TGT_CONN_DELETED, signal_param)) end def object_handler(event = %LogRevisedEvent{}) do @@ -173,7 +173,7 @@ defmodule Helix.Process.Event.Handler.TOP do Imagine, however, that `ProcessSignaledEvent` takes really long to arrive. It could happen that `FileDeletedEvent` arrives first. Now, it's the TOPHandler's - role to listen to `FileDeletedEvent`s and emit a SIGTGTFILED on all processes + role to listen to `FileDeletedEvent`s and emit a SIG_TGT_FILE_DELETED on all processes that target that file, including our recently completed process. This would not create an infinite loop, but it would affect the expected diff --git a/lib/process/model/process.ex b/lib/process/model/process.ex index 3f80e828..aeaa0c8c 100644 --- a/lib/process/model/process.ex +++ b/lib/process/model/process.ex @@ -125,18 +125,18 @@ defmodule Helix.Process.Model.Process do Default action is to resume the process. - ## SIGRETARGET + ## SIG_RETARGET Signal sent when the process finished prior execution and is now looking for a new target to work on. - Keep in mind that, when using `SIGRETARGET` on recursive processes, you might + Keep in mind that, when using `SIG_RETARGET` on recursive processes, you might want the signal to be sent only after the side-effect of the process has been properly processed. As an example, see `LogRecoverProcess`. Default action is to ignore the signal. - ## SIGPRIO + ## SIG_RENICE Signal sent when the user changed the priority of the process. @@ -150,44 +150,32 @@ defmodule Helix.Process.Model.Process do Note that these signals are NOT sent to the process that originated them. See `TOPHandler.filter_self_message/2` for context. - ## SIGSRCCONND + ## SIG_SRC_CONN_DELETED Signal sent when the connection that originated the Process was closed. Default action is to send itself a SIGKILL with `:src_connection_closed` reason. - ## SIGTGTCONND + ## SIG_TGT_CONN_DELETED Signal sent when the connection that the process is targeting was closed. Default action is to send itself a SIGKILL with `:tgt_connection_closed` reason. - ## SIGSRCFILED + ## SIG_SRC_FILE_DELETED Signal sent when the file that originated the process was deleted. Default action is to send itself a SIGKILL with `:src_file_deleted` reason. - ## SIGTGTFILED + ## SIG_TGT_FILE_DELETED Signal sent when the File that the process is targeting was deleted. Default action is to send itself a SIGKILL with `:tgt_file_deleted` reason. - ## SIGSRCBANKACCD - - Signal sent when the bank account the process uses as source was closed. - - Default action is to send itself a SIGKILL with `:src_bank_acc_closed` reason. - - ## SIGTGTBANKACCD - - Signal sent when the bank account the process is targeting was closed. - - Default action is to send itself a SIGKILL with `:tgt_bank_acc_closed` reason. - ## SIG_TGT_LOG_REVISED Signal sent when the log the process is targeting was revised (i.e. a new @@ -215,14 +203,12 @@ defmodule Helix.Process.Model.Process do | :SIGKILL | :SIGSTOP | :SIGCONT - | :SIGRETARGET - | :SIGPRIO - | :SIGSRCCONND - | :SIGTGTCONND - | :SIGSRCFILED - | :SIGTGTFILED - | :SIGSRCBANKACCD - | :SIGTGTBANKACCD + | :SIG_RETARGET + | :SIG_RENICE + | :SIG_SRC_CONN_DELETED + | :SIG_TGT_CONN_DELETED + | :SIG_SRC_FILE_DELETED + | :SIG_TGT_FILE_DELETED | :SIG_TGT_LOG_REVISED | :SIG_TGT_LOG_RECOVERED | :SIG_TGT_LOG_DESTROYED diff --git a/lib/process/model/processable.ex b/lib/process/model/processable.ex index 6f6084f0..85770acd 100644 --- a/lib/process/model/processable.ex +++ b/lib/process/model/processable.ex @@ -69,9 +69,9 @@ defprotocol Helix.Process.Model.Processable do Later on, the process *might* be killed. Depends on how it implements the `on_kill` callback. - ## :SIGRETARGET + ## :SIG_RETARGET - Sends a SIGRETARGET to itself + Sends a SIG_RETARGET to itself Later on, the process *might* change. Depends on how it implements the `on_retarget` callback. @@ -90,7 +90,7 @@ defprotocol Helix.Process.Model.Processable do | :restart | {:retarget, Process.retarget_changes} | {:SIGKILL, Process.kill_reason} - | :SIGRETARGET + | :SIG_RETARGET | :noop @spec complete(t, Process.t) :: @@ -110,7 +110,7 @@ defprotocol Helix.Process.Model.Processable do @spec retarget(t, Process.t) :: {action, [Event.t]} @doc """ - Called when the process receives a SIGRETARGET, meaning the process finished + Called when the process receives a SIG_RETARGET, meaning the process finished its previous objective and is now looking for something else to do. Commonly used on recursive processes. """ @@ -119,7 +119,7 @@ defprotocol Helix.Process.Model.Processable do @spec source_connection_closed(t, Process.t, Connection.t) :: {action, [Event.t]} @doc """ - Called when the process receives a SIGSRCCONND, meaning the connection that + Called when the process receives a SIG_SRC_CONN_DELETED, meaning the connection that originated that process has been closed. Also receives the connection that was recently closed. """ @@ -128,7 +128,7 @@ defprotocol Helix.Process.Model.Processable do @spec target_connection_closed(t, Process.t, Connection.t) :: {action, [Event.t]} @doc """ - Called when the process receives a SIGTGTCONND, meaning the connection that + Called when the process receives a SIG_TGT_CONN_DELETED, meaning the connection that process is targeting has been closed. Also receives the connection that was recently closed. """ diff --git a/lib/process/processable.ex b/lib/process/processable.ex index fe1b90b9..7ebf2562 100644 --- a/lib/process/processable.ex +++ b/lib/process/processable.ex @@ -117,7 +117,7 @@ defmodule Helix.Process.Processable do end @doc """ - Called when the process receives a SIGRETARGET. + Called when the process receives a SIG_RETARGET. Defines what should happen when the process is asked to look for a new target. @@ -135,7 +135,7 @@ defmodule Helix.Process.Processable do end @doc """ - Called when the process receives a SIGSRCCONND. + Called when the process receives a SIG_SRC_CONN_DELETED. Defines what should happen when the process' underlying connection is closed. @@ -155,7 +155,7 @@ defmodule Helix.Process.Processable do end @doc """ - Called when the process receives a SIGTGTCONND. + Called when the process receives a SIG_TGT_CONN_DELETED. Defines what should happen when the process' target connection is closed. diff --git a/test/process/event/handler/process_test.exs b/test/process/event/handler/process_test.exs index 25becbf3..ad68e000 100644 --- a/test/process/event/handler/process_test.exs +++ b/test/process/event/handler/process_test.exs @@ -31,7 +31,7 @@ defmodule Helix.Process.Event.Handler.ProcessTest do # Force retarget process1 - |> EventSetup.Process.signaled(:SIGRETARGET, {:retarget, changes}, %{}) + |> EventSetup.Process.signaled(:SIG_RETARGET, {:retarget, changes}, %{}) |> EventHelper.emit() process2 = ProcessQuery.fetch(process1.process_id) From 9876dbc83767870c549ede74ccaa73c483b721cf Mon Sep 17 00:00:00 2001 From: Renato Massaro Date: Sun, 21 Oct 2018 04:28:44 -0200 Subject: [PATCH 14/14] Add LogPaginateRequest --- lib/hell/hell/ip.ex | 6 ++ lib/log/model/log.ex | 2 +- lib/log/query/log.ex | 27 +------- lib/log/websocket/requests/paginate.ex | 53 ++++++++++++++++ lib/server/websocket/channel/server.ex | 13 ++++ lib/websocket/request/requestable.ex | 14 ++--- test/features/log/paginate_test.exs | 61 +++++++++++++++++++ test/id/id_test.exs | 13 ++++ test/log/websocket/requests/paginate_test.exs | 40 ++++++++++++ test/support/channel/setup.ex | 18 ++++++ test/support/log/setup.ex | 21 +++++++ 11 files changed, 235 insertions(+), 33 deletions(-) create mode 100644 lib/log/websocket/requests/paginate.ex create mode 100644 test/features/log/paginate_test.exs create mode 100644 test/log/websocket/requests/paginate_test.exs diff --git a/lib/hell/hell/ip.ex b/lib/hell/hell/ip.ex index f7600c88..ec4fb302 100644 --- a/lib/hell/hell/ip.ex +++ b/lib/hell/hell/ip.ex @@ -43,6 +43,12 @@ defmodule HELL.IPv6 do def binary_to_address_tuple(_), do: {:error, :einval} + def binary_to_address_tuple!(string) do + string + |> binary_to_address_tuple() + |> elem(1) + end + @spec generate_octet_groups(pos_integer) :: [0..0xffff] defp generate_octet_groups(groups) do diff --git a/lib/log/model/log.ex b/lib/log/model/log.ex index 432ce7c6..afb2102b 100644 --- a/lib/log/model/log.ex +++ b/lib/log/model/log.ex @@ -195,7 +195,7 @@ defmodule Helix.Log.Model.Log do Returns only logs that are older than the given `log_id`. """ def paginate_after_log(query, log_id), - do: where(query, [l], l.log_id < ^log_id) + do: where(query, [l], l.log_id > ^log_id) @spec only(Queryable.t, pos_integer) :: Queryable.t diff --git a/lib/log/query/log.ex b/lib/log/query/log.ex index 07dbfeb9..e299c4d1 100644 --- a/lib/log/query/log.ex +++ b/lib/log/query/log.ex @@ -4,7 +4,6 @@ defmodule Helix.Log.Query.Log do """ alias Helix.Server.Model.Server - alias Helix.Entity.Model.Entity alias Helix.Log.Internal.Log, as: LogInternal alias Helix.Log.Model.Log @@ -25,30 +24,8 @@ defmodule Helix.Log.Query.Log do defdelegate get_logs_on_server(server), to: LogInternal - @spec get_logs_from_entity_on_server(Server.idt, Entity.idt) :: + @spec paginate_logs_on_server(Server.id, Log.id, pos_integer) :: [Log.t] - @doc """ - Fetches logs on `server` that `entity` has created or revised - """ - defdelegate get_logs_from_entity_on_server(server, entity), - to: LogInternal - - @spec count_revisions_of_entity(Log.t, Entity.idt) :: - non_neg_integer - @doc """ - Returns the number of revisions on `log` that were created by `entity` - - ### Examples - - iex> count_revisions_of_entity(%Log{}, %Entity{}) - 0 - - iex> count_revisions_of_entity(%Log{}, %Entity.ID{}) - 2 - - Note that creating the log (either by forging it or by doing an action whose - side-effect is to create a log) will create a revision for the log - """ - defdelegate count_revisions_of_entity(log, entity), + defdelegate paginate_logs_on_server(server_id, last_log_id, count), to: LogInternal end diff --git a/lib/log/websocket/requests/paginate.ex b/lib/log/websocket/requests/paginate.ex new file mode 100644 index 00000000..864f24b4 --- /dev/null +++ b/lib/log/websocket/requests/paginate.ex @@ -0,0 +1,53 @@ +import Helix.Websocket.Request + +request Helix.Log.Websocket.Requests.Paginate do + + alias Helix.Log.Model.Log + alias Helix.Log.Query.Log, as: LogQuery + alias Helix.Log.Public.Index, as: LogIndex + + @default_total 20 + @max_total 100 + + def check_params(request, _socket) do + with {:ok, log_id} <- Log.ID.cast(request.unsafe["log_id"]) do + params = %{ + log_id: log_id, + total: get_total(request.unsafe["total"]) + } + + update_params(request, params, reply: true) + else + _ -> + bad_request(request) + end + end + + defp get_total(total) when not is_integer(total), + do: @default_total + defp get_total(total) when total <= 0, + do: @default_total + defp get_total(total) when total >= @max_total, + do: @max_total + defp get_total(total), + do: total + + def check_permissions(request, _socket), + do: reply_ok(request) + + def handle_request(request, socket) do + server_id = socket.assigns.destination.server_id + log_id = request.params.log_id + total = request.params.total + + logs = LogQuery.paginate_logs_on_server(server_id, log_id, total) + + update_meta(request, %{logs: logs}, reply: true) + end + + render(request, _socket) do + logs = Enum.map(request.meta.logs, &LogIndex.render_log/1) + + {:ok, logs} + end +end diff --git a/lib/server/websocket/channel/server.ex b/lib/server/websocket/channel/server.ex index 472cea99..6e560401 100644 --- a/lib/server/websocket/channel/server.ex +++ b/lib/server/websocket/channel/server.ex @@ -14,6 +14,7 @@ channel Helix.Server.Websocket.Channel.Server do alias Helix.Server.State.Websocket.Channel, as: ServerWebsocketChannelState alias Helix.Log.Websocket.Requests.Forge, as: LogForgeRequest + alias Helix.Log.Websocket.Requests.Paginate, as: LogPaginateRequest alias Helix.Log.Websocket.Requests.Recover, as: LogRecoverRequest alias Helix.Network.Websocket.Requests.Browse, as: BrowseRequest @@ -172,6 +173,18 @@ channel Helix.Server.Websocket.Channel.Server do """ topic "log.forge", LogForgeRequest + @doc """ + Fetches the logs in the server with pagination support. + + Params: + - *log_id: ID of the last seen log on the client. + - total: Total of logs to be returned. Defaults to 20. Max allowed is 100. + + Errors: + - base errors + """ + topic "log.paginate", LogPaginateRequest + @doc """ Starts a LogRecoverProcess. When recovering, the player may either start the process using the `global` method or the `custom` method. diff --git a/lib/websocket/request/requestable.ex b/lib/websocket/request/requestable.ex index 27a0bbec..8bbd6c92 100644 --- a/lib/websocket/request/requestable.ex +++ b/lib/websocket/request/requestable.ex @@ -95,8 +95,8 @@ defprotocol Helix.Websocket.Requestable do def handle_request(request, socket) @spec reply(Request.t, Websocket.t) :: - {:ok, reply :: map} - | {:error, reply :: map} + {:ok, reply :: map | list} + | {:error, reply :: map | list} | {:stop, reason :: term} | :noreply @doc """ @@ -122,12 +122,12 @@ defprotocol Helix.Websocket.Requestable do whole Requestable protocol is controlled at Websocket.Socket, which will reply to the request according to the return of `reply/2`. - If `reply/2` returns a `{:ok, data :: map}` tuple, it is indicating that the - response should reach the user. On the other hand, if it returns a `:noreply` - atom, well, the user will get no reply. + If `reply/2` returns a `{:ok, data}` tuple, it is indicating that the response + should reach the user. On the other hand, if it returns a `:noreply` atom, the + user will get no reply. - A return of `{:error, data :: map}` will send the rendered data to the client - with an error code. + A return of `{:error, data}` will send the rendered data to the client with an + error code. This can further be extended to include all possible Channel responses, namely missing the `{:stop, reason}` response. diff --git a/test/features/log/paginate_test.exs b/test/features/log/paginate_test.exs new file mode 100644 index 00000000..ee539d79 --- /dev/null +++ b/test/features/log/paginate_test.exs @@ -0,0 +1,61 @@ +defmodule Helix.Test.Features.Log.Paginate do + + use Helix.Test.Case.Integration + + import Phoenix.ChannelTest + import Helix.Test.Macros + + alias Helix.Test.Channel.Setup, as: ChannelSetup + alias Helix.Test.Log.Setup, as: LogSetup + + @moduletag :feature + + describe "log recover" do + test "Fetches logs older than `log_id`" do + {socket, %{server: gateway}} = ChannelSetup.create_socket() + + # Connect to gateway channel + {socket, _} = + ChannelSetup.join_server( + gateway_id: gateway.server_id, + own_server: true, + socket: socket, + skip_logs: true + ) + + _log1 = + LogSetup.log!( + log_id: "e3ac:6eef:c924:a009:3f17:1abd:d5d8:5809", + server_id: gateway.server_id + ) + + log2 = + LogSetup.log!( + log_id: "e3ac:6eef:c924:a009:3f17:279f:9983:8709", + server_id: gateway.server_id + ) + + log3 = + LogSetup.log!( + log_id: "e3ac:6eef:c924:a009:3f17:3243:a825:4009", + server_id: gateway.server_id + ) + + # We'll use `log2` as starting point, so `log1` and `log2` should not be + # fetched. + + params = %{"log_id" => to_string(log2.log_id)} + + ref = push socket, "log.paginate", params + assert_reply ref, :ok, response, timeout(:slow) + + logs = response.data + + # Only one log was returned... + assert length(logs) == 1 + + # And it is `log3` + assert List.first(logs).log_id == to_string(log3.log_id) + end + end +end diff --git a/test/id/id_test.exs b/test/id/id_test.exs index 26efdc29..06f22fd8 100644 --- a/test/id/id_test.exs +++ b/test/id/id_test.exs @@ -153,6 +153,19 @@ defmodule Helix.IDTest do refute slice(proc3_s1_id_bin, 24..53) == slice(proc3_s2_id_bin, 24..53) end + test "ids timestamp are sequential" do + parent = ID.generate(%{}, {:entity, :account}) + + id1 = ID.generate(%{parent: parent}, {:server, :desktop}) + :timer.sleep(1000) + id2 = ID.generate(%{parent: parent}, {:server, :desktop}) + + parse1 = ID.Utils.parse(id1) + parse2 = ID.Utils.parse(id2) + + assert parse2.timestamp.dec == parse1.timestamp.dec + 1 + end + skip_on_travis_slowpoke() test "benchmark" do # Generate 1000 IDs without parent and gp diff --git a/test/log/websocket/requests/paginate_test.exs b/test/log/websocket/requests/paginate_test.exs new file mode 100644 index 00000000..c1307fd1 --- /dev/null +++ b/test/log/websocket/requests/paginate_test.exs @@ -0,0 +1,40 @@ +defmodule Helix.Log.Websocket.Requests.PaginateTest do + + use Helix.Test.Case.Integration + + alias Helix.Websocket.Requestable + alias Helix.Log.Websocket.Requests.Paginate, as: LogPaginateRequest + + alias Helix.Test.Channel.Setup, as: ChannelSetup + alias Helix.Test.Log.Helper, as: LogHelper + + @mock_socket ChannelSetup.mock_server_socket() + + describe "LogPaginateRequest.check_params/2" do + test "accepts when everything is OK" do + log_id = LogHelper.id() + + p0 = %{ + "log_id" => to_string(log_id), + "total" => 50 + } + req0 = LogPaginateRequest.new(p0) + + assert {:ok, req0} = Requestable.check_params(req0, @mock_socket) + + assert req0.params.log_id == log_id + assert req0.params.total == 50 + + p1 = %{ + "log_id" => to_string(log_id), + "total" => 500_000 + } + req1 = LogPaginateRequest.new(p1) + + assert {:ok, req1} = Requestable.check_params(req1, @mock_socket) + + # If `total` is greater than `@max_total` allowed, `@max_total` is used. + assert req1.params.total == 100 + end + end +end diff --git a/test/support/channel/setup.ex b/test/support/channel/setup.ex index 8350ba9f..ae5b8692 100644 --- a/test/support/channel/setup.ex +++ b/test/support/channel/setup.ex @@ -1,3 +1,4 @@ +# credo:disable-for-this-file Credo.Check.Refactor.CyclomaticComplexity defmodule Helix.Test.Channel.Setup do import Phoenix.ChannelTest @@ -9,6 +10,7 @@ defmodule Helix.Test.Channel.Setup do alias Helix.Account.Websocket.Channel.Account, as: AccountChannel alias Helix.Entity.Model.Entity alias Helix.Entity.Query.Entity, as: EntityQuery + alias Helix.Log.Query.Log, as: LogQuery alias Helix.Server.Model.Server alias Helix.Server.Query.Server, as: ServerQuery alias Helix.Server.Websocket.Channel.Server, as: ServerChannel @@ -18,6 +20,7 @@ defmodule Helix.Test.Channel.Setup do alias Helix.Test.Account.Setup, as: AccountSetup alias Helix.Test.Cache.Helper, as: CacheHelper alias Helix.Test.Entity.Helper, as: EntityHelper + alias Helix.Test.Log.Helper, as: LogHelper alias Helix.Test.Network.Helper, as: NetworkHelper alias Helix.Test.Network.Setup, as: NetworkSetup alias Helix.Test.Server.Helper, as: ServerHelper @@ -135,6 +138,7 @@ defmodule Helix.Test.Channel.Setup do - destination_files: Whether to generate random files on destination. Defaults to false. - socket_opts: Relays opts to the `create_socket/1` method (if applicable) + - no_logs: Whether to disable the server join log creation. Related: Account.t, \ @@ -211,6 +215,20 @@ defmodule Helix.Test.Channel.Setup do } end + if opts[:skip_logs] do + server_id = + if local? do + gateway.server_id + else + destination.server_id + end + + server_id + |> LogQuery.get_logs_on_server() + |> List.last() + |> LogHelper.delete() + end + related = Map.merge(gateway_related, destination_related) CacheHelper.sync_test() diff --git a/test/support/log/setup.ex b/test/support/log/setup.ex index 7befe896..74a94ec5 100644 --- a/test/support/log/setup.ex +++ b/test/support/log/setup.ex @@ -1,3 +1,4 @@ +# credo:disable-for-this-file Credo.Check.Refactor.CyclomaticComplexity defmodule Helix.Test.Log.Setup do alias Ecto.Changeset @@ -48,6 +49,7 @@ defmodule Helix.Test.Log.Setup do end @doc """ + - log_id: Hardcode the log id. Useful for pagination tests. Optional. - server_id: Server which that log belongs to. - entity_id: Entity which that log belongs to. - type: Underlying log type. Defaults to random type. @@ -82,6 +84,25 @@ defmodule Helix.Test.Log.Setup do changeset = Log.create_changeset(params, revision_params) + # Override generated `log_id` with custom `log_id` if specified + changeset = + if opts[:log_id] do + log_id = %Log.ID{id: HELL.IPv6.binary_to_address_tuple!(opts[:log_id])} + + revision = Changeset.get_change(changeset, :revision) + new_revisions = + changeset + |> Changeset.get_change(:revisions) + |> Enum.map(&(Changeset.force_change(&1, :log_id, log_id))) + + changeset + |> Changeset.force_change(:log_id, log_id) + |> Changeset.force_change(:revision, Map.put(revision, :log_id, log_id)) + |> Changeset.put_assoc(:revisions, new_revisions) + else + changeset + end + log = Changeset.apply_changes(changeset) related = %{