diff --git a/apps/revault/src/revault_data_wrapper.erl b/apps/revault/src/revault_data_wrapper.erl index 5533853..9d0d5cc 100644 --- a/apps/revault/src/revault_data_wrapper.erl +++ b/apps/revault/src/revault_data_wrapper.erl @@ -11,7 +11,8 @@ -module(revault_data_wrapper). -export([peer/1, peer/2, new/0, ask/0, ok/0, error/1, fork/2]). -export([manifest/0, manifest/1, - send_file/4, send_multipart_file/6, send_deleted/2, + send_file/4, send_multipart_file/6, + send_deleted/2, send_conflict_deleted/3, send_conflict_file/5, send_conflict_multipart_file/7, fetch_file/1, sync_complete/0]). @@ -73,6 +74,9 @@ send_multipart_file(Path, Vsn, Hash, M, N, Bin) when M >= 1, M =< N -> send_deleted(Path, Vsn) -> {deleted_file, ?VSN, Path, {Vsn, deleted}}. +send_conflict_deleted(WorkPath, ConflictsLeft, Meta) -> + {conflict_file, ?VSN, WorkPath, deleted, ConflictsLeft, Meta}. + send_conflict_file(WorkPath, Path, ConflictsLeft, Meta, Bin) -> {conflict_file, ?VSN, WorkPath, Path, ConflictsLeft, Meta, Bin}. diff --git a/apps/revault/src/revault_data_wrapper.hrl b/apps/revault/src/revault_data_wrapper.hrl index 040a294..5df7eb3 100644 --- a/apps/revault/src/revault_data_wrapper.hrl +++ b/apps/revault/src/revault_data_wrapper.hrl @@ -1,4 +1,4 @@ % VSN 1: initial protocol % VSN 2: adds multipart file transfers % TODO: add test about protocol compatibility --define(VSN, 2). +-define(VSN, 3). diff --git a/apps/revault/src/revault_dirmon_tracker.erl b/apps/revault/src/revault_dirmon_tracker.erl index 9b4f3ca..6b36a95 100644 --- a/apps/revault/src/revault_dirmon_tracker.erl +++ b/apps/revault/src/revault_dirmon_tracker.erl @@ -151,6 +151,14 @@ handle_call({conflict, Work, {NewStamp, deleted}}, _From, %% but note the deletion stamp as part of the conflict. CStamp = conflict_stamp(Id, Stamp, NewStamp), {CStamp, {conflict, ConflictHashes, WorkingHash}}; + #{Work := {Stamp, deleted}} -> + %% This is a special case similar to having both files diverging + %% in stamps but having the same "hash" or value by virtue of being + %% deleted. Create an empty conflict file, assume further files might + %% come in as part of the sync or that this will properly carry + %% the state moving forward. + CStamp = conflict_stamp(Id, Stamp, NewStamp), + {CStamp, {conflict, [], deleted}}; #{Work := {Stamp, WorkingHash}} -> %% No conflict, create it ConflictingWork = revault_conflict_file:conflicting(Work, WorkingHash), @@ -374,8 +382,10 @@ conflict_marker(Dir, WorkingFile) -> write_conflict_marker(Dir, WorkingFile, {_, {conflict, Hashes, _}}) -> %% We don't care about the rename trick here, it's informational %% but all the critical data is tracked in the snapshot + F = conflict_marker(Dir, WorkingFile), + revault_file:ensure_dir(F), revault_file:write_file( - conflict_marker(Dir, WorkingFile), + F, lists:join($\n, [revault_conflict_file:hex(Hash) || Hash <- Hashes]) ). diff --git a/apps/revault/src/revault_disterl.erl b/apps/revault/src/revault_disterl.erl index ba3656f..e4625db 100644 --- a/apps/revault/src/revault_disterl.erl +++ b/apps/revault/src/revault_disterl.erl @@ -83,6 +83,8 @@ unpack({file, ?VSN, Path, Meta, PartNum, PartTotal, Bin}) -> {file, Path, Meta, unpack({fetch, ?VSN, Path}) -> {fetch, Path}; unpack({sync_complete, ?VSN}) -> sync_complete; unpack({deleted_file, ?VSN, Path, Meta}) -> {deleted_file, Path, Meta}; +unpack({conflict_file, ?VSN, WorkPath, deleted, Count, Meta}) -> + {conflict_file, WorkPath, deleted, Count, Meta}; unpack({conflict_file, ?VSN, WorkPath, Path, Count, Meta, Bin}) -> {conflict_file, WorkPath, Path, Count, Meta, Bin}; unpack({conflict_multipart_file, ?VSN, WorkPath, Path, Count, Meta, PartNum, PartTotal, Bin}) -> diff --git a/apps/revault/src/revault_fsm.erl b/apps/revault/src/revault_fsm.erl index 7efe9d6..3dc2a93 100644 --- a/apps/revault/src/revault_fsm.erl +++ b/apps/revault/src/revault_fsm.erl @@ -637,6 +637,25 @@ client_sync_files(info, {revault, _Marker, {deleted_file, F, Meta}}, Data) -> NewQ = send_next_scheduled(Q), NewAcc = Acc -- [F], {keep_state, Data#data{scan=true, sub=S#client_sync{queue=NewQ, acc=NewAcc}}}; +client_sync_files(info, {revault, _Marker, {conflict_file, WorkF, deleted, CountLeft, Meta}}, Data) -> + #data{name=Name, sub=S=#client_sync{queue=Q, acc=Acc}} = Data, + ?with_span( + <<"conflict">>, + #{attributes => [{<<"path">>, WorkF}, {<<"meta">>, ?str(Meta)}, + {<<"count">>, CountLeft} | ?attrs(Data)]}, + fun(_SpanCtx) -> + %% TODO: handle the file being corrupted vs its own hash + revault_dirmon_tracker:conflict(Name, WorkF, Meta) + end + ), + case CountLeft =:= 0 andalso Acc -- [WorkF] of + false -> + %% more of the same conflict file to come + {keep_state, Data#data{scan=true}}; + NewAcc -> + NewQ = send_next_scheduled(Q), + {keep_state, Data#data{scan=true, sub=S#client_sync{queue=NewQ, acc=NewAcc}}} + end; client_sync_files(info, {revault, _Marker, {conflict_file, WorkF, F, CountLeft, Meta, Bin}}, Data) -> #data{name=Name, sub=S=#client_sync{queue=Q, acc=Acc}} = Data, ?with_span( @@ -854,6 +873,16 @@ server_sync_files(info, {revault, _Marker, {deleted_file, F, Meta}}, | ?attrs(Data)]}, fun(_SpanCtx) -> handle_delete_sync(Name, Id, F, Meta) end), {keep_state, Data#data{scan=true}}; +server_sync_files(info, {revault, _M, {conflict_file, WorkF, deleted, CountLeft, Meta}}, Data) -> + ?with_span( + <<"conflict">>, + #{attributes => [{<<"path">>, WorkF}, {<<"meta">>, ?str(Meta)}, + {<<"count">>, CountLeft} | ?attrs(Data)]}, + fun(_SpanCtx) -> + revault_dirmon_tracker:conflict(Data#data.name, WorkF, Meta) + end + ), + {keep_state, Data#data{scan=true}}; server_sync_files(info, {revault, _M, {conflict_file, WorkF, F, CountLeft, Meta, Bin}}, Data) -> %% TODO: handle the file being corrupted vs its own hash ?with_span( @@ -1202,6 +1231,11 @@ file_transfer_schedule(Name, Path, File) -> case revault_dirmon_tracker:file(Name, File) of {Vsn, deleted} -> [{deleted, File, Vsn}]; + {Vsn, {conflict, [], deleted}} -> + %% Special deletion case where clashing deleted files + %% exist; there's no hash to send here, and no FHash; + %% explicitly call it as deleted. + [{conflict_file, File, deleted, 0, {Vsn, deleted}}]; {Vsn, {conflict, Hashes, _}} -> {List, _} = lists:foldl( fun(Hash, {Acc, Ct}) -> @@ -1231,6 +1265,11 @@ wrap(_Path, {deleted, File, Vsn}) -> ?set_attribute(<<"path">>, File), ?set_attribute(<<"transfer_type">>, <<"deleted">>), revault_data_wrapper:send_deleted(File, Vsn); +wrap(_Path, {conflict_file, File, deleted, Ct, Meta}) -> + ?set_attribute(<<"path">>, deleted), + ?set_attribute(<<"transfer_type">>, <<"conflict_file">>), + ?set_attribute(<<"conflict.ct">>, Ct), + revault_data_wrapper:send_conflict_deleted(File, Ct, Meta); wrap(Path, {conflict_file, File, FHash, Ct, Meta}) -> ?set_attribute(<<"path">>, FHash), ?set_attribute(<<"transfer_type">>, <<"conflict_file">>), @@ -1303,4 +1342,3 @@ pid_attrs() -> proplists:get_value(minor_gcs, proplists:get_value(garbage_collection, PidInfo))} ]. - diff --git a/apps/revault/src/revault_tcp.erl b/apps/revault/src/revault_tcp.erl index ad47da4..a86c49c 100644 --- a/apps/revault/src/revault_tcp.erl +++ b/apps/revault/src/revault_tcp.erl @@ -134,6 +134,8 @@ unpack({file, ?VSN, Path, Meta, PartNum, PartTotal, Bin}) -> {file, Path, Meta, unpack({fetch, ?VSN, Path}) -> {fetch, Path}; unpack({sync_complete, ?VSN}) -> sync_complete; unpack({deleted_file, ?VSN, Path, Meta}) -> {deleted_file, Path, Meta}; +unpack({conflict_file, ?VSN, WorkPath, deleted, Count, Meta}) -> + {conflict_file, WorkPath, deleted, Count, Meta}; unpack({conflict_file, ?VSN, WorkPath, Path, Count, Meta, Bin}) -> {conflict_file, WorkPath, Path, Count, Meta, Bin}; unpack({conflict_multipart_file, ?VSN, WorkPath, Path, Count, Meta, PartNum, PartTotal, Bin}) -> diff --git a/apps/revault/src/revault_tls.erl b/apps/revault/src/revault_tls.erl index 6db9f52..b2f9402 100644 --- a/apps/revault/src/revault_tls.erl +++ b/apps/revault/src/revault_tls.erl @@ -138,6 +138,8 @@ unpack({file, ?VSN, Path, Meta, PartNum, PartTotal, Bin}) -> {file, Path, Meta, unpack({fetch, ?VSN, Path}) -> {fetch, Path}; unpack({sync_complete, ?VSN}) -> sync_complete; unpack({deleted_file, ?VSN, Path, Meta}) -> {deleted_file, Path, Meta}; +unpack({conflict_file, ?VSN, WorkPath, deleted, Count, Meta}) -> + {conflict_file, WorkPath, deleted, Count, Meta}; unpack({conflict_file, ?VSN, WorkPath, Path, Count, Meta, Bin}) -> {conflict_file, WorkPath, Path, Count, Meta, Bin}; unpack({conflict_multipart_file, ?VSN, WorkPath, Path, Count, Meta, PartNum, PartTotal, Bin}) -> diff --git a/apps/revault/test/revault_fsm_SUITE.erl b/apps/revault/test/revault_fsm_SUITE.erl index babbcbc..ece573a 100644 --- a/apps/revault/test/revault_fsm_SUITE.erl +++ b/apps/revault/test/revault_fsm_SUITE.erl @@ -22,6 +22,7 @@ groups() -> fork_server_save, seed_fork, basic_sync, delete_sync, too_many_clients, overwrite_sync_clash, conflict_sync, + delete_sync_conflict, prevent_server_clash, multipart, double_conflict]}]. @@ -714,6 +715,70 @@ conflict_sync(Config) -> ?assertEqual({ok, <<"sh2">>}, file:read_file(filename:join([ClientPath2, "shared.1C56416E"]))), ok. +delete_sync_conflict() -> + [{doc, "A deletion conflict can be sync'd to a third party"}, + {timetrap, timer:seconds(5)}]. +delete_sync_conflict(Config) -> + Client = ?config(name, Config), + Server=?config(server, Config), + Remote = (?config(peer, Config))(Server), + ClientPath = ?config(path, Config), + ServerPath = ?config(server_path, Config), + {ok, _ServId1} = revault_fsm:id(Server), + {ok, _} = revault_fsm_sup:start_fsm( + ?config(db_dir, Config), + Client, + ClientPath, + ?config(ignore, Config), + ?config(interval, Config), + (?config(callback, Config))(Client) + ), + ok = revault_fsm:client(Client), + {ok, _ClientId} = revault_fsm:id(Client, Remote), + %% Set up a second client; because of how config works in the test, it needs + Client2 = Client ++ "_2", + Priv = ?config(priv_dir, Config), + DbDir2 = filename:join([Priv, "db_2"]), + ClientPath2 = filename:join([Priv, "data", "client_2"]), + filelib:ensure_dir(filename:join([DbDir2, "fakefile"])), + filelib:ensure_dir(filename:join([ClientPath2, "fakefile"])), + {ok, _} = revault_fsm_sup:start_fsm(DbDir2, Client2, ClientPath2, + ?config(ignore, Config), ?config(interval, Config), + (?config(callback, Config))(Client2)), + ok = revault_fsm:client(Client2), + ?assertMatch({ok, _}, revault_fsm:id(Client2, Remote)), + %% now in initialized mode + %% Write files + ok = file:write_file(filename:join([ServerPath, "shared"]), "sh1"), + ok = file:write_file(filename:join([ClientPath, "shared"]), "sh2"), + %% Track em + ok = revault_dirmon_event:force_scan(Client, 5000), + ok = revault_dirmon_event:force_scan(Server, 5000), + %% Delete em + ok = file:delete(filename:join([ServerPath, "shared"])), + ok = file:delete(filename:join([ClientPath, "shared"])), + %% Track the deletion + ok = revault_dirmon_event:force_scan(Client, 5000), + ok = revault_dirmon_event:force_scan(Server, 5000), + %% Sync em + ct:pal("SYNC", []), + ok = revault_fsm:sync(Client, Remote), + %% See the result + %% conflicting files are marked, with empty conflict files since nothing exists aside + %% from clashing deletions. + ?assertEqual({error, enoent}, file:read_file(filename:join([ServerPath, "shared"]))), + ?assertEqual({error, enoent}, file:read_file(filename:join([ClientPath, "shared"]))), + ?assertEqual({ok, <<"">>}, file:read_file(filename:join([ServerPath, "shared.conflict"])) ), + ?assertEqual({ok, <<"">>}, file:read_file(filename:join([ClientPath, "shared.conflict"])) ), + + %% Now when client 2 syncs, it gets the files and conflict files as well + ct:pal("SECOND SYNC", []), + ok = revault_fsm:sync(Client2, Remote), + %% conflicting files are marked, but working files aren't sync'd since they didn't exist here + ?assertEqual({error, enoent}, file:read_file(filename:join([ClientPath2, "shared"]))), + ?assertEqual({ok, <<"">>}, file:read_file(filename:join([ClientPath2, "shared.conflict"])) ), + ok. + prevent_server_clash() -> [{doc, "A client from a different server cannot connect to the wrong one " "as it is protected by a UUID."}, diff --git a/cli/revault_cli/rebar.config b/cli/revault_cli/rebar.config index fd26ac9..2ea5211 100644 --- a/cli/revault_cli/rebar.config +++ b/cli/revault_cli/rebar.config @@ -1 +1,2 @@ -{deps, [argparse]}. +{deps, [argparse, + {cecho, {git, "https://github.com/ferd/cecho.git", {branch, "master"}}}]}. diff --git a/cli/revault_cli/src/revault_cli.app.src b/cli/revault_cli/src/revault_cli.app.src index da2169e..36e371a 100644 --- a/cli/revault_cli/src/revault_cli.app.src +++ b/cli/revault_cli/src/revault_cli.app.src @@ -2,10 +2,12 @@ [{description, "An escript to interact with ReVault nodes"}, {vsn, "0.1.0"}, {registered, []}, + {mod, {revault_cli_app, []}}, {applications, [kernel, stdlib, - argparse + argparse, + cecho ]}, {env,[]}, {modules, []}, diff --git a/cli/revault_cli/src/revault_cli.hrl b/cli/revault_cli/src/revault_cli.hrl new file mode 100644 index 0000000..9363b50 --- /dev/null +++ b/cli/revault_cli/src/revault_cli.hrl @@ -0,0 +1,11 @@ +-define(KEY_BACKSPACE, 127). +-define(KEY_CTRLA, 1). +-define(KEY_CTRLE, 5). +-define(KEY_CTRLD, 4). +-define(KEY_ENTER, 10). +-define(KEY_TEXT_RANGE(X), % ignore control codes + (not(X < 32) andalso + not(X >= 127 andalso X < 160))). + +-define(EXEC_LINES, 15). +-define(MAX_VALIDATION_DELAY, 150). % longest time to validate input, in ms diff --git a/cli/revault_cli/src/revault_cli_app.erl b/cli/revault_cli/src/revault_cli_app.erl new file mode 100644 index 0000000..9c5bb00 --- /dev/null +++ b/cli/revault_cli/src/revault_cli_app.erl @@ -0,0 +1,9 @@ +-module(revault_cli_app). +-behaviour(application). +-export([start/2, stop/1]). + +start(_Type, _Args) -> + revault_curses:start_link(revault_cli_mod). + +stop(_State) -> + ok. diff --git a/cli/revault_cli/src/revault_cli_mod.erl b/cli/revault_cli/src/revault_cli_mod.erl new file mode 100644 index 0000000..0279a0d --- /dev/null +++ b/cli/revault_cli/src/revault_cli_mod.erl @@ -0,0 +1,779 @@ +-module(revault_cli_mod). +-behaviour(revault_curses). +-include("revault_cli.hrl"). +-include_lib("cecho/include/cecho.hrl"). + +-define(DEFAULT_NODE, list_to_atom("revault@" ++ hd(tl(string:tokens(atom_to_list(node()), "@"))))). + +-export([menu_order/0, menu_help/1, args/0, + init/0, render_exec/5, handle_exec/4]). + +%%%%%%%%%%%%%%%%% +%%% CALLBACKS %%% +%%%%%%%%%%%%%%%%% +menu_order() -> + [list, scan, sync, status, 'generate-keys', seed, 'remote-seed']. + +menu_help(list) -> "Show configuration and current settings"; +menu_help(scan) -> "Scan directories for changes"; +menu_help(sync) -> "Synchronize files with remote peer"; +menu_help(status) -> "Display current ReVault instance's configuration status"; +menu_help('generate-keys') -> "Generate TLS certificates for secure connections"; +menu_help(seed) -> "Create initial seed data to a directory, to use in a client"; +menu_help('remote-seed') -> "Create seed data as a client, from remote peer". + +args() -> + #{list => [ + #{name => node, label => "Local Node", + type => {node, "[\\w.-]+@[\\w.-]+", fun check_connect/2}, default => ?DEFAULT_NODE, + help => "ReVault instance to connect to"} + ], + scan => [ + #{name => node, label => "Local Node", + type => {node, "[\\w.-]+@[\\w.-]+", fun check_connect/2}, default => ?DEFAULT_NODE, + help => "Local ReVault instance to connect to"}, + #{name => dirs, label => "Dirs", + type => {list, fun revault_curses:parse_list/2, fun check_dirs/2}, default => fun default_dirs/1, + help => "List of directories to scan"} + ], + sync => [ + #{name => node, label => "Local Node", + type => {node, "[\\w.-]+@[\\w.-]+", fun check_connect/2}, default => ?DEFAULT_NODE, + help => "Local ReVault instance to connect to"}, + #{name => dirs, label => "Dirs", + type => {list, fun revault_curses:parse_list/2, fun check_dirs/2}, default => fun default_dirs/1, + help => "List of directories to scan"}, + #{name => peer, label => "Peer Node", + type => {string, "^(?:\\s*)?(.+)(?:\\s*)?$", fun check_peer/2}, default => fun default_peers/1, + help => "Peer to sync against"} + ], + status => [ + #{name => node, label => "Local Node", + type => {node, "[\\w.-]+@[\\w.-]+", fun check_connect/2}, default => ?DEFAULT_NODE, + help => "ReVault instance to connect to"} + ], + 'generate-keys' => [ + #{name => certname, label => "Certificate Name", + % the string regex 'trims' leading and trailing whitespace + type => {string, "[^\\s]+.*[^\\s]+", fun check_ignore/2}, default => "revault", + help => "Name of the key files generated"}, + #{name => path, label => "Certificate Directory", + type => {string, "[^\\s]+.*[^\\s]+", fun check_ignore/2}, default => "./", + help => "Directory where the key files will be placed"} + ], + seed => [ + #{name => node, label => "Local Node", + type => {node, "[\\w.-]+@[\\w.-]+", fun check_connect/2}, default => ?DEFAULT_NODE, + help => "ReVault instance to connect to"}, + #{name => path, label => "Fork Seed Directory", + type => {string, "[^\\s]+.*[^\\s]+", fun check_ignore/2}, default => "./forked/", + help => "path of the base directory where the forked data will be located."}, + #{name => dirs, label => "Dirs", + type => {list, fun revault_curses:parse_list/2, fun check_dirs/2}, default => fun default_dirs/1, + help => "List of directories to fork"} + ], + 'remote-seed' => [ + #{name => node, label => "Local Node", + type => {node, "[\\w.-]+@[\\w.-]+", fun check_connect/2}, default => ?DEFAULT_NODE, + help => "ReVault instance to connect to"}, + #{name => peer, label => "Peer Node", + type => {string, "^(?:\\s*)?(.+)(?:\\s*)?$", fun check_peer/2}, default => fun default_peers/1, + help => "Peer from which to fork a seed"}, + #{name => dirs, label => "Dirs", + %% TODO: replace list by 'peer_dirs' + type => {list, fun revault_curses:parse_list/2, fun check_dirs/2}, default => fun default_dirs/1, + help => "List of directories to fork"} + ] + }. + +init() -> + #{}. + +render_exec(Action, Args, MaxLines, MaxCols, State) -> + render_exec(Action, MaxLines, MaxCols, State#{exec_args => Args}). + +render_exec(list, _MaxLines, _MaxCols, State) -> + NewState = ensure_exec_state(list, State), + #{exec_state := #{path := Path, config := Config, offset := {OffY,OffX}}} = NewState, + Brk = io_lib:format("~n", []), + Str = io_lib:format("Config parsed from ~ts:~n~p~n", [Path, Config]), + %% Fit lines and the whole thing in a "box" + Lines = string:lexemes(Str, Brk), + {{clip, {OffY,OffX}}, NewState, Lines}; +render_exec(scan, _MaxLines, _MaxCols, State) -> + NewState = ensure_exec_state(scan, State), + #{exec_state := #{dirs := Statuses}} = NewState, + LStatuses = lists:sort(maps:to_list(Statuses)), + %% TODO: support scrolling if you have more Dirs than MaxLines or + %% dirs that are too long by tracking clipping offsets. + LongestDir = lists:max([string:length(D) || {D, _} <- LStatuses]), + Strs = [[string:pad([Dir, ":"], LongestDir+1, trailing, " "), " ", + case Status of + pending -> "??"; + ok -> "ok"; + _ -> "!!" + end] || {Dir, Status} <- LStatuses], + {clip, NewState, Strs}; +render_exec(sync, _MaxLines, _MaxCols, State) -> + NewState = ensure_exec_state(sync, State), + #{exec_state := #{dirs := Statuses}} = NewState, + LStatuses = lists:sort(maps:to_list(Statuses)), + %% TODO: support scrolling if you have more Dirs than MaxLines or + %% dirs that are too long by tracking clipping offsets + LongestDir = lists:max([string:length(D) || {D, _} <- LStatuses]), + Header = [string:pad("DIR", LongestDir+1, trailing, " "), " SCAN SYNC"], + Strs = [[string:pad([Dir, ":"], LongestDir+1, trailing, " "), " ", + case Status of + pending -> " ??"; + scanned -> " ok ??"; + synced -> " ok ok"; + _ -> " !! !!" + end] || {Dir, Status} <- LStatuses], + {clip, NewState, [Header | Strs]}; +render_exec(status, _MaxLines, _MaxCols, State) -> + NewState = ensure_exec_state(status, State), + #{exec_state := #{status := Status}} = NewState, + Strs = [io_lib:format("~p",[Status])], + {wrap, NewState, Strs}; +render_exec('generate-keys', _MaxLines, _MaxCols, State) -> + NewState = ensure_exec_state('generate-keys', State), + #{exec_state := #{status := Exists}} = NewState, + {wrap, NewState, Exists}; +render_exec(seed, _MaxLines, _MaxCols, State) -> + NewState = ensure_exec_state(seed, State), + #{exec_state := #{dirs := Statuses}} = NewState, + LStatuses = lists:sort(maps:to_list(Statuses)), + LongestDir = lists:max([string:length(D) || {D, _} <- LStatuses]), + Strs = [[string:pad([Dir, ":"], LongestDir+1, trailing, " "), " ", + case Status of + pending -> "??"; + ok -> "ok"; + _ -> "!!" + end] || {Dir, Status} <- LStatuses], + {clip, NewState, Strs}; +render_exec('remote-seed', _MaxLines, _MaxCols, State) -> + NewState = ensure_exec_state('remote-seed', State), + #{exec_state := #{dirs := Statuses}} = NewState, + LStatuses = lists:sort(maps:to_list(Statuses)), + LongestDir = lists:max([string:length(D) || {D, _} <- LStatuses]), + Strs = [[string:pad([Dir, ":"], LongestDir+1, trailing, " "), " ", + case Status of + pending -> "??"; + {ok, _ITC} -> "ok"; + _ -> "!!" + end] || {Dir, Status} <- LStatuses], + {clip, NewState, Strs}; +render_exec(Action, _MaxLines, _MaxCols, State) -> + {clip, State, [[io_lib:format("Action ~p not implemented yet.", [Action])]]}. + + +handle_exec(input, ?ceKEY_ESC, _Action, State) -> + case State of + #{exec_state := #{worker := P}} -> P ! stop; + _ -> ok + end, + {done, maps:without([exec_state], State)}; +%% List exec +handle_exec(input, ?ceKEY_DOWN, list, State = #{exec_state:=ES}) -> + {Y,X} = maps:get(offset, ES, {0, 0}), + {ok, State#{exec_state => ES#{offset => {Y+1, X}}}}; +handle_exec(input, ?ceKEY_UP, list, State = #{exec_state:=ES}) -> + {Y,X} = maps:get(offset, ES, {0, 0}), + {ok, State#{exec_state => ES#{offset => {max(0,Y-1), X}}}}; +handle_exec(input, ?ceKEY_RIGHT, list, State = #{exec_state:=ES}) -> + {Y,X} = maps:get(offset, ES, {0, 0}), + {ok, State#{exec_state => ES#{offset => {Y, X+1}}}}; +handle_exec(input, ?ceKEY_LEFT, list, State = #{exec_state:=ES}) -> + {Y,X} = maps:get(offset, ES, {0, 0}), + {ok, State#{exec_state => ES#{offset => {Y, max(0,X-1)}}}}; +handle_exec(input, ?ceKEY_PGDOWN, list, State = #{exec_state:=ES}) -> + {Y,X} = maps:get(offset, ES, {0, 0}), + Shift = ?EXEC_LINES-1, + {ok, State#{exec_state => ES#{offset => {Y+Shift, X}}}}; +handle_exec(input, ?ceKEY_PGUP, list, State = #{exec_state:=ES}) -> + {Y,X} = maps:get(offset, ES, {0, 0}), + Shift = ?EXEC_LINES-1, + {ok, State#{exec_state => ES#{offset => {max(0,Y-Shift), X}}}}; +%% TODO: ctrlA, ctrlE +%% Scan exec +handle_exec(event, {revault, scan, done}, scan, State=#{exec_state:=ES}) -> + %% unset the workers + case maps:get(worker, ES, undefined) of + undefined -> + ok; + Pid -> + %% make sure the worker is torn down fully, even + %% if this is blocking + Pid ! done, + Ref = erlang:monitor(process, Pid), + receive + {'DOWN', Ref, process, _, _} -> + ok + after 5000 -> + %% we ideally wouldn't wait more than ?MAX_VALIDATION_DELAY + %% so consider this a hard failure. + error(bad_worker_shutdown) + end + end, + {ok, State}; +handle_exec(event, {revault, scan, {Dir, Status}}, scan, State=#{exec_state:=ES}) -> + #{dirs := Statuses} = ES, + {ok, State#{exec_state => ES#{dirs => Statuses#{Dir => Status}}}}; +handle_exec(input, ?KEY_ENTER, scan, State) -> + %% Do a refresh by exiting the menu and re-entering again. Quite hacky. + revault_curses:send_event(self(), {revault, scan, done}), + revault_curses:send_input(self(), ?ceKEY_ESC), + revault_curses:send_input(self(), ?KEY_ENTER), + {ok, State}; +%% Sync exec +handle_exec(event, {revault, sync, done}, sync, State=#{exec_state:=ES}) -> + %% unset the workers + case maps:get(worker, ES, undefined) of + undefined -> + ok; + Pid -> + %% make sure the worker is torn down fully, even + %% if this is blocking + Pid ! done, + Ref = erlang:monitor(process, Pid), + receive + {'DOWN', Ref, process, _, _} -> + ok + after 5000 -> + %% we ideally wouldn't wait more than ?MAX_VALIDATION_DELAY + %% so consider this a hard failure. + error(bad_worker_shutdown) + end + end, + {ok, State}; +handle_exec(event, {revault, sync, {Dir, Status}}, sync, State=#{exec_state:=ES}) -> + #{dirs := Statuses} = ES, + {ok, State#{exec_state => ES#{dirs => Statuses#{Dir => Status}}}}; +handle_exec(input, ?KEY_ENTER, sync, State) -> + %% Do a refresh by exiting the menu and re-entering again. Quite hacky. + revault_curses:send_event(self(), {revault, sync, done}), + revault_curses:send_input(self(), ?ceKEY_ESC), + revault_curses:send_input(self(), ?KEY_ENTER), + {ok, State}; +%% Status +handle_exec(event, {revault, status, done}, status, State) -> + {ok, State}; +handle_exec(event, {revault, status, {ok, Val}}, status, State=#{exec_state:=ES}) -> + {ok, State#{exec_state => ES#{status => Val}}}; +handle_exec(input, ?KEY_ENTER, status, State) -> + %% Do a refresh by exiting the menu and re-entering again. Quite hacky. + revault_curses:send_event(self(), {revault, status, done}), + revault_curses:send_input(self(), ?ceKEY_ESC), + revault_curses:send_input(self(), ?KEY_ENTER), + {ok, State}; +%% Generate-Keys +handle_exec(event, {revault, 'generate-keys', {ok, Val}}, 'generate-keys', State=#{exec_state:=ES}) -> + {ok, State#{exec_state => ES#{status => Val}}}; +handle_exec(input, ?KEY_ENTER, 'generate-keys', State) -> + %% Do a refresh by exiting the menu and re-entering again. Quite hacky. + revault_curses:send_input(self(), ?ceKEY_ESC), + revault_curses:send_input(self(), ?KEY_ENTER), + {ok, State}; +%% Seed exec +handle_exec(event, {revault, seed, done}, seed, State=#{exec_state:=ES}) -> + %% unset the workers + case maps:get(worker, ES, undefined) of + undefined -> + ok; + Pid -> + %% make sure the worker is torn down fully, even + %% if this is blocking + Pid ! done, + Ref = erlang:monitor(process, Pid), + receive + {'DOWN', Ref, process, _, _} -> + ok + after 5000 -> + %% we ideally wouldn't wait more than ?MAX_VALIDATION_DELAY + %% so consider this a hard failure. + error(bad_worker_shutdown) + end + end, + {ok, State}; +handle_exec(event,{revault, seed, {Dir, Status}}, seed, State=#{exec_state:=ES}) -> + #{dirs := Statuses} = ES, + {ok, State#{exec_state => ES#{dirs => Statuses#{Dir => Status}}}}; +%% remote-seed exec +handle_exec(event,{revault, 'remote-seed', done}, 'remote-seed', State=#{exec_state:=ES}) -> + %% unset the workers + case maps:get(worker, ES, undefined) of + undefined -> + ok; + Pid -> + %% make sure the worker is torn down fully, even + %% if this is blocking + Pid ! done, + Ref = erlang:monitor(process, Pid), + receive + {'DOWN', Ref, process, _, _} -> + ok + after 5000 -> + %% we ideally wouldn't wait more than ?MAX_VALIDATION_DELAY + %% so consider this a hard failure. + error(bad_worker_shutdown) + end + end, + {ok, State}; +handle_exec(event,{revault, 'remote-seed', {Dir, Status}}, 'remote-seed', State=#{exec_state:=ES}) -> + #{dirs := Statuses} = ES, + file:write_file("/tmp/dbg", io_lib:format("~p~n", [{Dir, Status}])), + {ok, State#{exec_state => ES#{dirs => Statuses#{Dir => Status}}}}. + + +%%%%%%%%%%%%%%%%%%%% +%%% ARGS HELPERS %%% +%%%%%%%%%%%%%%%%%%%% +default_dirs(#{local_node := Node}) -> + try config(Node) of + {config, _Path, Config} -> + #{<<"dirs">> := DirMap} = Config, + maps:keys(DirMap) + catch + _E:_R -> [] + end. + +default_peers(State = #{local_node := Node}) -> + DirList = maps:get(dir_list, State, []), + try config(Node) of + {config, _Path, Config} -> + #{<<"peers">> := PeerMap} = Config, + Needed = ordsets:from_list(DirList), + Peers = [Peer + || Peer <- maps:keys(PeerMap), + Dirs <- [maps:get(<<"sync">>, maps:get(Peer, PeerMap))], + ordsets:is_subset(Needed, ordsets:from_list(Dirs))], + %% Flatten into a string, since peer data espects a string. + unicode:characters_to_binary(lists:join(", ", Peers)) + catch + _E:_R -> [] + end. + +check_connect(State, Node) -> + case revault_curses:check_connect(State, Node) of + ok -> + case revault_node(Node) of + ok -> ok; + _ -> {error, partial_success} + end; + Error -> + Error + end. + +check_dirs(#{local_node := Node}, Dirs) -> + try config(Node) of + {config, _Path, Config} -> + #{<<"dirs">> := DirMap} = Config, + ValidDirs = maps:keys(DirMap), + case Dirs -- ValidDirs of + [] -> ok; + Others -> {error, {unknown_dirs, Others}} + end + catch + E:R -> {error, {E,R}} + end. + +check_peer(State = #{local_node := Node}, Peer) -> + DirList = maps:get(dir_list, State, []), + try config(Node) of + {config, _Path, Config} -> + #{<<"peers">> := PeerMap} = Config, + Peers = [ValidPeer + || ValidPeer <- maps:keys(PeerMap)], + case lists:member(Peer, Peers) of + true -> + Needed = ordsets:from_list(DirList), + PeerDirs = maps:get(<<"sync">>, maps:get(Peer, PeerMap, #{}), []), + case ordsets:is_subset(Needed, ordsets:from_list(PeerDirs)) of + true -> ok; + false -> {error, {mismatching_dirs, Peer, Needed, PeerDirs}} + end; + false -> + {error, {unknown_peer, Peer, Peers}} + end + catch + E:R -> {error, {E,R}} + end. + +check_ignore(_, _) -> + ok. + +-spec revault_node(atom()) -> ok | {error, term()}. +revault_node(Node) -> + try erpc:call(Node, maestro_loader, status, []) of + current -> ok; + outdated -> ok; + last_valid -> ok; + _ -> {error, unknown_status} + catch + E:R -> {error, {rpc, {E,R}}} + end. + +config(Node) -> + {ok, Path, Config} = erpc:call(Node, maestro_loader, current, []), + {config, Path, Config}. + +%%%%%%%%%%%%%%%%%%%% +%%% EXEC HELPERS %%% +%%%%%%%%%%%%%%%%%%%% +%% Helper function to ensure exec state is properly initialized +ensure_exec_state(list, State) -> + case State of + #{exec_state := #{path := _, config := _, offset := _}} -> + State; + #{exec_args := #{node := Node}} -> + {ok, P, C} = erpc:call(Node, maestro_loader, current, []), + State#{exec_state => #{path => P, config => C, offset => {0,0}}} + end; +ensure_exec_state(scan, State) -> + case State of + #{exec_state := #{worker := _, dirs := _}} -> + State; + #{exec_args := Args} -> + #{node := Node, + dirs := Dirs} = Args, + %% TODO: replace with an alias + P = start_worker(self(), {scan, Node, Dirs}), + DirStatuses = maps:from_list([{Dir, pending} || Dir <- Dirs]), + State#{exec_state => #{worker => P, dirs => DirStatuses}} + end; +ensure_exec_state(sync, State) -> + case State of + #{exec_state := #{worker := _, peer := _, dirs := _}} -> + State; + #{exec_args := Args} -> + #{node := Node, + peer := P, + dirs := Dirs} = Args, + %% TODO: replace with an alias + W = start_worker(self(), {sync, Node, P, Dirs}), + DirStatuses = maps:from_list([{Dir, pending} || Dir <- Dirs]), + State#{exec_state => #{worker => W, peer => P, dirs => DirStatuses}} + end; +ensure_exec_state(status, State) -> + case State of + #{exec_state := #{worker := _, status := _}} -> + State; + #{exec_args := #{node := Node}} -> + %% TODO: replace with an alias + P = start_worker(self(), {status, Node}), + State#{exec_state => #{worker => P, status => undefined}} + end; +ensure_exec_state('generate-keys', State) -> + case State of + #{exec_state := #{worker := _, status := _}} -> + %% Do wrapping of the status line + State; + #{exec_args := Args} -> + #{path := Path, + certname := File} = Args, + %% TODO: replace with an alias + P = start_worker(self(), {generate_keys, Path, File}), + State#{exec_state => #{worker => P, status => "generating keys..."}} + end; +ensure_exec_state(seed, State) -> + case State of + #{exec_state := #{worker := _, dirs := _}} -> + %% Do wrapping of the status line + State; + #{exec_args := Args} -> + #{node := Node, + path := Path, + dirs := Dirs} = Args, + %% TODO: replace with an alias + P = start_worker(self(), {seed, Node, Path, Dirs}), + DirStatuses = maps:from_list([{Dir, pending} || Dir <- Dirs]), + State#{exec_state => #{worker => P, dirs => DirStatuses}} + end; +ensure_exec_state('remote-seed', State) -> + case State of + #{exec_state := #{worker := _, peer := _, dirs := _}} -> + %% Do wrapping of the status line + State; + #{exec_args := Args} -> + #{node := Node, + peer := P, + dirs := Dirs} = Args, + %% TODO: replace with an alias + W = start_worker(self(), {'remote-seed', Node, P, Dirs}), + DirStatuses = maps:from_list([{Dir, pending} || Dir <- Dirs]), + State#{exec_state => #{worker => W, peer => P, dirs => DirStatuses}} + end. + +%%%%%%%%%%%%%%%%%%%%% +%%% ASYNC WORKERS %%% +%%%%%%%%%%%%%%%%%%%%% +start_worker(ReplyTo, Call) -> + Parent = self(), + spawn_link(fun() -> worker(Parent, ReplyTo, Call) end). + +worker(Parent, ReplyTo, {scan, Node, Dirs}) -> + worker_scan(Parent, ReplyTo, Node, Dirs); +worker(Parent, ReplyTo, {sync, Node, Peer, Dirs}) -> + worker_sync(Parent, ReplyTo, Node, Peer, Dirs); +worker(Parent, ReplyTo, {status, Node}) -> + worker_status(Parent, ReplyTo, Node); +worker(Parent, ReplyTo, {generate_keys, Path, File}) -> + worker_generate_keys(Parent, ReplyTo, Path, File); +worker(Parent, ReplyTo, {seed, Node, Path, Dirs}) -> + worker_seed(Parent, ReplyTo, Node, Path, Dirs); +worker(Parent, ReplyTo, {'remote-seed', Node, Peer, Dirs}) -> + worker_remote_seed(Parent, ReplyTo, Node, Peer, Dirs). + +worker_scan(Parent, ReplyTo, Node, Dirs) -> + %% assume we are connected from arg validation time. + %% We have multiple directories, so scan them in parallel. + %% This requires setting up sub-workers, which incidentally lets us + %% also listen for interrupts from the parent. + process_flag(trap_exit, true), + ReqIds = lists:foldl(fun(Dir, Ids) -> + erpc:send_request(Node, + revault_dirmon_event, force_scan, [Dir, infinity], + Dir, Ids) + end, erpc:reqids_new(), Dirs), + worker_scan_loop(Parent, ReplyTo, Node, Dirs, ReqIds). + +worker_scan_loop(Parent, ReplyTo, Node, Dirs, ReqIds) -> + receive + {'EXIT', Parent, Reason} -> + %% clean up all the workers by being linked to them and dying + %% an unclean death. + exit(Reason); + stop -> + %% clean up all the workers by being linked to them and dying + %% an unclean death. + unlink(Parent), + exit(shutdown) + after 0 -> + case erpc:wait_response(ReqIds, ?MAX_VALIDATION_DELAY, true) of + no_request -> + revault_curses:send_event(ReplyTo, {revault, scan, done}), + exit(normal); + no_response -> + worker_scan_loop(Parent, ReplyTo, Node, Dirs, ReqIds); + {{response, Res}, Dir, NewIds} -> + revault_curses:send_event(ReplyTo, {revault, scan, {Dir, Res}}), + worker_scan_loop(Parent, ReplyTo, Node, Dirs, NewIds) + end + end. + +worker_sync(Parent, ReplyTo, Node, Peer, Dirs) -> + %% assume we are connected from arg validation time. + %% We have multiple directories, so sync them in parallel. + %% This requires setting up sub-workers, which incidentally lets us + %% also listen for interrupts from the parent. + process_flag(trap_exit, true), + ReqIds = lists:foldl(fun(Dir, Ids) -> + erpc:send_request(Node, + revault_dirmon_event, force_scan, [Dir, infinity], + {scan, Dir}, Ids) + end, erpc:reqids_new(), Dirs), + worker_sync_loop(Parent, ReplyTo, Node, Peer, Dirs, ReqIds). + +worker_sync_loop(Parent, ReplyTo, Node, Peer, Dirs, ReqIds) -> + receive + {'EXIT', Parent, Reason} -> + %% clean up all the workers by being linked to them and dying + %% an unclean death. + exit(Reason); + stop -> + %% clean up all the workers by being linked to them and dying + %% an unclean death. + unlink(Parent), + exit(shutdown) + after 0 -> + case erpc:wait_response(ReqIds, ?MAX_VALIDATION_DELAY, true) of + no_request -> + revault_curses:send_event(ReplyTo, {revault, sync, done}), + exit(normal); + no_response -> + worker_sync_loop(Parent, ReplyTo, Node, Peer, Dirs, ReqIds); + {{response, Res}, {scan, Dir}, TmpIds} -> + Status = case Res of + ok -> scanned; + Other -> Other + end, + revault_curses:send_event(ReplyTo, {revault, sync, {Dir, Status}}), + NewIds = erpc:send_request( + Node, + revault_fsm, sync, [Dir, Peer], + {sync, Dir}, + TmpIds + ), + worker_sync_loop(Parent, ReplyTo, Node, Peer, Dirs, NewIds); + {{response, Res}, {sync, Dir}, NewIds} -> + Status = case Res of + ok -> synced; + Other -> Other + end, + revault_curses:send_event(ReplyTo, {revault, sync, {Dir, Status}}), + worker_sync_loop(Parent, ReplyTo, Node, Peer, Dirs, NewIds) + end + end. + +worker_status(Parent, ReplyTo, Node) -> + process_flag(trap_exit, true), + ReqIds = erpc:send_request(Node, + maestro_loader, status, [], + status, erpc:reqids_new()), + worker_status_loop(Parent, ReplyTo,ReqIds). + +worker_status_loop(Parent, ReplyTo, ReqIds) -> + receive + {'EXIT', Parent, Reason} -> + exit(Reason); + stop -> + unlink(Parent), + exit(shutdown) + after 0 -> + case erpc:wait_response(ReqIds, ?MAX_VALIDATION_DELAY, true) of + no_request -> + revault_curses:send_event(ReplyTo, {revault, status, done}), + exit(normal); + no_response -> + worker_status_loop(Parent, ReplyTo, ReqIds); + {{response, Res}, status, NewIds} -> + revault_curses:send_event(ReplyTo, {revault, status, {ok, Res}}), + worker_status_loop(Parent, ReplyTo, NewIds) + end + end. + + +worker_generate_keys(Parent, ReplyTo, Path, File) -> + Res = make_selfsigned_cert(unicode:characters_to_list(Path), + unicode:characters_to_list(File)), + %% we actually don't have a loop, everything is local + %% and has already be run, so we just wait for a shutdown signal. + revault_curses:send_event(ReplyTo, {revault, 'generate-keys', {ok, Res}}), + receive + {'EXIT', Parent, Reason} -> + exit(Reason); + stop -> + unlink(Parent), + exit(shutdown) + end. + +worker_seed(Parent, ReplyTo, Node, Path, Dirs) -> + %% assume we are connected from arg validation time. + %% We have multiple directories, so scan them in parallel. + %% This requires setting up sub-workers, which incidentally lets us + %% also listen for interrupts from the parent. + process_flag(trap_exit, true), + ReqIds = lists:foldl(fun(Dir, Ids) -> + erpc:send_request(Node, + revault_fsm, seed_fork, [Dir, Path], + Dir, Ids) + end, erpc:reqids_new(), Dirs), + worker_seed_loop(Parent, ReplyTo, Node, Dirs, ReqIds). + +worker_seed_loop(Parent, ReplyTo, Node, Dirs, ReqIds) -> + receive + {'EXIT', Parent, Reason} -> + %% clean up all the workers by being linked to them and dying + %% an unclean death. + exit(Reason); + stop -> + %% clean up all the workers by being linked to them and dying + %% an unclean death. + unlink(Parent), + exit(shutdown) + after 0 -> + case erpc:wait_response(ReqIds, ?MAX_VALIDATION_DELAY, true) of + no_request -> + revault_curses:send_event(ReplyTo, {revault, seed, done}), + exit(normal); + no_response -> + worker_seed_loop(Parent, ReplyTo, Node, Dirs, ReqIds); + {{response, Res}, Dir, NewIds} -> + revault_curses:send_event(ReplyTo, {revault, seed, {Dir, Res}}), + worker_seed_loop(Parent, ReplyTo, Node, Dirs, NewIds) + end + end. + +worker_remote_seed(Parent, ReplyTo, Node, Peer, Dirs) -> + %% assume we are connected from arg validation time. + %% We have multiple directories, so scan them in parallel. + %% This requires setting up sub-workers, which incidentally lets us + %% also listen for interrupts from the parent. + process_flag(trap_exit, true), + ReqIds = lists:foldl(fun(Dir, Ids) -> + erpc:send_request(Node, + revault_fsm, id, [Dir, Peer], + Dir, Ids) + end, erpc:reqids_new(), Dirs), + worker_remote_seed_loop(Parent, ReplyTo, Node, Peer, Dirs, ReqIds). + +worker_remote_seed_loop(Parent, ReplyTo, Node, Peer, Dirs, ReqIds) -> + receive + {'EXIT', Parent, Reason} -> + %% clean up all the workers by being linked to them and dying + %% an unclean death. + exit(Reason); + stop -> + %% clean up all the workers by being linked to them and dying + %% an unclean death. + unlink(Parent), + exit(shutdown) + after 0 -> + case erpc:wait_response(ReqIds, ?MAX_VALIDATION_DELAY, true) of + no_request -> + revault_curses:send_event(ReplyTo, {revault, 'remote-seed', done}), + exit(normal); + no_response -> + worker_remote_seed_loop(Parent, ReplyTo, Node, Peer, Dirs, ReqIds); + {{response, Res}, Dir, NewIds} -> + revault_curses:send_event(ReplyTo, {revault, 'remote-seed', {Dir, Res}}), + worker_remote_seed_loop(Parent, ReplyTo, Node, Peer, Dirs, NewIds) + end + end. + +%%%%%%%%%%%%%%%%%%% +%%% EXTRA UTILS %%% +%%%%%%%%%%%%%%%%%%% + +%% Copied from revault_tls +make_selfsigned_cert(Dir, CertName) -> + check_openssl_vsn(), + + Key = filename:join(Dir, CertName ++ ".key"), + Cert = filename:join(Dir, CertName ++ ".crt"), + ok = filelib:ensure_dir(Cert), + Cmd = io_lib:format( + "openssl req -x509 -newkey rsa:4096 -sha256 -days 3650 -nodes " + "-keyout '~ts' -out '~ts' -subj '/CN=example.org' " + "-addext 'subjectAltName=DNS:example.org,DNS:www.example.org,IP:127.0.0.1'", + [Key, Cert] % TODO: escape quotes + ), + os:cmd(Cmd). + +check_openssl_vsn() -> + Vsn = os:cmd("openssl version"), + VsnMatch = "(Open|Libre)SSL ([0-9]+)\\.([0-9]+)\\.([0-9]+)", + case re:run(Vsn, VsnMatch, [{capture, all_but_first, list}]) of + {match, [Type, Major, Minor, Patch]} -> + try + check_openssl_vsn(Type, list_to_integer(Major), + list_to_integer(Minor), + list_to_integer(Patch)) + catch + error:bad_vsn -> + error({openssl_vsn, Vsn}) + end; + _ -> + error({openssl_vsn, Vsn}) + end. + +%% Using OpenSSL >= 1.1.1 or LibreSSL >= 3.1.0 +check_openssl_vsn("Libre", A, B, _) when A > 3; + A == 3, B >= 1 -> + ok; +check_openssl_vsn("Open", A, B, C) when A > 1; + A == 1, B > 1; + A == 1, B == 1, C >= 1 -> + ok; +check_openssl_vsn(_, _, _, _) -> + error(bad_vsn). diff --git a/cli/revault_cli/src/revault_curses.erl b/cli/revault_cli/src/revault_curses.erl new file mode 100644 index 0000000..82f6629 --- /dev/null +++ b/cli/revault_cli/src/revault_curses.erl @@ -0,0 +1,793 @@ +-module(revault_curses). +-include("revault_cli.hrl"). +-include_lib("cecho/include/cecho.hrl"). +-export([start_link/1, send_input/2, send_event/2]). +-export([init/1]). +-export([parse_list/2, check_connect/2]). +-export([clear_status/1, set_status/2]). + +-type menu_key() :: atom(). +-type val_type() :: {type_name(), convertor(), validator()}. +-type type_name() :: node | string | list. +-type convertor() :: regex() | convertor_fun(). +-type regex() :: string(). +-type convertor_fun() :: fun((string(), state()) -> {ok, term(), state()} | {error, term()}). +-type validator() :: fun((state(), term()) -> ok | {error, term()}). +-type render_mode() :: raw | clip | {clip, pos()} | wrap. +-type pos() :: {y(), x()}. +-type y() :: non_neg_integer(). +-type x() :: non_neg_integer(). +-type max_lines() :: pos_integer(). +-type max_cols() :: pos_integer(). +-type line() :: string(). +-type lines() :: [line()]. +-type arg() :: #{name := menu_key(), + label := string(), + help := string(), + default := term(), + type := val_type()}. +-type exec_arg() :: #{type_name() := term()}. +%% TODO: split internal state from callback state +-type nstate() :: #{mode := menu | action | exec, + menu := menu_key() | undefined, + hover_menu := menu_key(), + menu_map => #{menu_key() => pos()}, + menu_coord_map => #{pos() => menu_key()}, + action_args := [[arg(), ...], ...], + state := state(), + action_coords => {pos(), pos()}, + exec_coords => {pos(), pos()}, + status_coords => {pos(), pos()}, + status_init_pos => pos(), + status_message => iodata()}. +-type state() :: term(). + +-callback menu_order() -> [menu_key(), ...]. +-callback menu_help(menu_key()) -> string(). +-callback args() -> #{menu_key() := [arg(), ...]}. +-callback init() -> state(). +-callback render_exec(menu_key(), exec_arg(), max_lines(), max_cols(), state()) -> + {render_mode(), state(), lines()}. +-callback handle_exec(input, integer(), menu_key(), state()) -> {ok | done, state()}; + (event, term(), menu_key(), state()) -> {ok | done, state()}. + +%%%%%%%%%%%%%%%%% +%%% LIFECYCLE %%% +%%%%%%%%%%%%%%%%% +start_link(Module) -> + supervisor_bridge:start_link(?MODULE, Module). + +send_input(Pid, Char) -> + Pid ! {input, Char}. + +send_event(Pid, Event) -> + Pid ! {event, Event}. + +init(Module) -> + Pid = spawn_link(fun() -> main(Module) end), + {ok, Pid, Module}. + +-spec main(module()) -> no_return(). +main(Module) -> + setup(), + State = state(Module, #{}), + cecho:refresh(), + Pid = self(), + spawn_link(fun F() -> + Pid ! {input, cecho:getch()}, + F() + end), + loop(Module, select_menu(State, maps:get(hover_menu, State))). + +setup() -> + logger:remove_handler(default), + %% go in character-by-charcter mode + cecho:cbreak(), + %% don't show output + cecho:noecho(), + %% give keypad access + cecho:keypad(?ceSTDSCR, true), + %% don't wait on ESC keys + cecho:set_escdelay(25), + %% initial cursor position + cecho:move(1,1), + ok. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%%% STATE AND DISPLAY MANAGEMENT %%% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +state(Mod, Old) -> + Default = #{ + mode => menu, + hover_menu => hd(Mod:menu_order()), + menu => undefined, + state => Mod:init() + }, + Tmp0 = maps:merge(Default, Old), + %% Refresh the layout to show proper coordinates + Tmp1 = show_menu(Mod, Tmp0), + Tmp2 = show_action(Mod, Tmp1), + Tmp3 = show_exec(Mod, Tmp2), + Tmp4 = end_table(Mod, Tmp3), + cecho:refresh(), + Tmp4. + +show_menu(Mod, State) -> + #{menu := Chosen, hover_menu := Hover, mode := Mode} = State, + MenuOrder = Mod:menu_order(), + StringMenu = [atom_to_list(X) || X <- MenuOrder], + TopRow = ["╔═", + lists:join("═╤═", [lists:duplicate(string:length(X), "═") || X <- StringMenu]), + "═╗"], + MenuRow = ["║ ", lists:join(" │ ", [format_menu(X, Chosen) || X <- StringMenu]), " ║"], + BottomRow = ["╟─", + lists:join("─┴─", [lists:duplicate(string:length(X), "─") || X <- StringMenu]), + "─╢"], + {_, MenuMap, CoordMap} = lists:foldl( + fun(X, {N,M,C}) -> + XStr = atom_to_list(X), + {N+length(XStr)+3, + M#{X => {1,N}}, + C#{{1,N} => X}} + end, + {2, #{}, #{}}, + MenuOrder + ), + Width = string:length(MenuRow)-1, + str(0, 0, TopRow), + str(1, 0, MenuRow), + str(2, 0, BottomRow), + NewState = State#{menu_coords => {{0,0}, {2,Width}}, + menu_map => MenuMap, + menu_coord_map => CoordMap, + menu_init_pos => {1,2}}, + %% set cursor if in menu mode and show help for hovered item + case {Mode, Hover} of + {menu, Hover} -> + MoveTo = menu_pos(NewState, Hover), + mv(MoveTo), + set_status(NewState, Mod:menu_help(Hover)); + _ -> + NewState + end. + +show_action(_Mod, State = #{mode := Mode, + menu_coords := {_, End}}) when Mode == menu -> + State#{action_coords => {End, End}}; +show_action(Mod, State = #{mode := Mode, menu := Action, + menu_coords := {_, {MenuY, MaxX}}}) when Mode == action; + Mode == exec -> + MinY = MenuY, + %% TODO: clip lines that are too long + {ArgState, Args} = arg_output(State, Action, + maps:get(action_args, State, + maps:get(Action, Mod:args(), []))), + MaxY = lists:foldl(fun(Arg, Y) -> + #{line := {Label, Val}} = Arg, + Line = [Label, ": ", Val], + str(Y+1, 0, ["║ ", string:pad(Line, MaxX-3), " ║"]), + Y+1 + end, MinY, Args), + %% Ranges + {_, RevRanges} = lists:foldl(fun(Arg, {Y, Acc}) -> + #{line := {Label, _}} = Arg, + {Y+1, + [Arg#{range => {{Y+1, string:length(Label)+2+2}, {Y+1, MaxX-2}}} | Acc]} + end, {MinY, []}, Args), + Ranges = lists:reverse(RevRanges), + %% Set position + CurrentY = case cecho:getyx() of + {CurY, CurX} when CurY >= MinY, CurY =< MaxY, + CurX >= 2, CurX =< MaxX -> + CurY; + _ -> + %% Outside the box, move it to a known location + case Ranges of + [] -> + mv({MinY, 2}), + MinY; + [#{range := {First, _Last}}|_] -> + mv(First), + element(1, First) + end + end, + {ExtraLines, TmpState} = case Mode of + action -> + NthArg = CurrentY - MinY, + #{help := Help} = lists:nth(NthArg, Args), + {0, maybe_set_status(ArgState, Help)}; % this is the last section + _ -> + %% terminate table section + BottomRow = ["╟", lists:duplicate(MaxX-1, "─"), "╢"], + str(MaxY+1, 0, BottomRow), + {1, ArgState} + end, + TmpState#{action_coords => {{MinY,0}, {MaxY+ExtraLines,MaxX}}, + action_args => Ranges, + action_init_pos => {MinY, 2}}. + +show_exec(_Mod, State=#{mode := Mode, + action_coords := {_, {Y,X}}}) when Mode =/= exec -> + State#{exec_coords => {{Y,0},{Y,X}}}; +show_exec(Mod, State=#{mode := exec, + menu := Action, + action_coords := {_, {ActionY,MaxX}}, + action_args := Args, + state := ModState}) -> + MinY = ActionY, + %% expect line-based output in a list + MaxLines = ?EXEC_LINES, + MaxCols = MaxX-4, + MaxY = MinY + MaxLines, + ModArgs = maps:from_list([{N, V} || #{name := N, val := V} <- Args]), + {RenderMode, NewModState, Lines} = Mod:render_exec(Action, ModArgs, MaxLines, MaxCols, ModState), + case RenderMode of + raw -> + render_raw(Lines, {MinY,0}, {MaxY, MaxX}); + wrap -> + render_raw(wrap(Lines, MaxLines, MaxCols), + {MinY,0}, {MaxY, MaxX}); + clip -> + render_raw(clip(Lines, {0,0}, MaxLines, MaxCols), + {MinY,0}, {MaxY, MaxX}); + {clip, Offsets} -> + render_raw(clip(Lines, Offsets, MaxLines, MaxCols), + {MinY,0}, {MaxY, MaxX}) + end, + State#{exec_coords => {{MinY,0},{MaxY,MaxX}}, + state => NewModState}. + +end_table(_Mod, State=#{exec_coords := {_, {Y,X}}}) -> + str(Y+1, 0, ["╚", lists:duplicate(X-1, "═") ,"╝"]), + str(Y+2, 0, " ╰─ "), + %% Clear status area and render status if present + {StatusY, StatusX} = {Y+2, 5}, + %% Clear the entire status line + {MaxY,MaxX} = cecho:getmaxyx(), + [str(LY, StatusX, lists:duplicate(MaxX-StatusX, $\s)) || LY <- lists:seq(StatusY,MaxY)], + %% Render status message if present + case State of + #{status_message := StatusMsg} -> + str(StatusY, StatusX, StatusMsg); + _ -> + ok + end, + State#{status_coords => {{Y+1,0}, {StatusY,StatusX}}, + status_init_pos => {StatusY,StatusX}}. + +render_raw(Lines, {MinY, MinX}, {MaxY, MaxX}) -> + LinesY = lists:foldl(fun(Line, Y) -> + str(Y+1, MinX, ["║ ", string:pad(Line, MaxX-MinX-3), " ║"]), + Y+1 + end, MinY, Lines), + [str(LineY, MinX, ["║", lists:duplicate(MaxX-MinX-1, " "), "║"]) + || LineY <- lists:seq(LinesY+1, MaxY)]. + +select_menu(State, Menu) -> + mv(menu_pos(State, Menu)), + State#{hover_menu => Menu}. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%%% STATE AND DISPLAY HELPERS %%% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +format_menu(X, Chosen) -> + case atom_to_list(Chosen) of + X -> string:uppercase(X); + _ -> X + end. + +menu_at(#{menu_coord_map := CoordMap}, Coord) -> + #{Coord := Menu} = CoordMap, + Menu. + +menu_pos(#{menu_map := M}, Menu) -> + #{Menu := Coord} = M, + Coord. + +enter_menu(State, Menu) -> + State#{mode => action, + menu => Menu}. + +set_status(State, Str) -> + State#{status_message => Str}. + +maybe_set_status(State=#{status_message := _}, _) -> State; +maybe_set_status(State, Str) -> set_status(State, Str). + +clear_status(State) -> + maps:without([status_message], State). + +wrap(Str, Lines, Width) -> + wrap(Str, 0, Width, 0, Lines, [[]]). + +wrap(_Str, Width, Width, Lines, Lines, Acc) -> + lists:reverse(Acc); +wrap(Str, Width, Width, Ln, Lines, [L|Acc]) -> + wrap(Str, 0, Width, Ln+1, Lines, [[],lists:reverse(L)|Acc]); +wrap(Str, W, Width, Ln, Lines, [L|Acc]) -> + case string:next_grapheme(Str) of + [Brk|Rest] when Brk == $\n; Brk == "\r\n" -> + wrap(Rest, 0, Width, Ln+1, Lines, [[], lists:reverse(L)|Acc]); + [C|Rest] -> + wrap(Rest, W+1, Width, Ln, Lines, [[C|L]|Acc]); + [] -> + lists:reverse([lists:reverse(L)|Acc]) + end. + +clip(Lines, {OffY, OffX}, MaxLines, MaxCols) -> + [string:slice(S, OffX, MaxCols) + || S <- lists:sublist(Lines, OffY+1, MaxLines)]. + +%%%%%%%%%%%%%%%%%%%% +%%% ARG HANDLING %%% +%%%%%%%%%%%%%%%%%%%% + +parse_list(String, State) -> + try + %% drop surrounding whitespace and split on commas + S = string:trim(String, both), + L = re:split(S, "[\\s]*,[\\s]*", [{return, binary}, unicode]), + %% ignore empty results (<<>>) in returned value + {ok, [B || B <- L], State} + catch + _:_ -> {error, invalid, State} + end. + +check_connect(_State, Node) -> + case connect_nonblocking(Node) of + ok -> ok; + timeout -> {error, timeout}; + _ -> {error, connection_failure} + end. + +connect_nonblocking(Node) -> + timeout_call(?MAX_VALIDATION_DELAY, fun() -> connect(Node) end). + +connect(Node) -> + case net_kernel:connect_node(Node) of + ignored -> {error, no_dist}; + false -> {error, connection_failed}; + true -> ok + end. + +arg_output(State, Action, Args) -> + arg_output(State, Action, Args, []). + +arg_output(State, _, [], Acc) -> + {State, lists:reverse(Acc)}; +arg_output(State, Action, [Arg|Args], Acc) when not is_map_key(val, Arg) -> + {NewState, NewArg} = arg_init(State, Action, Arg), + arg_output(NewState, Action, [NewArg|Args], Acc); +arg_output(State, Action, [Arg=#{unparsed := Unparsed}|Args], Acc) -> + %% refresh data of pre-parsed elements. + %% with the new value in place, apply the transformation to its internal + %% format for further commands + Ret = parse_arg(State, Action, Arg, Unparsed), + %% Store it all! + case Ret of + {ok, Val, NewState} -> + arg_output(NewState, Action, + [maps:without([line, unparsed], Arg#{val => Val}) | Args], Acc); + {error, _Reason, NewState} -> + %% TODO: update status? + arg_output(NewState, Action, [maps:without([unparsed], Arg)|Args], Acc) + end; +arg_output(State, Action, [Arg=#{line := _} | Args], Acc) -> + arg_output(State, Action, Args, [Arg|Acc]); +arg_output(State, Action, [#{type := {node, _, Validator}, label := Label, val := NodeVal}=Arg|Args], Acc) -> + Status = case Validator(State, NodeVal) of + ok -> "ok"; + {error, partial_success} -> "?!"; + {error, timeout} -> "??"; + _ -> "!!" + end, + Line = {[Label, " (", Status, ")"], atom_to_list(NodeVal)}, + arg_output(State#{local_node => NodeVal}, Action, + [Arg#{line => Line}|Args], Acc); +arg_output(State, Action, [#{name := dirs, type := {list, _, _}, label := Label, val := DirList}=Arg|Args], Acc) -> + Line = {Label, lists:join(", ", DirList)}, + arg_output(State#{dir_list => DirList}, Action, + [Arg#{line => Line}|Args], Acc); +arg_output(State, Action, [#{type := {list, _, _}, label := Label, val := List}=Arg|Args], Acc) -> + Line = {Label, lists:join(", ", List)}, + arg_output(State, Action, [Arg#{line => Line}|Args], Acc); +arg_output(State, Action, [#{type := {string, _, _}, label := Label, val := Val}=Arg|Args], Acc) -> + Line = {Label, Val}, + arg_output(State, Action, [Arg#{line => Line}|Args], Acc); +arg_output(State, Action, [#{type := Unsupported}=Arg|Args], Acc) -> + #{label := Label} = Arg, + Line = {io_lib:format("[Unsupported] ~ts", [Label]), + io_lib:format("~p", [Unsupported])}, + arg_output(State, Action, [Arg#{line => Line}|Args], Acc). + + +arg_init(State, _Action, Arg = #{type := {node, _, _}, default := Default}) -> + Val = maps:get(local_node, State, Default), + {State#{local_node => Val}, Arg#{val => Val}}; +arg_init(State, _Action, Arg = #{name := dirs, type := {list, _, _}, default := F}) -> + Default = F(State), + {State#{dir_list => Default}, Arg#{val => Default}}; +arg_init(State, _Action, Arg = #{type := {list, _, _}, default := F}) -> + {State, Arg#{val => F(State)}}; +arg_init(State, _Action, Arg = #{type := {string, _, _}, default := X}) -> + Default = if is_function(X, 1) -> X(State); + is_function(X) -> error(bad_arity); + true -> X + end, + {State, Arg#{val => Default}}; +arg_init(State, _Action, Arg = #{type := Unsupported}) -> + {State, Arg#{val => {error, Unsupported}}}. + +parse_arg(State, _Action, #{type := TypeInfo}, Unparsed) -> + case TypeInfo of + {T, F, _Validation} when is_function(F) -> + parse_with_fun(T, F, Unparsed, State); + {T, Regex, _Validation} when is_list(Regex); is_binary(Regex) -> + F = fun(String, St) -> parse_regex(Regex, String, St) end, + parse_with_fun(T, F, Unparsed, State) + end. + +parse_regex(Re, String, State) -> + case re:run(String, Re, [{capture, first, binary}, unicode]) of + {match, [Str]} -> {ok, Str, State}; + nomatch -> {error, invalid, State} + end. + +parse_with_fun(node, F, Str, State) -> + maybe + {ok, NewStr, NewState} ?= F(Str, State), + Node = binary_to_atom(NewStr), + {ok, Node, NewState} + end; +parse_with_fun(_Type, F, Str, State) -> + F(Str, State). + +%% Internal use functions using the above material +%% but used to check all arguments fit and can be converted +%% properly. +validate_args(State, Action, Args) -> + %% Validate all the arguments + {Errors, Status} = lists:foldl( + fun(Arg = #{line := {_Label, Str}}, {Acc, S}) -> + case parse_arg(State, Action, Arg, Str) of + {ok, _, _} -> {Acc, S}; + {error, Reason, _} -> {[{Arg, Reason}|Acc], error} + end + end, + {[], ok}, + Args + ), + case Status of + error -> + {error, Errors}; + ok -> + {_Valid, Invalid} = convert_args(State, Args), + case Invalid of + [] -> + ok; + Invalid -> + {error, Invalid} + end + end. + +convert_args(State, Args) -> + lists:foldl( + fun(Arg = #{val := Val, type := {_,_,F}}, {V,I}) -> + case F(State, Val) of + ok -> {[Arg|V], I}; + {error, Reason} -> {V, [{Arg, Reason}]} + end + end, + {[],[]}, + Args + ). + +%%%%%%%%%%%%%%%%%%%%% +%%% MAIN TUI LOOP %%% +%%%%%%%%%%%%%%%%%%%%% +-spec loop(module(), nstate()) -> no_return(). +loop(Mod, OldState) -> + State = #{mode := Mode} = state(Mod, OldState), + case Mode of + menu -> + receive + {input, Input} -> + {ok, NewState} = handle_menu(Mod, {input, Input}, State), + loop(Mod, NewState) + end; + action -> + #{menu := Action} = State, + receive + {input, Input} -> + {ok, NewState} = handle_action({input, Input}, Action, clear_status(State)), + loop(Mod, NewState) + end; + exec -> + #{menu := Action} = State, + Msg = receive + {input, Input} -> {input, Input}; + {event, Other} -> {event, Other} + end, + case handle_exec(Mod, Msg, Action, State) of + {ok, NewState} -> + loop(Mod, NewState); + {done, TmpState} -> + %% clear up the arg list and status messages + NewState = revault_curses:clear_status(TmpState#{mode => action}), + cecho:erase(), + loop(Mod, NewState) + end + end. + + +handle_menu(Mod, {input, Key}, TmpState) -> + Pos = cecho:getyx(), + case Key of + ?ceKEY_RIGHT -> + NewMenu = next(menu_at(TmpState, Pos), Mod:menu_order()), + State = select_menu(TmpState, NewMenu), + {ok, State}; + ?ceKEY_LEFT -> + NewMenu = prev(menu_at(TmpState, Pos), Mod:menu_order()), + State = select_menu(TmpState, NewMenu), + mv_by({0,0}), + {ok, State}; + $\n -> + Menu = menu_at(TmpState, Pos), + State = clear_status(enter_menu(TmpState, Menu)), + {ok, State}; + UnknownChar -> + State = set_status( + TmpState, + io_lib:format("Unknown menu character: ~w", [UnknownChar]) + ), + {ok, State} + end. + +%% A little TUI editors for parameters. +handle_action({input, ?ceKEY_ESC}, _Action, TmpState = #{menu := _Menu}) -> + %% exit the menu + TmpState2 = TmpState#{mode => menu, menu => undefined}, + %% clear up the arg list + %% TODO: cache by action? + State = maps:without([action_args], TmpState2), + cecho:erase(), + {ok, State}; +handle_action({input, ?ceKEY_DOWN}, _Action, State = #{action_args := Args}) -> + {Y,_} = cecho:getyx(), + After = lists:dropwhile(fun(#{range := {_, {MaxY,_}}}) -> Y >= MaxY end, Args), + case After of + [#{range := {Pos, _}}|_] -> mv(Pos); + _ -> ok + end, + {ok, State}; +handle_action({input, ?ceKEY_UP}, _Action, State = #{action_args := Args}) -> + {Y,_} = cecho:getyx(), + Before = lists:takewhile(fun(#{range := {{MinY,_}, _}}) -> Y > MinY end, Args), + case lists:reverse(Before) of + [#{range := {Pos, _}}|_] -> mv(Pos); + _ -> ok + end, + {ok, State}; +handle_action({input, ?ceKEY_LEFT}, _Action, State = #{action_args := Args}) -> + {Y,X} = cecho:getyx(), + {value, #{range := {{_,MinX},_}}} = lists:search( + fun(#{range := {{MinY,_}, {MaxY,_}}}) -> Y >= MinY andalso Y =< MaxY end, + Args + ), + X > MinX andalso mv_by({0,-1}), + {ok, State}; +handle_action({input, ?ceKEY_RIGHT}, _Action, State = #{action_args := Args}) -> + {Y,X} = cecho:getyx(), + {value, #{range := {{_,MinX}, {_,MaxX}}, + line := {_, Str}}} = lists:search( + fun(#{range := {{MinY,_}, {MaxY,_}}}) -> Y >= MinY andalso Y =< MaxY end, + Args + ), + X < MaxX andalso X < MinX+string:length(Str) andalso mv_by({0,1}), + {ok, State}; +handle_action({input, ?KEY_CTRLA}, _Action, State = #{action_args := Args}) -> + {Y,_} = cecho:getyx(), + {value, #{range := {{_,MinX},_}}} = lists:search( + fun(#{range := {{MinY,_}, {MaxY,_}}}) -> Y >= MinY andalso Y =< MaxY end, + Args + ), + mv({Y, MinX}), + {ok, State}; +handle_action({input, ?KEY_CTRLE}, _Action, State = #{action_args := Args}) -> + {Y,_} = cecho:getyx(), + {value, #{range := {{_,MinX},_}, + line := {_, Str}}} = lists:search( + fun(#{range := {{MinY,_}, {MaxY,_}}}) -> Y >= MinY andalso Y =< MaxY end, + Args + ), + mv({Y,MinX+string:length(Str)}), + {ok, State}; +handle_action({input, ?KEY_BACKSPACE}, _Action, State = #{action_args := Args}) -> + {Y,X} = cecho:getyx(), + {value, Arg} = lists:search( + fun(#{range := {{MinY,_}, {MaxY,_}}}) -> Y >= MinY andalso Y =< MaxY end, + Args + ), + #{range := {{_,MinX},{_,MaxX}}, + line := {Label,Str}} = Arg, + NewStr = case X > MinX of + true -> % can go back + Pre = string:slice(Str, 0, (X-MinX)-1), + Post = string:slice(Str, X-MinX), + Edited = [Pre,Post], + str(Y, MinX, string:pad("", MaxX-MinX)), + str(Y, MinX, Edited), + mv_by({0,-1}), + Edited; + false -> + Str + end, + NewArgs = replace(Args, Arg, Arg#{line => {Label,NewStr}, + unparsed => NewStr}), + {ok, State#{action_args=>NewArgs}}; +handle_action({input, ?ceKEY_DEL}, _Action, State = #{action_args := Args}) -> + {Y,X} = cecho:getyx(), + {value, Arg} = lists:search( + fun(#{range := {{MinY,_}, {MaxY,_}}}) -> Y >= MinY andalso Y =< MaxY end, + Args + ), + #{range := {{_,MinX},{_,MaxX}}, + line := {Label,Str}} = Arg, + NewStr = case X >= MinX of + true -> % can go back + Pre = string:slice(Str, 0, X-MinX), + Post = string:slice(Str, (X-MinX)+1), + Edited = [Pre,Post], + str(Y, MinX, string:pad("", MaxX-MinX)), + str(Y, MinX, Edited), + Edited; + false -> + Str + end, + NewArgs = replace(Args, Arg, Arg#{line => {Label,NewStr}, + unparsed => NewStr}), + {ok, State#{action_args=>NewArgs}}; +handle_action({input, ?KEY_CTRLD}, _Action, State = #{action_args := Args}) -> + {Y,X} = cecho:getyx(), + {value, Arg} = lists:search( + fun(#{range := {{MinY,_}, {MaxY,_}}}) -> Y >= MinY andalso Y =< MaxY end, + Args + ), + #{range := {{_,MinX},{_,MaxX}}, + line := {Label,Str}} = Arg, + NewStr = case X >= MinX of + true -> % can go back + Edited = string:slice(Str, 0, X-MinX), + str(Y, MinX, string:pad("", MaxX-MinX)), + str(Y, MinX, Edited), + Edited; + false -> + Str + end, + NewArgs = replace(Args, Arg, Arg#{line => {Label,NewStr}, + unparsed => NewStr}), + {ok, State#{action_args=>NewArgs}}; +handle_action({input, Char}, _Action, State = #{action_args := Args}) when ?KEY_TEXT_RANGE(Char) -> + %% text input! + {Y,X} = cecho:getyx(), + {value, Arg} = lists:search( + fun(#{range := {{MinY,_}, {MaxY,_}}}) -> Y >= MinY andalso Y =< MaxY end, + Args + ), + #{range := {{_,MinX},{_,MaxX}}, + line := {Label,Str}} = Arg, + NewStr = case X < MaxX andalso X >= MinX + andalso X =< MinX+string:length(Str) of + true -> + Pre = string:slice(Str, 0, X-MinX), + Post = string:slice(Str, (X-MinX)), + Edited = string:slice([Pre, Char, Post], 0, MaxX-MinX), + str(Y, MinX, string:pad("", MaxX-MinX)), + str(Y, MinX, Edited), + mv_by({0,1}), + Edited; + false -> + Str + end, + NewArgs = replace(Args, Arg, Arg#{line => {Label,NewStr}, + unparsed => NewStr}), + {ok, State#{action_args=>NewArgs}}; +handle_action({input, ?KEY_ENTER}, Action, TmpState = #{action_args := Args}) -> + %% revalidate all values in all ranges; if any error + %% is found, show it in the status line. + %% if none are found, extract as clean options, and + %% switch to execution mode. + case validate_args(TmpState, Action, Args) of + ok -> + State = set_status(TmpState, "ok."), + {ok, State#{mode => exec}}; + {error, [{#{line := {Label, _}}, Reason}|_]} -> + State = set_status( + TmpState, + io_lib:format("Validation issue in ~ts: ~p", [Label, Reason]) + ), + {ok, State} + end; +handle_action({input, UnknownChar}, Action, TmpState) -> + State = set_status( + TmpState, + io_lib:format("Unknown character in ~p: ~w", [Action, UnknownChar]) + ), + {ok, State}. + +handle_exec(Mod, {Type, Msg}, Action, State=#{state := ModState}) -> + try + {Key, NewModState} = Mod:handle_exec(Type, Msg, Action, ModState), + {Key, State#{state => NewModState}} + catch + error:function_clause:Stack -> + case Stack of + [{Mod, handle_exec, _, _}|_] -> + Status = case {Type, Msg} of + {input, Char} -> + io_lib:format("Unknown character in ~p: ~w", [Action, Char]); + {event, _Event} -> + io_lib:format("Got unexpected event in ~p: ~p", [Action, Msg]) + end, + {ok, set_status(State, Status)}; + _ -> + erlang:raise(error, function_clause, Stack) + end + end. + +%%%%%%%%%%%%%%%%%%%%%%% +%%% NCURSES HELPERS %%% +%%%%%%%%%%%%%%%%%%%%%%% +str(Y, X, Str) -> + {OrigY, OrigX} = cecho:getyx(), + %% cecho expects a lists of bytes, so we gotta do some fun converting + cecho:mvaddstr(Y, X, binary_to_list(unicode:characters_to_binary(Str))), + cecho:move(OrigY, OrigX). + +mv_by({OffsetY, OffsetX}) -> + {CY, CX} = cecho:getyx(), + cecho:move(CY+OffsetY, CX+OffsetX). + +mv({Y,X}) -> + cecho:move(Y, X). + +%%%%%%%%%%%%%%%%%%%%%%%%%% +%%% FUNCTIONAL HELPERS %%% +%%%%%%%%%%%%%%%%%%%%%%%%%% +prev(K, L) -> next(K, lists:reverse(L)). + +next(_, [N]) -> N; +next(K, [K,N|_]) -> N; +next(K, [_|T]) -> next(K, T). + +replace([H|T], H, R) -> [R|T]; +replace([H|T], S, R) -> [H|replace(T, S, R)]. + +%% small helper that defers a blocking call that can be long +%% to another process, such that the validation step can have a +%% ceiling for how long it takes before returning a value. +%% If the process times out, it is killed brutally. +timeout_call(Timeout, Fun) -> + P = self(), + R = make_ref(), + {Pid, Ref} = spawn_monitor(fun() -> + Res = Fun(), + P ! {R, Res} + end), + receive + {R, Res} -> + erlang:demonitor(R, [flush]), + Res; + {'DOWN', Ref, _, _, _} -> + {error, connection_attempt_failed} + after Timeout -> + erlang:exit(Pid, kill), + receive + {'DOWN', Ref, _, _, _} -> + timeout; + {R, Res} -> + erlang:demonitor(R, [flush]), + Res + end + end. diff --git a/config/cli.args.src b/config/cli.args.src new file mode 100644 index 0000000..05028b0 --- /dev/null +++ b/config/cli.args.src @@ -0,0 +1,3 @@ +-noinput +-name ncurses_cli +-setcookie revault_cookie diff --git a/config/cli.sys.config b/config/cli.sys.config new file mode 100644 index 0000000..57afcca --- /dev/null +++ b/config/cli.sys.config @@ -0,0 +1 @@ +[]. diff --git a/rebar.config b/rebar.config index 2f662c4..d6f8693 100644 --- a/rebar.config +++ b/rebar.config @@ -74,6 +74,14 @@ {system_libs, false} ]} ]}, + {ncurses, [ + {project_app_dirs, ["apps/*", "lib/*", "cli/*", "."]}, + {deps, [{cecho, {git, "https://github.com/ferd/cecho.git", {branch, "master"}}}]}, + {relx, [{release, {cli, "0.1.0"}, + [revault_cli, cecho]}, + {sys_config, "./config/cli.sys.config"}, + {vm_args_src, "./config/cli.args.src"}]} + ]}, {debug, [ %% generate debug traces in gen_* processes {erl_opts, [{d, 'TEST'}]}