From 5d7e18db75d42c43661e33ee12ba3099b47d5acc Mon Sep 17 00:00:00 2001 From: Luca Succi Date: Thu, 19 Jan 2023 18:20:07 +0100 Subject: [PATCH 1/7] First pubsub commit --- include/opcua.hrl | 32 ++- src/opcua_pubsub.erl | 113 ++++++++++ src/opcua_pubsub.hrl | 48 +++++ src/opcua_pubsub_connection.erl | 119 +++++++++++ src/opcua_pubsub_data_set_reader.erl | 108 ++++++++++ src/opcua_pubsub_example.erl | 50 +++++ src/opcua_pubsub_middleware_sup.erl | 23 +++ src/opcua_pubsub_reader_group.erl | 46 +++++ src/opcua_pubsub_security.erl | 13 ++ src/opcua_pubsub_sup.erl | 35 ++++ src/opcua_pubsub_uadp.erl | 298 +++++++++++++++++++++++++++ src/opcua_pubsub_udp.erl | 104 ++++++++++ src/opcua_sup.erl | 3 +- 13 files changed, 990 insertions(+), 2 deletions(-) create mode 100644 src/opcua_pubsub.erl create mode 100644 src/opcua_pubsub.hrl create mode 100644 src/opcua_pubsub_connection.erl create mode 100644 src/opcua_pubsub_data_set_reader.erl create mode 100644 src/opcua_pubsub_example.erl create mode 100644 src/opcua_pubsub_middleware_sup.erl create mode 100644 src/opcua_pubsub_reader_group.erl create mode 100644 src/opcua_pubsub_security.erl create mode 100644 src/opcua_pubsub_sup.erl create mode 100644 src/opcua_pubsub_uadp.erl create mode 100644 src/opcua_pubsub_udp.erl diff --git a/include/opcua.hrl b/include/opcua.hrl index 7198a57..e4385bc 100644 --- a/include/opcua.hrl +++ b/include/opcua.hrl @@ -40,7 +40,37 @@ -define(OBJ_SERVER_TYPE, 2004). -define(OBJ_SERVER_STATUS_TYPE, 2138). - +% Attribute Id +% ------------ +% Every node in an OPC UA information model contains attributes depending on +% the node type. Possible attributes are as follows: +-define(UA_ATTRIBUTEID_NODEID, 1). +-define(UA_ATTRIBUTEID_NODECLASS, 2). +-define(UA_ATTRIBUTEID_BROWSENAME, 3). +-define(UA_ATTRIBUTEID_DISPLAYNAME, 4). +-define(UA_ATTRIBUTEID_DESCRIPTION, 5). +-define(UA_ATTRIBUTEID_WRITEMASK, 6). +-define(UA_ATTRIBUTEID_USERWRITEMASK, 7). +-define(UA_ATTRIBUTEID_ISABSTRACT, 8). +-define(UA_ATTRIBUTEID_SYMMETRIC, 9). +-define(UA_ATTRIBUTEID_INVERSENAME, 10). +-define(UA_ATTRIBUTEID_CONTAINSNOLOOPS, 11). +-define(UA_ATTRIBUTEID_EVENTNOTIFIER, 12). +-define(UA_ATTRIBUTEID_VALUE, 13). +-define(UA_ATTRIBUTEID_DATATYPE, 14). +-define(UA_ATTRIBUTEID_VALUERANK, 15). +-define(UA_ATTRIBUTEID_ARRAYDIMENSIONS, 16). +-define(UA_ATTRIBUTEID_ACCESSLEVEL, 17). +-define(UA_ATTRIBUTEID_USERACCESSLEVEL, 18). +-define(UA_ATTRIBUTEID_MINIMUMSAMPLINGINTERVAL,19). +-define(UA_ATTRIBUTEID_HISTORIZING, 20). +-define(UA_ATTRIBUTEID_EXECUTABLE, 21). +-define(UA_ATTRIBUTEID_USEREXECUTABLE, 22). +-define(UA_ATTRIBUTEID_DATATYPEDEFINITION, 23). +-define(UA_ATTRIBUTEID_ROLEPERMISSIONS, 24). +-define(UA_ATTRIBUTEID_USERROLEPERMISSIONS, 25). +-define(UA_ATTRIBUTEID_ACCESSRESTRICTIONS, 26). +-define(UA_ATTRIBUTEID_ACCESSLEVELEX, 27). %%% TYPES %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %-- OPCUA Types Records -------------------------------------------------------- diff --git a/src/opcua_pubsub.erl b/src/opcua_pubsub.erl new file mode 100644 index 0000000..5edbf89 --- /dev/null +++ b/src/opcua_pubsub.erl @@ -0,0 +1,113 @@ +-module(opcua_pubsub). + +-export([start_link/0]). + + +-behaviour(gen_server). +-export([init/1, handle_call/3, handle_cast/2, handle_info/2]). + + +-export([add_published_data_set/1]). +-export([add_data_set_field/2]). + +-export([add_connection/2]). +-export([new_network_message/2]). +-export([remove_connection/1]). + +-export([add_reader_group/2]). +-export([add_writer_group/2]). + +-export([add_data_set_reader/3]). +-export([create_target_variables/4]). + +-export([add_data_set_writer/3]). + +-record(state, { + connections = #{}, + published_data_sets = #{} +}). + +%%% API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +start_link() -> + gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). + +% Publised Data Set configuration: PDS are independent +add_published_data_set(Config) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, Config}). + +% Adds definitions per-field for a PublishedDataSet +add_data_set_field(PublishedDataSetID, FieldConfig) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, PublishedDataSetID, FieldConfig}). + +add_connection(Url, Config) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, Url, Config}). + +new_network_message(ConnectionId, Binary) -> + gen_server:cast(?MODULE, {?FUNCTION_NAME, ConnectionId, Binary}). + +remove_connection(ID) -> + gen_server:cast(?MODULE, {?FUNCTION_NAME, ID}). + +% Just a place to group DataSetReaders +add_reader_group(ConnectionID, Config) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, ConnectionID, Config}). + +add_writer_group(ConnectionID, Config) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, ConnectionID, Config}). + +% define a DataSetReader, this includes its DataSetFieldMetaData +add_data_set_reader(Conn_id, RG_id, DSR_cfg) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, Conn_id, RG_id, DSR_cfg}). + +% Add target variables to tell a DataSetReader where to write the decoded Fields +create_target_variables(Conn_id, RG_id, DSR_id, Config) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, Conn_id, RG_id, DSR_id, Config}). + +add_data_set_writer(Conn_id, WG_id, DWR_cfg) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, Conn_id, WG_id, DWR_cfg}). + + +% GEN_SERVER callbacks %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +init([]) -> + {ok, #state{}}. + + +handle_call({add_connection, Url, Opts}, _, #state{connections = Conns} = S) -> + {ok, ID, Conn} = opcua_pubsub_connection:create(Url, Opts), + Conns2 = maps:put(ID, Conn, Conns), + {reply, {ok, ID}, S#state{connections = Conns2}}; +handle_call({add_reader_group, Conn_id, Opts}, _, #state{connections = Conns} = S) -> + Conn = maps:get(Conn_id, Conns), + {ok, ID, NewConn} = opcua_pubsub_connection:add_reader_group(Opts, Conn), + Conns2 = maps:put(Conn_id, NewConn, Conns), + {reply, {ok, ID}, S#state{connections = Conns2}}; +handle_call({add_data_set_reader, Conn_id, RG_id, Cfg}, _, + #state{connections = Conns} = S) -> + Conn = maps:get(Conn_id, Conns), + {ok, ID, NewConn} = + opcua_pubsub_connection:add_data_set_reader(RG_id, Cfg, Conn), + Conns2 = maps:put(Conn_id, NewConn, Conns), + {reply, {ok, ID}, S#state{connections = Conns2}}; +handle_call({create_target_variables, Conn_id, RG_id, DSR_id, Cfg}, _, + #state{connections = Conns} = S) -> + Conn = maps:get(Conn_id, Conns), + {ok, ID, NewConn} = + opcua_pubsub_connection:create_target_variables(RG_id, DSR_id, Cfg, Conn), + Conns2 = maps:put(Conn_id, NewConn, Conns), + {reply, {ok, ID}, S#state{connections = Conns2}}. + +handle_cast({new_network_message, ConnId, Binary}, + #state{connections = Connections} = S) -> + Connection = maps:get(ConnId, Connections), + {ok, NewConn} = + opcua_pubsub_connection:handle_network_message(Binary, Connection), + {noreply, S#state{connections = maps:put(ConnId, NewConn, Connections)}}; +handle_cast({remove_connection, ID}, #state{connections = Conns} = S) -> + ok = opcua_pubsub_connection:destroy(maps:get(ID, Conns)), + {noreply, S#state{connections = maps:remove(ID, Conns)}}. +handle_info(_, S) -> + {noreply, S}. + + +%%% INTERNAL FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% diff --git a/src/opcua_pubsub.hrl b/src/opcua_pubsub.hrl new file mode 100644 index 0000000..8fe96d0 --- /dev/null +++ b/src/opcua_pubsub.hrl @@ -0,0 +1,48 @@ +-define(UA_PUBLISHERIDTYPE_BYTE,0). +-define(UA_PUBLISHERIDTYPE_UINT16,1). +-define(UA_PUBLISHERIDTYPE_UINT32,2). +-define(UA_PUBLISHERIDTYPE_UINT64,3). +-define(UA_PUBLISHERIDTYPE_STRING,4). + +-record(dataset_mirror,{}). + +-record(target_variable,{ + data_set_field_id = 0 :: non_neg_integer(), + receiver_index_range, + target_node_id, % node_id to write to + attribute_id, % attribute to write + write_index_range, + override_value_handling, + override_value +}). + +-record(data_set_field_metadata,{ + name :: string(), + description :: string(), + field_flags, + builtin_type :: opcua:builtin_type(), + data_type :: opcua:node_id(), + valueRank :: integer(), + array_dimensions, + maxStringLength, + data_set_field_id = 0 :: non_neg_integer(), + properties +}). + +-record(data_set_metadata,{ + name, + description, + fields, + data_set_class_id, + configuration_version :: undefined | {non_neg_integer(),non_neg_integer()} +}). + + +-record(data_set_reader_config,{ + name :: binary(), + publisher_id, + publisher_id_type, + writer_group_id, + data_set_writer_id, + data_set_metadata :: #data_set_metadata{} +}). diff --git a/src/opcua_pubsub_connection.erl b/src/opcua_pubsub_connection.erl new file mode 100644 index 0000000..b02d15f --- /dev/null +++ b/src/opcua_pubsub_connection.erl @@ -0,0 +1,119 @@ +-module(opcua_pubsub_connection). + +-export([create/2]). +-export([destroy/1]). +-export([handle_network_message/2]). +-export([add_reader_group/2]). +-export([add_data_set_reader/3]). +-export([create_target_variables/4]). + +-record(state, { + connection_id, + middleware :: {supervisor:child_id(), module()}, + reader_groups = #{}, + writer_groups = #{} +}). + +-include("opcua_pubsub.hrl"). + +%%% API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +% Publised Data Set configuration: PDS are independent +create(Url, Config) -> + PubSubConnectionID = uuid:get_v4(), + TransportProcessID = uuid:get_v4(), + Uri = uri_string:parse(Url), + Config2 = maps:merge(default_config(), Config), + Config3 = maps:put(uri, Uri, Config2), + Config4 = maps:put(connection_id, PubSubConnectionID, Config3), + case start_transport(TransportProcessID, Config4) of + {ok, Module} -> {ok, PubSubConnectionID, #state{ + connection_id = PubSubConnectionID, + middleware = {TransportProcessID, Module} + }}; + {error, E} -> error(E) + end. + +destroy(#state{middleware = M}) -> + supervisor:terminate_child(opcua_pubsub_middleware_sup, M), + supervisor:delete_child(opcua_pubsub_middleware_sup, M). + +handle_network_message(Binary, #state{reader_groups = RGs} = S) -> + {Headers, Payload} = opcua_pubsub_uadp:decode_network_message_headers(Binary), + InterestedReaders = + [begin + DSR_ids = opcua_pubsub_reader_group:filter_readers(Headers,RG), + {RG_id, RG, DSR_ids} + end + || { RG_id, RG} <- maps:to_list(RGs)], + ReadersCount = lists:sum([length(DSR_ids) + || {_, _, DSR_ids} <- InterestedReaders]), + case ReadersCount > 0 of + false -> io:format("Skipped NetMsg\n"),{ok, S}; + true -> + % we can procede with the security step if needed: + % opcua_pubsub_security: ... not_implemented yet + % Then we decode all messages + DataSetMessages = opcua_pubsub_uadp:decode_payload(Headers, Payload), + #{payload_header := #{data_set_writer_ids := DSW_ids}} = Headers, + BundledMessages = lists:zip(DSW_ids, DataSetMessages), + % After processing, the DSRs could change state. + % All groups must be updated + RG_list = dispatchMessages(BundledMessages, InterestedReaders), + NewRGs = lists:foldl(fun + ({RG_id, NewRG}, Map) -> maps:put(RG_id, NewRG, Map) + end, RGs, RG_list), + {ok, S#state{reader_groups = NewRGs}} + end. + + +add_reader_group(ReaderGroupCfg, #state{reader_groups = RG} = S) -> + RG_id = uuid:get_v4(), + {ok, ReaderGroup} = opcua_pubsub_reader_group:new(ReaderGroupCfg), + RG2 = maps:put(RG_id, ReaderGroup, RG), + {ok, RG_id, S#state{reader_groups = RG2}}. + +add_data_set_reader(RG_id, DSR_cfg, + #state{reader_groups = RGs} = S) -> + RG = maps:get(RG_id, RGs), + {ok, DSR_id, NewRG} = opcua_pubsub_reader_group:add_data_set_reader(DSR_cfg, RG), + NewGroups = maps:put(RG_id, NewRG, RGs), + {ok, DSR_id, S#state{reader_groups = NewGroups}}. + +create_target_variables(RG_id, DSR_id, Config, #state{reader_groups = RGs} = S) -> + RG = maps:get(RG_id, RGs), + {ok, NewRG} = opcua_pubsub_reader_group:create_target_variables(DSR_id, Config, RG), + NewGroups = maps:put(RG_id, NewRG, RGs), + {ok, DSR_id, S#state{reader_groups = NewGroups}}. + + +%%% INTERNAL FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +start_transport(ID, #{uri := #{scheme := <<"opc.udp">>}} = Config) -> + start_supervised_transport(ID, opcua_pubsub_udp, [Config]); +start_transport(_ID, _Config) -> + {error, unsupported_transport}. + +start_supervised_transport(ID, Module, Args) -> + Child = #{ + id => ID, + start => {Module, start_link, Args}, + restart => transient + }, + case supervisor:start_child(opcua_pubsub_middleware_sup, Child) of + {ok, _Pid} -> {ok, Module}; + E -> E + end. + +default_config() -> #{ + publisher_id_type => ?UA_PUBLISHERIDTYPE_UINT16, + publisher_id => 1111, + name => "Unnamed" + }. + +dispatchMessages(BundledMessages, InterestedReaders) -> + [begin + NewRG = opcua_pubsub_reader_group:dispatch_messages(BundledMessages, + DSR_ids, RG), + {RG_id, NewRG} + end || {RG_id, RG, DSR_ids} <- InterestedReaders]. \ No newline at end of file diff --git a/src/opcua_pubsub_data_set_reader.erl b/src/opcua_pubsub_data_set_reader.erl new file mode 100644 index 0000000..5caa00b --- /dev/null +++ b/src/opcua_pubsub_data_set_reader.erl @@ -0,0 +1,108 @@ +-module(opcua_pubsub_data_set_reader). + +-export([new/1]). +-export([is_interested/2]). +-export([process_messages/2]). +-export([create_target_variables/2]). + +-include("opcua_pubsub.hrl"). + +-record(state, { + % status = operational, + name :: binary(), + publisher_id, + publisher_id_type, + writer_group_id, + data_set_writer_id, + data_set_metadata :: #data_set_metadata{}, + + subscribed_dataset :: undefined | [#target_variable{}] | #dataset_mirror{} +}). + + +new(#data_set_reader_config{ + name = Name, + publisher_id = PubId, + publisher_id_type = PubIdType, + writer_group_id = WGId, + data_set_writer_id = DataSetWriterId, + data_set_metadata = DataSetMetadata}) -> + {ok, #state{name = Name, publisher_id = PubId, + publisher_id_type = PubIdType, + writer_group_id = WGId, + data_set_writer_id = DataSetWriterId, + data_set_metadata = DataSetMetadata}}. + +create_target_variables(Variables, State) -> + io:format("Target Vars, ~p~n", [Variables]), + {ok, State#state{subscribed_dataset = Variables}}. + +% Checklist: +% writergroup match +% payload contains at least one message from the desired writer +is_interested(#{ + publisher_id := Pub_id, + group_header := #{ + writer_group_id := WG_id + }, + payload_header := #{ + data_set_writer_ids := DSW_ids + } + } = _Headers, + #state{ publisher_id = Pub_id, + writer_group_id = WG_id, + data_set_writer_id = DataSetWriterId}) -> + lists:member(DataSetWriterId, DSW_ids); +is_interested(_, _) -> + false. + +process_messages([], State) -> State; +process_messages([{DataSetWriterId, {Header, Data}} | Messages], + #state{data_set_writer_id = DataSetWriterId} = State) -> + io:format("~p handling ~p~n",[?MODULE,Data]), + % TODO: add msg version check, state machine management ecc.. + {DataSet, NewState} = decode_data_set_message(Header, Data, State), + NewState2 = update_target_variables(DataSet, NewState), + process_messages(Messages, NewState2); +process_messages([ _| Messages], State) -> + process_messages(Messages, State). + +decode_data_set_message( % case of invalid message + #{data_set_flags1 => + #{data_set_msg_valid => 0}}, + _, S) -> + {[], S}; +decode_data_set_message( + #{ + data_set_flags1 => #{ + data_set_msg_valid => 1, + field_encoding => Encoding, + data_set_msg_seq_num => DataSetMsgSeqNum, + status => Status, + config_ver_minor_ver => ConfigVerMajorVer, + config_ver_major_ver => ConfigVerMinorVer, + data_set_flags2 => DataSetFlags2 + }, + data_set_flags2 => #{ + msg_type => MessageType, + timestamp => Timestamp, + picoseconds => PicoSeconds + }, + data_set_seq_num => DataSetSeqNum, + timestamp => Timestamp, + picoseconds => Picoseconds, + status => Status, + config_ver_major_ver => ConfigVerMajorVer, + config_ver_minor_ver => ConfigVerMinorVer + }, + Data, + #state{ + data_set_metadata = #data_set_metadata{ + configuration_version = {MajorV, MinorV} + }} = S) -> + + + + +update_target_variables([], S) -> S; +update_target_variables(DataSet, S) -> S. diff --git a/src/opcua_pubsub_example.erl b/src/opcua_pubsub_example.erl new file mode 100644 index 0000000..2b03c79 --- /dev/null +++ b/src/opcua_pubsub_example.erl @@ -0,0 +1,50 @@ +-module(opcua_pubsub_example). + +-export([run/0]). + +-include("opcua.hrl"). +-include("opcua_pubsub.hrl"). + +run() -> + Url = <<"opc.udp://224.0.0.22:4840">>, + ConnectionConfig = #{}, + {ok, ConnectionID} = opcua_pubsub:add_connection(Url, ConnectionConfig), + + ReaderGroupconfig = #{ name => <<"Simple Reader Group">>}, + {ok, RG_id} = opcua_pubsub:add_reader_group(ConnectionID, ReaderGroupconfig), + + DSR_config = #data_set_reader_config{ + name = <<"Example Reader">>, + publisher_id = 2234, + publisher_id_type = uint16, + writer_group_id = 100, + data_set_writer_id = 62541, + data_set_metadata = #data_set_metadata{ + name = "DataSet 1", + description = "An example from 62541", + fields = [ + #data_set_field_metadata{ + data_set_field_id = 0, + name = "DateTime 1", + builtin_type = date_time, + data_type = opcua_node:id(date_time), + valueRank = -1 % a scalar, + }] + } + }, + {ok, DSR_id} = + opcua_pubsub:add_data_set_reader(ConnectionID, RG_id, DSR_config), + + % A dedicated object on the server (or any address space available) + % containing all variables that will be updated by the DSR + DataSetObject = opcua_server:add_object(<<"Subscribed Data">>, numeric), + VarNodeId = opcua_server:add_variable(DataSetObject, <<"Publisher Time">>, + undefined, date_time, 0), + + TGT = #target_variable{ + data_set_field_id = 0, + target_node_id = VarNodeId, + attribute_id = ?UA_ATTRIBUTEID_VALUE + }, + opcua_pubsub:create_target_variables(ConnectionID,RG_id,DSR_id,[TGT]), + ok. diff --git a/src/opcua_pubsub_middleware_sup.erl b/src/opcua_pubsub_middleware_sup.erl new file mode 100644 index 0000000..57d0b95 --- /dev/null +++ b/src/opcua_pubsub_middleware_sup.erl @@ -0,0 +1,23 @@ +-module(opcua_pubsub_middleware_sup). + +-behaviour(supervisor). + +%%% EXPORTS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +%% API Functions +-export([start_link/0]). + +%% Behaviour supervisor callback functions +-export([init/1]). + + +%%% API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + + +%%% BEHAVIOUR supervisor CALLBACK FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +init([]) -> + {ok, {#{strategy => one_for_one}, []}}. diff --git a/src/opcua_pubsub_reader_group.erl b/src/opcua_pubsub_reader_group.erl new file mode 100644 index 0000000..9bdfbc7 --- /dev/null +++ b/src/opcua_pubsub_reader_group.erl @@ -0,0 +1,46 @@ +-module(opcua_pubsub_reader_group). + +-export([new/1]). +-export([add_data_set_reader/2]). +-export([filter_readers/2]). +-export([dispatch_messages/3]). +-export([create_target_variables/3]). + +-record(state, { + name, + data_set_readers = #{} +}). + +new(#{name := RG_name}) -> + {ok, #state{}}. + +add_data_set_reader(DSR_cfg, #state{data_set_readers = DSRs} = S) -> + DSR_id = uuid:get_v4(), + {ok, DSR} = opcua_pubsub_data_set_reader:new(DSR_cfg), + NewDSRs = maps:put(DSR_id, DSR, DSRs), + {ok, DSR_id, S#state{data_set_readers = NewDSRs}}. + +create_target_variables(DSR_id, Config,#state{data_set_readers = DSRs} = S) -> + DSR = maps:get(DSR_id, DSRs), + {ok, NewDSR} = opcua_pubsub_data_set_reader:create_target_variables(Config, DSR), + NewDSRs = maps:put(DSR_id, NewDSR, DSRs), + {ok, S#state{data_set_readers = NewDSRs}}. + +filter_readers(Headers, #state{data_set_readers = DSRs}) -> + [DSR_id || {DSR_id, DSR} <- maps:to_list(DSRs), + opcua_pubsub_data_set_reader:is_interested(Headers, DSR)]. + +dispatch_messages(BundledMessages, DSR_ids, #state{data_set_readers = DSRs} = S) -> + Updated = [ + begin + DSR = maps:get(ID, DSRs), + NewDSR = opcua_pubsub_data_set_reader:process_messages(BundledMessages, DSR), + {ID, NewDSR} + end || ID <- DSR_ids], + NewDSRs = lists:foldl(fun + ({ID,Value}, Map) -> + maps:put(ID, Value, Map) + end, DSRs, Updated), + S#state{data_set_readers = NewDSRs}. + + diff --git a/src/opcua_pubsub_security.erl b/src/opcua_pubsub_security.erl new file mode 100644 index 0000000..eb846fc --- /dev/null +++ b/src/opcua_pubsub_security.erl @@ -0,0 +1,13 @@ +-module(opcua_pubsub_security). + +-export([lock/1]). +-export([unlock/1]). + +%%% API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +lock(Binary) -> + Binary. + +unlock(Binary) -> + Binary. + +%%% INTERNALS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% diff --git a/src/opcua_pubsub_sup.erl b/src/opcua_pubsub_sup.erl new file mode 100644 index 0000000..ff8add8 --- /dev/null +++ b/src/opcua_pubsub_sup.erl @@ -0,0 +1,35 @@ +-module(opcua_pubsub_sup). + +-behaviour(supervisor). + +%%% EXPORTS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +%% API Functions +-export([start_link/0]). + +%% Behaviour supervisor callback functions +-export([init/1]). + + +%%% API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +start_link() -> + supervisor:start_link({local, ?MODULE}, ?MODULE, []). + + +%%% BEHAVIOUR supervisor CALLBACK FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +init([]) -> + Childs = [ + supervisor(opcua_pubsub_middleware_sup, []), + worker(opcua_pubsub, [])], + {ok, {#{strategy => one_for_all}, Childs}}. + + +%%% INTERNAL FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +worker(Module, Args) -> + #{id => Module, start => {Module, start_link, Args}}. + +supervisor(Module, Args) -> + #{id => Module, type => supervisor, start => {Module, start_link, Args}}. diff --git a/src/opcua_pubsub_uadp.erl b/src/opcua_pubsub_uadp.erl new file mode 100644 index 0000000..f2cad26 --- /dev/null +++ b/src/opcua_pubsub_uadp.erl @@ -0,0 +1,298 @@ +-module(opcua_pubsub_uadp). + + +-export([decode_network_message_headers/1]). +-export([decode_payload/2]). + +-include("opcua_pubsub.hrl"). + +%%% API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +decode_network_message_headers(<>) -> + <> = VersionFlags, + {ExtendedFlags1Map, Rest2} = decode_extended_flags1(ExtendedFlags1, Rest), + + % Skipping many optional fields, enforcing a minimal setup for testing + % TODO: add them once needed and remove this hard match + #{dataset_class_id := 0, extended_flags2 := 0, + picoseconds := 0, publisher_id_type := uint16, + security := 0, timestamp := 0} = ExtendedFlags1Map, + ExtendedFlags2 = maps:get(extended_flags2, ExtendedFlags1Map, 0), + {ExtendedFlags2Map, Rest3} = decode_extended_flags2(ExtendedFlags2, Rest2), + {PublisherIDValue, Rest4} = decode_publisherID(PublisherId, ExtendedFlags1Map, Rest3), + % {DataSetClassId, Rest5} = decode_dataset_class_id(PublisherID, ExtendedFlags1Record, Rest4), + {GroupHeaderMap, Rest5} = decode_group_header(GroupHeader, Rest4), + {PayloadHeaderMap, Rest6} = decode_payload_header(PayloadHeader, ExtendedFlags2Map, Rest5), + % Network Message Extended Header + % {TimeStamp, Rest7} = , + % {Picoseconds, Rest8} = , + % {PromotedFields, Rest7} = decode_promoted_fields(ExtendedFlags2Record, Rest6), + % Security + % {SecurityHeader, Rest8} = decode_security_header(ExtendedFlags1Record, Rest7), + Headers = #{ + publisher_id => PublisherIDValue, + extended_flags1 => ExtendedFlags1Map, + extended_flags2 => ExtendedFlags2Map, + group_header => GroupHeaderMap, + payload_header => PayloadHeaderMap + }, + {Headers, Rest6}; +decode_network_message_headers(_) -> + {error, unknown_message}. + +decode_payload(#{payload_header := undefined}, Payload) -> + [decode_data_set_message(Payload)]; +decode_payload(#{payload_header := #{count := 1}}, Payload) -> + [decode_data_set_message(Payload)]; +decode_payload(#{payload_header := #{count := Count}}, Payload) -> + <> = Payload, + Sizes = [Size || <> <= SizesBinary], + decode_multi_data_set_message(Rest, Sizes). + +%%% INTERNALS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + +decode_extended_flags1(0, Bin) -> + {#{ + extended_flags2 => 0, + picoseconds => 0, + timestamp => 0, + security => 0, + dataset_class_id => 0, + publisher_id_type => publisher_id_type(0) + }, Bin}; +decode_extended_flags1(1, << + ExtendedFlags2:1, + PicoSeconds:1, + Timestamp:1, + Security:1, + DataSetClassId:1, + PublisherIdType:3/little-unsigned, Rest/binary>>) -> + {#{ + extended_flags2 => ExtendedFlags2, + picoseconds => PicoSeconds, + timestamp => Timestamp, + security => Security, + dataset_class_id => DataSetClassId, + publisher_id_type => publisher_id_type(PublisherIdType) + }, Rest}. + +publisher_id_type(?UA_PUBLISHERIDTYPE_BYTE) -> byte; +publisher_id_type(?UA_PUBLISHERIDTYPE_UINT16) -> uint16; +publisher_id_type(?UA_PUBLISHERIDTYPE_UINT32) -> uint32; +publisher_id_type(?UA_PUBLISHERIDTYPE_UINT64) -> uint64; +publisher_id_type(?UA_PUBLISHERIDTYPE_STRING) -> string; +publisher_id_type(_) -> reserved. + +decode_extended_flags2(0, Bin) -> + {#{ + chunk => 0, + promoted_fields => 0, + network_message_type => decode_network_msg_type(<<0:1,0:1,0:1>>) + }, Bin}; +decode_extended_flags2(1, << + _Reserved:3, + NetworkMsgType:3/bitstring, + PromotedFields:1, + Chunk:1, + Bin/binary>>) -> + {#{ + chunk => Chunk, + promoted_fields => PromotedFields, + network_message_type => decode_network_msg_type(NetworkMsgType) + }, Bin}. + + +decode_network_msg_type(<< 0:1, 0:1, 0:1>>) -> data_set_message; +decode_network_msg_type(<< 0:1, 0:1, 1:1>>) -> discovery_request; +decode_network_msg_type(<< 0:1, 1:1, 0:1>>) -> discovery_responce; +decode_network_msg_type(<< _:1, _:1, _:1>>) -> reserved. + +decode_publisherID(0, _, Binary) -> {undefined, Binary}; +decode_publisherID(1, #{publisher_id_type := uint16}, Binary) -> + <> = Binary, + {PublisherID, Rest}. +% TODO: handle other PublisherID types + +decode_group_header(0, Bin) -> { undefined, Bin}; +decode_group_header(1, <>) -> + <<_ReservedBits:4, + SeqNum_flag:1, + NetworkMessageNumber_flag:1, + GroupVersion_flag:1, + WrtiterGroupId_flag:1>> = GroupFlags, + {WriterGroupId, Rest} = decode_writer_group_id(WrtiterGroupId_flag, Bin), + {GroupVersion, Rest2} = decode_group_version_id(GroupVersion_flag, Rest), + {NetworkMessageNumber, Rest3} = decode_network_message_number(NetworkMessageNumber_flag, Rest2), + {SeqNum, Rest4} = decode_network_sequence_number(SeqNum_flag, Rest3), + {#{ + writer_group_id => WriterGroupId, + group_version => GroupVersion, + network_message_number => NetworkMessageNumber, + sequence_number => SeqNum + }, Rest4}. + +decode_writer_group_id(0, Bin) -> {undefined, Bin}; +decode_writer_group_id(1, Bin) -> opcua_codec_binary_builtin:decode(uint16, Bin). + +decode_group_version_id(0, Bin) -> {undefined, Bin}; +decode_group_version_id(1, Bin) -> + %UInt32 that represents the time in seconds since the year 2000 + opcua_codec_binary_builtin:decode(uint32, Bin). + +decode_network_message_number(0, Bin) -> {undefined, Bin}; +decode_network_message_number(1, Bin) -> opcua_codec_binary_builtin:decode(uint16, Bin). + +decode_network_sequence_number(0, Bin) -> {undefined, Bin}; +decode_network_sequence_number(1, Bin) -> opcua_codec_binary_builtin:decode(uint16, Bin). + + +decode_payload_header(0, _, Bin) -> {undefined, Bin}; +decode_payload_header(1, #{chunk := 1}, Bin) -> + {DataSetWriterID, Rest} = opcua_codec_binary_builtin:decode(uint16, Bin), + throw({not_implemented, chunked_network_message}); +decode_payload_header(1, #{network_message_type := data_set_message}, Bin) -> + <> = Bin, + <> = Rest, + {#{ + count => MsgCount, + data_set_writer_ids => + [ DataWriterID || <> <= DataWriterIDs] + }, Rest2}; +decode_payload_header(1, #{network_message_type := discovery_request}, Bin) -> + throw({not_implemented, discovery_request}); +decode_payload_header(1, #{network_message_type := discovery_responce}, Bin) -> + throw({not_implemented, discovery_responce}). + + + +decode_multi_data_set_message(Bin, Sizes) -> + decode_multi_data_set_message(Bin, Sizes, []). + +decode_multi_data_set_message(<<>>, [], Result) -> lists:reverse(Result); +decode_multi_data_set_message(Bin, [S|TL], Result) -> + <> = Bin, + Decoded = decode_data_set_message(DSM), + decode_multi_data_set_message(Rest, [Decoded | Result], TL). + + +decode_data_set_message(Binary) -> + {DSM_header, Binary1} = decode_data_set_message_header(Binary), + DataSet = decode_data_set_message_data(DSM_header, Binary1), + {DSM_header, DataSet}. + +decode_data_set_message_header(DataSetMessageBinary) -> + {DataSetFlags1, Rest} = decode_data_set_flags1(DataSetMessageBinary), + {DataSetFlags2, Rest1} = decode_data_set_flags2(DataSetFlags1, Rest), + {DataSetSeqNum, Rest2} = decode_data_set_seq_num(DataSetFlags1, Rest1), + {Timestamp, Rest3} = decode_data_set_timestamp(DataSetFlags2, Rest2), + {Picoseconds, Rest4} = decode_data_set_picoseconds(DataSetFlags2, Rest3), + {Status, Rest5} = decode_data_set_status(DataSetFlags1, Rest4), + {ConfigVerMajorVer, Rest6} = decode_data_set_cfg_major_ver(DataSetFlags1, Rest5), + {ConfigVerMinorVer, Rest7} = decode_data_set_cfg_minor_ver(DataSetFlags1, Rest6), + {#{ + data_set_flags1 => DataSetFlags1, + data_set_flags2 => DataSetFlags2, + data_set_seq_num => DataSetSeqNum, + timestamp => Timestamp, + picoseconds => Picoseconds, + status => Status, + config_ver_major_ver => ConfigVerMajorVer, + config_ver_minor_ver => ConfigVerMinorVer + }, + Rest7}. + +decode_data_set_flags1(<< + DataSetFlags2:1, + ConfigVerMinorVer:1, + ConfigVerMajorVer:1, + Status:1, + DataSetMsgSeqNum:1, + FieldEncoding:2/bitstring, + DataSetMsgValid:1, + Rest/binary>>) -> + {#{ + data_set_msg_valid => DataSetMsgValid, + field_encoding => decode_field_encoding(FieldEncoding), + data_set_msg_seq_num => DataSetMsgSeqNum, + status => Status, + config_ver_minor_ver => ConfigVerMajorVer, + config_ver_major_ver => ConfigVerMinorVer, + data_set_flags2 => DataSetFlags2 + }, Rest}. + +decode_field_encoding(<<0:1, 0:1>>) -> variant; +decode_field_encoding(<<0:1, 1:1>>) -> raw; +decode_field_encoding(<<1:1, 0:1>>) -> data_value; +decode_field_encoding(<<1:1, 1:1>>) -> reserved. + +decode_data_set_flags2(#{data_set_flags2 := 0}, Bin) -> + {#{ + msg_type => decode_data_set_message_type(<<0:4>>), + timestamp => 0, + picoseconds => 0}, Bin}; +decode_data_set_flags2(#{data_set_flags2 := 1}, + <<_Reserved:2, + PicoSeconds:1, + Timestamp:1, + DataMsgType:4/bitstring, + Rest/binary>>) -> + {#{ + msg_type => decode_data_set_message_type(DataMsgType), + timestamp => Timestamp, + picoseconds => PicoSeconds + }, Rest}. + +decode_data_set_message_type(<<0:4>>) -> data_key_frame; +decode_data_set_message_type(<<0:1, 0:1, 0:1, 1:1>>) -> data_delta_frame; +decode_data_set_message_type(<<0:1, 0:1, 1:1, 0:1>>) -> event; +decode_data_set_message_type(<<0:1, 0:1, 1:1, 1:1>>) -> keep_alive; +decode_data_set_message_type(<<_:4>>) -> reserved. + +decode_data_set_seq_num(#{data_set_msg_seq_num := 0}, Bin) -> {undefined, Bin}; +decode_data_set_seq_num(#{data_set_msg_seq_num := 1}, Bin) -> + opcua_codec_binary_builtin:decode(uint16, Bin). + +decode_data_set_timestamp(#{timestamp := 0}, Bin) -> {undefined, Bin}; +decode_data_set_timestamp(#{timestamp := 1}, Bin) -> + opcua_codec_binary_builtin:decode(date_time, Bin). + +decode_data_set_picoseconds(#{picoseconds := 0}, Bin) -> {undefined, Bin}; +decode_data_set_picoseconds(#{picoseconds := 1}, Bin) -> + opcua_codec_binary_builtin:decode(uint16, Bin). + +decode_data_set_status(#{status := 0}, Bin) -> {undefined, Bin}; +decode_data_set_status(#{status := 1}, Bin) -> + opcua_codec_binary_builtin:decode(uint16, Bin). + +decode_data_set_cfg_major_ver(#{config_ver_major_ver := 0}, Bin) -> + {undefined, Bin}; +decode_data_set_cfg_major_ver(#{config_ver_major_ver := 1}, Bin) -> + opcua_codec_binary_builtin:decode(uint32, Bin). + +decode_data_set_cfg_minor_ver(#{config_ver_minor_ver := 0}, Bin) -> + {undefined, Bin}; +decode_data_set_cfg_minor_ver(#{config_ver_minor_ver := 1}, Bin) -> + opcua_codec_binary_builtin:decode(uint32, Bin). + + +decode_data_set_message_data( + #{data_set_flags1 := #{ + data_set_msg_valid := 1, + field_encoding := variant}, + data_set_flags2 := #{ + msg_type := data_key_frame}}, Bin) -> + {FieldCount, Bin1} = opcua_codec_binary_builtin:decode(uint16, Bin), + decode_variant_fields(Bin1, FieldCount). + +decode_variant_fields(Bin, Count) -> + decode_variant_fields(Bin, Count, []). + +decode_variant_fields(<<>>, 0, Fields) -> lists:reverse(Fields); +decode_variant_fields(Bin, Count, Fields) -> + {Object, RemData} = opcua_codec_binary:decode(variant, Bin), + decode_variant_fields(RemData, Count-1, [Object | Fields]). \ No newline at end of file diff --git a/src/opcua_pubsub_udp.erl b/src/opcua_pubsub_udp.erl new file mode 100644 index 0000000..239010e --- /dev/null +++ b/src/opcua_pubsub_udp.erl @@ -0,0 +1,104 @@ +-module(opcua_pubsub_udp). + +-export([start_link/1]). + +-export([send/1]). + +-behaviour(gen_server). +-export([init/1, handle_call/3, handle_cast/2, handle_info/2]). + +-include_lib("kernel/include/logger.hrl"). + +-record(state, { + connection_id, + socket +}). + +start_link(Opts) -> + gen_server:start_link({local, ?MODULE}, ?MODULE, [Opts], []). + +send(Data) -> + gen_server:cast(?MODULE, {?FUNCTION_NAME, Data}). + +init([#{ + connection_id := ConnectionId, + uri := #{ + host := BinaryIP, + port := Port + } + }]) -> + MulticastGroup = parse_ip(BinaryIP), + InterfaceIP = get_ip_of_valid_interface(), + ?LOG_DEBUG("PubSub UDP using interface ~p",[InterfaceIP]), + Opts = [ + binary, + {active, true}, + {reuseaddr, true}, + {ip, MulticastGroup}, + {multicast_ttl, 10} + ], + case gen_udp:open(Port, Opts) of + {ok, Socket} -> + inet:setopts(Socket, [{add_membership,{MulticastGroup,InterfaceIP}}]), + {ok, #state{ + connection_id = ConnectionId, + socket = Socket + }}; + {error, Reason} -> {error, Reason} + end. + +handle_call(_, _, State) -> + {reply, ok, State}. + +% handle_cast(disconnect, State) -> +% gen_udp:close(State#state.socket), +% {noreply, State#state{ socket = undefined}}; +handle_cast({send, Data}, #state{socket = Socket} = State) -> + gen_udp:send(Socket, Data), + {noreply, State}. + +handle_info({udp, Socket, _IP, _Port, Packet}, + #state{socket = Socket, connection_id = ConnectionId} = S) -> + % io:format("~n~nFrom: ~p~nPort: ~p~nData: ~p~n",[_IP,_Port,Packet]), + opcua_pubsub:new_network_message(ConnectionId, Packet), + {noreply, S}. + + +% helpers %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% +parse_ip(BinaryIP) -> + [A,B,C,D] = [ binary_to_integer(N) || N <- string:split(BinaryIP, ".", all)], + {A,B,C,D}. + +get_ip_of_valid_interface() -> + case get_valid_interfaces() of + [ {_Name, Opts} | _] -> get_ipv4_from_opts(Opts); + _ -> undefined + end. + +get_valid_interfaces() -> + {ok, Interfaces} = inet:getifaddrs(), + Selected = [ I + || {_Name, [{flags, Flags} | Opts]} = I <- Interfaces, + flags_are_ok(Flags), + has_ipv4(Opts) + ], + HasLoopback = fun({_Name, [{flags, Flags} | _]}) -> + lists:member(loopback, Flags) + end, + {LoopBack, Others} = lists:partition(HasLoopback, Selected), + Others ++ LoopBack. + +has_ipv4(Opts) -> + get_ipv4_from_opts(Opts) =/= undefined. + +flags_are_ok(Flags) -> + lists:member(up, Flags) and + lists:member(running, Flags). + +get_ipv4_from_opts([]) -> + undefined; +get_ipv4_from_opts([{addr, {_1, _2, _3, _4}} | _]) -> + {_1, _2, _3, _4}; +get_ipv4_from_opts([_ | TL]) -> + get_ipv4_from_opts(TL). diff --git a/src/opcua_sup.erl b/src/opcua_sup.erl index 2a798ea..7af9840 100644 --- a/src/opcua_sup.erl +++ b/src/opcua_sup.erl @@ -26,7 +26,8 @@ init([]) -> Childs = [ worker(opcua_keychain_default, [KeychainOpts]), worker(opcua_nodeset, [NodeSetDir]), - supervisor(opcua_client_sup, []) + supervisor(opcua_client_sup, []), + supervisor(opcua_pubsub_sup, []) ], Childs2 = case application:get_env(start_server) of {ok, false} -> Childs; From ea28b7f9e339508804454a12b49ab107151367d5 Mon Sep 17 00:00:00 2001 From: Luca Succi Date: Tue, 24 Jan 2023 10:57:28 +0100 Subject: [PATCH 2/7] DataReader writing into address space --- src/opcua_pubsub.hrl | 2 +- src/opcua_pubsub_data_set_reader.erl | 112 ++++++++++++++++++++------- src/opcua_pubsub_example.erl | 1 - src/opcua_pubsub_uadp.erl | 50 ++++++------ 4 files changed, 107 insertions(+), 58 deletions(-) diff --git a/src/opcua_pubsub.hrl b/src/opcua_pubsub.hrl index 8fe96d0..a5fd442 100644 --- a/src/opcua_pubsub.hrl +++ b/src/opcua_pubsub.hrl @@ -19,7 +19,7 @@ -record(data_set_field_metadata,{ name :: string(), description :: string(), - field_flags, + field_flags, % This flag indicates if the field is promoted to the NetworkMessage header builtin_type :: opcua:builtin_type(), data_type :: opcua:node_id(), valueRank :: integer(), diff --git a/src/opcua_pubsub_data_set_reader.erl b/src/opcua_pubsub_data_set_reader.erl index 5caa00b..56afe3e 100644 --- a/src/opcua_pubsub_data_set_reader.erl +++ b/src/opcua_pubsub_data_set_reader.erl @@ -5,10 +5,12 @@ -export([process_messages/2]). -export([create_target_variables/2]). +-include("opcua.hrl"). -include("opcua_pubsub.hrl"). +-include_lib("kernel/include/logger.hrl"). -record(state, { - % status = operational, + state = operational :: operational | error | enabled | paused, name :: binary(), publisher_id, publisher_id_type, @@ -31,11 +33,10 @@ new(#data_set_reader_config{ publisher_id_type = PubIdType, writer_group_id = WGId, data_set_writer_id = DataSetWriterId, - data_set_metadata = DataSetMetadata}}. + data_set_metadata = set_metadata_fields_ids(DataSetMetadata)}}. create_target_variables(Variables, State) -> - io:format("Target Vars, ~p~n", [Variables]), - {ok, State#state{subscribed_dataset = Variables}}. + {ok, State#state{subscribed_dataset = set_tgt_var_ids(Variables)}}. % Checklist: % writergroup match @@ -59,50 +60,101 @@ is_interested(_, _) -> process_messages([], State) -> State; process_messages([{DataSetWriterId, {Header, Data}} | Messages], #state{data_set_writer_id = DataSetWriterId} = State) -> - io:format("~p handling ~p~n",[?MODULE,Data]), + % io:format("~p handling ~p~n",[?MODULE,Header]), % TODO: add msg version check, state machine management ecc.. {DataSet, NewState} = decode_data_set_message(Header, Data, State), - NewState2 = update_target_variables(DataSet, NewState), + NewState2 = update_subscribed_dataset(DataSet, NewState), process_messages(Messages, NewState2); process_messages([ _| Messages], State) -> process_messages(Messages, State). decode_data_set_message( % case of invalid message - #{data_set_flags1 => - #{data_set_msg_valid => 0}}, + #{data_set_flags1 := + #{data_set_msg_valid := 0}}, _, S) -> {[], S}; decode_data_set_message( #{ - data_set_flags1 => #{ - data_set_msg_valid => 1, - field_encoding => Encoding, - data_set_msg_seq_num => DataSetMsgSeqNum, - status => Status, - config_ver_minor_ver => ConfigVerMajorVer, - config_ver_major_ver => ConfigVerMinorVer, - data_set_flags2 => DataSetFlags2 + data_set_flags1 := #{ + data_set_msg_valid := 1, + field_encoding := Encoding, + data_set_msg_seq_num := _, + status := _, + config_ver_minor_ver := _, + config_ver_major_ver := _, + data_set_flags2 := _ }, - data_set_flags2 => #{ - msg_type => MessageType, - timestamp => Timestamp, - picoseconds => PicoSeconds + data_set_flags2 := #{ + msg_type := MessageType, % keyframe / deltaframe / event ecc... + timestamp := _, + picoseconds := _ }, - data_set_seq_num => DataSetSeqNum, - timestamp => Timestamp, - picoseconds => Picoseconds, - status => Status, - config_ver_major_ver => ConfigVerMajorVer, - config_ver_minor_ver => ConfigVerMinorVer + data_set_seq_num := _, + timestamp := _, + picoseconds := _, + status := _, + config_ver_major_ver := _, + config_ver_minor_ver := _ }, Data, #state{ data_set_metadata = #data_set_metadata{ - configuration_version = {MajorV, MinorV} - }} = S) -> + fields = FieldsMetaData, + configuration_version = _Ver} + } = S) -> + case decode_fields(Encoding, MessageType, FieldsMetaData, Data) of + {error, E} -> + ?LOG_ERROR("Failure decoding DataSetMessageFields: ~p",[E]), + {[], S#state{state = error}}; + DataSet -> {DataSet, S} + end. +decode_fields(Encoding, data_key_frame, FieldsMetaData, Data) -> + {FieldCount, FieldsBin} = opcua_codec_binary_builtin:decode(uint16, Data), + decode_keyframe(Encoding, FieldsMetaData, FieldCount, FieldsBin, []); +decode_fields(_Encoding, _MessageType, _Fields, _Data) -> + error(bad_not_implemented). +decode_keyframe( _, _, _, <<>>, DataSet) -> lists:reverse(DataSet); +decode_keyframe(Encoding, [FieldMD|NextMDMD], FieldCount, Binary, DataSet) -> + {Decoded, Rest} = opcua_pubsub_uadp:decode_data_set_message_field(Encoding, + FieldMD, + Binary), + Data = {FieldMD, Decoded}, + case Decoded of + {error, E} -> {error, E}; + _ -> decode_keyframe(Encoding, NextMDMD, FieldCount-1, Rest, [Data|DataSet]) + end. +update_subscribed_dataset([], #state{state = error} = S) -> S; % skip +update_subscribed_dataset(_DataSet, #state{ subscribed_dataset = Sub }) + when #dataset_mirror{} == Sub -> + error(dataset_mirror_not_implemented); +update_subscribed_dataset(DataSet, #state{subscribed_dataset = TGT_vars} = S) + when is_list(TGT_vars)-> + ok = update_target_variables(DataSet, TGT_vars), + S. -update_target_variables([], S) -> S; -update_target_variables(DataSet, S) -> S. +update_target_variables([], TGT_vars) -> ok; +update_target_variables([{FieldMD, Variable}|DataSet], TGT_vars) -> + FieldId = FieldMD#data_set_field_metadata.data_set_field_id, + [TGT|_] = [ Var || #target_variable{data_set_field_id = DataSetFieldId} = Var + <- TGT_vars, DataSetFieldId == FieldId], + TargetNodeId = TGT#target_variable.target_node_id, + AttrId = TGT#target_variable.attribute_id, + update_tgt_var_attribute(TargetNodeId, AttrId, Variable), + ok. + +update_tgt_var_attribute(TargetNodeId, ?UA_ATTRIBUTEID_VALUE, + #opcua_variant{value = Value}) -> + opcua_server:set_value(TargetNodeId, Value). + +set_metadata_fields_ids(#data_set_metadata{fields = Fields} = DSMD) -> + Ids = lists:seq(0, length(Fields) - 1), + DSMD#data_set_metadata{fields = + [F#data_set_field_metadata{data_set_field_id = I} + || {I,F} <- lists:zip(Ids, Fields)]}. + +set_tgt_var_ids(Varables) -> + Ids = lists:seq(0, length(Varables) - 1), + [V#target_variable{data_set_field_id = I} || {I,V} <- lists:zip(Ids, Varables)]. diff --git a/src/opcua_pubsub_example.erl b/src/opcua_pubsub_example.erl index 2b03c79..8405b2e 100644 --- a/src/opcua_pubsub_example.erl +++ b/src/opcua_pubsub_example.erl @@ -24,7 +24,6 @@ run() -> description = "An example from 62541", fields = [ #data_set_field_metadata{ - data_set_field_id = 0, name = "DateTime 1", builtin_type = date_time, data_type = opcua_node:id(date_time), diff --git a/src/opcua_pubsub_uadp.erl b/src/opcua_pubsub_uadp.erl index f2cad26..5c1a41e 100644 --- a/src/opcua_pubsub_uadp.erl +++ b/src/opcua_pubsub_uadp.erl @@ -3,11 +3,14 @@ -export([decode_network_message_headers/1]). -export([decode_payload/2]). +-export([decode_data_set_message_field/3]). +-include("opcua.hrl"). -include("opcua_pubsub.hrl"). %%% API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Extracts the clear data from the message Headers and the payload binary decode_network_message_headers(<>) -> <>) -> decode_network_message_headers(_) -> {error, unknown_message}. +%extracts Dataset Messages from the payload blob decoding the headers decode_payload(#{payload_header := undefined}, Payload) -> - [decode_data_set_message(Payload)]; + {DSM_header, Binary} = decode_data_set_message_header(Payload), + [{DSM_header, Binary}]; decode_payload(#{payload_header := #{count := 1}}, Payload) -> - [decode_data_set_message(Payload)]; + {DSM_header, Binary} = decode_data_set_message_header(Payload), + [{DSM_header, Binary}]; decode_payload(#{payload_header := #{count := Count}}, Payload) -> <> = Payload, Sizes = [Size || <> <= SizesBinary], decode_multi_data_set_message(Rest, Sizes). +decode_data_set_message_field(variant, FieldMetadata, Binary) -> + #data_set_field_metadata{ + builtin_type = BuiltinType, + data_type = _NodeId, + valueRank = _ + } = FieldMetadata, + {Result, Rest} = opcua_codec_binary:decode(variant, Binary), + case Result of + #opcua_variant{type = BuiltinType} -> {Result, Rest}; + _ -> {error, unmatched_metadata} + end; +decode_data_set_message_field(_, _, _) -> + error(bad_encoding_not_implemented). + %%% INTERNALS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -176,14 +196,10 @@ decode_multi_data_set_message(Bin, Sizes) -> decode_multi_data_set_message(<<>>, [], Result) -> lists:reverse(Result); decode_multi_data_set_message(Bin, [S|TL], Result) -> <> = Bin, - Decoded = decode_data_set_message(DSM), - decode_multi_data_set_message(Rest, [Decoded | Result], TL). + {DSM_header, Binary1} = decode_data_set_message_header(DSM), + decode_multi_data_set_message(Rest, [ {DSM_header, Binary1} | Result], TL). -decode_data_set_message(Binary) -> - {DSM_header, Binary1} = decode_data_set_message_header(Binary), - DataSet = decode_data_set_message_data(DSM_header, Binary1), - {DSM_header, DataSet}. decode_data_set_message_header(DataSetMessageBinary) -> {DataSetFlags1, Rest} = decode_data_set_flags1(DataSetMessageBinary), @@ -278,21 +294,3 @@ decode_data_set_cfg_minor_ver(#{config_ver_minor_ver := 0}, Bin) -> {undefined, Bin}; decode_data_set_cfg_minor_ver(#{config_ver_minor_ver := 1}, Bin) -> opcua_codec_binary_builtin:decode(uint32, Bin). - - -decode_data_set_message_data( - #{data_set_flags1 := #{ - data_set_msg_valid := 1, - field_encoding := variant}, - data_set_flags2 := #{ - msg_type := data_key_frame}}, Bin) -> - {FieldCount, Bin1} = opcua_codec_binary_builtin:decode(uint16, Bin), - decode_variant_fields(Bin1, FieldCount). - -decode_variant_fields(Bin, Count) -> - decode_variant_fields(Bin, Count, []). - -decode_variant_fields(<<>>, 0, Fields) -> lists:reverse(Fields); -decode_variant_fields(Bin, Count, Fields) -> - {Object, RemData} = opcua_codec_binary:decode(variant, Bin), - decode_variant_fields(RemData, Count-1, [Object | Fields]). \ No newline at end of file From 026125a30ce46e49a61c775adf647324794193f5 Mon Sep 17 00:00:00 2001 From: Luca Succi Date: Tue, 24 Jan 2023 15:41:47 +0100 Subject: [PATCH 3/7] Pubsub Conection Refactoring --- src/opcua_pubsub.erl | 97 ++++++------ src/opcua_pubsub_connection.erl | 139 ++++++++++-------- ...up.erl => opcua_pubsub_connection_sup.erl} | 7 +- src/opcua_pubsub_example.erl | 12 +- src/opcua_pubsub_sup.erl | 2 +- src/opcua_pubsub_udp.erl | 36 +---- 6 files changed, 138 insertions(+), 155 deletions(-) rename src/{opcua_pubsub_middleware_sup.erl => opcua_pubsub_connection_sup.erl} (65%) diff --git a/src/opcua_pubsub.erl b/src/opcua_pubsub.erl index 5edbf89..3f650d9 100644 --- a/src/opcua_pubsub.erl +++ b/src/opcua_pubsub.erl @@ -10,9 +10,7 @@ -export([add_published_data_set/1]). -export([add_data_set_field/2]). --export([add_connection/2]). --export([new_network_message/2]). --export([remove_connection/1]). +-export([new_connection/2]). -export([add_reader_group/2]). -export([add_writer_group/2]). @@ -22,8 +20,13 @@ -export([add_data_set_writer/3]). +-export([start_connection/1]). +-export([stop_connection/1]). + +-export([register_connection/1]). + -record(state, { - connections = #{}, + connections = #{},% Maps Ids to Pids published_data_sets = #{} }). @@ -32,6 +35,13 @@ start_link() -> gen_server:start_link({local, ?MODULE}, ?MODULE, [], []). +start_connection(Connection) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, Connection}). + + +stop_connection(ConnectionID) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, ConnectionID}). + % Publised Data Set configuration: PDS are independent add_published_data_set(Config) -> gen_server:call(?MODULE, {?FUNCTION_NAME, Config}). @@ -40,72 +50,49 @@ add_published_data_set(Config) -> add_data_set_field(PublishedDataSetID, FieldConfig) -> gen_server:call(?MODULE, {?FUNCTION_NAME, PublishedDataSetID, FieldConfig}). -add_connection(Url, Config) -> - gen_server:call(?MODULE, {?FUNCTION_NAME, Url, Config}). - -new_network_message(ConnectionId, Binary) -> - gen_server:cast(?MODULE, {?FUNCTION_NAME, ConnectionId, Binary}). - -remove_connection(ID) -> - gen_server:cast(?MODULE, {?FUNCTION_NAME, ID}). +new_connection(Url, Opts) -> + opcua_pubsub_connection:create(Url, Opts). % Just a place to group DataSetReaders -add_reader_group(ConnectionID, Config) -> - gen_server:call(?MODULE, {?FUNCTION_NAME, ConnectionID, Config}). +add_reader_group(Connection, Config) -> + opcua_pubsub_connection:add_reader_group(Config, Connection). -add_writer_group(ConnectionID, Config) -> - gen_server:call(?MODULE, {?FUNCTION_NAME, ConnectionID, Config}). +add_writer_group(_Connection, _Config) -> + error(not_implemented). % define a DataSetReader, this includes its DataSetFieldMetaData -add_data_set_reader(Conn_id, RG_id, DSR_cfg) -> - gen_server:call(?MODULE, {?FUNCTION_NAME, Conn_id, RG_id, DSR_cfg}). +add_data_set_reader(Connection, RG_id, DSR_cfg) -> + opcua_pubsub_connection:add_data_set_reader(RG_id, DSR_cfg, Connection). % Add target variables to tell a DataSetReader where to write the decoded Fields -create_target_variables(Conn_id, RG_id, DSR_id, Config) -> - gen_server:call(?MODULE, {?FUNCTION_NAME, Conn_id, RG_id, DSR_id, Config}). +create_target_variables(Connection, RG_id, DSR_id, Cfg) -> + opcua_pubsub_connection:create_target_variables(RG_id, DSR_id, Cfg, Connection). -add_data_set_writer(Conn_id, WG_id, DWR_cfg) -> - gen_server:call(?MODULE, {?FUNCTION_NAME, Conn_id, WG_id, DWR_cfg}). +add_data_set_writer(Connection, WG_id, DWR_cfg) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, Connection, WG_id, DWR_cfg}). +% INTERNAL API %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +register_connection(ID) -> + gen_server:cast(?MODULE, {?FUNCTION_NAME, ID, self()}). % GEN_SERVER callbacks %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% init([]) -> {ok, #state{}}. +handle_call({start_connection, ConnectionConfig}, _, S) -> + ID = uuid:get_v4(), + {ok, _} = supervisor:start_child(opcua_pubsub_connection_sup, [ID, ConnectionConfig]), + {reply, {ok, ID}, S}; +handle_call({stop_connection, ConnectionID}, _, #state{connections = Conns} = S) -> + Pid = maps:get(ConnectionID, Conns), + ok = supervisor:terminate_child(opcua_pubsub_connection_sup, Pid), + NewMap = maps:remove(ConnectionID, Conns), + {reply, ok, S#state{connections = NewMap}}. + +handle_cast({register_connection, ID, Pid}, #state{connections = Conns} = State) -> + {noreply, State#state{connections = maps:put(ID, Pid, Conns)}}. -handle_call({add_connection, Url, Opts}, _, #state{connections = Conns} = S) -> - {ok, ID, Conn} = opcua_pubsub_connection:create(Url, Opts), - Conns2 = maps:put(ID, Conn, Conns), - {reply, {ok, ID}, S#state{connections = Conns2}}; -handle_call({add_reader_group, Conn_id, Opts}, _, #state{connections = Conns} = S) -> - Conn = maps:get(Conn_id, Conns), - {ok, ID, NewConn} = opcua_pubsub_connection:add_reader_group(Opts, Conn), - Conns2 = maps:put(Conn_id, NewConn, Conns), - {reply, {ok, ID}, S#state{connections = Conns2}}; -handle_call({add_data_set_reader, Conn_id, RG_id, Cfg}, _, - #state{connections = Conns} = S) -> - Conn = maps:get(Conn_id, Conns), - {ok, ID, NewConn} = - opcua_pubsub_connection:add_data_set_reader(RG_id, Cfg, Conn), - Conns2 = maps:put(Conn_id, NewConn, Conns), - {reply, {ok, ID}, S#state{connections = Conns2}}; -handle_call({create_target_variables, Conn_id, RG_id, DSR_id, Cfg}, _, - #state{connections = Conns} = S) -> - Conn = maps:get(Conn_id, Conns), - {ok, ID, NewConn} = - opcua_pubsub_connection:create_target_variables(RG_id, DSR_id, Cfg, Conn), - Conns2 = maps:put(Conn_id, NewConn, Conns), - {reply, {ok, ID}, S#state{connections = Conns2}}. - -handle_cast({new_network_message, ConnId, Binary}, - #state{connections = Connections} = S) -> - Connection = maps:get(ConnId, Connections), - {ok, NewConn} = - opcua_pubsub_connection:handle_network_message(Binary, Connection), - {noreply, S#state{connections = maps:put(ConnId, NewConn, Connections)}}; -handle_cast({remove_connection, ID}, #state{connections = Conns} = S) -> - ok = opcua_pubsub_connection:destroy(maps:get(ID, Conns)), - {noreply, S#state{connections = maps:remove(ID, Conns)}}. handle_info(_, S) -> {noreply, S}. diff --git a/src/opcua_pubsub_connection.erl b/src/opcua_pubsub_connection.erl index b02d15f..e32458e 100644 --- a/src/opcua_pubsub_connection.erl +++ b/src/opcua_pubsub_connection.erl @@ -1,42 +1,99 @@ -module(opcua_pubsub_connection). +-export([start_link/2]). +-export([send/2]). -export([create/2]). --export([destroy/1]). --export([handle_network_message/2]). -export([add_reader_group/2]). -export([add_data_set_reader/3]). -export([create_target_variables/4]). +-behaviour(gen_server). +-export([init/1, handle_call/3, handle_cast/2, handle_info/2]). + -record(state, { - connection_id, - middleware :: {supervisor:child_id(), module()}, + id, + config, + middleware :: {module(), term()}, reader_groups = #{}, writer_groups = #{} }). -include("opcua_pubsub.hrl"). -%%% API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% CONFIGURATION API %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% These help to build the Connection process state +% which holds the settings of all pubsub sub-entities -% Publised Data Set configuration: PDS are independent create(Url, Config) -> - PubSubConnectionID = uuid:get_v4(), - TransportProcessID = uuid:get_v4(), Uri = uri_string:parse(Url), Config2 = maps:merge(default_config(), Config), Config3 = maps:put(uri, Uri, Config2), - Config4 = maps:put(connection_id, PubSubConnectionID, Config3), - case start_transport(TransportProcessID, Config4) of - {ok, Module} -> {ok, PubSubConnectionID, #state{ - connection_id = PubSubConnectionID, - middleware = {TransportProcessID, Module} - }}; + {ok, #state{config = Config3}}. + +add_reader_group(ReaderGroupCfg, #state{reader_groups = RG} = S) -> + RG_id = uuid:get_v4(), + {ok, ReaderGroup} = opcua_pubsub_reader_group:new(ReaderGroupCfg), + RG2 = maps:put(RG_id, ReaderGroup, RG), + {ok, RG_id, S#state{reader_groups = RG2}}. + +add_data_set_reader(RG_id, DSR_cfg, #state{reader_groups = RGs} = S) -> + RG = maps:get(RG_id, RGs), + {ok, DSR_id, NewRG} = opcua_pubsub_reader_group:add_data_set_reader(DSR_cfg, RG), + NewGroups = maps:put(RG_id, NewRG, RGs), + {ok, DSR_id, S#state{reader_groups = NewGroups}}. + +create_target_variables(RG_id, DSR_id, Config, #state{reader_groups = RGs} = S) -> + RG = maps:get(RG_id, RGs), + {ok, NewRG} = opcua_pubsub_reader_group:create_target_variables(DSR_id, Config, RG), + NewGroups = maps:put(RG_id, NewRG, RGs), + {ok, S#state{reader_groups = NewGroups}}. + + +%%% GEN_SERVER API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +start_link(ID, ConfiguredState) -> + gen_server:start_link(?MODULE, [ID, ConfiguredState], []). + +send(Pid, Data) -> + gen_server:cast(Pid, {?FUNCTION_NAME, Data}). + +%%% GEN_SERVER CALLBACKS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +init([ID, #state{config = Config} = ConfiguredState]) -> + case start_transport(Config) of + {ok, Module, State} -> + opcua_pubsub:register_connection(ID), + {ok, ConfiguredState#state{ + id = ID, + middleware = {Module, State} + }}; {error, E} -> error(E) end. -destroy(#state{middleware = M}) -> - supervisor:terminate_child(opcua_pubsub_middleware_sup, M), - supervisor:delete_child(opcua_pubsub_middleware_sup, M). +handle_call(_, _, State) -> + {reply, ok, State}. + +handle_cast({send, Data}, #state{middleware = {M,S}} = State) -> + M:send(Data, S), + {noreply, State}. + +handle_info(Info, #state{middleware = {M, S}} = State) -> + {ok, NewS} = handle_network_message(M:handle_info(Info, S), State), + {noreply, NewS}. + +%%% INTERNAL FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +start_transport(#{uri := #{scheme := <<"opc.udp">>}} = Config) -> + {ok, Transport} = opcua_pubsub_udp:init(Config), + {ok, opcua_pubsub_udp, Transport}; +start_transport(_Config) -> + {error, unsupported_transport}. + +default_config() -> #{ + publisher_id_type => ?UA_PUBLISHERIDTYPE_UINT16, + publisher_id => 1111, + name => "Unnamed" + }. handle_network_message(Binary, #state{reader_groups = RGs} = S) -> {Headers, Payload} = opcua_pubsub_uadp:decode_network_message_headers(Binary), @@ -66,54 +123,10 @@ handle_network_message(Binary, #state{reader_groups = RGs} = S) -> {ok, S#state{reader_groups = NewRGs}} end. - -add_reader_group(ReaderGroupCfg, #state{reader_groups = RG} = S) -> - RG_id = uuid:get_v4(), - {ok, ReaderGroup} = opcua_pubsub_reader_group:new(ReaderGroupCfg), - RG2 = maps:put(RG_id, ReaderGroup, RG), - {ok, RG_id, S#state{reader_groups = RG2}}. - -add_data_set_reader(RG_id, DSR_cfg, - #state{reader_groups = RGs} = S) -> - RG = maps:get(RG_id, RGs), - {ok, DSR_id, NewRG} = opcua_pubsub_reader_group:add_data_set_reader(DSR_cfg, RG), - NewGroups = maps:put(RG_id, NewRG, RGs), - {ok, DSR_id, S#state{reader_groups = NewGroups}}. - -create_target_variables(RG_id, DSR_id, Config, #state{reader_groups = RGs} = S) -> - RG = maps:get(RG_id, RGs), - {ok, NewRG} = opcua_pubsub_reader_group:create_target_variables(DSR_id, Config, RG), - NewGroups = maps:put(RG_id, NewRG, RGs), - {ok, DSR_id, S#state{reader_groups = NewGroups}}. - - -%%% INTERNAL FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -start_transport(ID, #{uri := #{scheme := <<"opc.udp">>}} = Config) -> - start_supervised_transport(ID, opcua_pubsub_udp, [Config]); -start_transport(_ID, _Config) -> - {error, unsupported_transport}. - -start_supervised_transport(ID, Module, Args) -> - Child = #{ - id => ID, - start => {Module, start_link, Args}, - restart => transient - }, - case supervisor:start_child(opcua_pubsub_middleware_sup, Child) of - {ok, _Pid} -> {ok, Module}; - E -> E - end. - -default_config() -> #{ - publisher_id_type => ?UA_PUBLISHERIDTYPE_UINT16, - publisher_id => 1111, - name => "Unnamed" - }. - dispatchMessages(BundledMessages, InterestedReaders) -> [begin NewRG = opcua_pubsub_reader_group:dispatch_messages(BundledMessages, DSR_ids, RG), {RG_id, NewRG} - end || {RG_id, RG, DSR_ids} <- InterestedReaders]. \ No newline at end of file + end || {RG_id, RG, DSR_ids} <- InterestedReaders]. + diff --git a/src/opcua_pubsub_middleware_sup.erl b/src/opcua_pubsub_connection_sup.erl similarity index 65% rename from src/opcua_pubsub_middleware_sup.erl rename to src/opcua_pubsub_connection_sup.erl index 57d0b95..22f1ef1 100644 --- a/src/opcua_pubsub_middleware_sup.erl +++ b/src/opcua_pubsub_connection_sup.erl @@ -1,4 +1,4 @@ --module(opcua_pubsub_middleware_sup). +-module(opcua_pubsub_connection_sup). -behaviour(supervisor). @@ -20,4 +20,7 @@ start_link() -> %%% BEHAVIOUR supervisor CALLBACK FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% init([]) -> - {ok, {#{strategy => one_for_one}, []}}. + ChildSpecs = [#{id => none, + start => {opcua_pubsub_connection, start_link, []}, + shutdown => brutal_kill}], + {ok, {#{strategy => simple_one_for_one}, ChildSpecs}}. diff --git a/src/opcua_pubsub_example.erl b/src/opcua_pubsub_example.erl index 8405b2e..a19ba4f 100644 --- a/src/opcua_pubsub_example.erl +++ b/src/opcua_pubsub_example.erl @@ -8,10 +8,10 @@ run() -> Url = <<"opc.udp://224.0.0.22:4840">>, ConnectionConfig = #{}, - {ok, ConnectionID} = opcua_pubsub:add_connection(Url, ConnectionConfig), + {ok, Conn} = opcua_pubsub:new_connection(Url, ConnectionConfig), ReaderGroupconfig = #{ name => <<"Simple Reader Group">>}, - {ok, RG_id} = opcua_pubsub:add_reader_group(ConnectionID, ReaderGroupconfig), + {ok, RG_id, Conn2} = opcua_pubsub:add_reader_group(Conn, ReaderGroupconfig), DSR_config = #data_set_reader_config{ name = <<"Example Reader">>, @@ -31,8 +31,8 @@ run() -> }] } }, - {ok, DSR_id} = - opcua_pubsub:add_data_set_reader(ConnectionID, RG_id, DSR_config), + {ok, DSR_id, Conn3} = + opcua_pubsub:add_data_set_reader(Conn2, RG_id, DSR_config), % A dedicated object on the server (or any address space available) % containing all variables that will be updated by the DSR @@ -45,5 +45,7 @@ run() -> target_node_id = VarNodeId, attribute_id = ?UA_ATTRIBUTEID_VALUE }, - opcua_pubsub:create_target_variables(ConnectionID,RG_id,DSR_id,[TGT]), + {ok, Conn4} = opcua_pubsub:create_target_variables(Conn3,RG_id,DSR_id,[TGT]), + + {ok, ID} = opcua_pubsub:start_connection(Conn4), ok. diff --git a/src/opcua_pubsub_sup.erl b/src/opcua_pubsub_sup.erl index ff8add8..fbe94cd 100644 --- a/src/opcua_pubsub_sup.erl +++ b/src/opcua_pubsub_sup.erl @@ -21,7 +21,7 @@ start_link() -> init([]) -> Childs = [ - supervisor(opcua_pubsub_middleware_sup, []), + supervisor(opcua_pubsub_connection_sup, []), worker(opcua_pubsub, [])], {ok, {#{strategy => one_for_all}, Childs}}. diff --git a/src/opcua_pubsub_udp.erl b/src/opcua_pubsub_udp.erl index 239010e..e29fdd3 100644 --- a/src/opcua_pubsub_udp.erl +++ b/src/opcua_pubsub_udp.erl @@ -1,32 +1,21 @@ -module(opcua_pubsub_udp). --export([start_link/1]). --export([send/1]). - --behaviour(gen_server). --export([init/1, handle_call/3, handle_cast/2, handle_info/2]). +-export([init/1, send/2, handle_info/2]). -include_lib("kernel/include/logger.hrl"). -record(state, { - connection_id, socket }). -start_link(Opts) -> - gen_server:start_link({local, ?MODULE}, ?MODULE, [Opts], []). - -send(Data) -> - gen_server:cast(?MODULE, {?FUNCTION_NAME, Data}). -init([#{ - connection_id := ConnectionId, +init(#{ uri := #{ host := BinaryIP, port := Port } - }]) -> + }) -> MulticastGroup = parse_ip(BinaryIP), InterfaceIP = get_ip_of_valid_interface(), ?LOG_DEBUG("PubSub UDP using interface ~p",[InterfaceIP]), @@ -41,27 +30,16 @@ init([#{ {ok, Socket} -> inet:setopts(Socket, [{add_membership,{MulticastGroup,InterfaceIP}}]), {ok, #state{ - connection_id = ConnectionId, socket = Socket }}; {error, Reason} -> {error, Reason} end. -handle_call(_, _, State) -> - {reply, ok, State}. - -% handle_cast(disconnect, State) -> -% gen_udp:close(State#state.socket), -% {noreply, State#state{ socket = undefined}}; -handle_cast({send, Data}, #state{socket = Socket} = State) -> - gen_udp:send(Socket, Data), - {noreply, State}. +send(Data, #state{socket = Socket}) -> + ok = gen_udp:send(Socket, Data). -handle_info({udp, Socket, _IP, _Port, Packet}, - #state{socket = Socket, connection_id = ConnectionId} = S) -> - % io:format("~n~nFrom: ~p~nPort: ~p~nData: ~p~n",[_IP,_Port,Packet]), - opcua_pubsub:new_network_message(ConnectionId, Packet), - {noreply, S}. +handle_info({udp, Socket, _IP, _Port, Packet}, #state{socket = Socket} = S) -> + Packet. % helpers %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% From d5a7e91543518791fa4632bebbb224dae178694c Mon Sep 17 00:00:00 2001 From: Luca Succi Date: Wed, 25 Jan 2023 14:54:03 +0100 Subject: [PATCH 4/7] Add glue code for writer side initialization --- src/opcua_pubsub.erl | 67 ++++++++--- src/opcua_pubsub.hrl | 75 +++++++++++-- src/opcua_pubsub_connection.erl | 30 +++-- ...er.erl => opcua_pubsub_dataset_reader.erl} | 61 +++++----- src/opcua_pubsub_dataset_writer.erl | 45 ++++++++ src/opcua_pubsub_example.erl | 66 +++++++++-- src/opcua_pubsub_reader_group.erl | 28 ++--- src/opcua_pubsub_uadp.erl | 106 +++++++++--------- src/opcua_pubsub_writer_group.erl | 50 +++++++++ 9 files changed, 387 insertions(+), 141 deletions(-) rename src/{opcua_pubsub_data_set_reader.erl => opcua_pubsub_dataset_reader.erl} (75%) create mode 100644 src/opcua_pubsub_dataset_writer.erl create mode 100644 src/opcua_pubsub_writer_group.erl diff --git a/src/opcua_pubsub.erl b/src/opcua_pubsub.erl index 3f650d9..5241261 100644 --- a/src/opcua_pubsub.erl +++ b/src/opcua_pubsub.erl @@ -7,27 +7,30 @@ -export([init/1, handle_call/3, handle_cast/2, handle_info/2]). --export([add_published_data_set/1]). --export([add_data_set_field/2]). +-export([add_published_dataset/1]). +-export([add_published_dataset_field/3]). -export([new_connection/2]). -export([add_reader_group/2]). -export([add_writer_group/2]). --export([add_data_set_reader/3]). +-export([add_dataset_reader/3]). -export([create_target_variables/4]). --export([add_data_set_writer/3]). +-export([add_dataset_writer/4]). -export([start_connection/1]). -export([stop_connection/1]). -export([register_connection/1]). +-include("opcua_pubsub.hrl"). + -record(state, { + state, connections = #{},% Maps Ids to Pids - published_data_sets = #{} + published_datasets = #{} }). %%% API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -43,12 +46,12 @@ stop_connection(ConnectionID) -> gen_server:call(?MODULE, {?FUNCTION_NAME, ConnectionID}). % Publised Data Set configuration: PDS are independent -add_published_data_set(Config) -> +add_published_dataset(Config) -> gen_server:call(?MODULE, {?FUNCTION_NAME, Config}). % Adds definitions per-field for a PublishedDataSet -add_data_set_field(PublishedDataSetID, FieldConfig) -> - gen_server:call(?MODULE, {?FUNCTION_NAME, PublishedDataSetID, FieldConfig}). +add_published_dataset_field(PDS_ID, FieldsMetaData, FieldsSource) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, PDS_ID, FieldsMetaData, FieldsSource}). new_connection(Url, Opts) -> opcua_pubsub_connection:create(Url, Opts). @@ -57,19 +60,19 @@ new_connection(Url, Opts) -> add_reader_group(Connection, Config) -> opcua_pubsub_connection:add_reader_group(Config, Connection). -add_writer_group(_Connection, _Config) -> - error(not_implemented). +add_writer_group(Connection, Config) -> + opcua_pubsub_connection:add_writer_group(Config, Connection). % define a DataSetReader, this includes its DataSetFieldMetaData -add_data_set_reader(Connection, RG_id, DSR_cfg) -> - opcua_pubsub_connection:add_data_set_reader(RG_id, DSR_cfg, Connection). +add_dataset_reader(Connection, RG_id, DSR_cfg) -> + opcua_pubsub_connection:add_dataset_reader(RG_id, DSR_cfg, Connection). % Add target variables to tell a DataSetReader where to write the decoded Fields create_target_variables(Connection, RG_id, DSR_id, Cfg) -> opcua_pubsub_connection:create_target_variables(RG_id, DSR_id, Cfg, Connection). -add_data_set_writer(Connection, WG_id, DWR_cfg) -> - gen_server:call(?MODULE, {?FUNCTION_NAME, Connection, WG_id, DWR_cfg}). +add_dataset_writer(Connection, WG_id, PDS_id, DWR_cfg) -> + opcua_pubsub_connection:add_dataset_writer(WG_id, PDS_id, DWR_cfg, Connection). % INTERNAL API %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -78,7 +81,7 @@ register_connection(ID) -> % GEN_SERVER callbacks %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% init([]) -> - {ok, #state{}}. + {ok, #state{state = operational}}. handle_call({start_connection, ConnectionConfig}, _, S) -> ID = uuid:get_v4(), @@ -88,7 +91,18 @@ handle_call({stop_connection, ConnectionID}, _, #state{connections = Conns} = S) Pid = maps:get(ConnectionID, Conns), ok = supervisor:terminate_child(opcua_pubsub_connection_sup, Pid), NewMap = maps:remove(ConnectionID, Conns), - {reply, ok, S#state{connections = NewMap}}. + {reply, ok, S#state{connections = NewMap}}; +handle_call({add_published_dataset, Config}, _, #state{published_datasets = PDSs} = S) -> + PDS = h_add_published_dataset(Config), + ID = uuid:get_v4(), + NewMap = maps:put(ID, PDS, PDSs), + {reply, {ok, ID}, S#state{published_datasets = NewMap}}; +handle_call({add_published_dataset_field, PDS_id, FieldsMetadata, FieldsSources}, + _, #state{published_datasets = PDSs} = S) -> + PDS = maps:get(PDS_id, PDSs), + NewPDS = h_add_published_dataset_field(PDS, FieldsMetadata, FieldsSources), + NewMap = maps:put(PDS_id, NewPDS, PDSs), + {reply, ok, S#state{published_datasets = NewMap}}. handle_cast({register_connection, ID, Pid}, #state{connections = Conns} = State) -> {noreply, State#state{connections = maps:put(ID, Pid, Conns)}}. @@ -98,3 +112,24 @@ handle_info(_, S) -> %%% INTERNAL FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +h_add_published_dataset(Config) -> + % TODO add the PDS as object to the address space + Config. + +h_add_published_dataset_field( + #published_dataset{ + dataset_metadata = #dataset_metadata{fields = MDFields} = DM, + dataset_source = DSSource + } = PDS, + FieldsMetaData, FieldsSource) -> + + % TODO do more then just copy the provided configuration + % check for correctness in the config + % and show this stuff in the address space + PDS#published_dataset{ + dataset_metadata = DM#dataset_metadata{ + fields = MDFields ++ FieldsMetaData + }, + dataset_source = DSSource ++ FieldsSource + }. \ No newline at end of file diff --git a/src/opcua_pubsub.hrl b/src/opcua_pubsub.hrl index a5fd442..6dee56f 100644 --- a/src/opcua_pubsub.hrl +++ b/src/opcua_pubsub.hrl @@ -4,10 +4,12 @@ -define(UA_PUBLISHERIDTYPE_UINT64,3). -define(UA_PUBLISHERIDTYPE_STRING,4). +-type pubsub_state_machine() :: operational | error | enabled | paused. + -record(dataset_mirror,{}). -record(target_variable,{ - data_set_field_id = 0 :: non_neg_integer(), + dataset_field_id = 0 :: non_neg_integer(), receiver_index_range, target_node_id, % node_id to write to attribute_id, % attribute to write @@ -16,7 +18,7 @@ override_value }). --record(data_set_field_metadata,{ +-record(dataset_field_metadata,{ name :: string(), description :: string(), field_flags, % This flag indicates if the field is promoted to the NetworkMessage header @@ -25,24 +27,73 @@ valueRank :: integer(), array_dimensions, maxStringLength, - data_set_field_id = 0 :: non_neg_integer(), + dataset_field_id = 0 :: non_neg_integer(), properties }). --record(data_set_metadata,{ +-record(dataset_metadata,{ name, description, - fields, - data_set_class_id, - configuration_version :: undefined | {non_neg_integer(),non_neg_integer()} + fields = [] :: list(#dataset_field_metadata{}), + dataset_class_id, + configuration_version :: undefined | {non_neg_integer(),non_neg_integer()} }). - --record(data_set_reader_config,{ - name :: binary(), +-record(dataset_reader_config,{ + name :: binary(), publisher_id, publisher_id_type, writer_group_id, - data_set_writer_id, - data_set_metadata :: #data_set_metadata{} + dataset_writer_id, + dataset_metadata :: #dataset_metadata{} +}). + +-record(published_variable,{ + published_variable, + attribute_id, + sampling_interval_hint, + deadband_type = 0 :: 0 | 1 | 2, + deadband_value = 0.0 :: float(), + index_rande, + substitute_value, + metadata_properties = [] }). + +-record(published_events, { + event_notifier :: opcua:node_id(), + selected_fields :: list(), + filter +}). + +-type published_dataset_source() :: list(#published_variable{}) | + #published_events{}. + +-record(published_dataset,{ + name, + dataset_folder :: list(),% path to the destination folder + dataset_metadata :: #dataset_metadata{}, + extension_fields, + dataset_source = [] :: published_dataset_source() +}). + +-record(writer_group_config,{ + enabled, + name, + writer_group_id, + publishing_interval, + keep_alive_time, + priority, + locale_ids, + transport_settings, + message_settings +}). + +-record(dataset_writer_config,{ + name :: binary(), + dataset_writer_id :: non_neg_integer(), + dataset_field_content_mask, + keyframe_count = 1 :: non_neg_integer(), + dataset_name :: string(), + transport_settings, + message_settings +}). \ No newline at end of file diff --git a/src/opcua_pubsub_connection.erl b/src/opcua_pubsub_connection.erl index e32458e..d02eba9 100644 --- a/src/opcua_pubsub_connection.erl +++ b/src/opcua_pubsub_connection.erl @@ -1,15 +1,21 @@ -module(opcua_pubsub_connection). --export([start_link/2]). --export([send/2]). -export([create/2]). -export([add_reader_group/2]). --export([add_data_set_reader/3]). +-export([add_dataset_reader/3]). -export([create_target_variables/4]). +-export([add_writer_group/2]). +-export([add_dataset_writer/4]). + +-export([start_link/2]). +-export([send/2]). + -behaviour(gen_server). -export([init/1, handle_call/3, handle_cast/2, handle_info/2]). +-include("opcua_pubsub.hrl"). + -record(state, { id, config, @@ -18,7 +24,6 @@ writer_groups = #{} }). --include("opcua_pubsub.hrl"). % CONFIGURATION API %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % These help to build the Connection process state @@ -36,9 +41,9 @@ add_reader_group(ReaderGroupCfg, #state{reader_groups = RG} = S) -> RG2 = maps:put(RG_id, ReaderGroup, RG), {ok, RG_id, S#state{reader_groups = RG2}}. -add_data_set_reader(RG_id, DSR_cfg, #state{reader_groups = RGs} = S) -> +add_dataset_reader(RG_id, DSR_cfg, #state{reader_groups = RGs} = S) -> RG = maps:get(RG_id, RGs), - {ok, DSR_id, NewRG} = opcua_pubsub_reader_group:add_data_set_reader(DSR_cfg, RG), + {ok, DSR_id, NewRG} = opcua_pubsub_reader_group:add_dataset_reader(DSR_cfg, RG), NewGroups = maps:put(RG_id, NewRG, RGs), {ok, DSR_id, S#state{reader_groups = NewGroups}}. @@ -48,6 +53,17 @@ create_target_variables(RG_id, DSR_id, Config, #state{reader_groups = RGs} = S) NewGroups = maps:put(RG_id, NewRG, RGs), {ok, S#state{reader_groups = NewGroups}}. +add_writer_group(ReaderGroupCfg, #state{writer_groups = WGs} = S) -> + WG_id = uuid:get_v4(), + {ok, WriterGroup} = opcua_pubsub_writer_group:new(ReaderGroupCfg), + WGs2 = maps:put(WG_id, WriterGroup, WGs), + {ok, WG_id, S#state{writer_groups = WGs2}}. + +add_dataset_writer(WG_id, PDS_id, WriterCfg, #state{writer_groups = WGs} = S) -> + WG = maps:get(WG_id, WGs), + {ok, DSW_is, NewWriterGroup} = opcua_pubsub_writer_group:add_dataset_writer(PDS_id, WriterCfg, WG), + WGs2 = maps:put(WG_id, NewWriterGroup, WGs), + {ok, DSW_is, S#state{writer_groups = WGs2}}. %%% GEN_SERVER API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -112,7 +128,7 @@ handle_network_message(Binary, #state{reader_groups = RGs} = S) -> % opcua_pubsub_security: ... not_implemented yet % Then we decode all messages DataSetMessages = opcua_pubsub_uadp:decode_payload(Headers, Payload), - #{payload_header := #{data_set_writer_ids := DSW_ids}} = Headers, + #{payload_header := #{dataset_writer_ids := DSW_ids}} = Headers, BundledMessages = lists:zip(DSW_ids, DataSetMessages), % After processing, the DSRs could change state. % All groups must be updated diff --git a/src/opcua_pubsub_data_set_reader.erl b/src/opcua_pubsub_dataset_reader.erl similarity index 75% rename from src/opcua_pubsub_data_set_reader.erl rename to src/opcua_pubsub_dataset_reader.erl index 56afe3e..1ae9d58 100644 --- a/src/opcua_pubsub_data_set_reader.erl +++ b/src/opcua_pubsub_dataset_reader.erl @@ -1,4 +1,4 @@ --module(opcua_pubsub_data_set_reader). +-module(opcua_pubsub_dataset_reader). -export([new/1]). -export([is_interested/2]). @@ -15,25 +15,24 @@ publisher_id, publisher_id_type, writer_group_id, - data_set_writer_id, - data_set_metadata :: #data_set_metadata{}, + dataset_writer_id, + dataset_metadata :: #dataset_metadata{}, subscribed_dataset :: undefined | [#target_variable{}] | #dataset_mirror{} }). - -new(#data_set_reader_config{ +new(#dataset_reader_config{ name = Name, publisher_id = PubId, publisher_id_type = PubIdType, writer_group_id = WGId, - data_set_writer_id = DataSetWriterId, - data_set_metadata = DataSetMetadata}) -> + dataset_writer_id = DataSetWriterId, + dataset_metadata = DataSetMetadata}) -> {ok, #state{name = Name, publisher_id = PubId, publisher_id_type = PubIdType, writer_group_id = WGId, - data_set_writer_id = DataSetWriterId, - data_set_metadata = set_metadata_fields_ids(DataSetMetadata)}}. + dataset_writer_id = DataSetWriterId, + dataset_metadata = set_metadata_fields_ids(DataSetMetadata)}}. create_target_variables(Variables, State) -> {ok, State#state{subscribed_dataset = set_tgt_var_ids(Variables)}}. @@ -47,49 +46,49 @@ is_interested(#{ writer_group_id := WG_id }, payload_header := #{ - data_set_writer_ids := DSW_ids + dataset_writer_ids := DSW_ids } } = _Headers, #state{ publisher_id = Pub_id, writer_group_id = WG_id, - data_set_writer_id = DataSetWriterId}) -> + dataset_writer_id = DataSetWriterId}) -> lists:member(DataSetWriterId, DSW_ids); is_interested(_, _) -> false. process_messages([], State) -> State; process_messages([{DataSetWriterId, {Header, Data}} | Messages], - #state{data_set_writer_id = DataSetWriterId} = State) -> + #state{dataset_writer_id = DataSetWriterId} = State) -> % io:format("~p handling ~p~n",[?MODULE,Header]), % TODO: add msg version check, state machine management ecc.. - {DataSet, NewState} = decode_data_set_message(Header, Data, State), + {DataSet, NewState} = decode_dataset_message(Header, Data, State), NewState2 = update_subscribed_dataset(DataSet, NewState), process_messages(Messages, NewState2); process_messages([ _| Messages], State) -> process_messages(Messages, State). -decode_data_set_message( % case of invalid message - #{data_set_flags1 := - #{data_set_msg_valid := 0}}, +decode_dataset_message( % case of invalid message + #{dataset_flags1 := + #{dataset_msg_valid := 0}}, _, S) -> {[], S}; -decode_data_set_message( +decode_dataset_message( #{ - data_set_flags1 := #{ - data_set_msg_valid := 1, + dataset_flags1 := #{ + dataset_msg_valid := 1, field_encoding := Encoding, - data_set_msg_seq_num := _, + dataset_msg_seq_num := _, status := _, config_ver_minor_ver := _, config_ver_major_ver := _, - data_set_flags2 := _ + dataset_flags2 := _ }, - data_set_flags2 := #{ + dataset_flags2 := #{ msg_type := MessageType, % keyframe / deltaframe / event ecc... timestamp := _, picoseconds := _ }, - data_set_seq_num := _, + dataset_seq_num := _, timestamp := _, picoseconds := _, status := _, @@ -98,7 +97,7 @@ decode_data_set_message( }, Data, #state{ - data_set_metadata = #data_set_metadata{ + dataset_metadata = #dataset_metadata{ fields = FieldsMetaData, configuration_version = _Ver} } = S) -> @@ -117,7 +116,7 @@ decode_fields(_Encoding, _MessageType, _Fields, _Data) -> decode_keyframe( _, _, _, <<>>, DataSet) -> lists:reverse(DataSet); decode_keyframe(Encoding, [FieldMD|NextMDMD], FieldCount, Binary, DataSet) -> - {Decoded, Rest} = opcua_pubsub_uadp:decode_data_set_message_field(Encoding, + {Decoded, Rest} = opcua_pubsub_uadp:decode_dataset_message_field(Encoding, FieldMD, Binary), Data = {FieldMD, Decoded}, @@ -137,8 +136,8 @@ update_subscribed_dataset(DataSet, #state{subscribed_dataset = TGT_vars} = S) update_target_variables([], TGT_vars) -> ok; update_target_variables([{FieldMD, Variable}|DataSet], TGT_vars) -> - FieldId = FieldMD#data_set_field_metadata.data_set_field_id, - [TGT|_] = [ Var || #target_variable{data_set_field_id = DataSetFieldId} = Var + FieldId = FieldMD#dataset_field_metadata.dataset_field_id, + [TGT|_] = [ Var || #target_variable{dataset_field_id = DataSetFieldId} = Var <- TGT_vars, DataSetFieldId == FieldId], TargetNodeId = TGT#target_variable.target_node_id, AttrId = TGT#target_variable.attribute_id, @@ -149,12 +148,12 @@ update_tgt_var_attribute(TargetNodeId, ?UA_ATTRIBUTEID_VALUE, #opcua_variant{value = Value}) -> opcua_server:set_value(TargetNodeId, Value). -set_metadata_fields_ids(#data_set_metadata{fields = Fields} = DSMD) -> +set_metadata_fields_ids(#dataset_metadata{fields = Fields} = DSMD) -> Ids = lists:seq(0, length(Fields) - 1), - DSMD#data_set_metadata{fields = - [F#data_set_field_metadata{data_set_field_id = I} + DSMD#dataset_metadata{fields = + [F#dataset_field_metadata{dataset_field_id = I} || {I,F} <- lists:zip(Ids, Fields)]}. set_tgt_var_ids(Varables) -> Ids = lists:seq(0, length(Varables) - 1), - [V#target_variable{data_set_field_id = I} || {I,V} <- lists:zip(Ids, Varables)]. + [V#target_variable{dataset_field_id = I} || {I,V} <- lists:zip(Ids, Varables)]. diff --git a/src/opcua_pubsub_dataset_writer.erl b/src/opcua_pubsub_dataset_writer.erl new file mode 100644 index 0000000..577c915 --- /dev/null +++ b/src/opcua_pubsub_dataset_writer.erl @@ -0,0 +1,45 @@ +-module(opcua_pubsub_dataset_writer). + +-export([new/2]). +-export([write/1]). + +-include("opcua.hrl"). +-include("opcua_pubsub.hrl"). +-include_lib("kernel/include/logger.hrl"). + +-record(state, { + state = operational :: pubsub_state_machine(), + name, + dataset_writer_id, + dataset_field_content_mask, + keyframe_count, + dataset_name, + transport_settings, + message_settings, + connected_published_dataset +}). + + +new(PDS_ID, #dataset_writer_config{ + name = N, + dataset_writer_id = DS_WID, + dataset_field_content_mask = CM, + keyframe_count = KF_C, + dataset_name = DN, + transport_settings = TS, + message_settings = MS + }) -> + {ok, #state{ + state = operational, + name = N, + dataset_writer_id = DS_WID, + dataset_field_content_mask = CM, + keyframe_count = KF_C, + dataset_name = DN, + transport_settings = TS, + message_settings = MS, + connected_published_dataset = PDS_ID + }}. + +write(#state{} = S) -> + S. diff --git a/src/opcua_pubsub_example.erl b/src/opcua_pubsub_example.erl index a19ba4f..73673db 100644 --- a/src/opcua_pubsub_example.erl +++ b/src/opcua_pubsub_example.erl @@ -1,11 +1,12 @@ -module(opcua_pubsub_example). --export([run/0]). +-export([subscription/0]). +-export([publication/0]). -include("opcua.hrl"). -include("opcua_pubsub.hrl"). -run() -> +subscription() -> Url = <<"opc.udp://224.0.0.22:4840">>, ConnectionConfig = #{}, {ok, Conn} = opcua_pubsub:new_connection(Url, ConnectionConfig), @@ -13,17 +14,17 @@ run() -> ReaderGroupconfig = #{ name => <<"Simple Reader Group">>}, {ok, RG_id, Conn2} = opcua_pubsub:add_reader_group(Conn, ReaderGroupconfig), - DSR_config = #data_set_reader_config{ + DSR_config = #dataset_reader_config{ name = <<"Example Reader">>, publisher_id = 2234, publisher_id_type = uint16, writer_group_id = 100, - data_set_writer_id = 62541, - data_set_metadata = #data_set_metadata{ + dataset_writer_id = 62541, + dataset_metadata = #dataset_metadata{ name = "DataSet 1", description = "An example from 62541", fields = [ - #data_set_field_metadata{ + #dataset_field_metadata{ name = "DateTime 1", builtin_type = date_time, data_type = opcua_node:id(date_time), @@ -32,7 +33,7 @@ run() -> } }, {ok, DSR_id, Conn3} = - opcua_pubsub:add_data_set_reader(Conn2, RG_id, DSR_config), + opcua_pubsub:add_dataset_reader(Conn2, RG_id, DSR_config), % A dedicated object on the server (or any address space available) % containing all variables that will be updated by the DSR @@ -41,7 +42,7 @@ run() -> undefined, date_time, 0), TGT = #target_variable{ - data_set_field_id = 0, + dataset_field_id = 0, target_node_id = VarNodeId, attribute_id = ?UA_ATTRIBUTEID_VALUE }, @@ -49,3 +50,52 @@ run() -> {ok, ID} = opcua_pubsub:start_connection(Conn4), ok. + +publication() -> + + PDS_cfg = #published_dataset{ + name = "PublishedDataSet Example", + dataset_metadata = #dataset_metadata{ + name = "My Metadata" + } + }, + {ok, PDS_id} = opcua_pubsub:add_published_dataset(PDS_cfg), + + % we specify the fields metadata and their sources + % In this case we list available variables as sources + FieldsMetaData = [#dataset_field_metadata{ + name = "DateTime 1", + builtin_type = date_time, + data_type = opcua_node:id(date_time), + valueRank = -1 % a scalar, + }], + FieldsSource = [ + #published_variable{ + published_variable = ?NNID(2256), + attribute_id = ?UA_ATTRIBUTEID_VALUE + }], + ok = opcua_pubsub:add_published_dataset_field(PDS_id, FieldsMetaData, FieldsSource), + + Url = <<"opc.udp://224.0.0.22:4840">>, + ConnectionConfig = #{ + publisher_id_type => uint16, + publisher_id => 2234 + }, + {ok, Conn} = opcua_pubsub:new_connection(Url, ConnectionConfig), + + WriterGroupconfig = #writer_group_config{ + name = <<"Simple Writer Group">>, + writer_group_id = 100, + publishing_interval = 100 + }, + {ok, WG_id, Conn2} = opcua_pubsub:add_writer_group(Conn, WriterGroupconfig), + + DataSetWriterConfig = #dataset_writer_config{ + name = <<"Simple DataSet Writer">>, + dataset_writer_id = 62541, + keyframe_count = 10 + }, + {ok, DSW_id, Conn3} = opcua_pubsub:add_dataset_writer(Conn2, WG_id, + PDS_id, DataSetWriterConfig), + ok. + diff --git a/src/opcua_pubsub_reader_group.erl b/src/opcua_pubsub_reader_group.erl index 9bdfbc7..bbd381d 100644 --- a/src/opcua_pubsub_reader_group.erl +++ b/src/opcua_pubsub_reader_group.erl @@ -1,46 +1,46 @@ -module(opcua_pubsub_reader_group). -export([new/1]). --export([add_data_set_reader/2]). +-export([add_dataset_reader/2]). -export([filter_readers/2]). -export([dispatch_messages/3]). -export([create_target_variables/3]). -record(state, { name, - data_set_readers = #{} + dataset_readers = #{} }). new(#{name := RG_name}) -> - {ok, #state{}}. + {ok, #state{name = RG_name}}. -add_data_set_reader(DSR_cfg, #state{data_set_readers = DSRs} = S) -> +add_dataset_reader(DSR_cfg, #state{dataset_readers = DSRs} = S) -> DSR_id = uuid:get_v4(), - {ok, DSR} = opcua_pubsub_data_set_reader:new(DSR_cfg), + {ok, DSR} = opcua_pubsub_dataset_reader:new(DSR_cfg), NewDSRs = maps:put(DSR_id, DSR, DSRs), - {ok, DSR_id, S#state{data_set_readers = NewDSRs}}. + {ok, DSR_id, S#state{dataset_readers = NewDSRs}}. -create_target_variables(DSR_id, Config,#state{data_set_readers = DSRs} = S) -> +create_target_variables(DSR_id, Config,#state{dataset_readers = DSRs} = S) -> DSR = maps:get(DSR_id, DSRs), - {ok, NewDSR} = opcua_pubsub_data_set_reader:create_target_variables(Config, DSR), + {ok, NewDSR} = opcua_pubsub_dataset_reader:create_target_variables(Config, DSR), NewDSRs = maps:put(DSR_id, NewDSR, DSRs), - {ok, S#state{data_set_readers = NewDSRs}}. + {ok, S#state{dataset_readers = NewDSRs}}. -filter_readers(Headers, #state{data_set_readers = DSRs}) -> +filter_readers(Headers, #state{dataset_readers = DSRs}) -> [DSR_id || {DSR_id, DSR} <- maps:to_list(DSRs), - opcua_pubsub_data_set_reader:is_interested(Headers, DSR)]. + opcua_pubsub_dataset_reader:is_interested(Headers, DSR)]. -dispatch_messages(BundledMessages, DSR_ids, #state{data_set_readers = DSRs} = S) -> +dispatch_messages(BundledMessages, DSR_ids, #state{dataset_readers = DSRs} = S) -> Updated = [ begin DSR = maps:get(ID, DSRs), - NewDSR = opcua_pubsub_data_set_reader:process_messages(BundledMessages, DSR), + NewDSR = opcua_pubsub_dataset_reader:process_messages(BundledMessages, DSR), {ID, NewDSR} end || ID <- DSR_ids], NewDSRs = lists:foldl(fun ({ID,Value}, Map) -> maps:put(ID, Value, Map) end, DSRs, Updated), - S#state{data_set_readers = NewDSRs}. + S#state{dataset_readers = NewDSRs}. diff --git a/src/opcua_pubsub_uadp.erl b/src/opcua_pubsub_uadp.erl index 5c1a41e..746c801 100644 --- a/src/opcua_pubsub_uadp.erl +++ b/src/opcua_pubsub_uadp.erl @@ -3,7 +3,7 @@ -export([decode_network_message_headers/1]). -export([decode_payload/2]). --export([decode_data_set_message_field/3]). +-export([decode_dataset_message_field/3]). -include("opcua.hrl"). -include("opcua_pubsub.hrl"). @@ -49,18 +49,18 @@ decode_network_message_headers(_) -> %extracts Dataset Messages from the payload blob decoding the headers decode_payload(#{payload_header := undefined}, Payload) -> - {DSM_header, Binary} = decode_data_set_message_header(Payload), + {DSM_header, Binary} = decode_dataset_message_header(Payload), [{DSM_header, Binary}]; decode_payload(#{payload_header := #{count := 1}}, Payload) -> - {DSM_header, Binary} = decode_data_set_message_header(Payload), + {DSM_header, Binary} = decode_dataset_message_header(Payload), [{DSM_header, Binary}]; decode_payload(#{payload_header := #{count := Count}}, Payload) -> <> = Payload, Sizes = [Size || <> <= SizesBinary], - decode_multi_data_set_message(Rest, Sizes). + decode_multi_dataset_message(Rest, Sizes). -decode_data_set_message_field(variant, FieldMetadata, Binary) -> - #data_set_field_metadata{ +decode_dataset_message_field(variant, FieldMetadata, Binary) -> + #dataset_field_metadata{ builtin_type = BuiltinType, data_type = _NodeId, valueRank = _ @@ -70,7 +70,7 @@ decode_data_set_message_field(variant, FieldMetadata, Binary) -> #opcua_variant{type = BuiltinType} -> {Result, Rest}; _ -> {error, unmatched_metadata} end; -decode_data_set_message_field(_, _, _) -> +decode_dataset_message_field(_, _, _) -> error(bad_encoding_not_implemented). %%% INTERNALS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -127,7 +127,7 @@ decode_extended_flags2(1, << }, Bin}. -decode_network_msg_type(<< 0:1, 0:1, 0:1>>) -> data_set_message; +decode_network_msg_type(<< 0:1, 0:1, 0:1>>) -> dataset_message; decode_network_msg_type(<< 0:1, 0:1, 1:1>>) -> discovery_request; decode_network_msg_type(<< 0:1, 1:1, 0:1>>) -> discovery_responce; decode_network_msg_type(<< _:1, _:1, _:1>>) -> reserved. @@ -175,12 +175,12 @@ decode_payload_header(0, _, Bin) -> {undefined, Bin}; decode_payload_header(1, #{chunk := 1}, Bin) -> {DataSetWriterID, Rest} = opcua_codec_binary_builtin:decode(uint16, Bin), throw({not_implemented, chunked_network_message}); -decode_payload_header(1, #{network_message_type := data_set_message}, Bin) -> +decode_payload_header(1, #{network_message_type := dataset_message}, Bin) -> <> = Bin, <> = Rest, {#{ count => MsgCount, - data_set_writer_ids => + dataset_writer_ids => [ DataWriterID || <> <= DataWriterIDs] }, Rest2}; decode_payload_header(1, #{network_message_type := discovery_request}, Bin) -> @@ -190,30 +190,30 @@ decode_payload_header(1, #{network_message_type := discovery_responce}, Bin) -> -decode_multi_data_set_message(Bin, Sizes) -> - decode_multi_data_set_message(Bin, Sizes, []). +decode_multi_dataset_message(Bin, Sizes) -> + decode_multi_dataset_message(Bin, Sizes, []). -decode_multi_data_set_message(<<>>, [], Result) -> lists:reverse(Result); -decode_multi_data_set_message(Bin, [S|TL], Result) -> +decode_multi_dataset_message(<<>>, [], Result) -> lists:reverse(Result); +decode_multi_dataset_message(Bin, [S|TL], Result) -> <> = Bin, - {DSM_header, Binary1} = decode_data_set_message_header(DSM), - decode_multi_data_set_message(Rest, [ {DSM_header, Binary1} | Result], TL). + {DSM_header, Binary1} = decode_dataset_message_header(DSM), + decode_multi_dataset_message(Rest, [ {DSM_header, Binary1} | Result], TL). -decode_data_set_message_header(DataSetMessageBinary) -> - {DataSetFlags1, Rest} = decode_data_set_flags1(DataSetMessageBinary), - {DataSetFlags2, Rest1} = decode_data_set_flags2(DataSetFlags1, Rest), - {DataSetSeqNum, Rest2} = decode_data_set_seq_num(DataSetFlags1, Rest1), - {Timestamp, Rest3} = decode_data_set_timestamp(DataSetFlags2, Rest2), - {Picoseconds, Rest4} = decode_data_set_picoseconds(DataSetFlags2, Rest3), - {Status, Rest5} = decode_data_set_status(DataSetFlags1, Rest4), - {ConfigVerMajorVer, Rest6} = decode_data_set_cfg_major_ver(DataSetFlags1, Rest5), - {ConfigVerMinorVer, Rest7} = decode_data_set_cfg_minor_ver(DataSetFlags1, Rest6), +decode_dataset_message_header(DataSetMessageBinary) -> + {DataSetFlags1, Rest} = decode_dataset_flags1(DataSetMessageBinary), + {DataSetFlags2, Rest1} = decode_dataset_flags2(DataSetFlags1, Rest), + {DataSetSeqNum, Rest2} = decode_dataset_seq_num(DataSetFlags1, Rest1), + {Timestamp, Rest3} = decode_dataset_timestamp(DataSetFlags2, Rest2), + {Picoseconds, Rest4} = decode_dataset_picoseconds(DataSetFlags2, Rest3), + {Status, Rest5} = decode_dataset_status(DataSetFlags1, Rest4), + {ConfigVerMajorVer, Rest6} = decode_dataset_cfg_major_ver(DataSetFlags1, Rest5), + {ConfigVerMinorVer, Rest7} = decode_dataset_cfg_minor_ver(DataSetFlags1, Rest6), {#{ - data_set_flags1 => DataSetFlags1, - data_set_flags2 => DataSetFlags2, - data_set_seq_num => DataSetSeqNum, + dataset_flags1 => DataSetFlags1, + dataset_flags2 => DataSetFlags2, + dataset_seq_num => DataSetSeqNum, timestamp => Timestamp, picoseconds => Picoseconds, status => Status, @@ -222,7 +222,7 @@ decode_data_set_message_header(DataSetMessageBinary) -> }, Rest7}. -decode_data_set_flags1(<< +decode_dataset_flags1(<< DataSetFlags2:1, ConfigVerMinorVer:1, ConfigVerMajorVer:1, @@ -232,13 +232,13 @@ decode_data_set_flags1(<< DataSetMsgValid:1, Rest/binary>>) -> {#{ - data_set_msg_valid => DataSetMsgValid, + dataset_msg_valid => DataSetMsgValid, field_encoding => decode_field_encoding(FieldEncoding), - data_set_msg_seq_num => DataSetMsgSeqNum, + dataset_msg_seq_num => DataSetMsgSeqNum, status => Status, config_ver_minor_ver => ConfigVerMajorVer, config_ver_major_ver => ConfigVerMinorVer, - data_set_flags2 => DataSetFlags2 + dataset_flags2 => DataSetFlags2 }, Rest}. decode_field_encoding(<<0:1, 0:1>>) -> variant; @@ -246,51 +246,51 @@ decode_field_encoding(<<0:1, 1:1>>) -> raw; decode_field_encoding(<<1:1, 0:1>>) -> data_value; decode_field_encoding(<<1:1, 1:1>>) -> reserved. -decode_data_set_flags2(#{data_set_flags2 := 0}, Bin) -> +decode_dataset_flags2(#{dataset_flags2 := 0}, Bin) -> {#{ - msg_type => decode_data_set_message_type(<<0:4>>), + msg_type => decode_dataset_message_type(<<0:4>>), timestamp => 0, picoseconds => 0}, Bin}; -decode_data_set_flags2(#{data_set_flags2 := 1}, +decode_dataset_flags2(#{dataset_flags2 := 1}, <<_Reserved:2, PicoSeconds:1, Timestamp:1, DataMsgType:4/bitstring, Rest/binary>>) -> {#{ - msg_type => decode_data_set_message_type(DataMsgType), + msg_type => decode_dataset_message_type(DataMsgType), timestamp => Timestamp, picoseconds => PicoSeconds }, Rest}. -decode_data_set_message_type(<<0:4>>) -> data_key_frame; -decode_data_set_message_type(<<0:1, 0:1, 0:1, 1:1>>) -> data_delta_frame; -decode_data_set_message_type(<<0:1, 0:1, 1:1, 0:1>>) -> event; -decode_data_set_message_type(<<0:1, 0:1, 1:1, 1:1>>) -> keep_alive; -decode_data_set_message_type(<<_:4>>) -> reserved. +decode_dataset_message_type(<<0:4>>) -> data_key_frame; +decode_dataset_message_type(<<0:1, 0:1, 0:1, 1:1>>) -> data_delta_frame; +decode_dataset_message_type(<<0:1, 0:1, 1:1, 0:1>>) -> event; +decode_dataset_message_type(<<0:1, 0:1, 1:1, 1:1>>) -> keep_alive; +decode_dataset_message_type(<<_:4>>) -> reserved. -decode_data_set_seq_num(#{data_set_msg_seq_num := 0}, Bin) -> {undefined, Bin}; -decode_data_set_seq_num(#{data_set_msg_seq_num := 1}, Bin) -> +decode_dataset_seq_num(#{dataset_msg_seq_num := 0}, Bin) -> {undefined, Bin}; +decode_dataset_seq_num(#{dataset_msg_seq_num := 1}, Bin) -> opcua_codec_binary_builtin:decode(uint16, Bin). -decode_data_set_timestamp(#{timestamp := 0}, Bin) -> {undefined, Bin}; -decode_data_set_timestamp(#{timestamp := 1}, Bin) -> +decode_dataset_timestamp(#{timestamp := 0}, Bin) -> {undefined, Bin}; +decode_dataset_timestamp(#{timestamp := 1}, Bin) -> opcua_codec_binary_builtin:decode(date_time, Bin). -decode_data_set_picoseconds(#{picoseconds := 0}, Bin) -> {undefined, Bin}; -decode_data_set_picoseconds(#{picoseconds := 1}, Bin) -> +decode_dataset_picoseconds(#{picoseconds := 0}, Bin) -> {undefined, Bin}; +decode_dataset_picoseconds(#{picoseconds := 1}, Bin) -> opcua_codec_binary_builtin:decode(uint16, Bin). -decode_data_set_status(#{status := 0}, Bin) -> {undefined, Bin}; -decode_data_set_status(#{status := 1}, Bin) -> +decode_dataset_status(#{status := 0}, Bin) -> {undefined, Bin}; +decode_dataset_status(#{status := 1}, Bin) -> opcua_codec_binary_builtin:decode(uint16, Bin). -decode_data_set_cfg_major_ver(#{config_ver_major_ver := 0}, Bin) -> +decode_dataset_cfg_major_ver(#{config_ver_major_ver := 0}, Bin) -> {undefined, Bin}; -decode_data_set_cfg_major_ver(#{config_ver_major_ver := 1}, Bin) -> +decode_dataset_cfg_major_ver(#{config_ver_major_ver := 1}, Bin) -> opcua_codec_binary_builtin:decode(uint32, Bin). -decode_data_set_cfg_minor_ver(#{config_ver_minor_ver := 0}, Bin) -> +decode_dataset_cfg_minor_ver(#{config_ver_minor_ver := 0}, Bin) -> {undefined, Bin}; -decode_data_set_cfg_minor_ver(#{config_ver_minor_ver := 1}, Bin) -> +decode_dataset_cfg_minor_ver(#{config_ver_minor_ver := 1}, Bin) -> opcua_codec_binary_builtin:decode(uint32, Bin). diff --git a/src/opcua_pubsub_writer_group.erl b/src/opcua_pubsub_writer_group.erl new file mode 100644 index 0000000..2b1b1f5 --- /dev/null +++ b/src/opcua_pubsub_writer_group.erl @@ -0,0 +1,50 @@ +-module(opcua_pubsub_writer_group). + +-export([new/1]). +-export([add_dataset_writer/3]). + +-include("opcua_pubsub.hrl"). + +-record(state, { + enabled, + name, + writer_group_id, + publishing_interval, + keep_alive_time, + priority, + locale_ids, + transport_settings, + message_settings, + dataset_writers = #{} +}). + +new(#writer_group_config{ + enabled = E, + name = N, + writer_group_id = WG_ID, + publishing_interval = P_INTERVAL, + keep_alive_time = KA_TIME, + priority = P, + locale_ids = Locales, + transport_settings = TS, + message_settings = MS}) -> + {ok, #state{ + enabled = E, + name = N, + writer_group_id = WG_ID, + publishing_interval = P_INTERVAL, + keep_alive_time = KA_TIME, + priority = P, + locale_ids = Locales, + transport_settings = TS, + message_settings = MS, + dataset_writers = #{} + }}. + +add_dataset_writer(PDS_id, DSW_cfg, #state{dataset_writers = DSWs} = S) -> + DSW_id = uuid:get_v4(), + {ok, DSW} = opcua_pubsub_dataset_writer:new(PDS_id, DSW_cfg), + NewDSWs = maps:put(DSW_id, DSW, DSWs), + {ok, DSW_id, S#state{dataset_writers = NewDSWs}}. + + From e0ab4b601aabbf271b97dab671e469b047c0e395 Mon Sep 17 00:00:00 2001 From: Luca Succi Date: Wed, 25 Jan 2023 16:27:11 +0100 Subject: [PATCH 5/7] Even more glue code --- src/opcua_pubsub.erl | 11 +++++++--- src/opcua_pubsub.hrl | 4 ++-- src/opcua_pubsub_connection.erl | 34 ++++++++++++++++++++--------- src/opcua_pubsub_dataset_writer.erl | 9 +++++--- src/opcua_pubsub_example.erl | 4 ++++ src/opcua_pubsub_writer_group.erl | 22 ++++++++++++++++--- 6 files changed, 63 insertions(+), 21 deletions(-) diff --git a/src/opcua_pubsub.erl b/src/opcua_pubsub.erl index 5241261..1607d34 100644 --- a/src/opcua_pubsub.erl +++ b/src/opcua_pubsub.erl @@ -22,8 +22,8 @@ -export([start_connection/1]). -export([stop_connection/1]). - -export([register_connection/1]). +-export([get_published_dataset/1]). -include("opcua_pubsub.hrl"). @@ -41,7 +41,6 @@ start_link() -> start_connection(Connection) -> gen_server:call(?MODULE, {?FUNCTION_NAME, Connection}). - stop_connection(ConnectionID) -> gen_server:call(?MODULE, {?FUNCTION_NAME, ConnectionID}). @@ -53,6 +52,9 @@ add_published_dataset(Config) -> add_published_dataset_field(PDS_ID, FieldsMetaData, FieldsSource) -> gen_server:call(?MODULE, {?FUNCTION_NAME, PDS_ID, FieldsMetaData, FieldsSource}). +get_published_dataset(PDS_ID) -> + gen_server:call(?MODULE, {?FUNCTION_NAME, PDS_ID}). + new_connection(Url, Opts) -> opcua_pubsub_connection:create(Url, Opts). @@ -102,7 +104,10 @@ handle_call({add_published_dataset_field, PDS_id, FieldsMetadata, FieldsSources} PDS = maps:get(PDS_id, PDSs), NewPDS = h_add_published_dataset_field(PDS, FieldsMetadata, FieldsSources), NewMap = maps:put(PDS_id, NewPDS, PDSs), - {reply, ok, S#state{published_datasets = NewMap}}. + {reply, ok, S#state{published_datasets = NewMap}}; +handle_call({get_published_dataset, PDS_ID}, + _, #state{published_datasets = PublishedDatasets} = S) -> + {reply, maps:get(PDS_ID, PublishedDatasets), S}. handle_cast({register_connection, ID, Pid}, #state{connections = Conns} = State) -> {noreply, State#state{connections = maps:put(ID, Pid, Conns)}}. diff --git a/src/opcua_pubsub.hrl b/src/opcua_pubsub.hrl index 6dee56f..fb70a35 100644 --- a/src/opcua_pubsub.hrl +++ b/src/opcua_pubsub.hrl @@ -51,7 +51,7 @@ -record(published_variable,{ published_variable, attribute_id, - sampling_interval_hint, + sampling_interval_hint = -1, deadband_type = 0 :: 0 | 1 | 2, deadband_value = 0.0 :: float(), index_rande, @@ -77,7 +77,7 @@ }). -record(writer_group_config,{ - enabled, + enabled = true :: boolean(), name, writer_group_id, publishing_interval, diff --git a/src/opcua_pubsub_connection.erl b/src/opcua_pubsub_connection.erl index d02eba9..935f745 100644 --- a/src/opcua_pubsub_connection.erl +++ b/src/opcua_pubsub_connection.erl @@ -9,7 +9,6 @@ -export([add_dataset_writer/4]). -export([start_link/2]). --export([send/2]). -behaviour(gen_server). -export([init/1, handle_call/3, handle_cast/2, handle_info/2]). @@ -26,7 +25,7 @@ % CONFIGURATION API %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -% These help to build the Connection process state +% These help to build the initial Connection process state % which holds the settings of all pubsub sub-entities create(Url, Config) -> @@ -53,9 +52,9 @@ create_target_variables(RG_id, DSR_id, Config, #state{reader_groups = RGs} = S) NewGroups = maps:put(RG_id, NewRG, RGs), {ok, S#state{reader_groups = NewGroups}}. -add_writer_group(ReaderGroupCfg, #state{writer_groups = WGs} = S) -> +add_writer_group(WriterGroupCfg, #state{writer_groups = WGs} = S) -> WG_id = uuid:get_v4(), - {ok, WriterGroup} = opcua_pubsub_writer_group:new(ReaderGroupCfg), + {ok, WriterGroup} = opcua_pubsub_writer_group:new(WriterGroupCfg), WGs2 = maps:put(WG_id, WriterGroup, WGs), {ok, WG_id, S#state{writer_groups = WGs2}}. @@ -70,17 +69,18 @@ add_dataset_writer(WG_id, PDS_id, WriterCfg, #state{writer_groups = WGs} = S) -> start_link(ID, ConfiguredState) -> gen_server:start_link(?MODULE, [ID, ConfiguredState], []). -send(Pid, Data) -> - gen_server:cast(Pid, {?FUNCTION_NAME, Data}). - %%% GEN_SERVER CALLBACKS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -init([ID, #state{config = Config} = ConfiguredState]) -> +init([ID, #state{ + config = Config, + writer_groups = WriterGroups} = ConfiguredState]) -> case start_transport(Config) of {ok, Module, State} -> opcua_pubsub:register_connection(ID), + WG2 = init_writer_groups(WriterGroups), {ok, ConfiguredState#state{ id = ID, + writer_groups = WG2, middleware = {Module, State} }}; {error, E} -> error(E) @@ -89,10 +89,19 @@ init([ID, #state{config = Config} = ConfiguredState]) -> handle_call(_, _, State) -> {reply, ok, State}. -handle_cast({send, Data}, #state{middleware = {M,S}} = State) -> - M:send(Data, S), +handle_cast(_, State) -> {noreply, State}. +handle_info({publish, WG_ID}, #state{ + middleware = {Module, MiddlewareState}, + writer_groups = WriterGroups} = State) -> + WG = maps:get(WG_ID, WriterGroups), + {NetMsg, NewWG} = opcua_pubsub_writer_group:write_network_message(WG), + io:format("Sending NetworkMsg: ~p~n",[NetMsg]), + %MiddlewareState2 = Module:send(NetMsg, MiddlewareState), + {noreply, State#state{ + %middleware = {Module, MiddlewareState2}, + writer_groups = maps:put(WG_ID, NewWG, WriterGroups)}}; handle_info(Info, #state{middleware = {M, S}} = State) -> {ok, NewS} = handle_network_message(M:handle_info(Info, S), State), {noreply, NewS}. @@ -146,3 +155,8 @@ dispatchMessages(BundledMessages, InterestedReaders) -> {RG_id, NewRG} end || {RG_id, RG, DSR_ids} <- InterestedReaders]. +init_writer_groups(WriterGroups) -> + maps:from_list([begin + NewWG = opcua_pubsub_writer_group:init(ID, G), + {ID, NewWG} + end || {ID, G} <- maps:to_list(WriterGroups)]). diff --git a/src/opcua_pubsub_dataset_writer.erl b/src/opcua_pubsub_dataset_writer.erl index 577c915..9c94598 100644 --- a/src/opcua_pubsub_dataset_writer.erl +++ b/src/opcua_pubsub_dataset_writer.erl @@ -1,7 +1,7 @@ -module(opcua_pubsub_dataset_writer). -export([new/2]). --export([write/1]). +-export([write_dataset_message/1]). -include("opcua.hrl"). -include("opcua_pubsub.hrl"). @@ -41,5 +41,8 @@ new(PDS_ID, #dataset_writer_config{ connected_published_dataset = PDS_ID }}. -write(#state{} = S) -> - S. + +write_dataset_message(#state{connected_published_dataset = PDS_id} = S) -> + PDS = opcua_pubsub:get_published_dataset(PDS_id), + {<<>>, S}. + diff --git a/src/opcua_pubsub_example.erl b/src/opcua_pubsub_example.erl index 73673db..852b63d 100644 --- a/src/opcua_pubsub_example.erl +++ b/src/opcua_pubsub_example.erl @@ -97,5 +97,9 @@ publication() -> }, {ok, DSW_id, Conn3} = opcua_pubsub:add_dataset_writer(Conn2, WG_id, PDS_id, DataSetWriterConfig), + + + {ok, ID} = opcua_pubsub:start_connection(Conn3), + ok. diff --git a/src/opcua_pubsub_writer_group.erl b/src/opcua_pubsub_writer_group.erl index 2b1b1f5..ec0251e 100644 --- a/src/opcua_pubsub_writer_group.erl +++ b/src/opcua_pubsub_writer_group.erl @@ -2,11 +2,13 @@ -export([new/1]). -export([add_dataset_writer/3]). +-export([init/2]). +-export([write_network_message/1]). -include("opcua_pubsub.hrl"). -record(state, { - enabled, + state = operational :: pubsub_state_machine(), name, writer_group_id, publishing_interval, @@ -15,7 +17,8 @@ locale_ids, transport_settings, message_settings, - dataset_writers = #{} + dataset_writers = #{}, + timer }). new(#writer_group_config{ @@ -29,7 +32,6 @@ new(#writer_group_config{ transport_settings = TS, message_settings = MS}) -> {ok, #state{ - enabled = E, name = N, writer_group_id = WG_ID, publishing_interval = P_INTERVAL, @@ -47,4 +49,18 @@ add_dataset_writer(PDS_id, DSW_cfg, #state{dataset_writers = DSWs} = S) -> NewDSWs = maps:put(DSW_id, DSW, DSWs), {ok, DSW_id, S#state{dataset_writers = NewDSWs}}. +init(ID, #state{publishing_interval = PublishingInterval} = S) -> + {ok, Tref} = timer:send_interval(PublishingInterval, {publish, ID}), + S#state{timer = Tref}. + +write_network_message(#state{dataset_writers = DatasetWriters} = S) -> + Results = [ + begin + {DSM, NewState} = opcua_pubsub_dataset_writer:write_dataset_message(DSW), + {DSM, {ID, NewState}} + end || {ID, DSW} <- maps:to_list(DatasetWriters)], + {DataSetMessages, KV_pairs_DSWs} = lists:unzip(Results), + io:format("DSMs: ~p~n", [DataSetMessages]), + NetMsg = <<>>, + {NetMsg, S#state{dataset_writers = maps:from_list(KV_pairs_DSWs)}}. \ No newline at end of file From e53bcb9c34bc6f1f17d76fc945e2f25ceb9d06dc Mon Sep 17 00:00:00 2001 From: Luca Succi Date: Fri, 27 Jan 2023 17:40:11 +0100 Subject: [PATCH 6/7] First working minimal data_writer and encoding code --- src/opcua_codec.erl | 13 ++ src/opcua_pubsub.erl | 6 +- src/opcua_pubsub.hrl | 56 +++++- src/opcua_pubsub_connection.erl | 58 ++++-- src/opcua_pubsub_dataset_writer.erl | 46 ++++- src/opcua_pubsub_example.erl | 14 +- src/opcua_pubsub_uadp.erl | 278 ++++++++++++++++++++++++++-- src/opcua_pubsub_udp.erl | 18 +- src/opcua_pubsub_writer_group.erl | 64 +++---- 9 files changed, 458 insertions(+), 95 deletions(-) diff --git a/src/opcua_codec.erl b/src/opcua_codec.erl index 320237d..08c9f1f 100644 --- a/src/opcua_codec.erl +++ b/src/opcua_codec.erl @@ -24,6 +24,7 @@ -export([unpack_type/3]). -export([unpack_enum/2]). -export([unpack_option_set/2]). +-export([unpack_variant/1]). -export([builtin_type_name/1]). -export([builtin_type_id/1]). @@ -119,6 +120,18 @@ unpack_option_set(#opcua_option_set{fields = Fields}, Value) -> end, [], Fields), lists:reverse(FieldNames). +-spec unpack_variant(opcua:variant()) -> term(). +unpack_variant(#opcua_variant{type = extension_object, + value = #opcua_extension_object{ + type_id = DataTypeID, + body = Data + }}) -> + % Not sure what to do here ... + % For now I just extract the type and value + {DataTypeID, Data}; +unpack_variant(#opcua_variant{type = _, value = _}) -> + error(bad_not_implemented). + builtin_type_name( 1) -> boolean; builtin_type_name( 2) -> sbyte; builtin_type_name( 3) -> byte; diff --git a/src/opcua_pubsub.erl b/src/opcua_pubsub.erl index 1607d34..39822a7 100644 --- a/src/opcua_pubsub.erl +++ b/src/opcua_pubsub.erl @@ -10,7 +10,7 @@ -export([add_published_dataset/1]). -export([add_published_dataset_field/3]). --export([new_connection/2]). +-export([new_connection/3]). -export([add_reader_group/2]). -export([add_writer_group/2]). @@ -55,8 +55,8 @@ add_published_dataset_field(PDS_ID, FieldsMetaData, FieldsSource) -> get_published_dataset(PDS_ID) -> gen_server:call(?MODULE, {?FUNCTION_NAME, PDS_ID}). -new_connection(Url, Opts) -> - opcua_pubsub_connection:create(Url, Opts). +new_connection(Url, Config, Opts) -> + opcua_pubsub_connection:create(Url, Config, Opts). % Just a place to group DataSetReaders add_reader_group(Connection, Config) -> diff --git a/src/opcua_pubsub.hrl b/src/opcua_pubsub.hrl index fb70a35..34a9865 100644 --- a/src/opcua_pubsub.hrl +++ b/src/opcua_pubsub.hrl @@ -6,6 +6,11 @@ -type pubsub_state_machine() :: operational | error | enabled | paused. +-record(connection_config, { + publisher_id, + publisher_id_type +}). + -record(dataset_mirror,{}). -record(target_variable,{ @@ -76,6 +81,32 @@ dataset_source = [] :: published_dataset_source() }). +-define(UADP_NET_MSG_CONTENT_MASK_PUBLISHER_ID, (1 bsl 0)). +-define(UADP_NET_MSG_CONTENT_MASK_GROUP_HEADER, (1 bsl 1)). +-define(UADP_NET_MSG_CONTENT_MASK_WRITER_GROUP_ID, (1 bsl 2)). +-define(UADP_NET_MSG_CONTENT_MASK_GROUP_VERSION, (1 bsl 3)). +-define(UADP_NET_MSG_CONTENT_MASK_NET_MSG_NUM, (1 bsl 4)). +-define(UADP_NET_MSG_CONTENT_MASK_SEQ_NUM, (1 bsl 5)). +-define(UADP_NET_MSG_CONTENT_MASK_PAYLOAD_HEADER, (1 bsl 6)). +-define(UADP_NET_MSG_CONTENT_MASK_TIMESTAMP, (1 bsl 7)). +-define(UADP_NET_MSG_CONTENT_MASK_PICOSECONDS, (1 bsl 8)). +-define(UADP_NET_MSG_CONTENT_MASK_DATASET_CLASSID, (1 bsl 9)). +-define(UADP_NET_MSG_CONTENT_MASK_PROMOTED_FIELDS, (1 bsl 10)). + +-define(DEFAULT_NET_MSG_CONTENT, + ?UADP_NET_MSG_CONTENT_MASK_PUBLISHER_ID + bor ?UADP_NET_MSG_CONTENT_MASK_GROUP_HEADER + bor ?UADP_NET_MSG_CONTENT_MASK_WRITER_GROUP_ID + bor ?UADP_NET_MSG_CONTENT_MASK_PAYLOAD_HEADER). + +-record(uadp_writer_group_message_data,{ + groupVersion, + dataSetOrdering, + networkMessageContentMask = ?DEFAULT_NET_MSG_CONTENT, + samplingOffset, + publishingOffset +}). + -record(writer_group_config,{ enabled = true :: boolean(), name, @@ -85,15 +116,26 @@ priority, locale_ids, transport_settings, - message_settings + message_settings = #uadp_writer_group_message_data{} }). +-define(UADP_DATA_SET_FIELD_MASK_TIMESTAMP, 1). +-define(UADP_DATA_SET_FIELD_MASK_PICOSECONDS, (1 bsl 1)). +-define(UADP_DATA_SET_FIELD_MASK_STATUS, (1 bsl 2)). +-define(UADP_DATA_SET_FIELD_MASK_MAJORVERSION, (1 bsl 3)). +-define(UADP_DATA_SET_FIELD_MASK_MINORVERSION, (1 bsl 4)). +-define(UADP_DATA_SET_FIELD_MASK_SEQUENCENUMBER, (1 bsl 5)). + +-define(DEFAULT_DATA_SET_FIELD_CONTENT, + ?UADP_DATA_SET_FIELD_MASK_TIMESTAMP). + -record(dataset_writer_config,{ - name :: binary(), - dataset_writer_id :: non_neg_integer(), - dataset_field_content_mask, - keyframe_count = 1 :: non_neg_integer(), - dataset_name :: string(), + name :: binary(), + dataset_writer_id :: non_neg_integer(), + dataset_field_content_mask = ?DEFAULT_DATA_SET_FIELD_CONTENT, + keyframe_count = 1 :: non_neg_integer(), + dataset_name :: string(), transport_settings, message_settings -}). \ No newline at end of file +}). + diff --git a/src/opcua_pubsub_connection.erl b/src/opcua_pubsub_connection.erl index 935f745..b52e788 100644 --- a/src/opcua_pubsub_connection.erl +++ b/src/opcua_pubsub_connection.erl @@ -1,6 +1,6 @@ -module(opcua_pubsub_connection). --export([create/2]). +-export([create/3]). -export([add_reader_group/2]). -export([add_dataset_reader/3]). -export([create_target_variables/4]). @@ -17,7 +17,10 @@ -record(state, { id, - config, + uri, + transport_config, + publisher_id, + publisher_id_type, middleware :: {module(), term()}, reader_groups = #{}, writer_groups = #{} @@ -28,11 +31,17 @@ % These help to build the initial Connection process state % which holds the settings of all pubsub sub-entities -create(Url, Config) -> +create(Url, + #connection_config{publisher_id = PublisherId, + publisher_id_type = PublisherIdType}, + TransportOpts) -> Uri = uri_string:parse(Url), - Config2 = maps:merge(default_config(), Config), - Config3 = maps:put(uri, Uri, Config2), - {ok, #state{config = Config3}}. + Config2 = maps:merge(default_config(), TransportOpts), + Config3 = maps:merge(Config2, #{uri => Uri}), + {ok, #state{uri = Uri, + transport_config = Config3, + publisher_id = PublisherId, + publisher_id_type = PublisherIdType}}. add_reader_group(ReaderGroupCfg, #state{reader_groups = RG} = S) -> RG_id = uuid:get_v4(), @@ -52,9 +61,14 @@ create_target_variables(RG_id, DSR_id, Config, #state{reader_groups = RGs} = S) NewGroups = maps:put(RG_id, NewRG, RGs), {ok, S#state{reader_groups = NewGroups}}. -add_writer_group(WriterGroupCfg, #state{writer_groups = WGs} = S) -> +add_writer_group(WriterGroupCfg, #state{ + publisher_id = PublisherId, + publisher_id_type = PublisherIdType, + writer_groups = WGs} = S) -> WG_id = uuid:get_v4(), - {ok, WriterGroup} = opcua_pubsub_writer_group:new(WriterGroupCfg), + {ok, WriterGroup} = opcua_pubsub_writer_group:new(PublisherId, + PublisherIdType, + WriterGroupCfg), WGs2 = maps:put(WG_id, WriterGroup, WGs), {ok, WG_id, S#state{writer_groups = WGs2}}. @@ -71,9 +85,8 @@ start_link(ID, ConfiguredState) -> %%% GEN_SERVER CALLBACKS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -init([ID, #state{ - config = Config, - writer_groups = WriterGroups} = ConfiguredState]) -> +init([ID, #state{transport_config = Config, + writer_groups = WriterGroups} = ConfiguredState]) -> case start_transport(Config) of {ok, Module, State} -> opcua_pubsub:register_connection(ID), @@ -98,13 +111,17 @@ handle_info({publish, WG_ID}, #state{ WG = maps:get(WG_ID, WriterGroups), {NetMsg, NewWG} = opcua_pubsub_writer_group:write_network_message(WG), io:format("Sending NetworkMsg: ~p~n",[NetMsg]), - %MiddlewareState2 = Module:send(NetMsg, MiddlewareState), + MiddlewareState2 = Module:send(NetMsg, MiddlewareState), {noreply, State#state{ - %middleware = {Module, MiddlewareState2}, + middleware = {Module, MiddlewareState2}, writer_groups = maps:put(WG_ID, NewWG, WriterGroups)}}; handle_info(Info, #state{middleware = {M, S}} = State) -> - {ok, NewS} = handle_network_message(M:handle_info(Info, S), State), - {noreply, NewS}. + case M:handle_info(Info, S) of + %ignored -> {noreply, State}; + NetMsg -> + {ok, NewS} = handle_network_message(NetMsg, State), + {noreply, NewS} + end. %%% INTERNAL FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -115,13 +132,17 @@ start_transport(_Config) -> {error, unsupported_transport}. default_config() -> #{ - publisher_id_type => ?UA_PUBLISHERIDTYPE_UINT16, + publisher_id_type => uint16, publisher_id => 1111, name => "Unnamed" }. handle_network_message(Binary, #state{reader_groups = RGs} = S) -> {Headers, Payload} = opcua_pubsub_uadp:decode_network_message_headers(Binary), + + DataSetMessages = opcua_pubsub_uadp:decode_payload(Headers, Payload), + io:format("Msgs = ~p\n",[DataSetMessages]), + InterestedReaders = [begin DSR_ids = opcua_pubsub_reader_group:filter_readers(Headers,RG), @@ -131,8 +152,11 @@ handle_network_message(Binary, #state{reader_groups = RGs} = S) -> ReadersCount = lists:sum([length(DSR_ids) || {_, _, DSR_ids} <- InterestedReaders]), case ReadersCount > 0 of - false -> io:format("Skipped NetMsg\n"),{ok, S}; + false -> + io:format("Skipped NetMsg = ~p\n",[Binary]), + {ok, S}; true -> + io:format("Accepting NetMsg = ~p\n",[Headers]), % we can procede with the security step if needed: % opcua_pubsub_security: ... not_implemented yet % Then we decode all messages diff --git a/src/opcua_pubsub_dataset_writer.erl b/src/opcua_pubsub_dataset_writer.erl index 9c94598..6de068d 100644 --- a/src/opcua_pubsub_dataset_writer.erl +++ b/src/opcua_pubsub_dataset_writer.erl @@ -19,7 +19,6 @@ connected_published_dataset }). - new(PDS_ID, #dataset_writer_config{ name = N, dataset_writer_id = DS_WID, @@ -41,8 +40,47 @@ new(PDS_ID, #dataset_writer_config{ connected_published_dataset = PDS_ID }}. - -write_dataset_message(#state{connected_published_dataset = PDS_id} = S) -> +write_dataset_message(#state{dataset_writer_id = DSW_ID, + connected_published_dataset = PDS_id, + dataset_field_content_mask = ContentMask} = S) -> PDS = opcua_pubsub:get_published_dataset(PDS_id), - {<<>>, S}. + % We are going to produce a keyframe, always. + % We do not support delta-frames so we ignore keyframe_count + #published_dataset{ + dataset_metadata = #dataset_metadata{fields = FieldsMetadata} = MD, + dataset_source = DatasetSource + } = PDS, + Values = read_sources(DatasetSource, []), + Fields = encode_data_set_fields(FieldsMetadata, Values), + FieldCount = <<(length(Fields)):16/unsigned-little>>, + Header = encode_message_header(variant, data_key_frame, ContentMask, MD), + DataSetMessage = iolist_to_binary([Header, FieldCount, Fields]), + {DataSetMessage, DSW_ID, S}. + +read_sources([], Values) -> lists:reverse(Values); +read_sources([#published_variable{ + published_variable = NodeID, + attribute_id = ?UA_ATTRIBUTEID_VALUE + } | Rest], Values) -> + [DataValue] = opcua_server_registry:perform(NodeID,[#opcua_read_command{attr = value}]), + #opcua_data_value{ + value = Value, + status = good + } = DataValue, + % io:format("Read value: ~p~n",[Value]), + read_sources(Rest, [Value | Values]). + +encode_message_header(FieldEncoding, MsgType, ContentMask, Metadata) -> + + opcua_pubsub_uadp:encode_dataset_message_header(FieldEncoding, MsgType, + ContentMask, Metadata). + + +encode_data_set_fields(FieldsMetadata, Values) -> + encode_data_set_fields(FieldsMetadata, Values, []). + +encode_data_set_fields([], [], Results) -> lists:reverse(Results); +encode_data_set_fields([ FieldMeta | FMD], [Value | Values], Results) -> + Binary = opcua_pubsub_uadp:encode_dataset_message_field(FieldMeta, Value), + encode_data_set_fields(FMD, Values, [Binary | Results]). diff --git a/src/opcua_pubsub_example.erl b/src/opcua_pubsub_example.erl index 852b63d..851c7b6 100644 --- a/src/opcua_pubsub_example.erl +++ b/src/opcua_pubsub_example.erl @@ -8,8 +8,8 @@ subscription() -> Url = <<"opc.udp://224.0.0.22:4840">>, - ConnectionConfig = #{}, - {ok, Conn} = opcua_pubsub:new_connection(Url, ConnectionConfig), + ConnectionConfig = #connection_config{}, + {ok, Conn} = opcua_pubsub:new_connection(Url, ConnectionConfig, #{}), ReaderGroupconfig = #{ name => <<"Simple Reader Group">>}, {ok, RG_id, Conn2} = opcua_pubsub:add_reader_group(Conn, ReaderGroupconfig), @@ -71,17 +71,17 @@ publication() -> }], FieldsSource = [ #published_variable{ - published_variable = ?NNID(2256), + published_variable = ?NNID(2258), attribute_id = ?UA_ATTRIBUTEID_VALUE }], ok = opcua_pubsub:add_published_dataset_field(PDS_id, FieldsMetaData, FieldsSource), Url = <<"opc.udp://224.0.0.22:4840">>, - ConnectionConfig = #{ - publisher_id_type => uint16, - publisher_id => 2234 + ConnectionConfig = #connection_config{ + publisher_id = 2234, + publisher_id_type = uint16 }, - {ok, Conn} = opcua_pubsub:new_connection(Url, ConnectionConfig), + {ok, Conn} = opcua_pubsub:new_connection(Url, ConnectionConfig, #{}), WriterGroupconfig = #writer_group_config{ name = <<"Simple Writer Group">>, diff --git a/src/opcua_pubsub_uadp.erl b/src/opcua_pubsub_uadp.erl index 746c801..94ce2fa 100644 --- a/src/opcua_pubsub_uadp.erl +++ b/src/opcua_pubsub_uadp.erl @@ -5,9 +5,56 @@ -export([decode_payload/2]). -export([decode_dataset_message_field/3]). +-export([encode_dataset_message_field/2]). +-export([encode_dataset_message_header/4]). +-export([encode_payload/1]). +-export([encode_network_message_headers/4]). + -include("opcua.hrl"). -include("opcua_pubsub.hrl"). +%%%%%% Encoding binary flags %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% Used In UADPFlags +-define(UADP_VERSION, 1). +-define(PUBLISHER_ID_ENABLED, (1 bsl 4)). +-define(GROUP_HEADER_ENABLED, (1 bsl 5)). +-define(PAYLOAD_HEADER_ENABLED, (1 bsl 6)). +-define(EXT_FLAGS_1_ENABLED, (1 bsl 7)). +% Used In ExtendedFlags2 +-define(EXT_FLAGS2_CHUNK_MESSAGE, 1). +-define(EXT_FLAGS2_PROMOTED_FIELD_ENABLED, (1 bsl 1)). +-define(EXT_FLAGS2_DATASET_MSG_TYPE, 0). +-define(EXT_FLAGS2_DISCOVERY_REQUEST_MSG_TYPE, (1 bsl 2)). +-define(EXT_FLAGS2_DISCOVERY_RESPONSE_MSG_TYPE, (1 bsl 3)). +% Used In ExtendedFlags1 +-define(EXT_FLAGS1_DATA_SET_CLASS_ID_ENABLED, (1 bsl 3)). +% -define(SECURITY_ENABLED, (1 bsl 4)). % not implemented +-define(EXT_FLAGS1_TIMESTAMP_ENABLED, (1 bsl 6)). +-define(EXT_FLAGS1_PICOSECONDS_ENABLED, (1 bsl 6)). +-define(EXT_FLAGS1_EXT_FLAGS_2_ENABLED, (1 bsl 7)). +% Used In GroupFlags +-define(WRITER_GROUP_ENABLED, 1). +-define(GROUP_VERSION_ENABLED, (1 bsl 1)). +-define(NETWORK_MESSAGE_ENABLED, (1 bsl 2)). +-define(SEQUENCE_NUMBER_ENABLED, (1 bsl 3)). +% Used in DataSetMessageHeader DataSetFlags1 +-define(DATASET_FLAGS1_VALID, 1). +-define(DATASET_FLAGS1_VARIANT, 0). +-define(DATASET_FLAGS1_RAWDATA, (1 bsl 1)). +-define(DATASET_FLAGS1_DATAVALUE, (1 bsl 2)). +-define(DATASET_FLAGS1_SEQ_NUM_ENABLED, (1 bsl 3)). +-define(DATASET_FLAGS1_STATUS_ENABLED, (1 bsl 4)). +-define(DATASET_FLAGS1_MAJOR_V_ENABLED, (1 bsl 5)). +-define(DATASET_FLAGS1_MINOR_V_ENABLED, (1 bsl 6)). +-define(DATASET_FLAGS1_FLAGS2_ENABLED, (1 bsl 7)). +% Used in DataSetMessageHeader DataSetFlags2 +-define(DATASET_FLAGS2_KEY_FRAME, 0). +-define(DATASET_FLAGS2_DELTA_FRAME, 1). +-define(DATASET_FLAGS2_EVENT, 2). +-define(DATASET_FLAGS2_KEEP_ALIVE, 3). +-define(DATASET_FLAGS2_TIMESTAMP_ENABLED, (1 bsl 4)). +-define(DATASET_FLAGS2_PICOSECONDS_ENABLED, (1 bsl 5)). + %%% API FUNCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Extracts the clear data from the message Headers and the payload binary @@ -22,7 +69,7 @@ decode_network_message_headers(<>) -> % Skipping many optional fields, enforcing a minimal setup for testing % TODO: add them once needed and remove this hard match #{dataset_class_id := 0, extended_flags2 := 0, - picoseconds := 0, publisher_id_type := uint16, + picoseconds := 0, decode_publisher_id_type := uint16, security := 0, timestamp := 0} = ExtendedFlags1Map, ExtendedFlags2 = maps:get(extended_flags2, ExtendedFlags1Map, 0), {ExtendedFlags2Map, Rest3} = decode_extended_flags2(ExtendedFlags2, Rest2), @@ -73,9 +120,136 @@ decode_dataset_message_field(variant, FieldMetadata, Binary) -> decode_dataset_message_field(_, _, _) -> error(bad_encoding_not_implemented). -%%% INTERNALS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +encode_dataset_message_field(#dataset_field_metadata{ + data_type = DataType}, #opcua_variant{} = V) -> + %io:format("Original Variant: ~p~n",[V]), + {_TypeID, Val} = opcua_codec:unpack_variant(V), + V2 = opcua_codec:pack_variant(opcua_server_space, DataType, Val), + %io:format("Variant to publish: ~p~n",[V2]), + {Binary, _} = opcua_codec_binary:encode(variant, V2), + %io:format("Encoded ~p~n", [Binary]), + {Result, _Rest} = opcua_codec_binary:decode(variant, Binary), + %io:format("Decoded: ~p~n",[Result]), + Binary. + +encode_dataset_message_header(FieldEncoding, MsgType, ContentMask, + #dataset_metadata{configuration_version = _MajorMinor}) -> + Flags1 = ?DATASET_FLAGS1_VALID + bor encode_field_encoding(FieldEncoding) + bor ?DATASET_FLAGS1_FLAGS2_ENABLED, + io:format("Flags1 ~p~n",[Flags1]), + Flags2 = 0, + % DataSetFlags1 + % F1 = #{ + % dataset_msg_valid => 1, + % field_encoding => FieldEncoding, + % dataset_flags2 => 1}, + % {Status, F1_1} = case ContentMask band ?UADP_DATA_SET_FIELD_MASK_STATUS of + % 1 -> error(bad_not_implemented); + % 0 -> {<<>>, maps:put(status, 0, F1)} + % end, + % {MajorVersion, F1_2} = case ContentMask band ?UADP_DATA_SET_FIELD_MASK_MAJORVERSION of + % 1 -> error(bad_not_implemented); + % 0 -> {<<>>, maps:put(config_ver_major_ver, 0, F1_1)} + % end, + % {MinorVersion, F1_3} = case ContentMask band ?UADP_DATA_SET_FIELD_MASK_MINORVERSION of + % 1 -> error(bad_not_implemented); + % 0 -> {<<>>, maps:put(config_ver_minor_ver, 0, F1_2)} + % end, + % {SeqNumber, F1_4} = case ContentMask band ?UADP_DATA_SET_FIELD_MASK_SEQUENCENUMBER of + % 1 -> error(bad_not_implemented); + % 0 -> {<<>>, maps:put(dataset_msg_seq_num, 0, F1_3)} + % end, + {Flags2_1, Timestamp} = case ContentMask band ?UADP_DATA_SET_FIELD_MASK_TIMESTAMP of + 0 -> {Flags2, <<>>}; + _ -> T = opcua_codec_binary_builtin:encode(date_time, opcua_util:date_time()), + {Flags2 bor ?DATASET_FLAGS2_TIMESTAMP_ENABLED, T} + end, + % {PicoSeconds, F2_2} = case ContentMask band ?UADP_DATA_SET_FIELD_MASK_PICOSECONDS of + % 1 -> error(bad_not_implemented); + % 0 -> {<<>>, maps:put(picoseconds, 0, F2_1)} + iolist_to_binary([Flags1, Flags2_1, Timestamp]). + +encode_payload([DataSetMessage]) -> DataSetMessage; +encode_payload(DataSetMessages) -> + Sizes = [ <<(byte_size(DSM)):16/unsigned-little>> || DSM <- DataSetMessages], + iolist_to_binary([Sizes, DataSetMessages]). + + + +encode_network_message_headers(PublisherID, PublisherIdType, DSW_IDS, + #writer_group_config{ + message_settings = #uadp_writer_group_message_data{ + networkMessageContentMask = Mask} + } = WriterGroupCfg) -> + UADPFlags = ?UADP_VERSION, % from the specification + ExtFlags1 = 0, + % ExtFlags2 = 0, % unused + % UADP main flags + {UADPFlags1, PubID} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_PUBLISHER_ID of + 0 -> {UADPFlags, <<>>}; + _ -> ID = opcua_codec_binary_builtin:encode(PublisherIdType, PublisherID), + {UADPFlags bor ?PUBLISHER_ID_ENABLED, ID} + end, + {UADPFlags2, GH} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_GROUP_HEADER of + 0 -> {UADPFlags1, <<>>}; + _ -> H = encode_group_header(WriterGroupCfg, Mask), + {UADPFlags1 bor ?GROUP_HEADER_ENABLED, H} + end, + {UADPFlags3, PH} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_GROUP_HEADER of + 0 -> {UADPFlags2, <<>>}; + _ -> H_ = encode_payload_header(WriterGroupCfg, DSW_IDS), + {UADPFlags2 bor ?PAYLOAD_HEADER_ENABLED, H_} + end, + % ExtendedFlags1 always enabled + UADPFlags4 = UADPFlags3 bor ?EXT_FLAGS_1_ENABLED, + ExtFlags1_1 = case UADPFlags4 band ?PUBLISHER_ID_ENABLED of + 0 -> ExtFlags1; + _ -> ExtFlags1 bor encode_publisher_id_type(PublisherIdType) + end, + {ExtFlags1_2, ClassID} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_DATASET_CLASSID of + 0 -> {ExtFlags1_1, <<>>}; + _ -> {ExtFlags1_1 bor ?EXT_FLAGS1_DATA_SET_CLASS_ID_ENABLED, error(not_implemented)} + end, + % Security disabled, + % TODO: add check here when is implemented + {ExtFlags1_3, SH} = {ExtFlags1_2, <<>>}, + % ... + {ExtFlags1_4, Timestamp} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_TIMESTAMP of + 0 -> {ExtFlags1_3, <<>>}; + _ -> T = opcua_codec_binary_builtin:encode(date_time, opcua_util:date_time()), + {ExtFlags1_3 bor ?EXT_FLAGS1_TIMESTAMP_ENABLED, T} + end, + {ExtFlags1_5, PicoSeconds} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_PICOSECONDS of + 0 -> {ExtFlags1_4, <<>>}; + _ -> {ExtFlags1_4 bor ?EXT_FLAGS1_PICOSECONDS_ENABLED, error(not_implemented)} + end, + + % ExtendedFlags2 disabled for simplicity + % this disables promoted_fields, picosecods timestamp + % and defaults to dataset_message + ExtFlags1_6 = ExtFlags1_5, % bor ?EXT_FLAGS_2_ENABLED, + + %ExtFlags2_1 = ExtFlags2 bor 0, % TODO: add check and support for ?CHUNK_MESSAGE + % {ExtFlags2_2, PromotedFields} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_PROMOTED_FIELDS of + % 0 -> {ExtFlags2_1, <<>>}; + % _ -> {ExtFlags2_1 bor ?PROMOTED_FIELD_ENABLED, error(not_implemented)} + % end, + % TODO: support more message types + % HardcodedMsgType = dataset_message, + % ExtFlags2_3 = ExtFlags2_2 bor encode_network_msg_type(HardcodedMsgType), + + iolist_to_binary([ + UADPFlags4, ExtFlags1_6, %% Flags ExtFlags2 is unused for now + PubID, ClassID, GH, PH, % Main elements + % extended header elements (unused) + %Timestamp, PicoSeconds, PromotedFields, + SH % optional security info (unused) + ]). +%%% INTERNALS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + decode_extended_flags1(0, Bin) -> {#{ extended_flags2 => 0, @@ -83,7 +257,7 @@ decode_extended_flags1(0, Bin) -> timestamp => 0, security => 0, dataset_class_id => 0, - publisher_id_type => publisher_id_type(0) + decode_publisher_id_type => decode_publisher_id_type(0) }, Bin}; decode_extended_flags1(1, << ExtendedFlags2:1, @@ -98,15 +272,15 @@ decode_extended_flags1(1, << timestamp => Timestamp, security => Security, dataset_class_id => DataSetClassId, - publisher_id_type => publisher_id_type(PublisherIdType) + decode_publisher_id_type => decode_publisher_id_type(PublisherIdType) }, Rest}. -publisher_id_type(?UA_PUBLISHERIDTYPE_BYTE) -> byte; -publisher_id_type(?UA_PUBLISHERIDTYPE_UINT16) -> uint16; -publisher_id_type(?UA_PUBLISHERIDTYPE_UINT32) -> uint32; -publisher_id_type(?UA_PUBLISHERIDTYPE_UINT64) -> uint64; -publisher_id_type(?UA_PUBLISHERIDTYPE_STRING) -> string; -publisher_id_type(_) -> reserved. +decode_publisher_id_type(?UA_PUBLISHERIDTYPE_BYTE) -> byte; +decode_publisher_id_type(?UA_PUBLISHERIDTYPE_UINT16) -> uint16; +decode_publisher_id_type(?UA_PUBLISHERIDTYPE_UINT32) -> uint32; +decode_publisher_id_type(?UA_PUBLISHERIDTYPE_UINT64) -> uint64; +decode_publisher_id_type(?UA_PUBLISHERIDTYPE_STRING) -> string; +decode_publisher_id_type(_) -> reserved. decode_extended_flags2(0, Bin) -> {#{ @@ -133,7 +307,7 @@ decode_network_msg_type(<< 0:1, 1:1, 0:1>>) -> discovery_responce; decode_network_msg_type(<< _:1, _:1, _:1>>) -> reserved. decode_publisherID(0, _, Binary) -> {undefined, Binary}; -decode_publisherID(1, #{publisher_id_type := uint16}, Binary) -> +decode_publisherID(1, #{decode_publisher_id_type := uint16}, Binary) -> <> = Binary, {PublisherID, Rest}. % TODO: handle other PublisherID types @@ -170,7 +344,6 @@ decode_network_message_number(1, Bin) -> opcua_codec_binary_builtin:decode(uint1 decode_network_sequence_number(0, Bin) -> {undefined, Bin}; decode_network_sequence_number(1, Bin) -> opcua_codec_binary_builtin:decode(uint16, Bin). - decode_payload_header(0, _, Bin) -> {undefined, Bin}; decode_payload_header(1, #{chunk := 1}, Bin) -> {DataSetWriterID, Rest} = opcua_codec_binary_builtin:decode(uint16, Bin), @@ -188,8 +361,6 @@ decode_payload_header(1, #{network_message_type := discovery_request}, Bin) -> decode_payload_header(1, #{network_message_type := discovery_responce}, Bin) -> throw({not_implemented, discovery_responce}). - - decode_multi_dataset_message(Bin, Sizes) -> decode_multi_dataset_message(Bin, Sizes, []). @@ -294,3 +465,82 @@ decode_dataset_cfg_minor_ver(#{config_ver_minor_ver := 0}, Bin) -> {undefined, Bin}; decode_dataset_cfg_minor_ver(#{config_ver_minor_ver := 1}, Bin) -> opcua_codec_binary_builtin:decode(uint32, Bin). + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +encode_dataset_message_flags1(#{ + dataset_msg_valid := DataSetMsgValid, + field_encoding := FieldEncoding, + dataset_msg_seq_num := DataSetMsgSeqNum, + status := Status, + config_ver_minor_ver := ConfigVerMajorVer, + config_ver_major_ver := ConfigVerMinorVer, + dataset_flags2 := DataSetFlags2 + }) -> + <>. + +encode_field_encoding(variant) -> ?DATASET_FLAGS1_VARIANT; +encode_field_encoding(raw) -> ?DATASET_FLAGS1_RAWDATA; +encode_field_encoding(data_value) -> ?DATASET_FLAGS1_DATAVALUE. + +encode_dataset_message_flags2(#{ + msg_type := DataMsgType, + timestamp := Timestamp, + picoseconds := PicoSeconds + }) -> + <<0:2, + PicoSeconds:1, + Timestamp:1, + (encode_dataset_message_type(DataMsgType)):4/bitstring>>. + + +encode_dataset_message_type(data_key_frame) -> <<0:4>>; +encode_dataset_message_type(data_delta_fram) -> <<0:1, 0:1, 0:1, 1:1>>; +encode_dataset_message_type(event) -> <<0:1, 0:1, 1:1, 0:1>>; +encode_dataset_message_type(keep_alive) -> <<0:1, 0:1, 1:1, 1:1>>. + +encode_publisher_id_type(byte) -> ?UA_PUBLISHERIDTYPE_BYTE; +encode_publisher_id_type(uint16) -> ?UA_PUBLISHERIDTYPE_UINT16; +encode_publisher_id_type(uint32) -> ?UA_PUBLISHERIDTYPE_UINT32; +encode_publisher_id_type(uint64) -> ?UA_PUBLISHERIDTYPE_UINT64; +encode_publisher_id_type(string) -> ?UA_PUBLISHERIDTYPE_STRING. + +encode_network_msg_type(dataset_message) -> ?EXT_FLAGS2_DATASET_MSG_TYPE; +encode_network_msg_type(discovery_request) -> ?EXT_FLAGS2_DISCOVERY_REQUEST_MSG_TYPE; +encode_network_msg_type(discovery_responce) -> ?EXT_FLAGS2_DISCOVERY_RESPONSE_MSG_TYPE. + + + +encode_payload_header(WriterGroupCfg, DSW_IDS) -> + IDS = [<> || ID <- DSW_IDS ], + Count = <<(length(DSW_IDS)):8/unsigned-little>>, + iolist_to_binary([Count | IDS]). + +encode_group_header(#writer_group_config{ + writer_group_id = WriterGroupId}, Mask) -> + Flags = 0, + Elements = [], + {Flags1, Elements1} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_SEQ_NUM of + 0 -> {Flags, Elements}; + _ -> {Flags bor ?SEQUENCE_NUMBER_ENABLED, error(not_implemented)} + end, + {Flags2, Elements2} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_NET_MSG_NUM of + 0 -> {Flags1, Elements1}; + _ -> {Flags1 bor ?NETWORK_MESSAGE_ENABLED, error(not_implemented)} + end, + {Flags3, Elements3} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_GROUP_VERSION of + 0 -> {Flags2, Elements2}; + _ -> {Flags2 bor ?GROUP_VERSION_ENABLED, error(not_implemented)} + end, + {Flags4, Elements4} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_WRITER_GROUP_ID of + 0 -> {Flags3, Elements3}; + _ -> {Flags3 bor ?WRITER_GROUP_ENABLED, + [opcua_codec_binary_builtin:encode(uint16, WriterGroupId) | Elements3]} + end, + iolist_to_binary([Flags4, Elements4]). diff --git a/src/opcua_pubsub_udp.erl b/src/opcua_pubsub_udp.erl index e29fdd3..c1ff49d 100644 --- a/src/opcua_pubsub_udp.erl +++ b/src/opcua_pubsub_udp.erl @@ -6,7 +6,8 @@ -include_lib("kernel/include/logger.hrl"). -record(state, { - socket + socket, + out_socket }). @@ -24,24 +25,27 @@ init(#{ {active, true}, {reuseaddr, true}, {ip, MulticastGroup}, - {multicast_ttl, 10} + {multicast_ttl, 10}, + {multicast_loop, false} ], case gen_udp:open(Port, Opts) of {ok, Socket} -> - inet:setopts(Socket, [{add_membership,{MulticastGroup,InterfaceIP}}]), + inet:setopts(Socket, [{add_membership,{MulticastGroup, InterfaceIP}}]), + {ok, S} = gen_udp:open(0), {ok, #state{ - socket = Socket + socket = Socket, + out_socket = S }}; {error, Reason} -> {error, Reason} end. -send(Data, #state{socket = Socket}) -> - ok = gen_udp:send(Socket, Data). +send(Data, #state{out_socket = Socket} = S) -> + ok = gen_udp:send(Socket, {224,0,0,22}, 4840, Data), + S. handle_info({udp, Socket, _IP, _Port, Packet}, #state{socket = Socket} = S) -> Packet. - % helpers %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % parse_ip(BinaryIP) -> diff --git a/src/opcua_pubsub_writer_group.erl b/src/opcua_pubsub_writer_group.erl index ec0251e..24a4e2b 100644 --- a/src/opcua_pubsub_writer_group.erl +++ b/src/opcua_pubsub_writer_group.erl @@ -1,6 +1,6 @@ -module(opcua_pubsub_writer_group). --export([new/1]). +-export([new/3]). -export([add_dataset_writer/3]). -export([init/2]). -export([write_network_message/1]). @@ -9,37 +9,18 @@ -record(state, { state = operational :: pubsub_state_machine(), - name, - writer_group_id, - publishing_interval, - keep_alive_time, - priority, - locale_ids, - transport_settings, - message_settings, + publisher_id, + publisher_id_type, + config :: #writer_group_config{}, dataset_writers = #{}, timer }). -new(#writer_group_config{ - enabled = E, - name = N, - writer_group_id = WG_ID, - publishing_interval = P_INTERVAL, - keep_alive_time = KA_TIME, - priority = P, - locale_ids = Locales, - transport_settings = TS, - message_settings = MS}) -> +new(PublisherId, PublisherIdType, #writer_group_config{} = Config) -> {ok, #state{ - name = N, - writer_group_id = WG_ID, - publishing_interval = P_INTERVAL, - keep_alive_time = KA_TIME, - priority = P, - locale_ids = Locales, - transport_settings = TS, - message_settings = MS, + publisher_id = PublisherId, + publisher_id_type = PublisherIdType, + config = Config, dataset_writers = #{} }}. @@ -49,18 +30,29 @@ add_dataset_writer(PDS_id, DSW_cfg, #state{dataset_writers = DSWs} = S) -> NewDSWs = maps:put(DSW_id, DSW, DSWs), {ok, DSW_id, S#state{dataset_writers = NewDSWs}}. -init(ID, #state{publishing_interval = PublishingInterval} = S) -> +init(ID, #state{config = #writer_group_config{ + publishing_interval = PublishingInterval}} = S) -> {ok, Tref} = timer:send_interval(PublishingInterval, {publish, ID}), - S#state{timer = Tref}. + S#state{state = operational, timer = Tref}. -write_network_message(#state{dataset_writers = DatasetWriters} = S) -> +write_network_message(#state{publisher_id = PublisherId, + publisher_id_type = PublisherIdType, + config = Config, + dataset_writers = DatasetWriters} = S) -> Results = [ begin - {DSM, NewState} = opcua_pubsub_dataset_writer:write_dataset_message(DSW), - {DSM, {ID, NewState}} + {DSM, DSW_ID, NewState} = opcua_pubsub_dataset_writer:write_dataset_message(DSW), + {DSM, DSW_ID, {ID, NewState}} end || {ID, DSW} <- maps:to_list(DatasetWriters)], - {DataSetMessages, KV_pairs_DSWs} = lists:unzip(Results), - io:format("DSMs: ~p~n", [DataSetMessages]), - NetMsg = <<>>, - {NetMsg, S#state{dataset_writers = maps:from_list(KV_pairs_DSWs)}}. \ No newline at end of file + {DataSetMessages, DSW_IDS, KV_pairs_DSWs} = lists:unzip3(Results), + NewState = S#state{dataset_writers = maps:from_list(KV_pairs_DSWs)}, + % io:format("DSMs: ~p~n", [DataSetMessages]), + Payload = opcua_pubsub_uadp:encode_payload(DataSetMessages), + % Headers presence in the Network message should be regulated by the content mask + Headers = opcua_pubsub_uadp:encode_network_message_headers(PublisherId, + PublisherIdType, + DSW_IDS, + Config), + NetworkMessage = iolist_to_binary([Headers, Payload]), + {NetworkMessage, NewState}. From e548b5686e67f1d2cc3254bcb285b8ad1f894030 Mon Sep 17 00:00:00 2001 From: Luca Succi Date: Mon, 30 Jan 2023 11:26:10 +0100 Subject: [PATCH 7/7] minor cleanups --- src/opcua_pubsub.hrl | 8 ++-- src/opcua_pubsub_connection.erl | 17 +++----- src/opcua_pubsub_dataset_reader.erl | 13 +++--- src/opcua_pubsub_dataset_writer.erl | 22 ++++------ src/opcua_pubsub_example.erl | 18 ++++---- src/opcua_pubsub_uadp.erl | 64 ++++++++--------------------- src/opcua_pubsub_udp.erl | 2 +- 7 files changed, 53 insertions(+), 91 deletions(-) diff --git a/src/opcua_pubsub.hrl b/src/opcua_pubsub.hrl index 34a9865..07a8316 100644 --- a/src/opcua_pubsub.hrl +++ b/src/opcua_pubsub.hrl @@ -24,8 +24,8 @@ }). -record(dataset_field_metadata,{ - name :: string(), - description :: string(), + name :: binary(), + description :: undefined | binary(), field_flags, % This flag indicates if the field is promoted to the NetworkMessage header builtin_type :: opcua:builtin_type(), data_type :: opcua:node_id(), @@ -75,7 +75,7 @@ -record(published_dataset,{ name, - dataset_folder :: list(),% path to the destination folder + dataset_folder :: undefined | list(binary()),% path to the destination folder dataset_metadata :: #dataset_metadata{}, extension_fields, dataset_source = [] :: published_dataset_source() @@ -134,7 +134,7 @@ dataset_writer_id :: non_neg_integer(), dataset_field_content_mask = ?DEFAULT_DATA_SET_FIELD_CONTENT, keyframe_count = 1 :: non_neg_integer(), - dataset_name :: string(), + dataset_name :: undefined | binary(), transport_settings, message_settings }). diff --git a/src/opcua_pubsub_connection.erl b/src/opcua_pubsub_connection.erl index b52e788..80e94db 100644 --- a/src/opcua_pubsub_connection.erl +++ b/src/opcua_pubsub_connection.erl @@ -21,7 +21,7 @@ transport_config, publisher_id, publisher_id_type, - middleware :: {module(), term()}, + middleware, reader_groups = #{}, writer_groups = #{} }). @@ -74,7 +74,8 @@ add_writer_group(WriterGroupCfg, #state{ add_dataset_writer(WG_id, PDS_id, WriterCfg, #state{writer_groups = WGs} = S) -> WG = maps:get(WG_id, WGs), - {ok, DSW_is, NewWriterGroup} = opcua_pubsub_writer_group:add_dataset_writer(PDS_id, WriterCfg, WG), + {ok, DSW_is, NewWriterGroup} = + opcua_pubsub_writer_group:add_dataset_writer(PDS_id, WriterCfg, WG), WGs2 = maps:put(WG_id, NewWriterGroup, WGs), {ok, DSW_is, S#state{writer_groups = WGs2}}. @@ -110,7 +111,7 @@ handle_info({publish, WG_ID}, #state{ writer_groups = WriterGroups} = State) -> WG = maps:get(WG_ID, WriterGroups), {NetMsg, NewWG} = opcua_pubsub_writer_group:write_network_message(WG), - io:format("Sending NetworkMsg: ~p~n",[NetMsg]), + % io:format("Sending NetworkMsg: ~p~n",[NetMsg]), MiddlewareState2 = Module:send(NetMsg, MiddlewareState), {noreply, State#state{ middleware = {Module, MiddlewareState2}, @@ -139,10 +140,6 @@ default_config() -> #{ handle_network_message(Binary, #state{reader_groups = RGs} = S) -> {Headers, Payload} = opcua_pubsub_uadp:decode_network_message_headers(Binary), - - DataSetMessages = opcua_pubsub_uadp:decode_payload(Headers, Payload), - io:format("Msgs = ~p\n",[DataSetMessages]), - InterestedReaders = [begin DSR_ids = opcua_pubsub_reader_group:filter_readers(Headers,RG), @@ -152,11 +149,9 @@ handle_network_message(Binary, #state{reader_groups = RGs} = S) -> ReadersCount = lists:sum([length(DSR_ids) || {_, _, DSR_ids} <- InterestedReaders]), case ReadersCount > 0 of - false -> - io:format("Skipped NetMsg = ~p\n",[Binary]), + false -> % io:format("Skipped NetMsg = ~p\n",[Binary]), {ok, S}; - true -> - io:format("Accepting NetMsg = ~p\n",[Headers]), + true -> % io:format("Accepting NetMsg = ~p\n",[Headers]), % we can procede with the security step if needed: % opcua_pubsub_security: ... not_implemented yet % Then we decode all messages diff --git a/src/opcua_pubsub_dataset_reader.erl b/src/opcua_pubsub_dataset_reader.erl index 1ae9d58..0d3a53b 100644 --- a/src/opcua_pubsub_dataset_reader.erl +++ b/src/opcua_pubsub_dataset_reader.erl @@ -134,15 +134,18 @@ update_subscribed_dataset(DataSet, #state{subscribed_dataset = TGT_vars} = S) ok = update_target_variables(DataSet, TGT_vars), S. -update_target_variables([], TGT_vars) -> ok; -update_target_variables([{FieldMD, Variable}|DataSet], TGT_vars) -> +update_target_variables([], _TGT_vars) -> ok; +update_target_variables([{FieldMD, Variable} | RemainingDataSet], TGT_vars) -> FieldId = FieldMD#dataset_field_metadata.dataset_field_id, - [TGT|_] = [ Var || #target_variable{dataset_field_id = DataSetFieldId} = Var - <- TGT_vars, DataSetFieldId == FieldId], + {[TGT], OtherTGTs} = lists:partition( + fun(#target_variable{dataset_field_id = DataSetFieldId}) -> + DataSetFieldId == FieldId + end, + TGT_vars), TargetNodeId = TGT#target_variable.target_node_id, AttrId = TGT#target_variable.attribute_id, update_tgt_var_attribute(TargetNodeId, AttrId, Variable), - ok. + update_target_variables(RemainingDataSet, OtherTGTs). update_tgt_var_attribute(TargetNodeId, ?UA_ATTRIBUTEID_VALUE, #opcua_variant{value = Value}) -> diff --git a/src/opcua_pubsub_dataset_writer.erl b/src/opcua_pubsub_dataset_writer.erl index 6de068d..1cae551 100644 --- a/src/opcua_pubsub_dataset_writer.erl +++ b/src/opcua_pubsub_dataset_writer.erl @@ -19,7 +19,7 @@ connected_published_dataset }). -new(PDS_ID, #dataset_writer_config{ +new(PDS_id, #dataset_writer_config{ name = N, dataset_writer_id = DS_WID, dataset_field_content_mask = CM, @@ -37,14 +37,13 @@ new(PDS_ID, #dataset_writer_config{ dataset_name = DN, transport_settings = TS, message_settings = MS, - connected_published_dataset = PDS_ID + connected_published_dataset = opcua_pubsub:get_published_dataset(PDS_id) }}. write_dataset_message(#state{dataset_writer_id = DSW_ID, - connected_published_dataset = PDS_id, + connected_published_dataset = PDS, dataset_field_content_mask = ContentMask} = S) -> - PDS = opcua_pubsub:get_published_dataset(PDS_id), - % We are going to produce a keyframe, always. + % We are going to produce a keyframe with variant encoding, always. % We do not support delta-frames so we ignore keyframe_count #published_dataset{ dataset_metadata = #dataset_metadata{fields = FieldsMetadata} = MD, @@ -52,9 +51,10 @@ write_dataset_message(#state{dataset_writer_id = DSW_ID, } = PDS, Values = read_sources(DatasetSource, []), Fields = encode_data_set_fields(FieldsMetadata, Values), - FieldCount = <<(length(Fields)):16/unsigned-little>>, - Header = encode_message_header(variant, data_key_frame, ContentMask, MD), - DataSetMessage = iolist_to_binary([Header, FieldCount, Fields]), + Header = opcua_pubsub_uadp:encode_dataset_message_header(variant, + data_key_frame, + ContentMask, MD), + DataSetMessage = opcua_pubsub_uadp:encode_dataset_message(Header, Fields), {DataSetMessage, DSW_ID, S}. read_sources([], Values) -> lists:reverse(Values); @@ -70,12 +70,6 @@ read_sources([#published_variable{ % io:format("Read value: ~p~n",[Value]), read_sources(Rest, [Value | Values]). -encode_message_header(FieldEncoding, MsgType, ContentMask, Metadata) -> - - opcua_pubsub_uadp:encode_dataset_message_header(FieldEncoding, MsgType, - ContentMask, Metadata). - - encode_data_set_fields(FieldsMetadata, Values) -> encode_data_set_fields(FieldsMetadata, Values, []). diff --git a/src/opcua_pubsub_example.erl b/src/opcua_pubsub_example.erl index 851c7b6..718d61d 100644 --- a/src/opcua_pubsub_example.erl +++ b/src/opcua_pubsub_example.erl @@ -6,8 +6,10 @@ -include("opcua.hrl"). -include("opcua_pubsub.hrl"). +-define(URL, <<"opc.udp://224.0.0.22:4840">>). + subscription() -> - Url = <<"opc.udp://224.0.0.22:4840">>, + Url = ?URL, ConnectionConfig = #connection_config{}, {ok, Conn} = opcua_pubsub:new_connection(Url, ConnectionConfig, #{}), @@ -21,11 +23,11 @@ subscription() -> writer_group_id = 100, dataset_writer_id = 62541, dataset_metadata = #dataset_metadata{ - name = "DataSet 1", - description = "An example from 62541", + name = <<"DataSet 1">>, + description = <<"An example from 62541">>, fields = [ #dataset_field_metadata{ - name = "DateTime 1", + name = <<"DateTime 1">>, builtin_type = date_time, data_type = opcua_node:id(date_time), valueRank = -1 % a scalar, @@ -54,9 +56,9 @@ subscription() -> publication() -> PDS_cfg = #published_dataset{ - name = "PublishedDataSet Example", + name = <<"PublishedDataSet Example">>, dataset_metadata = #dataset_metadata{ - name = "My Metadata" + name = <<"My Metadata">> } }, {ok, PDS_id} = opcua_pubsub:add_published_dataset(PDS_cfg), @@ -64,7 +66,7 @@ publication() -> % we specify the fields metadata and their sources % In this case we list available variables as sources FieldsMetaData = [#dataset_field_metadata{ - name = "DateTime 1", + name = <<"DateTime 1">>, builtin_type = date_time, data_type = opcua_node:id(date_time), valueRank = -1 % a scalar, @@ -76,7 +78,7 @@ publication() -> }], ok = opcua_pubsub:add_published_dataset_field(PDS_id, FieldsMetaData, FieldsSource), - Url = <<"opc.udp://224.0.0.22:4840">>, + Url = ?URL, ConnectionConfig = #connection_config{ publisher_id = 2234, publisher_id_type = uint16 diff --git a/src/opcua_pubsub_uadp.erl b/src/opcua_pubsub_uadp.erl index 94ce2fa..85b348f 100644 --- a/src/opcua_pubsub_uadp.erl +++ b/src/opcua_pubsub_uadp.erl @@ -7,6 +7,7 @@ -export([encode_dataset_message_field/2]). -export([encode_dataset_message_header/4]). +-export([encode_dataset_message/2]). -export([encode_payload/1]). -export([encode_network_message_headers/4]). @@ -122,13 +123,14 @@ decode_dataset_message_field(_, _, _) -> encode_dataset_message_field(#dataset_field_metadata{ data_type = DataType}, #opcua_variant{} = V) -> + % TODO: make sure to correctly extract a value before encoding. %io:format("Original Variant: ~p~n",[V]), {_TypeID, Val} = opcua_codec:unpack_variant(V), - V2 = opcua_codec:pack_variant(opcua_server_space, DataType, Val), + V2 = opcua_codec:pack_variant(opcua_server_space, DataType, -1, Val), %io:format("Variant to publish: ~p~n",[V2]), {Binary, _} = opcua_codec_binary:encode(variant, V2), %io:format("Encoded ~p~n", [Binary]), - {Result, _Rest} = opcua_codec_binary:decode(variant, Binary), + {_Result, _Rest} = opcua_codec_binary:decode(variant, Binary), %io:format("Decoded: ~p~n",[Result]), Binary. @@ -137,7 +139,6 @@ encode_dataset_message_header(FieldEncoding, MsgType, ContentMask, Flags1 = ?DATASET_FLAGS1_VALID bor encode_field_encoding(FieldEncoding) bor ?DATASET_FLAGS1_FLAGS2_ENABLED, - io:format("Flags1 ~p~n",[Flags1]), Flags2 = 0, % DataSetFlags1 % F1 = #{ @@ -170,13 +171,15 @@ encode_dataset_message_header(FieldEncoding, MsgType, ContentMask, % 0 -> {<<>>, maps:put(picoseconds, 0, F2_1)} iolist_to_binary([Flags1, Flags2_1, Timestamp]). +encode_dataset_message(Header, Fields) -> + FieldCount = <<(length(Fields)):16/unsigned-little>>, + iolist_to_binary([Header, FieldCount, Fields]). + encode_payload([DataSetMessage]) -> DataSetMessage; encode_payload(DataSetMessages) -> Sizes = [ <<(byte_size(DSM)):16/unsigned-little>> || DSM <- DataSetMessages], iolist_to_binary([Sizes, DataSetMessages]). - - encode_network_message_headers(PublisherID, PublisherIdType, DSW_IDS, #writer_group_config{ message_settings = #uadp_writer_group_message_data{ @@ -198,7 +201,7 @@ encode_network_message_headers(PublisherID, PublisherIdType, DSW_IDS, end, {UADPFlags3, PH} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_GROUP_HEADER of 0 -> {UADPFlags2, <<>>}; - _ -> H_ = encode_payload_header(WriterGroupCfg, DSW_IDS), + _ -> H_ = encode_payload_header(DSW_IDS), {UADPFlags2 bor ?PAYLOAD_HEADER_ENABLED, H_} end, % ExtendedFlags1 always enabled @@ -230,12 +233,12 @@ encode_network_message_headers(PublisherID, PublisherIdType, DSW_IDS, % and defaults to dataset_message ExtFlags1_6 = ExtFlags1_5, % bor ?EXT_FLAGS_2_ENABLED, - %ExtFlags2_1 = ExtFlags2 bor 0, % TODO: add check and support for ?CHUNK_MESSAGE + % ExtFlags2_1 = ExtFlags2 bor 0, % TODO: add check and support for ?CHUNK_MESSAGE % {ExtFlags2_2, PromotedFields} = case Mask band ?UADP_NET_MSG_CONTENT_MASK_PROMOTED_FIELDS of % 0 -> {ExtFlags2_1, <<>>}; - % _ -> {ExtFlags2_1 bor ?PROMOTED_FIELD_ENABLED, error(not_implemented)} + % _ -> {ExtFlags2_1 bor ?EXT_FLAGS2_PROMOTED_FIELD_ENABLED, error(not_implemented)} % end, - % TODO: support more message types + % % TODO: support more message types % HardcodedMsgType = dataset_message, % ExtFlags2_3 = ExtFlags2_2 bor encode_network_msg_type(HardcodedMsgType), @@ -468,56 +471,21 @@ decode_dataset_cfg_minor_ver(#{config_ver_minor_ver := 1}, Bin) -> %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% -encode_dataset_message_flags1(#{ - dataset_msg_valid := DataSetMsgValid, - field_encoding := FieldEncoding, - dataset_msg_seq_num := DataSetMsgSeqNum, - status := Status, - config_ver_minor_ver := ConfigVerMajorVer, - config_ver_major_ver := ConfigVerMinorVer, - dataset_flags2 := DataSetFlags2 - }) -> - <>. - encode_field_encoding(variant) -> ?DATASET_FLAGS1_VARIANT; encode_field_encoding(raw) -> ?DATASET_FLAGS1_RAWDATA; encode_field_encoding(data_value) -> ?DATASET_FLAGS1_DATAVALUE. -encode_dataset_message_flags2(#{ - msg_type := DataMsgType, - timestamp := Timestamp, - picoseconds := PicoSeconds - }) -> - <<0:2, - PicoSeconds:1, - Timestamp:1, - (encode_dataset_message_type(DataMsgType)):4/bitstring>>. - - -encode_dataset_message_type(data_key_frame) -> <<0:4>>; -encode_dataset_message_type(data_delta_fram) -> <<0:1, 0:1, 0:1, 1:1>>; -encode_dataset_message_type(event) -> <<0:1, 0:1, 1:1, 0:1>>; -encode_dataset_message_type(keep_alive) -> <<0:1, 0:1, 1:1, 1:1>>. - encode_publisher_id_type(byte) -> ?UA_PUBLISHERIDTYPE_BYTE; encode_publisher_id_type(uint16) -> ?UA_PUBLISHERIDTYPE_UINT16; encode_publisher_id_type(uint32) -> ?UA_PUBLISHERIDTYPE_UINT32; encode_publisher_id_type(uint64) -> ?UA_PUBLISHERIDTYPE_UINT64; encode_publisher_id_type(string) -> ?UA_PUBLISHERIDTYPE_STRING. -encode_network_msg_type(dataset_message) -> ?EXT_FLAGS2_DATASET_MSG_TYPE; -encode_network_msg_type(discovery_request) -> ?EXT_FLAGS2_DISCOVERY_REQUEST_MSG_TYPE; -encode_network_msg_type(discovery_responce) -> ?EXT_FLAGS2_DISCOVERY_RESPONSE_MSG_TYPE. - - +% encode_network_msg_type(dataset_message) -> ?EXT_FLAGS2_DATASET_MSG_TYPE; +% encode_network_msg_type(discovery_request) -> ?EXT_FLAGS2_DISCOVERY_REQUEST_MSG_TYPE; +% encode_network_msg_type(discovery_responce) -> ?EXT_FLAGS2_DISCOVERY_RESPONSE_MSG_TYPE. -encode_payload_header(WriterGroupCfg, DSW_IDS) -> +encode_payload_header(DSW_IDS) -> IDS = [<> || ID <- DSW_IDS ], Count = <<(length(DSW_IDS)):8/unsigned-little>>, iolist_to_binary([Count | IDS]). diff --git a/src/opcua_pubsub_udp.erl b/src/opcua_pubsub_udp.erl index c1ff49d..52410a5 100644 --- a/src/opcua_pubsub_udp.erl +++ b/src/opcua_pubsub_udp.erl @@ -43,7 +43,7 @@ send(Data, #state{out_socket = Socket} = S) -> ok = gen_udp:send(Socket, {224,0,0,22}, 4840, Data), S. -handle_info({udp, Socket, _IP, _Port, Packet}, #state{socket = Socket} = S) -> +handle_info({udp, Socket, _IP, _Port, Packet}, #state{socket = Socket} = _S) -> Packet. % helpers %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%