syntax = "proto3";
package com.emagiz.components.kafka.serialization;

option java_package = "com.emagiz.components.kafka.serialization";

import "google/protobuf/any.proto";

//Copied from https://github.com/kpn-dsh/tenant-example/blob/master/src/main/proto/envelope.proto

message KeyEnvelope {
  KeyHeader header = 1;           // header is mandatory on stream topics
  string key = 2;                 // MQTT topic (minus prefixes) on stream topics
  reserved 3;                     // deprecated field
}

message KeyHeader {
  // identifies the message origin
  Identity identifier = 1;

  // marks the message as 'retained'
  // this makes the Latest Value Store keep a copy of the message in memory for later retrieval
  bool retained = 2;

  // the QOS with wich the message is handled within the system
  QoS qos = 3;
}

// identifies the data origin
message Identity {
  string  tenant    = 1;
  oneof publisher {
    string free_form   = 2;     // for 'backwards' compatibility or produces not matching the publishers below
    string user        = 3;     // reserved for MQTT side publishes ('humans')
    string client      = 4;     // reserved for MQTT side publishes ('devices')
    string application = 5;     // to be used when producing data from containers
  }
}

// System QOS identifiers
enum QoS {
  BEST_EFFORT  = 0;   // might be dropped in case of resources running low (~ highest throughput)
  RELIABLE     = 1;   // will *never* be dropped and retried until success (~ lowest throughput)
}

message DataEnvelope {
  oneof kind {                      // main payload for data messages;
    // leave empty for DELETE semantics in Latest Value Store
    bytes payload = 1;
  }

  map<string, string> tracing = 2; // tracing data: used for passing span contexts between applications
  // tenant-specific fields are ONLY allowed in the 500-1000 range
}