36 const char* digit_set,
37 const std::vector<uint8_t>& bits_table,
38 size_t bits_per_digit,
39 size_t digits_per_group,
53 std::string
encode(
const std::vector<uint8_t>& input);
61 void decode(
const std::string& encoded_str, std::vector<uint8_t>& output);
258void decodeBase32Hex(
const std::string& encoded_str, std::vector<uint8_t>& output);
264std::string
encodeBase64(
const std::vector<uint8_t>& binary);
272void decodeBase64(
const std::string& encoded_str, std::vector<uint8_t>& output);
278std::string
encodeHex(
const std::vector<uint8_t>& binary);
286void decodeHex(
const std::string& encoded_str, std::vector<uint8_t>& output);
293inline std::string
toHex(std::string value) {
294 std::vector<uint8_t> bin(value.begin(), value.end());
Class for encoding and decoding binary data using Base16 (aka Hex) as described in RFC 4648.
Base16Encoder()
Constructor.
~Base16Encoder()=default
Destructor.
static const char * DIGIT_SET
Set of digits used for encoding in Base16.
static const std::vector< uint8_t > BITS_TABLE
Table that maps Base16 digits to their binary data value.
Class for encoding and decoding binary data using Base32Hex as described in RFC 4648.
Base32HexEncoder()
Constructor.
static const char * DIGIT_SET
Set of digits used for encoding in Base32Hex.
static const std::vector< uint8_t > BITS_TABLE
Table that maps Base32Hex digits to their binary data value.
~Base32HexEncoder()=default
Destructor.
Class for encoding and decoding binary data using Base64 as described in RFC 4648.
~Base64Encoder()=default
Destructor.
Base64Encoder()
Constructor.
static const std::vector< uint8_t > BITS_TABLE
Table that maps Base64 digits to their binary data value.
static const char * DIGIT_SET
Set of digits used for encoding in Base64.
Class for encoding and decoding binary data using an algorithm described in RFC 4648.
std::string getAlgorithm() const
Get the algorithm name.
size_t max_bits_to_digit_
Maxium index value of the digit set.
const char * digit_set_
Set of digits (i.e. alphabet) used for encoding.
const char pad_char_
Character used for padding out to group size (0 means no padding)
std::string encode(const std::vector< uint8_t > &input)
Encodes binary data using the encoder's algorithm.
size_t getBitsPerDigit()
Get the number of data bits represented by a digit.
size_t digits_per_group_
Number of digits contained in a group.
void decode(const std::string &encoded_str, std::vector< uint8_t > &output)
Decodes an encoded string using the encoder's algorithm.
uint8_t getPadChar() const
Get the character used for padding out to group size (0 means no padding)
size_t max_digit_to_bits_
Maxium index value of the algorithm bit table.
std::string algorithm_
Name of the algorithm, used for logging.
BaseNEncoder(const std::string &algorithm, const char *digit_set, const std::vector< uint8_t > &bits_table, size_t bits_per_digit, size_t digits_per_group, const char pad_char, size_t max_pad, bool case_sensitive)
Constructor.
bool isCaseSensitive()
Indicates whether or not the algorithm's digit set is case-sensitive.
virtual ~BaseNEncoder()=default
Destructor.
size_t getDigitsPerGroup() const
Get the number of digits contained in a group.
bool case_sensitive_
Indicates whether or not the algorithm's digit set is case-sensitive.
const char * getDigitSet() const
Get the digit set.
uint8_t digitToBits(uint8_t digit)
Translate a digit into the appropriate algorithm bit value.
size_t max_pad_
Maximum number of pad characters in a group.
std::vector< uint8_t > bits_table_
Table to translate digits to data used during decoding.
size_t getMaxPad()
Get the maximum number of pad characters in a group.
char bitsToDigit(uint8_t bits)
Translate a byte of binary data into the appropriate algorithm digit.
size_t getMaxDigitToBits()
Get the maxium index value of the algorithm bit table.
size_t bits_per_digit_
Number of data bits represented by a digit.
const std::vector< uint8_t > & getBitsTable() const
Get the digit lookup table.
size_t getMaxBitsToDigit()
Get the maxium index value of the digit set.
string encodeBase64(const vector< uint8_t > &binary)
Encode binary data in the base64 format.
void decodeBase32Hex(const std::string &encoded_str, std::vector< uint8_t > &output)
Decode a base32-hex encoded string into binary data.
string encodeBase32Hex(const vector< uint8_t > &binary)
Encode binary data in the base32-hex format.
void decodeHex(const string &encoded_str, vector< uint8_t > &output)
Decode a base16 encoded string into binary data.
string encodeHex(const vector< uint8_t > &binary)
Encode binary data in the base16 format.
void decodeBase64(const std::string &encoded_str, std::vector< uint8_t > &output)
Decode a base64 encoded string into binary data.
std::string toHex(std::string value)
Encode in hexadecimal inline.
Defines the logger used by the top-level component of kea-lfc.