How to convert hexadecimal string to an array of UInt8 bytes in Swift?
Swift 5
import CryptoSwift
let hexString = "e0696349774606f1b5602ffa6c2d953f"
let hexArray = Array<UInt8>.init(hex: hexString) // [224, 105, 99, 73, 119, 70, 6, 241, 181, 96, 47, 250, 108, 45, 149, 63]
You can convert your hexa String
back to array of [UInt8]
iterating every two hexa characters and initialize an UInt8
using its string radix initializer. The following implementation assumes the hexa string is well formed:
Edit/update: Xcode 11 • Swift 5.1
extension StringProtocol {
var hexaData: Data { .init(hexa) }
var hexaBytes: [UInt8] { .init(hexa) }
private var hexa: UnfoldSequence<UInt8, Index> {
sequence(state: startIndex) { startIndex in
guard startIndex < self.endIndex else { return nil }
let endIndex = self.index(startIndex, offsetBy: 2, limitedBy: self.endIndex) ?? self.endIndex
defer { startIndex = endIndex }
return UInt8(self[startIndex..<endIndex], radix: 16)
}
}
}
let string = "e0696349774606f1b5602ffa6c2d953f"
let data = string.hexaData // 16 bytes
let bytes = string.hexaBytes // [224, 105, 99, 73, 119, 70, 6, 241, 181, 96, 47, 250, 108, 45, 149, 63]
If you would like to handle malformed hexa strings as well you can make it a throwing method:
extension String {
enum DecodingError: Error {
case invalidHexaCharacter(Character), oddNumberOfCharacters
}
}
extension Collection {
func unfoldSubSequences(limitedTo maxLength: Int) -> UnfoldSequence<SubSequence,Index> {
sequence(state: startIndex) { lowerBound in
guard lowerBound < endIndex else { return nil }
let upperBound = index(lowerBound,
offsetBy: maxLength,
limitedBy: endIndex
) ?? endIndex
defer { lowerBound = upperBound }
return self[lowerBound..<upperBound]
}
}
}
extension StringProtocol {
func hexa<D>() throws -> D where D: DataProtocol & RangeReplaceableCollection {
try .init(self)
}
}
extension DataProtocol where Self: RangeReplaceableCollection {
init<S: StringProtocol>(_ hexa: S) throws {
guard hexa.count.isMultiple(of: 2) else {
throw String.DecodingError.oddNumberOfCharacters
}
self = .init()
reserveCapacity(hexa.utf8.count/2)
for pair in hexa.unfoldSubSequences(limitedTo: 2) {
guard let byte = UInt8(pair, radix: 16) else {
for character in pair where !character.isHexDigit {
throw String.DecodingError.invalidHexaCharacter(character)
}
continue
}
append(byte)
}
}
}
Usage:
let hexaString = "e0696349774606f1b5602ffa6c2d953f"
do {
let bytes: [UInt8] = try hexaString.hexa()
print(bytes)
let data: Data = try hexaString.hexa()
print(data)
} catch {
print(error)
}
This will print
[224, 105, 99, 73, 119, 70, 6, 241, 181, 96, 47, 250, 108, 45, 149, 63]
16 bytes
Based on answer from Leo Dabus
Details
- Swift 5.1, Xcode 11.2.1
Solution
enum HexConvertError: Error {
case wrongInputStringLength
case wrongInputStringCharacters
}
extension StringProtocol {
func asHexArrayFromNonValidatedSource() -> [UInt8] {
var startIndex = self.startIndex
return stride(from: 0, to: count, by: 2).compactMap { _ in
let endIndex = index(startIndex, offsetBy: 2, limitedBy: self.endIndex) ?? self.endIndex
defer { startIndex = endIndex }
return UInt8(self[startIndex..<endIndex], radix: 16)
}
}
func asHexArray() throws -> [UInt8] {
if count % 2 != 0 { throw HexConvertError.wrongInputStringLength }
let characterSet = "0123456789ABCDEFabcdef"
let wrongCharacter = first { return !characterSet.contains($0) }
if wrongCharacter != nil { throw HexConvertError.wrongInputStringCharacters }
return asHexArrayFromNonValidatedSource()
}
}
Usage
// Way 1
do {
print("with validation: \(try input.asHexArray() )")
} catch (let error) {
print("with validation: \(error)")
}
// Way 2
"12g". asHexArrayFromNonValidatedSource()
Full sample
Do not forget to paste here the solution code
func test(input: String) {
print("input: \(input)")
do {
print("with validation: \(try input.asHexArray() )")
} catch (let error) {
print("with validation: \(error)")
}
print("without validation \(input.asHexArrayFromNonValidatedSource())\n")
}
test(input: "12wr22")
test(input: "124")
test(input: "12AF")
Console output
input: 12wr22
with validation: wrongInputStringCharacters
without validation [18, 34]
input: 124
with validation: wrongInputStringLength
without validation [18, 4]
input: 1240
with validation: [18, 64]
without validation [18, 64]
input: 12AF
with validation: [18, 175]
without validation [18, 175]