Convert between Decimal, Binary and Hexadecimal in Swift

Both String and Int have initializers which take a radix (base). Combining those, you can achieve all of the conversions:

// Decimal to binary
let d1 = 21
let b1 = String(d1, radix: 2)
print(b1) // "10101"

// Binary to decimal
let b2 = "10110"
let d2 = Int(b2, radix: 2)!
print(d2) // 22

// Decimal to hexadecimal
let d3 = 61
let h1 = String(d3, radix: 16)
print(h1) // "3d"

// Hexadecimal to decimal
let h2 = "a3"
let d4 = Int(h2, radix: 16)!
print(d4) // 163

// Binary to hexadecimal
let b3 = "10101011"
let h3 = String(Int(b3, radix: 2)!, radix: 16)
print(h3) // "ab"

// Hexadecimal to binary
let h4 = "face"
let b4 = String(Int(h4, radix: 16)!, radix: 2)
print(b4) // "1111101011001110"

Swift Converter 2, 8, 10, 16

A bunch of Int and String is used to convert different systems.

Int is used for working with decimal
String is used for other types

It is safe to convert a string representation of system to decimal and decimal to string representation

String -> Int -> String

There are samples of

binary(2) - 0b
octal(8) - 0o
decimal(10)
hexadecimal(16) - 0x
extension Int {
    
    //From Decimal
    //10 -> 2
    func decToBinString() -> String {
        let result = createString(radix: 2)
        return result
    }
    
    //10 -> 8
    func decToOctString() -> String {
//        let result = decToOctStringFormat()
        let result = createString(radix: 8)
        
        return result
    }
    
    //10 -> 16
    func decToHexString() -> String {
//        let result = decToHexStringFormat()
        let result = createString(radix: 16)
        return result
    }
    
    //10 -> 8
    func decToOctStringFormat(minLength: Int = 0) -> String {

        return createString(minLength: minLength, system: "O")
    }

    //10 -> 16
    func decToHexStringFormat(minLength: Int = 0) -> String {

        return createString(minLength: minLength, system: "X")
    }
    
    fileprivate  func createString(radix: Int) -> String {
        return String(self, radix: radix, uppercase: true)
    }
    
    fileprivate func createString(minLength: Int = 0, system: String) -> String {
        //0 - fill empty space by 0
        //minLength - min count of chars
        //system - system number
        return String(format:"%0\(minLength)\(system)", self)
    }
}
extension String {
    
    //To Decimal
    //2 -> 10
    func binToDec() -> Int {
        return createInt(radix: 2)
    }
    
    //8 -> 10
    func octToDec() -> Int {
        return createInt(radix: 8)
    }
    
    //16 -> 10
    func hexToDec() -> Int {
        return createInt(radix: 16)
    }
    
    //Others
    //2 -> 8
    func binToOct() -> String {
        return self.binToDec().decToOctString()
    }
    
    //2 -> 16
    func binToHex() -> String {
        return self.binToDec().decToHexString()
    }
    
    //8 -> 16
    func octToHex() -> String {
        return self.octToDec().decToHexString()
    }
    
    //16 -> 8
    func hexToOct() -> String {
        return self.hexToDec().decToOctString()
    }
    
    //16 -> 2
    func hexToBin() -> String {
        return self.hexToDec().decToBinString()
    }
    
    //8 -> 2
    func octToBin() -> String {
        return self.octToDec().decToBinString()
    }

    //Additional
    //16 -> 2
    func hexToBinStringFormat(minLength: Int = 0) -> String {
        
        return hexToBin().pad(minLength: minLength)
    }
    
    fileprivate func pad(minLength: Int) -> String {
        let padCount = minLength - self.count
        
        guard padCount > 0 else {
            return self
        }

        return String(repeating: "0", count: padCount) + self
    } 

    fileprivate func createInt(radix: Int) -> Int {
        return Int(self, radix: radix)!
    }
    
}

Tests:

func testConverter() {
    let decInt = 13511
    let binStr = "11010011000111"
    let octStr = "32307"
    let hexStr = "34C7"
    
    //From Decimal
    //10 -> 2
    XCTAssertEqual(binStr, decInt.decToBinString())
    //10 -> 8
    XCTAssertEqual(octStr, decInt.decToOctString())
    //10 -> 16
    XCTAssertEqual(hexStr, decInt.decToHexString())
    
    //To Decimal
    //2 -> 10
    XCTAssertEqual(decInt, binStr.binToDec())
    //8 -> 10
    XCTAssertEqual(decInt, octStr.octToDec())
    //16 -> 10
    XCTAssertEqual(decInt, hexStr.hexToDec())
    
    //Others
    //2 -> 8
    XCTAssertEqual(octStr, binStr.binToOct())
    //2 -> 16
    XCTAssertEqual(hexStr, binStr.binToHex())
    //8 -> 16
    XCTAssertEqual(hexStr, octStr.octToHex())
    //16 -> 8
    XCTAssertEqual(octStr, hexStr.hexToOct())
    //16 -> 2
    XCTAssertEqual(binStr, hexStr.hexToBin())
    //8 -> 2
    XCTAssertEqual(binStr, octStr.octToBin())

    //String format feature
    //10 -> 8
    XCTAssertEqual("017", (15).decToOctStringFormat(minLength: 3))
    //10 -> 16
    XCTAssertEqual("00F", (15).decToHexStringFormat(minLength: 3))
    //16 -> 2
    XCTAssertEqual("0011010011000111", hexStr.hexToBinStringFormat(minLength: 16))
    
}

Tags:

Hex

Binary

Swift