Swift native functions to have numbers as hex strings

Swift 3:

String to UInt:

let str = "fcd7d7"
let number = UInt(str, radix: 16)!
print(number)

result: 16570327

UInt to hex String:

let number = UInt(exactly: 16570327)!
let str = String(number, radix: 16, uppercase: false)
print(str)

result: fcd7d7


As of Swift 2, all integer types have a constructor

init?(_ text: String, radix: Int = default)

so that both integer to hex string and hex string to integer conversions can be done with built-in methods. Example:

let num = 1000
let str = String(num, radix: 16)
print(str) // "3e8"

if let num2 = Int(str, radix: 16) {
    print(num2) // 1000
}

(Old answer for Swift 1:) The conversion from an integer to a hex string can be done with

let hex = String(num, radix: 16)

(see for example How to convert a decimal number to binary in Swift?). This does not require the import of any Framework and works with any base between 2 and 36.

The conversion from a hex string to an integer can be done with the BSD library function strtoul() (compare How to convert a binary to decimal in Swift?) if you are willing to import Darwin.

Otherwise there is (as far as I know) no built-in Swift method. Here is an extension that converts a string to a number according to a given base:

extension UInt {
    init?(_ string: String, radix: UInt) {
        let digits = "0123456789abcdefghijklmnopqrstuvwxyz"
        var result = UInt(0)
        for digit in string.lowercaseString {
            if let range = digits.rangeOfString(String(digit)) {
                let val = UInt(distance(digits.startIndex, range.startIndex))
                if val >= radix {
                    return nil
                }
                result = result * radix + val
            } else {
                return nil
            }
        }
        self = result
    }
}

Example:

let hexString = "A0"
if let num = UInt(hexString, radix: 16) {
    println(num)
} else {
    println("invalid input")
}

update: Xcode 12.5 • Swift 5.4

extension StringProtocol {
    func dropping<S: StringProtocol>(prefix: S) -> SubSequence { hasPrefix(prefix) ? dropFirst(prefix.count) : self[...] }
    var hexaToDecimal: Int { Int(dropping(prefix: "0x"), radix: 16) ?? 0 }
    var hexaToBinary: String { .init(hexaToDecimal, radix: 2) }
    var decimalToHexa: String { .init(Int(self) ?? 0, radix: 16) }
    var decimalToBinary: String { .init(Int(self) ?? 0, radix: 2) }
    var binaryToDecimal: Int { Int(dropping(prefix: "0b"), radix: 2) ?? 0 }
    var binaryToHexa: String { .init(binaryToDecimal, radix: 16) }
}

extension BinaryInteger {
    var binary: String { .init(self, radix: 2) }
    var hexa: String { .init(self, radix: 16) }
}

Testing:

print("7fffffffffffffff".hexaToDecimal)      // "9223372036854775807" decimal integer
print("0x7fffffffffffffff".hexaToDecimal)    // "9223372036854775807" decimal integer
print("7fffffffffffffff".hexaToBinary) // "111111111111111111111111111111111111111111111111111111111111111" binary (String)
print("0x7fffffffffffffff".hexaToBinary) // "111111111111111111111111111111111111111111111111111111111111111"

print("255".decimalToHexa)   // "ff"       hexa (String)
print("255".decimalToBinary) // "11111111" binary (String)
0b11111111

print("11111111".binaryToHexa)      // "ff"  hexa (String)
print("0b11111111".binaryToHexa)    // "ff"  hexa (String)
print("11111111".binaryToDecimal)   // 255 decimal (Int)
print("0b11111111".binaryToDecimal) // 255 decimal (Int)

print(255.binary) // "11111111" binary (String)
print(255.hexa)   // "ff"       hexa (String)