Use Hex color in SwiftUI
Try this:
extension Color {
init(hex: Int, opacity: Double = 1.0) {
let red = Double((hex & 0xff0000) >> 16) / 255.0
let green = Double((hex & 0xff00) >> 8) / 255.0
let blue = Double((hex & 0xff) >> 0) / 255.0
self.init(.sRGB, red: red, green: green, blue: blue, opacity: opacity)
}
}
Usage:
Text("Hello World!")
.background(Color(hex: 0xf5bc53))
Text("Hello World!")
.background(Color(hex: 0xf5bc53, opacity: 0.8))
You're almost there, you were using the wrong initialiser parameter:
extension Color {
init(hex: String) {
let hex = hex.trimmingCharacters(in: CharacterSet.alphanumerics.inverted)
var int: UInt64 = 0
Scanner(string: hex).scanHexInt64(&int)
let a, r, g, b: UInt64
switch hex.count {
case 3: // RGB (12-bit)
(a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17)
case 6: // RGB (24-bit)
(a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF)
case 8: // ARGB (32-bit)
(a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF)
default:
(a, r, g, b) = (1, 1, 1, 0)
}
self.init(
.sRGB,
red: Double(r) / 255,
green: Double(g) / 255,
blue: Double(b) / 255,
opacity: Double(a) / 255
)
}
}
Here is a Playground with my solution. It adds fallbacks after fallbacks and only relies on the hexString for color and alpha.
import SwiftUI
extension Color {
init(hex string: String) {
var string: String = string.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines)
if string.hasPrefix("#") {
_ = string.removeFirst()
}
// Double the last value if incomplete hex
if !string.count.isMultiple(of: 2), let last = string.last {
string.append(last)
}
// Fix invalid values
if string.count > 8 {
string = String(string.prefix(8))
}
// Scanner creation
let scanner = Scanner(string: string)
var color: UInt64 = 0
scanner.scanHexInt64(&color)
if string.count == 2 {
let mask = 0xFF
let g = Int(color) & mask
let gray = Double(g) / 255.0
self.init(.sRGB, red: gray, green: gray, blue: gray, opacity: 1)
} else if string.count == 4 {
let mask = 0x00FF
let g = Int(color >> 8) & mask
let a = Int(color) & mask
let gray = Double(g) / 255.0
let alpha = Double(a) / 255.0
self.init(.sRGB, red: gray, green: gray, blue: gray, opacity: alpha)
} else if string.count == 6 {
let mask = 0x0000FF
let r = Int(color >> 16) & mask
let g = Int(color >> 8) & mask
let b = Int(color) & mask
let red = Double(r) / 255.0
let green = Double(g) / 255.0
let blue = Double(b) / 255.0
self.init(.sRGB, red: red, green: green, blue: blue, opacity: 1)
} else if string.count == 8 {
let mask = 0x000000FF
let r = Int(color >> 24) & mask
let g = Int(color >> 16) & mask
let b = Int(color >> 8) & mask
let a = Int(color) & mask
let red = Double(r) / 255.0
let green = Double(g) / 255.0
let blue = Double(b) / 255.0
let alpha = Double(a) / 255.0
self.init(.sRGB, red: red, green: green, blue: blue, opacity: alpha)
} else {
self.init(.sRGB, red: 1, green: 1, blue: 1, opacity: 1)
}
}
}
let gray0 = Color(hex: "3f")
let gray1 = Color(hex: "#69")
let gray2 = Color(hex: "#6911")
let gray3 = Color(hex: "fff")
let red = Color(hex: "#FF000044s")
let green = Color(hex: "#00FF00")
let blue0 = Color(hex: "0000FF")
let blue1 = Color(hex: "0000F")
For getting the hexString from Color.. well, this is not a public API. We still need to rely on UIColor implementations for that.
PS: I saw the components solution below.. but if the API changes in the future, my version is a bit more stable.
Another alternative below that uses Int for hex but of course, it can be changed to String if you prefer that.
extension Color {
init(hex: UInt, alpha: Double = 1) {
self.init(
.sRGB,
red: Double((hex >> 16) & 0xff) / 255,
green: Double((hex >> 08) & 0xff) / 255,
blue: Double((hex >> 00) & 0xff) / 255,
opacity: alpha
)
}
}
Usage examples:
Color(hex: 0x000000)
Color(hex: 0x000000, alpha: 0.2)