How to create a hex color string UIColor initializer in Swift? [duplicate]

I am using this code for create UIColor from hex value. Its working perfectly.

extension UIColor {
convenience init(red: Int, green: Int, blue: Int) {
   assert(red >= 0 && red <= 255, "Invalid red component")
   assert(green >= 0 && green <= 255, "Invalid green component")
   assert(blue >= 0 && blue <= 255, "Invalid blue component")

   self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0)
}

convenience init(netHex:Int) {
   self.init(red:(netHex >> 16) & 0xff, green:(netHex >> 8) & 0xff, blue:netHex & 0xff)
}
}

Usage:

var textColor = UIColor(netHex: 0xffffff)

This code works perfectly for Int hex code. But It needs hex code 0xffffff as Int type. I am having the hex code from web service. It will be like "#ffffff" (String not Int). I can convert this string like "0xffffff". But I can't convert from "0xffffff"(String) to 0xffffff (Int).

I need something like this

var textColor = UIColor(netHex: "0xffffff")

or better like this:

var textColor = UIColor(netHex: "#ffffff")

Thanks in advance.


Solution 1:

Xcode 9 • Swift 4 or later

extension UIColor {
    convenience init?(hexaRGB: String, alpha: CGFloat = 1) {
        var chars = Array(hexaRGB.hasPrefix("#") ? hexaRGB.dropFirst() : hexaRGB[...])
        switch chars.count {
        case 3: chars = chars.flatMap { [$0, $0] }
        case 6: break
        default: return nil
        }
        self.init(red: .init(strtoul(String(chars[0...1]), nil, 16)) / 255,
                green: .init(strtoul(String(chars[2...3]), nil, 16)) / 255,
                 blue: .init(strtoul(String(chars[4...5]), nil, 16)) / 255,
                alpha: alpha)
    }

    convenience init?(hexaRGBA: String) {
        var chars = Array(hexaRGBA.hasPrefix("#") ? hexaRGBA.dropFirst() : hexaRGBA[...])
        switch chars.count {
        case 3: chars = chars.flatMap { [$0, $0] }; fallthrough
        case 6: chars.append(contentsOf: ["F","F"])
        case 8: break
        default: return nil
        }
        self.init(red: .init(strtoul(String(chars[0...1]), nil, 16)) / 255,
                green: .init(strtoul(String(chars[2...3]), nil, 16)) / 255,
                 blue: .init(strtoul(String(chars[4...5]), nil, 16)) / 255,
                alpha: .init(strtoul(String(chars[6...7]), nil, 16)) / 255)
    }

    convenience init?(hexaARGB: String) {
        var chars = Array(hexaARGB.hasPrefix("#") ? hexaARGB.dropFirst() : hexaARGB[...])
        switch chars.count {
        case 3: chars = chars.flatMap { [$0, $0] }; fallthrough
        case 6: chars.append(contentsOf: ["F","F"])
        case 8: break
        default: return nil
        }
        self.init(red: .init(strtoul(String(chars[2...3]), nil, 16)) / 255,
                green: .init(strtoul(String(chars[4...5]), nil, 16)) / 255,
                 blue: .init(strtoul(String(chars[6...7]), nil, 16)) / 255,
                alpha: .init(strtoul(String(chars[0...1]), nil, 16)) / 255)
    }
}

if let textColor = UIColor(hexa: "00F") {
    print(textColor) // r 0.0 g 0.0 b 1.0 a 1.0
}

if let textColor = UIColor(hexaRGB: "00F") {
    print(textColor) // r 0.0 g 0.0 b 1.0 a 1.0
}     


UIColor(hexaRGB: "#00F")                  // r 0.0 g 0.0 b 1.0 a 1.0
UIColor(hexaRGB: "#00F", alpha: 0.5)      // r 0.0 g 0.0 b 1.0 a 0.5

UIColor(hexaRGB: "#0000FF")               // r 0.0 g 0.0 b 1.0 a 1.0
UIColor(hexaRGB: "#0000FF", alpha: 0.5)   // r 0.0 g 0.0 b 1.0 a 0.5

UIColor(hexaRGBA: "#0000FFFF")            // r 0.0 g 0.0 b 1.0 a 1.0
UIColor(hexaRGBA: "#0000FF7F")            // r 0.0 g 0.0 b 1.0 a 0.498

UIColor(hexaARGB: "#FF0000FF")            // r 0.0 g 0.0 b 1.0 a 1.0
UIColor(hexaARGB: "#7F0000FF")            // r 0.0 g 0.0 b 1.0 a 0.498