I\'m making a drawing app and I would like to refer to my colors through use of an enum. For example, it would be cleaner and more convenient to use Colors.RedColor
This answer is probably late, but for others finding this question. I was not satisfied with the answers above, since adding colors as UIColors extension is not always what you want, since:
This is the solution I came up with:
enum PencilColor {
case lightRed
case darkPurple
var associatedColor: UIColor {
switch self {
case .lightRed: return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
case .darkPurple: return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
}
}
}
I use computed properties to solve this problem, this is my code
enum MyColor {
case navigationBarBackgroundColor
case navigationTintCololr
}
extension MyColor {
var value: UIColor {
get {
switch self {
case .navigationBarBackgroundColor:
return UIColor(red: 67/255, green: 173/255, blue: 247/255, alpha: 1.0)
case .navigationTintCololr:
return UIColor.white
}
}
}
}
then I can use MyColor like this:
MyColor.navigationBarBackgroundColor.value
I do it like this (basically using a struct as a namespace):
extension UIColor {
struct MyTheme {
static var firstColor: UIColor { return UIColor(red: 1, green: 0, blue: 0, alpha: 1) }
static var secondColor: UIColor { return UIColor(red: 0, green: 1, blue: 0, alpha: 1) }
}
}
And you use it like:
UIColor.MyTheme.firstColor
So you can have a red color inside your custom theme.
Based on @Jano's answer I made an improvement by using Int
as the literal type:
import UIKit
public final class Colors: UIColor {
}
extension Colors: ExpressibleByIntegerLiteral {
public typealias IntegerLiteralType = Int
public convenience init(integerLiteral value: Int) {
let red = CGFloat((value & 0xFF0000FF) >> 24) / 0xFF
let green = CGFloat((value & 0x00FF00FF) >> 16) / 0xFF
let blue = CGFloat((value & 0x0000FFFF) >> 8) / 0xFF
let alpha = CGFloat(value & 0x00FF00FF) / 0xFF
self.init(red: red, green: green, blue: blue, alpha: alpha)
}
}
extension Colors: RawRepresentable {
public typealias RawValue = Int
public var rawValue: RawValue {
return hex
}
public convenience init?(rawValue: RawValue) {
self.init(integerLiteral: rawValue)
}
}
fileprivate extension UIColor {
var hex: Int {
var fRed: CGFloat = 0
var fGreen: CGFloat = 0
var fBlue: CGFloat = 0
var fAlpha: CGFloat = 0
if self.getRed(&fRed, green: &fGreen, blue: &fBlue, alpha: &fAlpha) {
let red = Int(fRed * 255.0)
let green = Int(fGreen * 255.0)
let blue = Int(fBlue * 255.0)
let alpha = Int(fAlpha * 255.0)
let rgb = (alpha << 24) + (red << 16) + (green << 8) + blue
return rgb
} else {
return 0x000000
}
}
}
public enum MainPalette: Colors {
case red = 0xFF0000ff
case white = 0xFFFFFFFF
}
public enum FeatureXPalette: Colors {
case blue = 0x024F9Eff
// case bluish = 0x024F9Eff // <- Can't do
case red = 0xFF0000ff
}
The advantage is that it doesn't allow duplicate colors (as a true enum) and also I support alpha.
As you can see, you can create multiple enums for different palettes/schemes. In the case you want views to be able to use any palette, you can just add a protocol:
protocol Color {
var color: UIColor { get }
}
extension MainPalette: Color {
var color: UIColor {
return rawValue
}
}
extension FeatureXPalette: Color {
var color: UIColor {
return rawValue
}
}
so that way you can have a function that takes in the protocol:
func printColorEquality(color1: Color, color2: Color) {
print(color1.color == color2.color)
}
let red1: Color = MainPalette.red
let red2: Color = FeatureXPalette.red
printColorEquality(color1: red1, color2: red2)
What I also like to do is add static vars for convenience:
extension MainPalette {
public static var brightRed: UIColor {
return MainPalette.red.color
}
}
that gives you a cleaner api:
view.backgroundColor = MainPalette.brightRed
Naming can be improved: you have to choose if you want a nice convenience api or nice naming for your enums.
How can I make a Swift enum with UIColor value?
This is how you would literally make an enum with a UIColor value:
import UIKit
final class Color: UIColor, RawRepresentable, ExpressibleByStringLiteral
{
// MARK:- ExpressibleByStringLiteral
typealias StringLiteralType = String
convenience init(stringLiteral: String) {
guard let (a,r,g,b) = Color.argb(hexColor: stringLiteral) else {
assertionFailure("Invalid string")
self.init(red: 0, green: 0, blue: 0, alpha: 0)
return
}
self.init(red: r, green: g, blue: b, alpha: a)
}
// MARK:- RawRepresentable
public typealias RawValue = String
convenience init?(rawValue: RawValue) {
guard let (a,r,g,b) = Color.argb(hexColor: rawValue) else { return nil }
self.init(red: r, green: g, blue: b, alpha: a)
}
var rawValue: RawValue {
return hexString()
}
// MARK:- Private
/// Return color components in range [0,1] for hexadecimal color strings.
/// - hexColor: case-insensitive string with format RGB, RRGGBB, or AARRGGBB.
private static func argb(hexColor: String) -> (CGFloat,CGFloat,CGFloat,CGFloat)?
{
let hexAlphabet = "0123456789abcdefABCDEF"
let hex = hexColor.trimmingCharacters(in: CharacterSet(charactersIn: hexAlphabet).inverted)
var int = UInt32()
Scanner(string: hex).scanHexInt32(&int)
let a, r, g, b: UInt32
switch hex.count {
case 3: (a, r, g, b) = (255, (int >> 8) * 17, (int >> 4 & 0xF) * 17, (int & 0xF) * 17) // RGB
case 6: (a, r, g, b) = (255, int >> 16, int >> 8 & 0xFF, int & 0xFF) // RRGGBB
case 8: (a, r, g, b) = (int >> 24, int >> 16 & 0xFF, int >> 8 & 0xFF, int & 0xFF) // AARRGGBB
default: return nil
}
return (CGFloat(a)/255, CGFloat(r)/255, CGFloat(g)/255, CGFloat(b)/255)
}
private func hexString() -> String {
var red: CGFloat = 0
var green: CGFloat = 0
var blue: CGFloat = 0
var alpha: CGFloat = 0
if self.getRed(&red, green: &green, blue: &blue, alpha: &alpha) {
return String(format: "#%02X%02X%02X%02X", UInt8(red * 255), UInt8(green * 255), UInt8(blue * 255), UInt8(alpha * 255))
}
assertionFailure("Invalid colour space.")
return "#F00"
}
}
enum Colors: Color {
case red = "#F00"
// case blue = "#F00" // Raw value for enum case is not unique
}
let color3 = Color(rawValue: "#000") // RGB
let color6 = Color(rawValue: "#123456") // RRGGBB
let color8 = Color(rawValue: "#12345678") // AARRGGBB
print(Colors(rawValue:"#F00") as Any) // red
print(Colors(rawValue:"#FF0000") as Any) // red
print(Colors(rawValue:"#FFFF0000") as Any) // red
print(Colors(rawValue:"#ABC") as Any) // nil because it’s not a member of the enumeration
// print(Colors(rawValue:"#XYZ") as Any) // assertion on debug, black on release
print(Colors.red) // red
print(Colors.red.rawValue) // UIExtendedSRGBColorSpace 1 0 0 1
With help from
Actually I use such implementation, it is very convenience for me because of two reason, first one I can use dex value and another all colors in constant
import UIKit
struct ColorPalette {
struct Gray {
static let Light = UIColor(netHex: 0x595959)
static let Medium = UIColor(netHex: 0x262626)
}
}
extension UIColor {
convenience init(red: Int, green: Int, blue: Int) {
assert(red >= 0 && red <= 255, "Invalid red component")
assert(green >= 0 && green <= 255, "Invalid green component")
assert(blue >= 0 && blue <= 255, "Invalid blue component")
self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0)
}
convenience init(netHex: Int) {
self.init(red: (netHex >> 16) & 0xff, green: (netHex >> 8) & 0xff, blue: netHex & 0xff)
}
}
usage
let backgroundGreyColor = ColorPalette.Gray.Medium.cgColor