I\'m trying to convert the integer representation of an ascii character back into a string.
string += (char) int;
In other languages like J
Here's a production-ready solution in Swift 3:
extension String {
init(unicodeScalar: UnicodeScalar) {
self.init(Character(unicodeScalar))
}
init?(unicodeCodepoint: Int) {
if let unicodeScalar = UnicodeScalar(unicodeCodepoint) {
self.init(unicodeScalar: unicodeScalar)
} else {
return nil
}
}
static func +(lhs: String, rhs: Int) -> String {
return lhs + String(unicodeCodepoint: rhs)!
}
static func +=(lhs: inout String, rhs: Int) {
lhs = lhs + rhs
}
}
Usage:
let a = String(unicodeCodepoint: 42) // "*"
var b = a + 126 // "*~"
b += 33 // "*~!"
Note that this works with all ASCII and Unicode codepoints, so you can do this:
var emoji = String(unicodeCodepoint: 0x1F469)! // "