I am trying replicate some Objective C cocoa in Swift. All is good until I come across the following:
// Set a new type and creator:
unsigned long type = \'TEXT\
In Swift 4 or later, I use this code - if the string is not 4 characters in size, it will return an OSType(0):
extension String {
public func osType() -> OSType {
var result:UInt = 0
if let data = self.data(using: .macOSRoman), data.count == 4
{
data.withUnsafeBytes { (ptr:UnsafePointer<UInt8>) in
for i in 0..<data.count {
result = result << 8 + UInt(ptr[i])
}
}
}
return OSType(result)
}
}
let type = "APPL".osType() // 1095782476
// check if this is OK in a playground
let hexStr = String(format: "0x%lx", type) // 0x4150504c -> "APPL" in ASCII
Extend ExpressibleByStringLiteral
to use four-character string literals directly:
extension FourCharCode: ExpressibleByStringLiteral {
public init(stringLiteral value: StringLiteralType) {
if let data = value.data(using: .macOSRoman), data.count == 4 {
self = data.reduce(0, {$0 << 8 + Self($1)})
} else {
self = 0
}
}
}
Now you can just pass a string literal as the FourCharCode
/ OSType
/ UInt32
parameter:
let record = NSAppleEventDescriptor.record()
record.setDescriptor(NSAppleEventDescriptor(boolean: true), forKeyword: "test")
I'm using this in my Cocoa Scripting apps, it considers characters > 0x80 correctly
func OSTypeFrom(string : String) -> UInt {
var result : UInt = 0
if let data = string.dataUsingEncoding(NSMacOSRomanStringEncoding) {
let bytes = UnsafePointer<UInt8>(data.bytes)
for i in 0..<data.length {
result = result << 8 + UInt(bytes[i])
}
}
return result
}
Edit:
Alternatively
func fourCharCodeFrom(string : String) -> FourCharCode
{
assert(string.count == 4, "String length must be 4")
var result : FourCharCode = 0
for char in string.utf16 {
result = (result << 8) + FourCharCode(char)
}
return result
}
or still swiftier
func fourCharCode(from string : String) -> FourCharCode
{
return string.utf16.reduce(0, {$0 << 8 + FourCharCode($1)})
}
Here's a simple function
func mbcc(foo: String) -> Int
{
let chars = foo.utf8
var result: Int = 0
for aChar in chars
{
result = result << 8 + Int(aChar)
}
return result
}
let a = mbcc("TEXT")
print(String(format: "0x%lx", a)) // Prints 0x54455854
It will work for strings that will fit in an Int. Once they get longer it starts losing digits from the top.
If you use
result = result * 256 + Int(aChar)
you should get a crash when the string gets too big instead.
Note: this should work but doesn't actually work - must be a bug. I am leaving this here to document that it doesn't work, may work in the future, but for time being best to use the accepted answer.
I found the following typealiases from the Swift API:
typealias FourCharCode = UInt32
typealias OSType = FourCharCode
And the following functions:
func NSFileTypeForHFSTypeCode(hfsFileTypeCode: OSType) -> String!
func NSHFSTypeCodeFromFileType(fileTypeString: String!) -> OSType
This should allow me to create the equivalent code:
let type : UInt32 = UInt32(NSHFSTypeCodeFromFileType("TEXT"))
let creator : UInt32 = UInt32(NSHFSTypeCodeFromFileType("pdos"))
WARNING: But it doesn't work on Xcode 7.0 beta (7A121l)
Swift 5 Update:
extension String {
func osType() -> OSType {
return OSType(
data(using: .macOSRoman)?
.withUnsafeBytes {
$0.reduce(into: UInt(0)) { $0 = $0 << 8 + UInt($1) }
} ?? 0
)
}
}