diff --git a/OSBindings/Mac/Clock Signal/Base.lproj/MainMenu.xib b/OSBindings/Mac/Clock Signal/Base.lproj/MainMenu.xib index ff476331c..e617f15ca 100644 --- a/OSBindings/Mac/Clock Signal/Base.lproj/MainMenu.xib +++ b/OSBindings/Mac/Clock Signal/Base.lproj/MainMenu.xib @@ -111,6 +111,12 @@ + + + + + + diff --git a/OSBindings/Mac/Clock Signal/Clock Signal.entitlements b/OSBindings/Mac/Clock Signal/Clock Signal.entitlements index 74600409e..d6593673f 100644 --- a/OSBindings/Mac/Clock Signal/Clock Signal.entitlements +++ b/OSBindings/Mac/Clock Signal/Clock Signal.entitlements @@ -4,6 +4,8 @@ com.apple.security.app-sandbox + com.apple.security.assets.pictures.read-write + com.apple.security.device.bluetooth com.apple.security.device.usb diff --git a/OSBindings/Mac/Clock Signal/Documents/MachineDocument.swift b/OSBindings/Mac/Clock Signal/Documents/MachineDocument.swift index f5137594d..1c26fbd5b 100644 --- a/OSBindings/Mac/Clock Signal/Documents/MachineDocument.swift +++ b/OSBindings/Mac/Clock Signal/Documents/MachineDocument.swift @@ -6,8 +6,8 @@ // Copyright 2016 Thomas Harte. All rights reserved. // -import Cocoa import AudioToolbox +import Cocoa class MachineDocument: NSDocument, @@ -310,6 +310,29 @@ class MachineDocument: return super.validateUserInterfaceItem(item) } + // Screenshot capture. + @IBAction func saveScreenshot(_ sender: AnyObject!) { + // Grab a date formatter and form a file name. + let dateFormatter = DateFormatter() + dateFormatter.dateStyle = .short + dateFormatter.timeStyle = .long + + let filename = ("Clock Signal Screen Shot " + dateFormatter.string(from: Date()) + ".png").replacingOccurrences(of: "/", with: "-") + .replacingOccurrences(of: ":", with: ".") + let pictursURL = FileManager.default.urls(for: .picturesDirectory, in: .userDomainMask)[0] + let url = pictursURL.appendingPathComponent(filename) + + // Obtain the machine's current display. + var imageRepresentation: NSBitmapImageRep? = nil + self.openGLView.perform { + imageRepresentation = self.machine.imageRepresentation + } + + // Encode as a PNG and save. + let pngData = imageRepresentation!.representation(using: .png, properties: [:]) + try! pngData?.write(to: url) + } + // MARK: Activity display. class LED { let levelIndicator: NSLevelIndicator diff --git a/OSBindings/Mac/Clock Signal/Machine/CSMachine.h b/OSBindings/Mac/Clock Signal/Machine/CSMachine.h index 458a0b5cc..8934d88f3 100644 --- a/OSBindings/Mac/Clock Signal/Machine/CSMachine.h +++ b/OSBindings/Mac/Clock Signal/Machine/CSMachine.h @@ -64,6 +64,7 @@ typedef NS_ENUM(NSInteger, CSMachineKeyboardInputMode) { @property (nonatomic, readonly, nonnull) NSString *userDefaultsPrefix; - (void)paste:(nonnull NSString *)string; +@property (nonatomic, readonly, nonnull) NSBitmapImageRep *imageRepresentation; @property (nonatomic, assign) BOOL useFastLoadingHack; @property (nonatomic, assign) CSMachineVideoSignal videoSignal; diff --git a/OSBindings/Mac/Clock Signal/Machine/CSMachine.mm b/OSBindings/Mac/Clock Signal/Machine/CSMachine.mm index 117b86718..a219a1801 100644 --- a/OSBindings/Mac/Clock Signal/Machine/CSMachine.mm +++ b/OSBindings/Mac/Clock Signal/Machine/CSMachine.mm @@ -243,6 +243,43 @@ struct ActivityObserver: public Activity::Observer { keyboardMachine->type_string([paste UTF8String]); } +- (NSBitmapImageRep *)imageRepresentation { + // Get the current viewport to establish framebuffer size. Then determine how wide the + // centre 4/3 of that would be. + GLint dimensions[4]; + glGetIntegerv(GL_VIEWPORT, dimensions); + GLint proportionalWidth = (dimensions[3] * 4) / 3; + + // Grab the framebuffer contents. + std::vector temporaryData(static_cast(proportionalWidth * dimensions[3] * 3)); + glReadPixels((dimensions[2] - proportionalWidth) >> 1, 0, proportionalWidth, dimensions[3], GL_RGB, GL_UNSIGNED_BYTE, temporaryData.data()); + + // Generate an NSBitmapImageRep and populate it with a vertical flip + // of the original data. + NSBitmapImageRep *const result = + [[NSBitmapImageRep alloc] + initWithBitmapDataPlanes:NULL + pixelsWide:proportionalWidth + pixelsHigh:dimensions[3] + bitsPerSample:8 + samplesPerPixel:3 + hasAlpha:NO + isPlanar:NO + colorSpaceName:NSDeviceRGBColorSpace + bytesPerRow:3 * proportionalWidth + bitsPerPixel:0]; + + const size_t line_size = static_cast(proportionalWidth * 3); + for(GLint y = 0; y < dimensions[3]; ++y) { + memcpy( + &result.bitmapData[static_cast(y) * line_size], + &temporaryData[static_cast(dimensions[3] - y - 1) * line_size], + line_size); + } + + return result; +} + - (void)applyMedia:(const Analyser::Static::Media &)media { @synchronized(self) { MediaTarget::Machine *const mediaTarget = _machine->media_target(); diff --git a/OSBindings/SDL/main.cpp b/OSBindings/SDL/main.cpp index 299474e53..c08d4507c 100644 --- a/OSBindings/SDL/main.cpp +++ b/OSBindings/SDL/main.cpp @@ -632,7 +632,16 @@ int main(int argc, char *argv[]) { } // Create a suitable SDL surface and save the thing. - SDL_Surface *const surface = SDL_CreateRGBSurfaceFrom(pixels.data(), proportional_width, window_height, 8*4, proportional_width*4, 0, 0, 0, 0); + const bool is_big_endian = SDL_BYTEORDER == SDL_BIG_ENDIAN; + SDL_Surface *const surface = SDL_CreateRGBSurfaceFrom( + pixels.data(), + proportional_width, window_height, + 8*4, + proportional_width*4, + is_big_endian ? 0xff000000 : 0x000000ff, + is_big_endian ? 0x00ff0000 : 0x0000ff00, + is_big_endian ? 0x0000ff00 : 0x00ff0000, + 0); SDL_SaveBMP(surface, target.c_str()); SDL_FreeSurface(surface); break;