diff --git a/Components/6560/6560.hpp b/Components/6560/6560.hpp index 7ab74c41a..4f47f89e0 100644 --- a/Components/6560/6560.hpp +++ b/Components/6560/6560.hpp @@ -54,16 +54,20 @@ template class MOS6560 { audio_generator_(audio_queue_), speaker_(audio_generator_) { - crt_->set_composite_sampling_function( - "float composite_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)" + crt_->set_svideo_sampling_function( + "vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase)" "{" "vec2 yc = texture(texID, coordinate).rg / vec2(255.0);" - "float phaseOffset = 6.283185308 * 2.0 * yc.y;" - "float chroma = cos(phase + phaseOffset);" - "return mix(yc.x, step(yc.y, 0.75) * chroma, amplitude);" + "float phaseOffset = 6.283185308 * 2.0 * yc.y;" + "float chroma = step(yc.y, 0.75) * cos(phase + phaseOffset);" + + "return vec2(yc.x, chroma);" "}"); + // default to s-video output + crt_->set_video_signal(Outputs::CRT::VideoSignal::SVideo); + // default to NTSC set_output_mode(OutputMode::NTSC); } diff --git a/Components/9918/9918.cpp b/Components/9918/9918.cpp index e99a4dcaa..f6ba07db1 100644 --- a/Components/9918/9918.cpp +++ b/Components/9918/9918.cpp @@ -96,7 +96,7 @@ TMS9918::TMS9918(Personality p) { "{" "return texture(sampler, coordinate).rgb / vec3(255.0);" "}"); - crt_->set_output_device(Outputs::CRT::OutputDevice::Monitor); + crt_->set_video_signal(Outputs::CRT::VideoSignal::RGB); crt_->set_visible_area(Outputs::CRT::Rect(0.055f, 0.025f, 0.9f, 0.9f)); crt_->set_input_gamma(2.8f); diff --git a/Machines/AmstradCPC/AmstradCPC.cpp b/Machines/AmstradCPC/AmstradCPC.cpp index 8a55b8675..202d06628 100644 --- a/Machines/AmstradCPC/AmstradCPC.cpp +++ b/Machines/AmstradCPC/AmstradCPC.cpp @@ -317,7 +317,7 @@ class CRTCBusHandler { "return vec3(float((sample >> 4) & 3u), float((sample >> 2) & 3u), float(sample & 3u)) / 2.0;" "}"); crt_->set_visible_area(Outputs::CRT::Rect(0.075f, 0.05f, 0.9f, 0.9f)); - crt_->set_output_device(Outputs::CRT::OutputDevice::Monitor); + crt_->set_video_signal(Outputs::CRT::VideoSignal::RGB); } /// Destructs the CRT. diff --git a/Machines/Atari2600/TIA.cpp b/Machines/Atari2600/TIA.cpp index 7f6a8146d..c26580a98 100644 --- a/Machines/Atari2600/TIA.cpp +++ b/Machines/Atari2600/TIA.cpp @@ -25,7 +25,7 @@ namespace { TIA::TIA(bool create_crt) { if(create_crt) { crt_.reset(new Outputs::CRT::CRT(cycles_per_line * 2 - 1, 1, Outputs::CRT::DisplayType::NTSC60, 1)); - crt_->set_output_device(Outputs::CRT::OutputDevice::Television); + crt_->set_video_signal(Outputs::CRT::VideoSignal::Composite); set_output_mode(OutputMode::NTSC); } @@ -123,20 +123,20 @@ void TIA::set_output_mode(Atari2600::TIA::OutputMode output_mode) { Outputs::CRT::DisplayType display_type; if(output_mode == OutputMode::NTSC) { - crt_->set_composite_sampling_function( - "float composite_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)" + crt_->set_svideo_sampling_function( + "vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase)" "{" "uint c = texture(texID, coordinate).r;" "uint y = c & 14u;" "uint iPhase = (c >> 4);" "float phaseOffset = 6.283185308 * float(iPhase) / 13.0 + 5.074880441076923;" - "return mix(float(y) / 14.0, step(1, iPhase) * cos(phase + phaseOffset), amplitude);" + "return vec2(float(y) / 14.0, step(1, iPhase) * cos(phase + phaseOffset));" "}"); display_type = Outputs::CRT::DisplayType::NTSC60; } else { - crt_->set_composite_sampling_function( - "float composite_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)" + crt_->set_svideo_sampling_function( + "vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase)" "{" "uint c = texture(texID, coordinate).r;" "uint y = c & 14u;" @@ -145,10 +145,12 @@ void TIA::set_output_mode(Atari2600::TIA::OutputMode output_mode) { "uint direction = iPhase & 1u;" "float phaseOffset = float(7u - direction) + (float(direction) - 0.5) * 2.0 * float(iPhase >> 1);" "phaseOffset *= 6.283185308 / 12.0;" - "return mix(float(y) / 14.0, step(4, (iPhase + 2u) & 15u) * cos(phase + phaseOffset), amplitude);" + "return vec2(float(y) / 14.0, step(4, (iPhase + 2u) & 15u) * cos(phase + phaseOffset));" "}"); display_type = Outputs::CRT::DisplayType::PAL50; } + crt_->set_video_signal(Outputs::CRT::VideoSignal::Composite); + // line number of cycles in a line of video is one less than twice the number of clock cycles per line; the Atari // outputs 228 colour cycles of material per line when an NTSC line 227.5. Since all clock numbers will be doubled // later, cycles_per_line * 2 - 1 is therefore the real length of an NTSC line, even though we're going to supply diff --git a/Machines/ColecoVision/ColecoVision.cpp b/Machines/ColecoVision/ColecoVision.cpp index 7acd6ca22..67915f5d9 100644 --- a/Machines/ColecoVision/ColecoVision.cpp +++ b/Machines/ColecoVision/ColecoVision.cpp @@ -135,7 +135,7 @@ class ConcreteMachine: void setup_output(float aspect_ratio) override { vdp_.reset(new TI::TMS9918(TI::TMS9918::TMS9918A)); - get_crt()->set_output_device(Outputs::CRT::OutputDevice::Television); + get_crt()->set_video_signal(Outputs::CRT::VideoSignal::Composite); } void close_output() override { diff --git a/Machines/Electron/Electron.cpp b/Machines/Electron/Electron.cpp index edfaaf200..746a65124 100644 --- a/Machines/Electron/Electron.cpp +++ b/Machines/Electron/Electron.cpp @@ -454,7 +454,7 @@ class ConcreteMachine: Configurable::Display display; if(Configurable::get_display(selections_by_option, display)) { - get_crt()->set_output_device((display == Configurable::Display::RGB) ? Outputs::CRT::OutputDevice::Monitor : Outputs::CRT::OutputDevice::Television); + get_crt()->set_video_signal((display == Configurable::Display::RGB) ? Outputs::CRT::VideoSignal::RGB : Outputs::CRT::VideoSignal::Composite); } } diff --git a/Machines/MSX/MSX.cpp b/Machines/MSX/MSX.cpp index aaaa32cc4..faecdcffd 100644 --- a/Machines/MSX/MSX.cpp +++ b/Machines/MSX/MSX.cpp @@ -562,7 +562,7 @@ class ConcreteMachine: Configurable::Display display; if(Configurable::get_display(selections_by_option, display)) { - get_crt()->set_output_device((display == Configurable::Display::RGB) ? Outputs::CRT::OutputDevice::Monitor : Outputs::CRT::OutputDevice::Television); + get_crt()->set_video_signal((display == Configurable::Display::RGB) ? Outputs::CRT::VideoSignal::RGB : Outputs::CRT::VideoSignal::Composite); } } diff --git a/Machines/Oric/Oric.cpp b/Machines/Oric/Oric.cpp index 64e8e1246..4dbec895b 100644 --- a/Machines/Oric/Oric.cpp +++ b/Machines/Oric/Oric.cpp @@ -263,7 +263,7 @@ class ConcreteMachine: use_fast_tape_hack_ = activate; } - void set_output_device(Outputs::CRT::OutputDevice output_device) { + void set_output_device(Outputs::CRT::VideoSignal output_device) { video_output_->set_output_device(output_device); } @@ -392,7 +392,7 @@ class ConcreteMachine: video_output_.reset(new VideoOutput(ram_)); if(!colour_rom_.empty()) video_output_->set_colour_rom(colour_rom_); - set_output_device(Outputs::CRT::OutputDevice::Monitor); + set_output_device(Outputs::CRT::VideoSignal::RGB); } void close_output() override final { @@ -465,7 +465,7 @@ class ConcreteMachine: Configurable::Display display; if(Configurable::get_display(selections_by_option, display)) { - set_output_device((display == Configurable::Display::RGB) ? Outputs::CRT::OutputDevice::Monitor : Outputs::CRT::OutputDevice::Television); + set_output_device((display == Configurable::Display::RGB) ? Outputs::CRT::VideoSignal::RGB : Outputs::CRT::VideoSignal::Composite); } } diff --git a/Machines/Oric/Video.cpp b/Machines/Oric/Video.cpp index 9aff00933..fd8b34851 100644 --- a/Machines/Oric/Video.cpp +++ b/Machines/Oric/Video.cpp @@ -41,13 +41,13 @@ VideoOutput::VideoOutput(uint8_t *memory) : ); crt_->set_composite_function_type(Outputs::CRT::CRT::CompositeSourceType::DiscreteFourSamplesPerCycle, 0.0f); - set_output_device(Outputs::CRT::OutputDevice::Television); + set_output_device(Outputs::CRT::VideoSignal::Composite); crt_->set_visible_area(crt_->get_rect_for_area(53, 224, 16 * 6, 40 * 6, 4.0f / 3.0f)); } -void VideoOutput::set_output_device(Outputs::CRT::OutputDevice output_device) { +void VideoOutput::set_output_device(Outputs::CRT::VideoSignal output_device) { output_device_ = output_device; - crt_->set_output_device(output_device); + crt_->set_video_signal(output_device); } void VideoOutput::set_colour_rom(const std::vector &rom) { @@ -129,7 +129,7 @@ void VideoOutput::run_for(const Cycles cycles) { if(control_byte & 0x60) { if(pixel_target_) { uint16_t colours[2]; - if(output_device_ == Outputs::CRT::OutputDevice::Monitor) { + if(output_device_ == Outputs::CRT::VideoSignal::RGB) { colours[0] = static_cast(paper_ ^ inverse_mask); colours[1] = static_cast(ink_ ^ inverse_mask); } else { @@ -183,7 +183,7 @@ void VideoOutput::run_for(const Cycles cycles) { pixel_target_[0] = pixel_target_[1] = pixel_target_[2] = pixel_target_[3] = pixel_target_[4] = pixel_target_[5] = - (output_device_ == Outputs::CRT::OutputDevice::Monitor) ? paper_ ^ inverse_mask : colour_forms_[paper_ ^ inverse_mask]; + (output_device_ == Outputs::CRT::VideoSignal::RGB) ? paper_ ^ inverse_mask : colour_forms_[paper_ ^ inverse_mask]; } } if(pixel_target_) pixel_target_ += 6; diff --git a/Machines/Oric/Video.hpp b/Machines/Oric/Video.hpp index fe0ff2f38..c3b58436e 100644 --- a/Machines/Oric/Video.hpp +++ b/Machines/Oric/Video.hpp @@ -20,7 +20,7 @@ class VideoOutput { Outputs::CRT::CRT *get_crt(); void run_for(const Cycles cycles); void set_colour_rom(const std::vector &rom); - void set_output_device(Outputs::CRT::OutputDevice output_device); + void set_output_device(Outputs::CRT::VideoSignal output_device); private: uint8_t *ram_; @@ -33,7 +33,7 @@ class VideoOutput { // Output target and device uint16_t *pixel_target_; uint16_t colour_forms_[8]; - Outputs::CRT::OutputDevice output_device_; + Outputs::CRT::VideoSignal output_device_; // Registers uint8_t ink_, paper_; diff --git a/OSBindings/Mac/Clock Signal.xcodeproj/project.pbxproj b/OSBindings/Mac/Clock Signal.xcodeproj/project.pbxproj index 4687fe9c1..a1b450ebd 100644 --- a/OSBindings/Mac/Clock Signal.xcodeproj/project.pbxproj +++ b/OSBindings/Mac/Clock Signal.xcodeproj/project.pbxproj @@ -3060,7 +3060,7 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0700; - LastUpgradeCheck = 0900; + LastUpgradeCheck = 0930; ORGANIZATIONNAME = "Thomas Harte"; TargetAttributes = { 4B055A691FAE763F0060FFFF = { @@ -3910,12 +3910,14 @@ CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; @@ -3963,12 +3965,14 @@ CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; diff --git a/OSBindings/Mac/Clock Signal.xcodeproj/xcshareddata/xcschemes/Clock Signal.xcscheme b/OSBindings/Mac/Clock Signal.xcodeproj/xcshareddata/xcschemes/Clock Signal.xcscheme index ebe496fa1..f9049689f 100644 --- a/OSBindings/Mac/Clock Signal.xcodeproj/xcshareddata/xcschemes/Clock Signal.xcscheme +++ b/OSBindings/Mac/Clock Signal.xcodeproj/xcshareddata/xcschemes/Clock Signal.xcscheme @@ -1,6 +1,6 @@ + codeCoverageEnabled = "YES" + shouldUseLaunchSchemeArgsEnv = "YES"> @@ -74,7 +73,6 @@ selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" enableASanStackUseAfterReturn = "YES" disableMainThreadChecker = "YES" - language = "" launchStyle = "0" useCustomWorkingDirectory = "NO" ignoresPersistentStateOnLaunch = "NO" diff --git a/OSBindings/Mac/Clock Signal/Machine/CSMachine.mm b/OSBindings/Mac/Clock Signal/Machine/CSMachine.mm index 5009aabd4..d99f138f1 100644 --- a/OSBindings/Mac/Clock Signal/Machine/CSMachine.mm +++ b/OSBindings/Mac/Clock Signal/Machine/CSMachine.mm @@ -94,7 +94,7 @@ struct SpeakerDelegate: public Outputs::Speaker::Speaker::Delegate, public LockP [_view performWithGLContext:^{ @synchronized(self) { - _machine->crt_machine()->close_output(); + self->_machine->crt_machine()->close_output(); } }]; } diff --git a/OSBindings/Mac/Clock Signal/Machine/Wrappers/CSAtari2600.mm b/OSBindings/Mac/Clock Signal/Machine/Wrappers/CSAtari2600.mm index 5b596cb7d..9b5fa41dc 100644 --- a/OSBindings/Mac/Clock Signal/Machine/Wrappers/CSAtari2600.mm +++ b/OSBindings/Mac/Clock Signal/Machine/Wrappers/CSAtari2600.mm @@ -66,8 +66,8 @@ } dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{ - @synchronized(_machine) { - _atari2600->set_switch_is_enabled(toggleSwitch, false); + @synchronized(self->_machine) { + self->_atari2600->set_switch_is_enabled(toggleSwitch, false); } }); } diff --git a/OSBindings/Mac/Clock SignalTests/AtariStaticAnalyserTests.mm b/OSBindings/Mac/Clock SignalTests/AtariStaticAnalyserTests.mm index f4c4264ee..101181894 100644 --- a/OSBindings/Mac/Clock SignalTests/AtariStaticAnalyserTests.mm +++ b/OSBindings/Mac/Clock SignalTests/AtariStaticAnalyserTests.mm @@ -10,15 +10,18 @@ #import #include "../../../Analyser/Static/StaticAnalyser.hpp" +#include "../../../Analyser/Static/Atari/Target.hpp" + +using PagingModel = Analyser::Static::Atari::Target::PagingModel; @interface AtariROMRecord : NSObject -@property(nonatomic, readonly) Analyser::Static::Atari2600PagingModel pagingModel; +@property(nonatomic, readonly) PagingModel pagingModel; @property(nonatomic, readonly) BOOL usesSuperchip; -+ (instancetype)recordWithPagingModel:(Analyser::Static::Atari2600PagingModel)pagingModel usesSuperchip:(BOOL)usesSuperchip; ++ (instancetype)recordWithPagingModel:(PagingModel)pagingModel usesSuperchip:(BOOL)usesSuperchip; @end @implementation AtariROMRecord -+ (instancetype)recordWithPagingModel:(Analyser::Static::Atari2600PagingModel)pagingModel usesSuperchip:(BOOL)usesSuperchip ++ (instancetype)recordWithPagingModel:(PagingModel)pagingModel usesSuperchip:(BOOL)usesSuperchip { AtariROMRecord *record = [[AtariROMRecord alloc] init]; record->_pagingModel = pagingModel; @@ -27,7 +30,7 @@ } @end -#define Record(sha, model, uses) sha : [AtariROMRecord recordWithPagingModel:Analyser::Static::Atari2600PagingModel::model usesSuperchip:uses], +#define Record(sha, model, uses) sha : [AtariROMRecord recordWithPagingModel:PagingModel::model usesSuperchip:uses], static NSDictionary *romRecordsBySHA1 = @{ Record(@"58dbcbdffbe80be97746e94a0a75614e64458fdc", None, NO) // 4kraVCS Record(@"9967a76efb68017f793188f691159f04e6bb4447", None, NO) // 'X'Mission @@ -598,8 +601,10 @@ static NSDictionary *romRecordsBySHA1 = @{ if(!romRecord) continue; // assert equality - XCTAssert(targets.front()->atari.paging_model == romRecord.pagingModel, @"%@; should be %d, is %d", testFile, romRecord.pagingModel, targets.front()->atari.paging_model); - XCTAssert(targets.front()->atari.uses_superchip == romRecord.usesSuperchip, @"%@; should be %@", testFile, romRecord.usesSuperchip ? @"true" : @"false"); + Analyser::Static::Atari::Target *atari_target = dynamic_cast(targets.front().get()); + XCTAssert(atari_target != nullptr); + XCTAssert(atari_target->paging_model == romRecord.pagingModel, @"%@; should be %d, is %d", testFile, romRecord.pagingModel, atari_target->paging_model); + XCTAssert(atari_target->uses_superchip == romRecord.usesSuperchip, @"%@; should be %@", testFile, romRecord.usesSuperchip ? @"true" : @"false"); } } diff --git a/Outputs/CRT/CRT.hpp b/Outputs/CRT/CRT.hpp index 90367a174..be156e071 100644 --- a/Outputs/CRT/CRT.hpp +++ b/Outputs/CRT/CRT.hpp @@ -306,10 +306,27 @@ class CRT { */ void set_composite_function_type(CompositeSourceType type, float offset_of_first_sample = 0.0f); + /*! Sets a function that will map from whatever data the machine provided to an s-video signal. + + If the output mode is composite then a default mapping from RGB to the display's + output mode will be applied. + + @param shader A GLSL fragment including a function with the signature + `vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase)` + that evaluates to the s-video signal level, luminance as the first component and chrominance + as the second, as a function of a source buffer, sampling location and colour + carrier phase. + */ + inline void set_svideo_sampling_function(const std::string &shader) { + enqueue_openGL_function([shader, this] { + openGL_output_builder_.set_svideo_sampling_function(shader); + }); + } + /*! Sets a function that will map from whatever data the machine provided to an RGB signal. - If the output mode is composite then a default mapping from RGB to the display's composite - format will be applied. + If the output mode is composite or svideo then a default mapping from RGB to the display's + output mode will be applied. @param shader A GLSL fragent including a function with the signature `vec3 rgb_sample(usampler2D sampler, vec2 coordinate, vec2 icoordinate)` that evaluates to an RGB colour @@ -329,9 +346,9 @@ class CRT { openGL_output_builder_.texture_builder.set_bookender(std::move(bookender)); } - inline void set_output_device(OutputDevice output_device) { - enqueue_openGL_function([output_device, this] { - openGL_output_builder_.set_output_device(output_device); + inline void set_video_signal(VideoSignal video_signal) { + enqueue_openGL_function([video_signal, this] { + openGL_output_builder_.set_video_signal(video_signal); }); } diff --git a/Outputs/CRT/CRTTypes.hpp b/Outputs/CRT/CRTTypes.hpp index d86cd72a2..66edccfc4 100644 --- a/Outputs/CRT/CRTTypes.hpp +++ b/Outputs/CRT/CRTTypes.hpp @@ -36,9 +36,10 @@ enum class ColourSpace { YUV }; -enum class OutputDevice { - Monitor, - Television +enum class VideoSignal { + RGB, + SVideo, + Composite }; } diff --git a/Outputs/CRT/Internals/CRTOpenGL.cpp b/Outputs/CRT/Internals/CRTOpenGL.cpp index 9bd1b7184..67feeda9e 100644 --- a/Outputs/CRT/Internals/CRTOpenGL.cpp +++ b/Outputs/CRT/Internals/CRTOpenGL.cpp @@ -71,10 +71,6 @@ OpenGLOutputBuilder::~OpenGLOutputBuilder() { glDeleteVertexArrays(1, &output_vertex_array_); } -bool OpenGLOutputBuilder::get_is_television_output() { - return output_device_ == OutputDevice::Television || !rgb_input_shader_program_; -} - void OpenGLOutputBuilder::set_target_framebuffer(GLint target_framebuffer) { target_framebuffer_ = target_framebuffer; } @@ -86,6 +82,7 @@ void OpenGLOutputBuilder::draw_frame(unsigned int output_width, unsigned int out // establish essentials if(!output_shader_program_) { prepare_composite_input_shaders(); + prepare_svideo_input_shaders(); prepare_rgb_input_shaders(); prepare_source_vertex_array(); @@ -153,21 +150,34 @@ void OpenGLOutputBuilder::draw_frame(unsigned int output_width, unsigned int out }; // for composite video, go through four steps to get to something that can be painted to the output - RenderStage composite_render_stages[] = { + const RenderStage composite_render_stages[] = { {composite_input_shader_program_.get(), composite_texture_.get(), {0.0, 0.0, 0.0}}, {composite_separation_filter_program_.get(), separated_texture_.get(), {0.0, 0.5, 0.5}}, {composite_chrominance_filter_shader_program_.get(), filtered_texture_.get(), {0.0, 0.0, 0.0}}, {nullptr, nullptr} }; - // for RGB video, there's only two steps - RenderStage rgb_render_stages[] = { + // for s-video, there are two steps — it's like composite but skips separation + const RenderStage svideo_render_stages[] = { + {svideo_input_shader_program_.get(), separated_texture_.get(), {0.0, 0.5, 0.5}}, + {composite_chrominance_filter_shader_program_.get(), filtered_texture_.get(), {0.0, 0.0, 0.0}}, + {nullptr, nullptr} + }; + + // for RGB video, there's also only two steps; a lowpass filter is still applied per physical reality + const RenderStage rgb_render_stages[] = { {rgb_input_shader_program_.get(), composite_texture_.get(), {0.0, 0.0, 0.0}}, {rgb_filter_shader_program_.get(), filtered_texture_.get(), {0.0, 0.0, 0.0}}, {nullptr, nullptr} }; - RenderStage *active_pipeline = get_is_television_output() ? composite_render_stages : rgb_render_stages; + const RenderStage *active_pipeline; + switch(video_signal_) { + default: + case VideoSignal::Composite: active_pipeline = composite_render_stages; break; + case VideoSignal::SVideo: active_pipeline = svideo_render_stages; break; + case VideoSignal::RGB: active_pipeline = rgb_render_stages; break; + } if(array_submission.input_size || array_submission.output_size) { // all drawing will be from the source vertex array and without blending @@ -245,6 +255,7 @@ void OpenGLOutputBuilder::reset_all_OpenGL_state() { composite_input_shader_program_ = nullptr; composite_separation_filter_program_ = nullptr; composite_chrominance_filter_shader_program_ = nullptr; + svideo_input_shader_program_ = nullptr; rgb_input_shader_program_ = nullptr; rgb_filter_shader_program_ = nullptr; output_shader_program_ = nullptr; @@ -264,6 +275,12 @@ void OpenGLOutputBuilder::set_composite_sampling_function(const std::string &sha reset_all_OpenGL_state(); } +void OpenGLOutputBuilder::set_svideo_sampling_function(const std::string &shader) { + std::lock_guard lock_guard(output_mutex_); + svideo_shader_ = shader; + reset_all_OpenGL_state(); +} + void OpenGLOutputBuilder::set_rgb_sampling_function(const std::string &shader) { std::lock_guard lock_guard(output_mutex_); rgb_shader_ = shader; @@ -273,7 +290,7 @@ void OpenGLOutputBuilder::set_rgb_sampling_function(const std::string &shader) { // MARK: - Program compilation void OpenGLOutputBuilder::prepare_composite_input_shaders() { - composite_input_shader_program_ = OpenGL::IntermediateShader::make_source_conversion_shader(composite_shader_, rgb_shader_); + composite_input_shader_program_ = OpenGL::IntermediateShader::make_composite_source_shader(composite_shader_, svideo_shader_, rgb_shader_); composite_input_shader_program_->set_source_texture_unit(source_data_texture_unit); composite_input_shader_program_->set_output_size(IntermediateBufferWidth, IntermediateBufferHeight); @@ -285,6 +302,7 @@ void OpenGLOutputBuilder::prepare_composite_input_shaders() { composite_chrominance_filter_shader_program_->set_source_texture_unit(work_texture_ ? work_texture_unit : separated_texture_unit); composite_chrominance_filter_shader_program_->set_output_size(IntermediateBufferWidth, IntermediateBufferHeight); + // TODO: the below is related to texture fencing, which is not yet implemented correctly, so not yet enabled. if(work_texture_) { composite_input_shader_program_->set_is_double_height(true, 0.0f, 0.0f); composite_separation_filter_program_->set_is_double_height(true, 0.0f, 0.5f); @@ -296,6 +314,19 @@ void OpenGLOutputBuilder::prepare_composite_input_shaders() { } } +void OpenGLOutputBuilder::prepare_svideo_input_shaders() { + svideo_input_shader_program_ = OpenGL::IntermediateShader::make_svideo_source_shader(svideo_shader_, rgb_shader_); + svideo_input_shader_program_->set_source_texture_unit(source_data_texture_unit); + svideo_input_shader_program_->set_output_size(IntermediateBufferWidth, IntermediateBufferHeight); + + // TODO: the below is related to texture fencing, which is not yet implemented correctly, so not yet enabled. + if(work_texture_) { + svideo_input_shader_program_->set_is_double_height(true, 0.0f, 0.0f); + } else { + svideo_input_shader_program_->set_is_double_height(false); + } +} + void OpenGLOutputBuilder::prepare_rgb_input_shaders() { if(rgb_shader_.size()) { rgb_input_shader_program_ = OpenGL::IntermediateShader::make_rgb_source_shader(rgb_shader_); @@ -333,6 +364,11 @@ void OpenGLOutputBuilder::prepare_source_vertex_array() { Shader::get_input_name(Shader::Input::PhaseTimeAndAmplitude), 3, GL_UNSIGNED_BYTE, GL_FALSE, SourceVertexSize, (void *)SourceVertexOffsetOfPhaseTimeAndAmplitude, 1); + + svideo_input_shader_program_->enable_vertex_attribute_with_pointer( + Shader::get_input_name(Shader::Input::InputStart), + 2, GL_UNSIGNED_SHORT, GL_FALSE, SourceVertexSize, + (void *)SourceVertexOffsetOfInputStart, 1); } } @@ -363,9 +399,9 @@ void OpenGLOutputBuilder::prepare_output_vertex_array() { // MARK: - Public Configuration -void OpenGLOutputBuilder::set_output_device(OutputDevice output_device) { - if(output_device_ != output_device) { - output_device_ = output_device; +void OpenGLOutputBuilder::set_video_signal(VideoSignal video_signal) { + if(video_signal_ != video_signal) { + video_signal_ = video_signal; composite_src_output_y_ = 0; last_output_width_ = 0; last_output_height_ = 0; @@ -413,6 +449,7 @@ void OpenGLOutputBuilder::set_colour_space_uniforms() { if(composite_input_shader_program_) composite_input_shader_program_->set_colour_conversion_matrices(fromRGB, toRGB); if(composite_separation_filter_program_) composite_separation_filter_program_->set_colour_conversion_matrices(fromRGB, toRGB); if(composite_chrominance_filter_shader_program_) composite_chrominance_filter_shader_program_->set_colour_conversion_matrices(fromRGB, toRGB); + if(svideo_input_shader_program_) svideo_input_shader_program_->set_colour_conversion_matrices(fromRGB, toRGB); } void OpenGLOutputBuilder::set_gamma() { @@ -433,7 +470,8 @@ float OpenGLOutputBuilder::get_composite_output_width() const { void OpenGLOutputBuilder::set_output_shader_width() { if(output_shader_program_) { - const float width = get_is_television_output() ? get_composite_output_width() : 1.0f; + // For anything that isn't RGB, scale so that sampling is in-phase with the colour subcarrier. + const float width = (video_signal_ == VideoSignal::RGB) ? 1.0f : get_composite_output_width(); output_shader_program_->set_input_width_scaler(width); } } @@ -464,6 +502,10 @@ void OpenGLOutputBuilder::set_timing_uniforms() { composite_input_shader_program_->set_width_scalers(1.0f, output_width); composite_input_shader_program_->set_extension(0.0f); } + if(svideo_input_shader_program_) { + svideo_input_shader_program_->set_width_scalers(1.0f, output_width); + svideo_input_shader_program_->set_extension(0.0f); + } if(rgb_input_shader_program_) { rgb_input_shader_program_->set_width_scalers(1.0f, 1.0f); } diff --git a/Outputs/CRT/Internals/CRTOpenGL.hpp b/Outputs/CRT/Internals/CRTOpenGL.hpp index 36deb7eb4..7802b85c2 100644 --- a/Outputs/CRT/Internals/CRTOpenGL.hpp +++ b/Outputs/CRT/Internals/CRTOpenGL.hpp @@ -33,7 +33,7 @@ class OpenGLOutputBuilder { ColourSpace colour_space_; unsigned int colour_cycle_numerator_; unsigned int colour_cycle_denominator_; - OutputDevice output_device_; + VideoSignal video_signal_; float gamma_; // timing information to allow reasoning about input information @@ -49,12 +49,14 @@ class OpenGLOutputBuilder { // Other things the caller may have provided. std::string composite_shader_; + std::string svideo_shader_; std::string rgb_shader_; GLint target_framebuffer_ = 0; // Methods used by the OpenGL code void prepare_output_shader(); void prepare_rgb_input_shaders(); + void prepare_svideo_input_shaders(); void prepare_composite_input_shaders(); void prepare_output_vertex_array(); @@ -73,6 +75,8 @@ class OpenGLOutputBuilder { std::unique_ptr composite_separation_filter_program_; std::unique_ptr composite_chrominance_filter_shader_program_; + std::unique_ptr svideo_input_shader_program_; + std::unique_ptr rgb_input_shader_program_; std::unique_ptr rgb_filter_shader_program_; @@ -99,7 +103,6 @@ class OpenGLOutputBuilder { GLsync fence_; float get_composite_output_width() const; void set_output_shader_width(); - bool get_is_television_output(); public: // These two are protected by output_mutex_. @@ -130,8 +133,8 @@ class OpenGLOutputBuilder { return std::unique_lock(output_mutex_); } - inline OutputDevice get_output_device() { - return output_device_; + inline VideoSignal get_output_device() { + return video_signal_; } inline uint16_t get_composite_output_y() { @@ -147,12 +150,13 @@ class OpenGLOutputBuilder { composite_src_output_y_++; } - void set_target_framebuffer(GLint target_framebuffer); + void set_target_framebuffer(GLint); void draw_frame(unsigned int output_width, unsigned int output_height, bool only_if_dirty); void set_openGL_context_will_change(bool should_delete_resources); - void set_composite_sampling_function(const std::string &shader); - void set_rgb_sampling_function(const std::string &shader); - void set_output_device(OutputDevice output_device); + void set_composite_sampling_function(const std::string &); + void set_svideo_sampling_function(const std::string &); + void set_rgb_sampling_function(const std::string &); + void set_video_signal(VideoSignal); void set_timing(unsigned int input_frequency, unsigned int cycles_per_line, unsigned int height_of_display, unsigned int horizontal_scan_period, unsigned int vertical_scan_period, unsigned int vertical_period_divider); }; diff --git a/Outputs/CRT/Internals/Shaders/IntermediateShader.cpp b/Outputs/CRT/Internals/Shaders/IntermediateShader.cpp index fec05e14b..9741ee2f0 100644 --- a/Outputs/CRT/Internals/Shaders/IntermediateShader.cpp +++ b/Outputs/CRT/Internals/Shaders/IntermediateShader.cpp @@ -113,7 +113,7 @@ std::unique_ptr IntermediateShader::make_shader(const std::s })); } -std::unique_ptr IntermediateShader::make_source_conversion_shader(const std::string &composite_shader, const std::string &rgb_shader) { +std::unique_ptr IntermediateShader::make_composite_source_shader(const std::string &composite_shader, const std::string &svideo_shader, const std::string &rgb_shader) { std::ostringstream fragment_shader; fragment_shader << "#version 150\n" @@ -124,23 +124,30 @@ std::unique_ptr IntermediateShader::make_source_conversion_s "out vec4 fragColour;" - "uniform usampler2D texID;"; + "uniform usampler2D texID;" + << composite_shader; - if(!composite_shader.size()) { - std::ostringstream derived_composite_sample; - derived_composite_sample << - rgb_shader << - "uniform mat3 rgbToLumaChroma;" - "float composite_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)" - "{" - "vec3 rgbColour = clamp(rgb_sample(texID, coordinate, iCoordinate), vec3(0.0), vec3(1.0));" - "vec3 lumaChromaColour = rgbToLumaChroma * rgbColour;" - "vec2 quadrature = vec2(cos(phase), -sin(phase)) * amplitude;" - "return dot(lumaChromaColour, vec3(1.0 - amplitude, quadrature));" - "}"; - fragment_shader << derived_composite_sample.str(); - } else { - fragment_shader << composite_shader; + if(composite_shader.empty()) { + if(!svideo_shader.empty()) { + fragment_shader << + svideo_shader << + "float composite_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)" + "{" + "vec2 svideoColour = svideo_sample(texID, coordinate, iCoordinate, phase);" + "return mix(svideoColour.x, svideoColour.y, amplitude);" + "}"; + } else { + fragment_shader << + rgb_shader << + "uniform mat3 rgbToLumaChroma;" + "float composite_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)" + "{" + "vec3 rgbColour = clamp(rgb_sample(texID, coordinate, iCoordinate), vec3(0.0), vec3(1.0));" + "vec3 lumaChromaColour = rgbToLumaChroma * rgbColour;" + "vec2 quadrature = vec2(cos(phase), -sin(phase)) * amplitude;" + "return dot(lumaChromaColour, vec3(1.0 - amplitude, quadrature));" + "}"; + } } fragment_shader << @@ -152,6 +159,44 @@ std::unique_ptr IntermediateShader::make_source_conversion_s return make_shader(fragment_shader.str(), true, true); } +std::unique_ptr IntermediateShader::make_svideo_source_shader(const std::string &svideo_shader, const std::string &rgb_shader) { + std::ostringstream fragment_shader; + fragment_shader << + "#version 150\n" + + "in vec2 inputPositionsVarying[11];" + "in vec2 iInputPositionVarying;" + "in vec3 phaseAndAmplitudeVarying;" + + "out vec3 fragColour;" + + "uniform usampler2D texID;" + << svideo_shader; + + if(svideo_shader.empty()) { + fragment_shader + << rgb_shader << + "uniform mat3 rgbToLumaChroma;" + "vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase)" + "{" + "vec3 rgbColour = clamp(rgb_sample(texID, coordinate, iCoordinate), vec3(0.0), vec3(1.0));" + "vec3 lumaChromaColour = rgbToLumaChroma * rgbColour;" + "vec2 quadrature = vec2(cos(phase), -sin(phase));" + "return vec2(lumaChromaColour.x, 0.5 + dot(quadrature, lumaChromaColour.yz) * 0.5);" + "}"; + } + + fragment_shader << + "void main(void)" + "{" + "vec2 sample = svideo_sample(texID, inputPositionsVarying[5], iInputPositionVarying, phaseAndAmplitudeVarying.x);" + "vec2 quadrature = vec2(cos(phaseAndAmplitudeVarying.x), -sin(phaseAndAmplitudeVarying.x)) * 0.5 * phaseAndAmplitudeVarying.z;" + "fragColour = vec3(sample.x, vec2(0.5) + (sample.y * quadrature));" + "}"; + + return make_shader(fragment_shader.str(), true, true); +} + std::unique_ptr IntermediateShader::make_rgb_source_shader(const std::string &rgb_shader) { std::ostringstream fragment_shader; fragment_shader << diff --git a/Outputs/CRT/Internals/Shaders/IntermediateShader.hpp b/Outputs/CRT/Internals/Shaders/IntermediateShader.hpp index 96c9c86dc..fb7775436 100644 --- a/Outputs/CRT/Internals/Shaders/IntermediateShader.hpp +++ b/Outputs/CRT/Internals/Shaders/IntermediateShader.hpp @@ -43,30 +43,50 @@ public: /*! Constructs and returns an intermediate shader that will take runs from the inputPositions, - converting them to single-channel composite values using @c composite_shader if supplied - or @c rgb_shader and a reference composite conversion if @c composite_shader is @c nullptr. + converting them to single-channel composite values using @c composite_shader if non-empty + or a reference composite conversion of @c svideo_shader (first preference) or + @c rgb_shader (second preference) otherwise. + + [input format] => one-channel composite. */ - static std::unique_ptr make_source_conversion_shader(const std::string &composite_shader, const std::string &rgb_shader); + static std::unique_ptr make_composite_source_shader(const std::string &composite_shader, const std::string &svideo_shader, const std::string &rgb_shader); + + /*! + Constructs and returns an intermediate shader that will take runs from the inputPositions, + converting them to two-channel svideo values using @c svideo_shader if non-empty + or a reference svideo conversion of @c rgb_shader otherwise. + + [input format] => three-channel Y, noisy (m, n). + */ + static std::unique_ptr make_svideo_source_shader(const std::string &svideo_shader, const std::string &rgb_shader); /*! Constructs and returns an intermediate shader that will take runs from the inputPositions, converting them to RGB values using @c rgb_shader. + + [input format] => three-channel RGB. */ static std::unique_ptr make_rgb_source_shader(const std::string &rgb_shader); /*! Constructs and returns an intermediate shader that will read composite samples from the R channel, filter then to obtain luminance, stored to R, and to separate out unfiltered chrominance, store to G and B. + + one-channel composite => three-channel Y, noisy (m, n). */ static std::unique_ptr make_chroma_luma_separation_shader(); /*! Constructs and returns an intermediate shader that will pass R through unchanged while filtering G and B. + + three-channel Y, noisy (m, n) => three-channel RGB. */ static std::unique_ptr make_chroma_filter_shader(); /*! Constructs and returns an intermediate shader that will filter R, G and B. + + three-channel RGB => frequency-limited three-channel RGB. */ static std::unique_ptr make_rgb_filter_shader();