1
0
mirror of https://github.com/TomHarte/CLK.git synced 2024-07-02 20:30:00 +00:00

Attempts to provide an implementation of SVideo support.

This commit is contained in:
Thomas Harte 2018-03-30 12:41:20 -04:00
parent 33281b9d89
commit 207d462dbf
4 changed files with 114 additions and 25 deletions

View File

@ -71,10 +71,6 @@ OpenGLOutputBuilder::~OpenGLOutputBuilder() {
glDeleteVertexArrays(1, &output_vertex_array_);
}
bool OpenGLOutputBuilder::get_is_television_output() {
return video_signal_ == VideoSignal::Composite || !rgb_input_shader_program_;
}
void OpenGLOutputBuilder::set_target_framebuffer(GLint target_framebuffer) {
target_framebuffer_ = target_framebuffer;
}
@ -86,6 +82,7 @@ void OpenGLOutputBuilder::draw_frame(unsigned int output_width, unsigned int out
// establish essentials
if(!output_shader_program_) {
prepare_composite_input_shaders();
prepare_svideo_input_shaders();
prepare_rgb_input_shaders();
prepare_source_vertex_array();
@ -153,21 +150,34 @@ void OpenGLOutputBuilder::draw_frame(unsigned int output_width, unsigned int out
};
// for composite video, go through four steps to get to something that can be painted to the output
RenderStage composite_render_stages[] = {
const RenderStage composite_render_stages[] = {
{composite_input_shader_program_.get(), composite_texture_.get(), {0.0, 0.0, 0.0}},
{composite_separation_filter_program_.get(), separated_texture_.get(), {0.0, 0.5, 0.5}},
{composite_chrominance_filter_shader_program_.get(), filtered_texture_.get(), {0.0, 0.0, 0.0}},
{nullptr, nullptr}
};
// for RGB video, there's only two steps
RenderStage rgb_render_stages[] = {
// for s-video, there are two steps — it's like composite but skips separation
const RenderStage svideo_render_stages[] = {
{composite_input_shader_program_.get(), separated_texture_.get(), {0.0, 0.5, 0.5}},
{composite_chrominance_filter_shader_program_.get(), filtered_texture_.get(), {0.0, 0.0, 0.0}},
{nullptr, nullptr}
};
// for RGB video, there's also only two steps; a lowpass filter is still applied per physical reality
const RenderStage rgb_render_stages[] = {
{rgb_input_shader_program_.get(), composite_texture_.get(), {0.0, 0.0, 0.0}},
{rgb_filter_shader_program_.get(), filtered_texture_.get(), {0.0, 0.0, 0.0}},
{nullptr, nullptr}
};
RenderStage *active_pipeline = get_is_television_output() ? composite_render_stages : rgb_render_stages;
const RenderStage *active_pipeline;
switch(video_signal_) {
default:
case VideoSignal::Composite: active_pipeline = composite_render_stages; break;
case VideoSignal::SVideo: active_pipeline = svideo_render_stages; break;
case VideoSignal::RGB: active_pipeline = rgb_render_stages; break;
}
if(array_submission.input_size || array_submission.output_size) {
// all drawing will be from the source vertex array and without blending
@ -245,6 +255,7 @@ void OpenGLOutputBuilder::reset_all_OpenGL_state() {
composite_input_shader_program_ = nullptr;
composite_separation_filter_program_ = nullptr;
composite_chrominance_filter_shader_program_ = nullptr;
svideo_input_shader_program_ = nullptr;
rgb_input_shader_program_ = nullptr;
rgb_filter_shader_program_ = nullptr;
output_shader_program_ = nullptr;
@ -264,6 +275,12 @@ void OpenGLOutputBuilder::set_composite_sampling_function(const std::string &sha
reset_all_OpenGL_state();
}
void OpenGLOutputBuilder::set_svideo_sampling_function(const std::string &shader) {
std::lock_guard<std::mutex> lock_guard(output_mutex_);
svideo_shader_ = shader;
reset_all_OpenGL_state();
}
void OpenGLOutputBuilder::set_rgb_sampling_function(const std::string &shader) {
std::lock_guard<std::mutex> lock_guard(output_mutex_);
rgb_shader_ = shader;
@ -273,7 +290,7 @@ void OpenGLOutputBuilder::set_rgb_sampling_function(const std::string &shader) {
// MARK: - Program compilation
void OpenGLOutputBuilder::prepare_composite_input_shaders() {
composite_input_shader_program_ = OpenGL::IntermediateShader::make_source_conversion_shader(composite_shader_, rgb_shader_);
composite_input_shader_program_ = OpenGL::IntermediateShader::make_composite_source_shader(composite_shader_, svideo_shader_, rgb_shader_);
composite_input_shader_program_->set_source_texture_unit(source_data_texture_unit);
composite_input_shader_program_->set_output_size(IntermediateBufferWidth, IntermediateBufferHeight);
@ -285,6 +302,7 @@ void OpenGLOutputBuilder::prepare_composite_input_shaders() {
composite_chrominance_filter_shader_program_->set_source_texture_unit(work_texture_ ? work_texture_unit : separated_texture_unit);
composite_chrominance_filter_shader_program_->set_output_size(IntermediateBufferWidth, IntermediateBufferHeight);
// TODO: the below is related to texture fencing, which is not yet implemented correctly, so not yet enabled.
if(work_texture_) {
composite_input_shader_program_->set_is_double_height(true, 0.0f, 0.0f);
composite_separation_filter_program_->set_is_double_height(true, 0.0f, 0.5f);
@ -296,6 +314,19 @@ void OpenGLOutputBuilder::prepare_composite_input_shaders() {
}
}
void OpenGLOutputBuilder::prepare_svideo_input_shaders() {
svideo_input_shader_program_ = OpenGL::IntermediateShader::make_svideo_source_shader(svideo_shader_, rgb_shader_);
svideo_input_shader_program_->set_source_texture_unit(source_data_texture_unit);
svideo_input_shader_program_->set_output_size(IntermediateBufferWidth, IntermediateBufferHeight);
// TODO: the below is related to texture fencing, which is not yet implemented correctly, so not yet enabled.
if(work_texture_) {
svideo_input_shader_program_->set_is_double_height(true, 0.0f, 0.0f);
} else {
svideo_input_shader_program_->set_is_double_height(false);
}
}
void OpenGLOutputBuilder::prepare_rgb_input_shaders() {
if(rgb_shader_.size()) {
rgb_input_shader_program_ = OpenGL::IntermediateShader::make_rgb_source_shader(rgb_shader_);
@ -433,7 +464,8 @@ float OpenGLOutputBuilder::get_composite_output_width() const {
void OpenGLOutputBuilder::set_output_shader_width() {
if(output_shader_program_) {
const float width = get_is_television_output() ? get_composite_output_width() : 1.0f;
// For anything that isn't RGB, scale so that sampling is in-phase with the colour subcarrier.
const float width = (video_signal_ == VideoSignal::RGB) ? 1.0f : get_composite_output_width();
output_shader_program_->set_input_width_scaler(width);
}
}

View File

@ -49,12 +49,14 @@ class OpenGLOutputBuilder {
// Other things the caller may have provided.
std::string composite_shader_;
std::string svideo_shader_;
std::string rgb_shader_;
GLint target_framebuffer_ = 0;
// Methods used by the OpenGL code
void prepare_output_shader();
void prepare_rgb_input_shaders();
void prepare_svideo_input_shaders();
void prepare_composite_input_shaders();
void prepare_output_vertex_array();
@ -101,7 +103,6 @@ class OpenGLOutputBuilder {
GLsync fence_;
float get_composite_output_width() const;
void set_output_shader_width();
bool get_is_television_output();
public:
// These two are protected by output_mutex_.

View File

@ -113,7 +113,7 @@ std::unique_ptr<IntermediateShader> IntermediateShader::make_shader(const std::s
}));
}
std::unique_ptr<IntermediateShader> IntermediateShader::make_source_conversion_shader(const std::string &composite_shader, const std::string &rgb_shader) {
std::unique_ptr<IntermediateShader> IntermediateShader::make_composite_source_shader(const std::string &composite_shader, const std::string &svideo_shader, const std::string &rgb_shader) {
std::ostringstream fragment_shader;
fragment_shader <<
"#version 150\n"
@ -128,16 +128,26 @@ std::unique_ptr<IntermediateShader> IntermediateShader::make_source_conversion_s
if(!composite_shader.size()) {
std::ostringstream derived_composite_sample;
derived_composite_sample <<
rgb_shader <<
"uniform mat3 rgbToLumaChroma;"
"float composite_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)"
"{"
"vec3 rgbColour = clamp(rgb_sample(texID, coordinate, iCoordinate), vec3(0.0), vec3(1.0));"
"vec3 lumaChromaColour = rgbToLumaChroma * rgbColour;"
"vec2 quadrature = vec2(cos(phase), -sin(phase)) * amplitude;"
"return dot(lumaChromaColour, vec3(1.0 - amplitude, quadrature));"
"}";
if(!svideo_shader.empty()) {
derived_composite_sample <<
svideo_shader <<
"float composite_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)"
"{"
"vec2 svideoColour = svideo_sample(texID, coordinate, iCoordinate, phase);"
"return mix(svideoColour.x, svideoColour.y, amplitude);"
"}";
} else {
derived_composite_sample <<
rgb_shader <<
"uniform mat3 rgbToLumaChroma;"
"float composite_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)"
"{"
"vec3 rgbColour = clamp(rgb_sample(texID, coordinate, iCoordinate), vec3(0.0), vec3(1.0));"
"vec3 lumaChromaColour = rgbToLumaChroma * rgbColour;"
"vec2 quadrature = vec2(cos(phase), -sin(phase)) * amplitude;"
"return dot(lumaChromaColour, vec3(1.0 - amplitude, quadrature));"
"}";
}
fragment_shader << derived_composite_sample.str();
} else {
fragment_shader << composite_shader;
@ -152,6 +162,44 @@ std::unique_ptr<IntermediateShader> IntermediateShader::make_source_conversion_s
return make_shader(fragment_shader.str(), true, true);
}
std::unique_ptr<IntermediateShader> IntermediateShader::make_svideo_source_shader(const std::string &svideo_shader, const std::string &rgb_shader) {
std::ostringstream fragment_shader;
fragment_shader <<
"#version 150\n"
"in vec2 inputPositionsVarying[11];"
"in vec2 iInputPositionVarying;"
"in vec3 phaseAndAmplitudeVarying;"
"out vec3 fragColour;"
"uniform usampler2D texID;"
<< svideo_shader;
if(svideo_shader.empty()) {
fragment_shader
<< rgb_shader <<
"uniform mat3 rgbToLumaChroma;"
"float svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase)"
"{"
"vec3 rgbColour = clamp(rgb_sample(texID, coordinate, iCoordinate), vec3(0.0), vec3(1.0));"
"vec3 lumaChromaColour = rgbToLumaChroma * rgbColour;"
"vec2 quadrature = vec2(cos(phase), -sin(phase));"
"return vec2(lumaChromaColour, quadrature);"
"}";
}
fragment_shader <<
"void main(void)"
"{"
"vec2 sample = svideo_sample(texID, inputPositionsVarying[5], iInputPositionVarying, phaseAndAmplitudeVarying.x);"
"vec2 quadrature = vec2(cos(phaseAndAmplitudeVarying.x), -sin(phaseAndAmplitudeVarying.x));"
"fragColour = vec3(luminance, vec2(0.5) + (chrominance * quadrature));"
"}";
return make_shader(fragment_shader.str(), true, true);
}
std::unique_ptr<IntermediateShader> IntermediateShader::make_rgb_source_shader(const std::string &rgb_shader) {
std::ostringstream fragment_shader;
fragment_shader <<

View File

@ -43,10 +43,18 @@ public:
/*!
Constructs and returns an intermediate shader that will take runs from the inputPositions,
converting them to single-channel composite values using @c composite_shader if supplied
or @c rgb_shader and a reference composite conversion if @c composite_shader is @c nullptr.
converting them to single-channel composite values using @c composite_shader if non-empty
or a reference composite conversion of @c svideo_shader (first preference) or
@c rgb_shader (second preference) otherwise.
*/
static std::unique_ptr<IntermediateShader> make_source_conversion_shader(const std::string &composite_shader, const std::string &rgb_shader);
static std::unique_ptr<IntermediateShader> make_composite_source_shader(const std::string &composite_shader, const std::string &svideo_shader, const std::string &rgb_shader);
/*!
Constructs and returns an intermediate shader that will take runs from the inputPositions,
converting them to two-channel svideo values using @c svideo_shader if non-empty
or a reference svideo conversion of @c rgb_shader otherwise.
*/
static std::unique_ptr<IntermediateShader> make_svideo_source_shader(const std::string &svideo_shader, const std::string &rgb_shader);
/*!
Constructs and returns an intermediate shader that will take runs from the inputPositions,