1
0
mirror of https://github.com/TomHarte/CLK.git synced 2025-08-11 03:25:06 +00:00

Having decided that these things probably need to be separate, starts drilling down on S-Video.

This commit is contained in:
Thomas Harte
2020-08-25 22:05:19 -04:00
parent 807cb99f6d
commit e502d336db
2 changed files with 39 additions and 24 deletions

View File

@@ -26,6 +26,7 @@ struct Uniforms {
float zoom; float zoom;
simd::float2 offset; simd::float2 offset;
simd::float3 firCoefficients[8]; simd::float3 firCoefficients[8];
float radiansPerPixel;
}; };
constexpr size_t NumBufferedScans = 2048; constexpr size_t NumBufferedScans = 2048;
@@ -151,9 +152,9 @@ using BufferingScanTarget = Outputs::Display::BufferingScanTarget;
depthStencilDescriptor.frontFaceStencil.stencilFailureOperation = MTLStencilOperationReplace; depthStencilDescriptor.frontFaceStencil.stencilFailureOperation = MTLStencilOperationReplace;
_clearStencilState = [view.device newDepthStencilStateWithDescriptor:depthStencilDescriptor]; _clearStencilState = [view.device newDepthStencilStateWithDescriptor:depthStencilDescriptor];
// Create a composition texture up front. // Create a composition texture up front. (TODO: is it worth switching to an 8bpp texture in composite mode?)
MTLTextureDescriptor *const textureDescriptor = [MTLTextureDescriptor MTLTextureDescriptor *const textureDescriptor = [MTLTextureDescriptor
texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm texture2DDescriptorWithPixelFormat:MTLPixelFormatRG8Unorm
width:2048 // This 'should do'. width:2048 // This 'should do'.
height:NumBufferedLines height:NumBufferedLines
mipmapped:NO]; mipmapped:NO];
@@ -359,9 +360,8 @@ using BufferingScanTarget = Outputs::Display::BufferingScanTarget;
#endif #endif
// Build the composition pipeline if one is in use. // Build the composition pipeline if one is in use.
if(_isUsingCompositionPipeline) {
const bool isSVideoOutput = modals.display_type == Outputs::Display::DisplayType::SVideo; const bool isSVideoOutput = modals.display_type == Outputs::Display::DisplayType::SVideo;
if(_isUsingCompositionPipeline) {
pipelineDescriptor.colorAttachments[0].pixelFormat = _compositionTexture.pixelFormat; pipelineDescriptor.colorAttachments[0].pixelFormat = _compositionTexture.pixelFormat;
pipelineDescriptor.vertexFunction = [library newFunctionWithName:@"scanToComposition"]; pipelineDescriptor.vertexFunction = [library newFunctionWithName:@"scanToComposition"];
pipelineDescriptor.fragmentFunction = pipelineDescriptor.fragmentFunction =
@@ -395,11 +395,13 @@ using BufferingScanTarget = Outputs::Display::BufferingScanTarget;
} }
// Whether S-Video or composite, apply the same relatively strong filter to colour channels. // Whether S-Video or composite, apply the same relatively strong filter to colour channels.
SignalProcessing::FIRFilter chrominancefilter(15, cyclesPerLine, 0.0f, colourCyclesPerLine * 0.5f); SignalProcessing::FIRFilter chrominancefilter(15, cyclesPerLine, 0.0f, colourCyclesPerLine * 0.25f);
const auto calculatedCoefficients = chrominancefilter.get_coefficients(); const auto calculatedCoefficients = chrominancefilter.get_coefficients();
for(size_t c = 0; c < 8; ++c) { for(size_t c = 0; c < 8; ++c) {
firCoefficients[c].y = firCoefficients[c].z = calculatedCoefficients[c]; firCoefficients[c].y = firCoefficients[c].z = calculatedCoefficients[c] * (isSVideoOutput ? 3.0f : 1.0f);
} }
uniforms()->radiansPerPixel = (colourCyclesPerLine * 3.141592654f * 2.0f) / cyclesPerLine;
} }
// Build the output pipeline. // Build the output pipeline.
@@ -407,7 +409,7 @@ using BufferingScanTarget = Outputs::Display::BufferingScanTarget;
pipelineDescriptor.vertexFunction = [library newFunctionWithName:_isUsingCompositionPipeline ? @"lineToDisplay" : @"scanToDisplay"]; pipelineDescriptor.vertexFunction = [library newFunctionWithName:_isUsingCompositionPipeline ? @"lineToDisplay" : @"scanToDisplay"];
if(_isUsingCompositionPipeline) { if(_isUsingCompositionPipeline) {
pipelineDescriptor.fragmentFunction = [library newFunctionWithName:@"filterFragment"]; pipelineDescriptor.fragmentFunction = [library newFunctionWithName:isSVideoOutput ? @"filterSVideoFragment" : @"filterCompositeFragment"];
} else { } else {
const bool isRGBOutput = modals.display_type == Outputs::Display::DisplayType::RGB; const bool isRGBOutput = modals.display_type == Outputs::Display::DisplayType::RGB;
pipelineDescriptor.fragmentFunction = pipelineDescriptor.fragmentFunction =

View File

@@ -34,6 +34,9 @@ struct Uniforms {
// Describes the FIR filter in use; it'll be 15 coefficients but they're // Describes the FIR filter in use; it'll be 15 coefficients but they're
// symmetrical around the centre. // symmetrical around the centre.
float3 firCoefficients[8]; float3 firCoefficients[8];
// Maps from pixel offsets into the composition buffer to angular difference.
float radiansPerPixel;
}; };
namespace { namespace {
@@ -217,15 +220,20 @@ fragment float4 samplePhaseLinkedLuminance8(SourceInterpolator vert [[stage_in]]
// The luminance/phase format can produce either composite or S-Video. // The luminance/phase format can produce either composite or S-Video.
fragment float4 sampleLuminance8Phase8(SourceInterpolator vert [[stage_in]], texture2d<float> texture [[texture(0)]]) { float2 convertLuminance8Phase8(SourceInterpolator vert [[stage_in]], texture2d<float> texture [[texture(0)]]) {
return float4(texture.sample(standardSampler, vert.textureCoordinates).rg, 0.0, 1.0);
}
fragment float4 compositeSampleLuminance8Phase8(SourceInterpolator vert [[stage_in]], texture2d<float> texture [[texture(0)]]) {
const auto luminancePhase = texture.sample(standardSampler, vert.textureCoordinates).rg; const auto luminancePhase = texture.sample(standardSampler, vert.textureCoordinates).rg;
const float phaseOffset = 3.141592654 * 4.0 * luminancePhase.g; const float phaseOffset = 3.141592654 * 4.0 * luminancePhase.g;
const float rawChroma = step(luminancePhase.g, 0.75) * cos(vert.colourPhase + phaseOffset); const float rawChroma = step(luminancePhase.g, 0.75) * cos(vert.colourPhase + phaseOffset);
const float level = mix(luminancePhase.r, rawChroma, vert.colourAmplitude); return float2(luminancePhase.r, rawChroma);
}
fragment float2 sampleLuminance8Phase8(SourceInterpolator vert [[stage_in]], texture2d<float> texture [[texture(0)]]) {
return convertLuminance8Phase8(vert, texture);
}
fragment float4 compositeSampleLuminance8Phase8(SourceInterpolator vert [[stage_in]], texture2d<float> texture [[texture(0)]]) {
const float2 luminanceChroma = convertLuminance8Phase8(vert, texture);
const float level = mix(luminanceChroma.r, luminanceChroma.g, vert.colourAmplitude);
return float4( return float4(
level, level,
0.5 + 0.5*level*cos(vert.colourPhase), 0.5 + 0.5*level*cos(vert.colourPhase),
@@ -331,26 +339,31 @@ fragment float4 clearFragment() {
// MARK: - Conversion fragment shaders // MARK: - Conversion fragment shaders
fragment float4 filterFragment(SourceInterpolator vert [[stage_in]], texture2d<float> texture [[texture(0)]], constant Uniforms &uniforms [[buffer(0)]]) { fragment float4 filterSVideoFragment(SourceInterpolator vert [[stage_in]], texture2d<float> texture [[texture(0)]], constant Uniforms &uniforms [[buffer(0)]]) {
#define Sample(x) texture.sample(standardSampler, vert.textureCoordinates + float2(x, 0.0f)).rgb #define Sample(x) texture.sample(standardSampler, vert.textureCoordinates + float2(x, 0.0f)).rg - float2(0.0f, 0.5f)
const float3 rawSamples[] = { const float2 rawSamples[] = {
Sample(-7), Sample(-6), Sample(-5), Sample(-4), Sample(-3), Sample(-2), Sample(-1), Sample(-7), Sample(-6), Sample(-5), Sample(-4), Sample(-3), Sample(-2), Sample(-1),
Sample(0), Sample(0),
Sample(1), Sample(2), Sample(3), Sample(4), Sample(5), Sample(6), Sample(7), Sample(1), Sample(2), Sample(3), Sample(4), Sample(5), Sample(6), Sample(7),
}; };
#undef Sample #undef Sample
#define Sample(c, o) \ #define Sample(c, o, a) \
uniforms.firCoefficients[c] * rawSamples[o] uniforms.firCoefficients[c] * float3(rawSamples[o].r, rawSamples[o].g*cos(vert.colourPhase + (a)*uniforms.radiansPerPixel), rawSamples[o].g*sin(vert.colourPhase + (a)*uniforms.radiansPerPixel))
const float3 colour = const float3 colour =
Sample(0, 0) + Sample(1, 1) + Sample(2, 2) + Sample(3, 3) + Sample(0, 0, -7) + Sample(1, 1, -6) + Sample(2, 2, -5) + Sample(3, 3, -4) +
Sample(4, 4) + Sample(5, 5) + Sample(6, 6) + Sample(4, 4, -3) + Sample(5, 5, -2) + Sample(6, 6, -1) +
Sample(7, 7) + Sample(7, 7, 0) +
Sample(6, 8) + Sample(5, 9) + Sample(4, 10) + Sample(6, 8, 1) + Sample(5, 9, 2) + Sample(4, 10, 3) +
Sample(3, 11) + Sample(2, 12) + Sample(1, 13) + Sample(0, 14); Sample(3, 11, 4) + Sample(2, 12, 5) + Sample(1, 13, 6) + Sample(0, 14, 7);
#undef Sample #undef Sample
return float4(uniforms.toRGB * ((colour - float3(0.0f, 0.5f, 0.5f)) * float3(1.0f, 2.0f / vert.colourAmplitude, 2.0f / vert.colourAmplitude)), 1.0f); return float4(uniforms.toRGB * colour, 1.0f);
}
fragment float4 filterCompositeFragment(SourceInterpolator vert [[stage_in]], texture2d<float> texture [[texture(0)]], constant Uniforms &uniforms [[buffer(0)]]) {
// TODO.
return float4(1.0);
} }