1
0
mirror of https://github.com/TomHarte/CLK.git synced 2024-11-26 23:52:26 +00:00

Corrects use of composition buffer.

Something is still very obviously amiss in colour processing somewhere down the line, but the correct forms are once again visibly in evidence.
This commit is contained in:
Thomas Harte 2020-08-20 20:21:28 -04:00
parent 5dc39a5d24
commit ad6fb85fda
2 changed files with 9 additions and 5 deletions

View File

@ -426,7 +426,7 @@ using BufferingScanTarget = Outputs::Display::BufferingScanTarget;
RangePerform(start, end, (_isUsingCompositionPipeline ? NumBufferedLines : NumBufferedScans), OutputStrips);
#undef OutputStrips
// Complete encoding and return.
// Complete encoding.
[encoder endEncoding];
}
@ -489,10 +489,12 @@ using BufferingScanTarget = Outputs::Display::BufferingScanTarget;
id<MTLRenderCommandEncoder> encoder = [commandBuffer renderCommandEncoderWithDescriptor:_compositionRenderPass];
[encoder setRenderPipelineState:_composePipeline];
[encoder setFragmentTexture:_writeAreaTexture atIndex:0];
[encoder setVertexBuffer:_scansBuffer offset:0 atIndex:0];
[encoder setVertexBuffer:_uniformsBuffer offset:0 atIndex:1];
[encoder setVertexTexture:_compositionTexture atIndex:0];
[encoder setFragmentBuffer:_uniformsBuffer offset:0 atIndex:0];
[encoder setFragmentTexture:_writeAreaTexture atIndex:0];
#define OutputScans(start, size) [encoder drawPrimitives:MTLPrimitiveTypeLine vertexStart:0 vertexCount:2 instanceCount:size baseInstance:start]
RangePerform(outputArea.start.scan, outputArea.end.scan, NumBufferedScans, OutputScans);

View File

@ -171,13 +171,15 @@ vertex SourceInterpolator scanToComposition( constant Uniforms &uniforms [[buffe
// Populate result as if direct texture access were available.
result.position.x = mix(scans[instanceID].endPoints[0].cyclesSinceRetrace, scans[instanceID].endPoints[1].cyclesSinceRetrace, float(vertexID));
result.position.y = scans[instanceID].line;
result.position.wz = float2(0.0, 1.0);
result.position.zw = float2(0.0, 1.0);
result.textureCoordinates.x = mix(scans[instanceID].endPoints[0].dataOffset, scans[instanceID].endPoints[1].dataOffset, float(vertexID));
result.textureCoordinates.y = scans[instanceID].dataY;
result.colourPhase = mix(scans[instanceID].endPoints[0].compositeAngle, scans[instanceID].endPoints[1].compositeAngle, float(vertexID)) / 32.0;
result.colourAmplitude = scans[instanceID].compositeAmplitude;
// Map position into eye space, allowing for target texture dimensions.
// TODO: is this really necessary? Is there nothing like coord::pixel that applies here?
result.position.xy = ((result.position.xy + float2(0.5)) / float2(texture.get_width(), texture.get_height())) * float2(2.0) - float2(1.0);
result.position.xy = ((result.position.xy + float2(0.5)) / float2(texture.get_width(), texture.get_height())) * float2(2.0, -2.0) + float2(-1.0, 1.0);
return result;
}
@ -302,6 +304,6 @@ fragment float4 copyFragment(CopyInterpolator vert [[stage_in]], texture2d<float
return texture.sample(standardSampler, vert.textureCoordinates);
}
fragment float4 clearFragment(CopyInterpolator vert [[stage_in]], texture2d<float> texture [[texture(0)]]) {
fragment float4 clearFragment() {
return float4(0.0, 0.0, 0.0, 0.64);
}