1
0
mirror of https://github.com/TomHarte/CLK.git synced 2024-07-30 07:29:06 +00:00

Merge pull request #458 from TomHarte/ApplePhase

Corrects NTSC Q phase
This commit is contained in:
Thomas Harte 2018-06-03 08:11:43 -04:00 committed by GitHub
commit 94359e9c75
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 141 additions and 157 deletions

View File

@ -69,7 +69,7 @@ template <class BusHandler> class MOS6560 {
speaker_(audio_generator_)
{
crt_->set_svideo_sampling_function(
"vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase)"
"vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)"
"{"
"vec2 yc = texture(texID, coordinate).rg / vec2(255.0);"
@ -125,10 +125,10 @@ template <class BusHandler> class MOS6560 {
19, 86, 123, 59,
};
const uint8_t ntsc_chrominances[16] = {
255, 255, 7, 71,
25, 86, 48, 112,
0, 119, 7, 71,
25, 86, 48, 112,
255, 255, 121, 57,
103, 42, 80, 16,
0, 9, 121, 57,
103, 42, 80, 16,
};
const uint8_t *chrominances;
Outputs::CRT::DisplayType display_type;

View File

@ -92,7 +92,7 @@ class AY38910: public ::Outputs::Speaker::SampleSource {
Concurrency::DeferringAsyncTaskQueue &task_queue_;
int selected_register_ = 0;
uint8_t registers_[16];
uint8_t registers_[16] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
uint8_t output_registers_[16] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
uint8_t port_inputs_[2];

View File

@ -124,9 +124,9 @@ void DiskII::decide_clocking_preference() {
// If in read mode, clocking is either:
//
// just-in-time, if drives are running or the shift register has any 1s in it or a flux event hasn't yet passed; or
// none, given that drives are not running, the shift register has already emptied and there's no flux about to fire.
// none, given that drives are not running, the shift register has already emptied and there's no flux about to be received.
if(!(inputs_ & ~input_flux)) {
clocking_preference_ = (!motor_is_enabled_ && !shift_register_ && !(inputs_&input_flux)) ? ClockingHint::Preference::None : ClockingHint::Preference::JustInTime;
clocking_preference_ = (!motor_is_enabled_ && !shift_register_ && (inputs_&input_flux)) ? ClockingHint::Preference::None : ClockingHint::Preference::JustInTime;
}
// If in writing mode, clocking is real time.

View File

@ -10,83 +10,27 @@
using namespace AppleII::Video;
namespace {
struct ScaledByteFiller {
ScaledByteFiller() {
VideoBase::setup_tables();
}
} throwaway;
}
VideoBase::VideoBase() :
crt_(new Outputs::CRT::CRT(455, 1, Outputs::CRT::DisplayType::NTSC60, 1)) {
crt_(new Outputs::CRT::CRT(910, 1, Outputs::CRT::DisplayType::NTSC60, 1)) {
// Set a composite sampling function that assumes 1bpp input, and uses just 7 bits per byte.
// Set a composite sampling function that assumes one byte per pixel input, and
// accepts any non-zero value as being fully on, zero being fully off.
crt_->set_composite_sampling_function(
"float composite_sample(usampler2D sampler, vec2 coordinate, vec2 icoordinate, float phase, float amplitude)"
"{"
"uint texValue = texture(sampler, coordinate).r;"
"texValue >>= int(icoordinate.x) % 7;"
"return float(texValue & 1u);"
"return texture(sampler, coordinate).r;"
"}");
crt_->set_integer_coordinate_multiplier(7.0f);
// Show only the centre 75% of the TV frame.
crt_->set_video_signal(Outputs::CRT::VideoSignal::Composite);
crt_->set_visible_area(Outputs::CRT::Rect(0.115f, 0.117f, 0.77f, 0.77f));
crt_->set_immediate_default_phase(0.0f);
}
Outputs::CRT::CRT *VideoBase::get_crt() {
return crt_.get();
}
uint16_t VideoBase::scaled_byte[256];
uint16_t VideoBase::low_resolution_patterns[2][16];
void VideoBase::setup_tables() {
for(int c = 0; c < 128; ++c) {
const uint16_t value =
((c & 0x01) ? 0x0003 : 0x0000) |
((c & 0x02) ? 0x000c : 0x0000) |
((c & 0x04) ? 0x0030 : 0x0000) |
((c & 0x08) ? 0x0140 : 0x0000) |
((c & 0x10) ? 0x0600 : 0x0000) |
((c & 0x20) ? 0x1800 : 0x0000) |
((c & 0x40) ? 0x6000 : 0x0000);
uint8_t *const table_entry = reinterpret_cast<uint8_t *>(&scaled_byte[c]);
table_entry[0] = static_cast<uint8_t>(value & 0xff);
table_entry[1] = static_cast<uint8_t>(value >> 8);
}
for(int c = 128; c < 256; ++c) {
uint8_t *const source_table_entry = reinterpret_cast<uint8_t *>(&scaled_byte[c & 0x7f]);
uint8_t *const destination_table_entry = reinterpret_cast<uint8_t *>(&scaled_byte[c]);
destination_table_entry[0] = static_cast<uint8_t>(source_table_entry[0] << 1);
destination_table_entry[1] = static_cast<uint8_t>((source_table_entry[1] << 1) | (source_table_entry[0] >> 6));
}
for(int c = 0; c < 16; ++c) {
// Produce the whole 28-bit pattern that would cover two columns.
const int reversed_c = ((c&0x1) ? 0x8 : 0x0) | ((c&0x2) ? 0x4 : 0x0) | ((c&0x4) ? 0x2 : 0x0) | ((c&0x8) ? 0x1 : 0x0);
int pattern = 0;
for(int l = 0; l < 7; ++l) {
pattern <<= 4;
pattern |= reversed_c;
}
// Pack that 28-bit pattern into the appropriate look-up tables.
uint8_t *const left_entry = reinterpret_cast<uint8_t *>(&low_resolution_patterns[0][c]);
uint8_t *const right_entry = reinterpret_cast<uint8_t *>(&low_resolution_patterns[1][c]);
left_entry[0] = static_cast<uint8_t>(pattern);;
left_entry[1] = static_cast<uint8_t>(pattern >> 7);
right_entry[0] = static_cast<uint8_t>(pattern >> 14);
right_entry[1] = static_cast<uint8_t>(pattern >> 21);
}
}
void VideoBase::set_graphics_mode() {
use_graphics_mode_ = true;
}
@ -113,19 +57,4 @@ void VideoBase::set_high_resolution() {
void VideoBase::set_character_rom(const std::vector<uint8_t> &character_rom) {
character_rom_ = character_rom;
// Bytes in the character ROM are stored in reverse bit order. Reverse them
// ahead of time so as to be able to use the same scaling table as for
// high-resolution graphics.
for(auto &byte : character_rom_) {
byte =
((byte & 0x40) ? 0x01 : 0x00) |
((byte & 0x20) ? 0x02 : 0x00) |
((byte & 0x10) ? 0x04 : 0x00) |
((byte & 0x08) ? 0x08 : 0x00) |
((byte & 0x04) ? 0x10 : 0x00) |
((byte & 0x02) ? 0x20 : 0x00) |
((byte & 0x01) ? 0x40 : 0x00) |
(byte & 0x80);
}
}

View File

@ -27,7 +27,6 @@ class BusHandler {
class VideoBase {
public:
VideoBase();
static void setup_tables();
/// @returns The CRT this video feed is feeding.
Outputs::CRT::CRT *get_crt();
@ -46,9 +45,12 @@ class VideoBase {
protected:
std::unique_ptr<Outputs::CRT::CRT> crt_;
uint8_t *pixel_pointer_ = nullptr;
int pixel_pointer_column_ = 0;
bool pixels_are_high_density_ = false;
int video_page_ = 0;
int row_ = 0, column_ = 0, flash_ = 0;
uint16_t *pixel_pointer_ = nullptr;
std::vector<uint8_t> character_rom_;
enum class GraphicsMode {
@ -58,10 +60,7 @@ class VideoBase {
} graphics_mode_ = GraphicsMode::LowRes;
bool use_graphics_mode_ = false;
bool mixed_mode_ = false;
uint16_t graphics_carry_ = 0;
static uint16_t scaled_byte[256];
static uint16_t low_resolution_patterns[2][16];
uint8_t graphics_carry_ = 0;
};
template <class BusHandler> class Video: public VideoBase {
@ -91,7 +90,7 @@ template <class BusHandler> class Video: public VideoBase {
const int cycles_this_line = std::min(65 - column_, int_cycles);
if(row_ >= first_sync_line && row_ < first_sync_line + 3) {
crt_->output_sync(static_cast<unsigned int>(cycles_this_line) * 7);
crt_->output_sync(static_cast<unsigned int>(cycles_this_line) * 14);
} else {
const int ending_column = column_ + cycles_this_line;
const GraphicsMode line_mode = use_graphics_mode_ ? graphics_mode_ : GraphicsMode::Text;
@ -101,8 +100,13 @@ template <class BusHandler> class Video: public VideoBase {
// of line 192.
if(column_ < 40) {
if(row_ < 192) {
if(!column_) {
pixel_pointer_ = reinterpret_cast<uint16_t *>(crt_->allocate_write_area(80, 2));
GraphicsMode pixel_mode = (!mixed_mode_ || row_ < 160) ? line_mode : GraphicsMode::Text;
bool requires_high_density = pixel_mode != GraphicsMode::Text;
if(!column_ || requires_high_density != pixels_are_high_density_) {
if(column_) output_data_to_column(column_);
pixel_pointer_ = crt_->allocate_write_area(561);
pixel_pointer_column_ = column_;
pixels_are_high_density_ = requires_high_density;
graphics_carry_ = 0;
}
@ -111,10 +115,7 @@ template <class BusHandler> class Video: public VideoBase {
const int pixel_row = row_ & 7;
const uint16_t row_address = static_cast<uint16_t>((character_row >> 3) * 40 + ((character_row&7) << 7));
const uint16_t text_address = static_cast<uint16_t>(((video_page_+1) * 0x400) + row_address);
const uint16_t graphics_address = static_cast<uint16_t>(((video_page_+1) * 0x2000) + row_address + ((pixel_row&7) << 10));
const int row_shift = (row_&4);
GraphicsMode pixel_mode = (!mixed_mode_ || row_ < 160) ? line_mode : GraphicsMode::Text;
switch(pixel_mode) {
case GraphicsMode::Text: {
const uint8_t inverses[] = {
@ -128,35 +129,82 @@ template <class BusHandler> class Video: public VideoBase {
const std::size_t character_address = static_cast<std::size_t>(((character & 0x3f) << 3) + pixel_row);
const uint8_t character_pattern = character_rom_[character_address] ^ inverses[character >> 6];
pixel_pointer_[c] = scaled_byte[character_pattern & 0x7f];
// The character ROM is output MSB to LSB rather than LSB to MSB.
pixel_pointer_[0] = character_pattern & 0x40;
pixel_pointer_[1] = character_pattern & 0x20;
pixel_pointer_[2] = character_pattern & 0x10;
pixel_pointer_[3] = character_pattern & 0x08;
pixel_pointer_[4] = character_pattern & 0x04;
pixel_pointer_[5] = character_pattern & 0x02;
pixel_pointer_[6] = character_pattern & 0x01;
graphics_carry_ = character_pattern & 0x40;
pixel_pointer_ += 7;
}
} break;
case GraphicsMode::LowRes:
case GraphicsMode::LowRes: {
const int row_shift = (row_&4);
// TODO: decompose into two loops, possibly.
for(int c = column_; c < pixel_end; ++c) {
const uint8_t character = bus_handler_.perform_read(static_cast<uint16_t>(text_address + c));
pixel_pointer_[c] = low_resolution_patterns[c&1][(character >> row_shift)&0xf];
}
break;
const uint8_t nibble = (bus_handler_.perform_read(static_cast<uint16_t>(text_address + c)) >> row_shift) & 0x0f;
case GraphicsMode::HighRes:
// Low-resolution graphics mode shifts the colour code on a loop, but has to account for whether this
// 14-sample output window is starting at the beginning of a colour cycle or halfway through.
if(c&1) {
pixel_pointer_[0] = pixel_pointer_[4] = pixel_pointer_[8] = pixel_pointer_[12] = nibble & 4;
pixel_pointer_[1] = pixel_pointer_[5] = pixel_pointer_[9] = pixel_pointer_[13] = nibble & 8;
pixel_pointer_[2] = pixel_pointer_[6] = pixel_pointer_[10] = nibble & 1;
pixel_pointer_[3] = pixel_pointer_[7] = pixel_pointer_[11] = nibble & 2;
graphics_carry_ = nibble & 8;
} else {
pixel_pointer_[0] = pixel_pointer_[4] = pixel_pointer_[8] = pixel_pointer_[12] = nibble & 1;
pixel_pointer_[1] = pixel_pointer_[5] = pixel_pointer_[9] = pixel_pointer_[13] = nibble & 2;
pixel_pointer_[2] = pixel_pointer_[6] = pixel_pointer_[10] = nibble & 4;
pixel_pointer_[3] = pixel_pointer_[7] = pixel_pointer_[11] = nibble & 8;
graphics_carry_ = nibble & 2;
}
pixel_pointer_ += 14;
}
} break;
case GraphicsMode::HighRes: {
const uint16_t graphics_address = static_cast<uint16_t>(((video_page_+1) * 0x2000) + row_address + ((pixel_row&7) << 10));
for(int c = column_; c < pixel_end; ++c) {
const uint8_t graphic = bus_handler_.perform_read(static_cast<uint16_t>(graphics_address + c));
pixel_pointer_[c] = scaled_byte[graphic];
// High resolution graphics shift out LSB to MSB, optionally with a delay of half a pixel.
// If there is a delay, the previous output level is held to bridge the gap.
if(graphic & 0x80) {
reinterpret_cast<uint8_t *>(&pixel_pointer_[c])[0] |= graphics_carry_;
pixel_pointer_[0] = graphics_carry_;
pixel_pointer_[1] = pixel_pointer_[2] = graphic & 0x01;
pixel_pointer_[3] = pixel_pointer_[4] = graphic & 0x02;
pixel_pointer_[5] = pixel_pointer_[6] = graphic & 0x04;
pixel_pointer_[7] = pixel_pointer_[8] = graphic & 0x08;
pixel_pointer_[9] = pixel_pointer_[10] = graphic & 0x10;
pixel_pointer_[11] = pixel_pointer_[12] = graphic & 0x20;
pixel_pointer_[13] = graphic & 0x40;
} else {
pixel_pointer_[0] = pixel_pointer_[1] = graphic & 0x01;
pixel_pointer_[2] = pixel_pointer_[3] = graphic & 0x02;
pixel_pointer_[4] = pixel_pointer_[5] = graphic & 0x04;
pixel_pointer_[6] = pixel_pointer_[7] = graphic & 0x08;
pixel_pointer_[8] = pixel_pointer_[9] = graphic & 0x10;
pixel_pointer_[10] = pixel_pointer_[11] = graphic & 0x20;
pixel_pointer_[12] = pixel_pointer_[13] = graphic & 0x40;
}
graphics_carry_ = (graphic >> 6) & 1;
graphics_carry_ = graphic & 0x40;
pixel_pointer_ += 14;
}
break;
} break;
}
if(ending_column >= 40) {
crt_->output_data(280, 80);
output_data_to_column(40);
}
} else {
if(ending_column >= 40) {
crt_->output_blank(280);
crt_->output_blank(560);
}
}
}
@ -169,13 +217,13 @@ template <class BusHandler> class Video: public VideoBase {
const int first_blank_start = std::max(40, column_);
const int first_blank_end = std::min(first_sync_column, ending_column);
if(first_blank_end > first_blank_start) {
crt_->output_blank(static_cast<unsigned int>(first_blank_end - first_blank_start) * 7);
crt_->output_blank(static_cast<unsigned int>(first_blank_end - first_blank_start) * 14);
}
const int sync_start = std::max(first_sync_column, column_);
const int sync_end = std::min(first_sync_column + 4, ending_column);
if(sync_end > sync_start) {
crt_->output_sync(static_cast<unsigned int>(sync_end - sync_start) * 7);
crt_->output_sync(static_cast<unsigned int>(sync_end - sync_start) * 14);
}
int second_blank_start;
@ -183,7 +231,7 @@ template <class BusHandler> class Video: public VideoBase {
const int colour_burst_start = std::max(first_sync_column + 4, column_);
const int colour_burst_end = std::min(first_sync_column + 7, ending_column);
if(colour_burst_end > colour_burst_start) {
crt_->output_default_colour_burst(static_cast<unsigned int>(colour_burst_end - colour_burst_start) * 7);
crt_->output_default_colour_burst(static_cast<unsigned int>(colour_burst_end - colour_burst_start) * 14);
}
second_blank_start = std::max(first_sync_column + 7, column_);
@ -192,7 +240,7 @@ template <class BusHandler> class Video: public VideoBase {
}
if(ending_column > second_blank_start) {
crt_->output_blank(static_cast<unsigned int>(ending_column - second_blank_start) * 7);
crt_->output_blank(static_cast<unsigned int>(ending_column - second_blank_start) * 14);
}
}
@ -204,7 +252,7 @@ template <class BusHandler> class Video: public VideoBase {
// Add an extra half a colour cycle of blank; this isn't counted in the run_for
// count explicitly but is promised.
crt_->output_blank(1);
crt_->output_blank(2);
}
}
}
@ -261,6 +309,11 @@ template <class BusHandler> class Video: public VideoBase {
const int flash_length = 8406;
BusHandler &bus_handler_;
void output_data_to_column(int column) {
int length = column - pixel_pointer_column_;
crt_->output_data(static_cast<unsigned int>(length*14), static_cast<unsigned int>(length * (pixels_are_high_density_ ? 14 : 7)));
pixel_pointer_ = nullptr;
}
};
}

View File

@ -124,19 +124,19 @@ void TIA::set_output_mode(Atari2600::TIA::OutputMode output_mode) {
if(output_mode == OutputMode::NTSC) {
crt_->set_svideo_sampling_function(
"vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase)"
"vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)"
"{"
"uint c = texture(texID, coordinate).r;"
"uint y = c & 14u;"
"uint iPhase = (c >> 4);"
"float phaseOffset = 6.283185308 * float(iPhase) / 13.0 + 5.074880441076923;"
"return vec2(float(y) / 14.0, step(1, iPhase) * cos(phase + phaseOffset));"
"return vec2(float(y) / 14.0, step(1, iPhase) * cos(phase - phaseOffset));"
"}");
display_type = Outputs::CRT::DisplayType::NTSC60;
} else {
crt_->set_svideo_sampling_function(
"vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase)"
"vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)"
"{"
"uint c = texture(texID, coordinate).r;"
"uint y = c & 14u;"

View File

@ -16,25 +16,20 @@ namespace {
The number of bytes of PCM data to allocate at once; if/when more are required,
the class will simply allocate another batch.
*/
const std::size_t StandardAllocationSize = 40;
/// The amount of time a byte takes to output.
const std::size_t HalfCyclesPerByte = 8;
const std::size_t StandardAllocationSize = 320;
}
Video::Video() :
crt_(new Outputs::CRT::CRT(207 * 2, 1, Outputs::CRT::DisplayType::PAL50, 1)) {
// Set a composite sampling function that assumes 1bpp input.
// Set a composite sampling function that assumes two-level input; either a byte is 0, which is black,
// or it is non-zero, which is white.
crt_->set_composite_sampling_function(
"float composite_sample(usampler2D sampler, vec2 coordinate, vec2 icoordinate, float phase, float amplitude)"
"{"
"uint texValue = texture(sampler, coordinate).r;"
"texValue <<= int(icoordinate.x) & 7;"
"return float(texValue & 128u);"
"return texture(sampler, coordinate).r;"
"}");
crt_->set_integer_coordinate_multiplier(8.0f);
// Show only the centre 80% of the TV frame.
crt_->set_video_signal(Outputs::CRT::VideoSignal::Composite);
@ -43,7 +38,7 @@ Video::Video() :
void Video::run_for(const HalfCycles half_cycles) {
// Just keep a running total of the amount of time that remains owed to the CRT.
cycles_since_update_ += static_cast<unsigned int>(half_cycles.as_int());
time_since_update_ += half_cycles;
}
void Video::flush() {
@ -53,29 +48,29 @@ void Video::flush() {
void Video::flush(bool next_sync) {
if(sync_) {
// If in sync, that takes priority. Output the proper amount of sync.
crt_->output_sync(cycles_since_update_);
crt_->output_sync(static_cast<unsigned int>(time_since_update_.as_int()));
} else {
// If not presently in sync, then...
if(line_data_) {
// If there is output data queued, output it either if it's being interrupted by
// sync, or if we're past its end anyway. Otherwise let it be.
unsigned int data_length = static_cast<unsigned int>(line_data_pointer_ - line_data_) * HalfCyclesPerByte;
if(data_length < cycles_since_update_ || next_sync) {
unsigned int output_length = std::min(data_length, cycles_since_update_);
crt_->output_data(output_length, output_length / HalfCyclesPerByte);
int data_length = static_cast<int>(line_data_pointer_ - line_data_);
if(data_length < time_since_update_.as_int() || next_sync) {
auto output_length = std::min(data_length, time_since_update_.as_int());
crt_->output_data(static_cast<unsigned int>(output_length), static_cast<unsigned int>(output_length));
line_data_pointer_ = line_data_ = nullptr;
cycles_since_update_ -= output_length;
time_since_update_ -= HalfCycles(output_length);
} else return;
}
// Any pending pixels being dealt with, pad with the white level.
uint8_t *colour_pointer = static_cast<uint8_t *>(crt_->allocate_write_area(1));
if(colour_pointer) *colour_pointer = 0xff;
crt_->output_level(cycles_since_update_);
crt_->output_level(static_cast<unsigned int>(time_since_update_.as_int()));
}
cycles_since_update_ = 0;
time_since_update_ = 0;
}
void Video::set_sync(bool sync) {
@ -101,14 +96,19 @@ void Video::output_byte(uint8_t byte) {
if(line_data_) {
// If the buffer is full, output it now and obtain a new one
if(line_data_pointer_ - line_data_ == StandardAllocationSize) {
crt_->output_data(StandardAllocationSize * HalfCyclesPerByte, StandardAllocationSize);
cycles_since_update_ -= StandardAllocationSize * HalfCyclesPerByte;
crt_->output_data(StandardAllocationSize, StandardAllocationSize);
time_since_update_ -= StandardAllocationSize;
line_data_pointer_ = line_data_ = crt_->allocate_write_area(StandardAllocationSize);
if(!line_data_) return;
}
line_data_pointer_[0] = byte;
line_data_pointer_ ++;
// Convert to one-byte-per-pixel where any non-zero value will act as white.
uint8_t mask = 0x80;
for(int c = 0; c < 8; c++) {
line_data_pointer_[c] = byte & mask;
mask >>= 1;
}
line_data_pointer_ += 8;
}
}

View File

@ -45,7 +45,7 @@ class Video {
bool sync_ = false;
uint8_t *line_data_ = nullptr;
uint8_t *line_data_pointer_ = nullptr;
unsigned int cycles_since_update_ = 0;
HalfCycles time_since_update_ = 0;
std::unique_ptr<Outputs::CRT::CRT> crt_;
void flush(bool next_sync);

View File

@ -157,7 +157,7 @@ template<bool is_zx81> class ConcreteMachine:
// The below emulates the ZonX AY expansion device.
if(is_zx81) {
if((address&0xef) == 0x0f) {
if((address&0xef) == 0xcf) {
value &= ay_read_data();
}
}

View File

@ -170,7 +170,9 @@ void CRT::advance_cycles(unsigned int number_of_cycles, bool hsync_requested, bo
// outside of the locked region
source_output_position_x1() = static_cast<uint16_t>(horizontal_flywheel_->get_current_output_position());
source_phase() = colour_burst_phase_;
source_amplitude() = colour_burst_amplitude_;
// TODO: determine what the PAL phase-shift machines actually do re: the swinging burst.
source_amplitude() = phase_alternates_ ? 128 - colour_burst_amplitude_ : 128 + colour_burst_amplitude_;
}
// decrement the number of cycles left to run for and increment the
@ -368,7 +370,7 @@ void CRT::output_colour_burst(unsigned int number_of_cycles, uint8_t phase, uint
scan.type = Scan::Type::ColourBurst;
scan.number_of_cycles = number_of_cycles;
scan.phase = phase;
scan.amplitude = amplitude;
scan.amplitude = amplitude >> 1;
output_scan(&scan);
}

View File

@ -332,10 +332,10 @@ class CRT {
output mode will be applied.
@param shader A GLSL fragment including a function with the signature
`vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase)`
`vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)`
that evaluates to the s-video signal level, luminance as the first component and chrominance
as the second, as a function of a source buffer, sampling location and colour
carrier phase.
carrier phase; amplitude is supplied for its sign.
*/
inline void set_svideo_sampling_function(const std::string &shader) {
enqueue_openGL_function([shader, this] {

View File

@ -96,10 +96,10 @@ std::unique_ptr<IntermediateShader> IntermediateShader::make_shader(const std::s
// setup phaseAndAmplitudeVarying.x as colour burst subcarrier phase, in radians;
// setup phaseAndAmplitudeVarying.y as colour burst amplitude;
// setup phaseAndAmplitudeVarying.z as 1 / (colour burst amplitude), or 0.0 if amplitude is 0.0;
// setup phaseAndAmplitudeVarying.z as 1 / abs(colour burst amplitude), or 0.0 if amplitude is 0.0;
"phaseAndAmplitudeVarying.x = (extendedOutputPosition.x + (phaseTimeAndAmplitude.x / 64.0)) * 0.5 * 3.141592654;"
"phaseAndAmplitudeVarying.y = phaseTimeAndAmplitude.y / 255.0;"
"phaseAndAmplitudeVarying.z = (phaseAndAmplitudeVarying.y > 0.0) ? 1.0 / phaseAndAmplitudeVarying.y : 0.0;"
"phaseAndAmplitudeVarying.y = (phaseTimeAndAmplitude.y - 128) / 127.0;"
"phaseAndAmplitudeVarying.z = (abs(phaseAndAmplitudeVarying.y) > 0.05) ? 1.0 / abs(phaseAndAmplitudeVarying.y) : 0.0;"
// determine output position by scaling the output position according to the texture size
"vec2 eyePosition = 2.0*(extendedOutputPosition / outputTextureSize) - vec2(1.0);"
@ -134,8 +134,8 @@ std::unique_ptr<IntermediateShader> IntermediateShader::make_composite_source_sh
svideo_shader <<
"float composite_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)"
"{"
"vec2 svideoColour = svideo_sample(texID, coordinate, iCoordinate, phase);"
"return mix(svideoColour.x, svideoColour.y, amplitude);"
"vec2 svideoColour = svideo_sample(texID, coordinate, iCoordinate, phase, amplitude);"
"return mix(svideoColour.x, svideoColour.y, abs(amplitude));"
"}";
} else {
fragment_shader <<
@ -145,7 +145,7 @@ std::unique_ptr<IntermediateShader> IntermediateShader::make_composite_source_sh
"{"
"vec3 rgbColour = clamp(rgb_sample(texID, coordinate, iCoordinate), vec3(0.0), vec3(1.0));"
"vec3 lumaChromaColour = rgbToLumaChroma * rgbColour;"
"vec2 quadrature = vec2(cos(phase), -sin(phase)) * amplitude;"
"vec2 quadrature = vec2(cos(phase), sin(phase)) * vec2(abs(amplitude), amplitude);"
"return dot(lumaChromaColour, vec3(1.0 - amplitude, quadrature));"
"}";
}
@ -178,11 +178,11 @@ std::unique_ptr<IntermediateShader> IntermediateShader::make_svideo_source_shade
fragment_shader
<< rgb_shader <<
"uniform mat3 rgbToLumaChroma;"
"vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase)"
"vec2 svideo_sample(usampler2D texID, vec2 coordinate, vec2 iCoordinate, float phase, float amplitude)"
"{"
"vec3 rgbColour = clamp(rgb_sample(texID, coordinate, iCoordinate), vec3(0.0), vec3(1.0));"
"vec3 lumaChromaColour = rgbToLumaChroma * rgbColour;"
"vec2 quadrature = vec2(cos(phase), -sin(phase));"
"vec2 quadrature = vec2(cos(phase), sin(phase)) * vec2(1.0, sign(amplitude));"
"return vec2(lumaChromaColour.x, 0.5 + dot(quadrature, lumaChromaColour.yz) * 0.5);"
"}";
}
@ -190,8 +190,8 @@ std::unique_ptr<IntermediateShader> IntermediateShader::make_svideo_source_shade
fragment_shader <<
"void main(void)"
"{"
"vec2 sample = svideo_sample(texID, inputPositionsVarying[5], iInputPositionVarying, phaseAndAmplitudeVarying.x);"
"vec2 quadrature = vec2(cos(phaseAndAmplitudeVarying.x), -sin(phaseAndAmplitudeVarying.x)) * 0.5 * phaseAndAmplitudeVarying.z;"
"vec2 sample = svideo_sample(texID, inputPositionsVarying[5], iInputPositionVarying, phaseAndAmplitudeVarying.x, phaseAndAmplitudeVarying.y);"
"vec2 quadrature = vec2(cos(phaseAndAmplitudeVarying.x), sin(phaseAndAmplitudeVarying.x)) * vec2(1.0, sign(phaseAndAmplitudeVarying.y)) * 0.5 * phaseAndAmplitudeVarying.z;"
"fragColour = vec3(sample.x, vec2(0.5) + (sample.y * quadrature));"
"}";
@ -244,11 +244,11 @@ std::unique_ptr<IntermediateShader> IntermediateShader::make_chroma_luma_separat
// define chroma to be whatever was here, minus luma
"float chrominance = 0.5 * (samples.z - luminance) * phaseAndAmplitudeVarying.z;"
"luminance /= (1.0 - phaseAndAmplitudeVarying.y);"
"luminance /= (1.0 - abs(phaseAndAmplitudeVarying.y));"
// split choma colours here, as the most direct place, writing out
// RGB = (luma, chroma.x, chroma.y)
"vec2 quadrature = vec2(cos(phaseAndAmplitudeVarying.x), -sin(phaseAndAmplitudeVarying.x));"
"vec2 quadrature = vec2(cos(phaseAndAmplitudeVarying.x), sin(phaseAndAmplitudeVarying.x)) * vec2(1.0, sign(phaseAndAmplitudeVarying.y));"
"fragColour = vec3(luminance, vec2(0.5) + (chrominance * quadrature));"
"}",false, false);
}