Merge pull request #2020 from oioitff/mpeg-support

Rewrite some core functions for sceMpeg with ffmpeg.
This commit is contained in:
Henrik Rydgård 2013-06-01 14:24:48 -07:00
commit 0e65c23346
10 changed files with 955 additions and 264 deletions

View file

@ -233,6 +233,7 @@
<ClCompile Include="HW\atrac3plus.cpp" />
<ClCompile Include="HW\MediaEngine.cpp" />
<ClCompile Include="HW\MemoryStick.cpp" />
<ClCompile Include="HW\MpegDemux.cpp" />
<ClCompile Include="HW\OMAConvert.cpp" />
<ClCompile Include="HW\SasAudio.cpp" />
<ClCompile Include="Loaders.cpp" />
@ -411,6 +412,7 @@
<ClInclude Include="Host.h" />
<ClInclude Include="HW\atrac3plus.h" />
<ClInclude Include="HW\MediaEngine.h" />
<ClInclude Include="HW\MpegDemux.h" />
<ClInclude Include="HW\OMAConvert.h" />
<ClInclude Include="HW\SasAudio.h" />
<ClInclude Include="HW\MemoryStick.h" />

View file

@ -430,6 +430,9 @@
<ClCompile Include="HW\atrac3plus.cpp">
<Filter>HW</Filter>
</ClCompile>
<ClCompile Include="HW\MpegDemux.cpp">
<Filter>HW</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<ClInclude Include="ELF\ElfReader.h">
@ -795,6 +798,9 @@
<ClInclude Include="HW\atrac3plus.h">
<Filter>HW</Filter>
</ClInclude>
<ClInclude Include="HW\MpegDemux.h">
<Filter>HW</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<None Include="CMakeLists.txt" />
@ -802,4 +808,4 @@
<None Include="..\android\jni\Android.mk" />
<None Include="GameLogNotes.txt" />
</ItemGroup>
</Project>
</Project>

View file

@ -67,7 +67,7 @@ static const int avcDecodeDelayMs = 5400; // Varies between 4700 and 600
static const int avcEmptyDelayMs = 320;
static const int mpegDecodeErrorDelayMs = 100;
static const int mpegTimestampPerSecond = 90000; // How many MPEG Timestamp units in a second.
//static const int videoTimestampStep = 3003; // Value based on pmfplayer (mpegTimestampPerSecond / 29.970 (fps)).
static const int videoTimestampStep = 3003; // Value based on pmfplayer (mpegTimestampPerSecond / 29.970 (fps)).
static const int audioTimestampStep = 4180; // For audio play at 44100 Hz (2048 samples / 44100 * mpegTimestampPerSecond == 4180)
//static const int audioFirstTimestamp = 89249; // The first MPEG audio AU has always this timestamp
static const int audioFirstTimestamp = 90000; // The first MPEG audio AU has always this timestamp
@ -183,7 +183,6 @@ struct MpegContext {
static u32 streamIdGen;
static bool isCurrentMpegAnalyzed;
static bool fakeMode;
static int actionPostPut;
static std::map<u32, MpegContext *> mpegMap;
static u32 lastMpegHandle = 0;
@ -211,7 +210,7 @@ static void InitRingbuffer(SceMpegRingBuffer *buf, int packets, int data, int si
buf->packets = packets;
buf->packetsRead = 0;
buf->packetsWritten = 0;
buf->packetsFree = 0; // set later
buf->packetsFree = 0;
buf->packetSize = 2048;
buf->data = data;
buf->callback_addr = callback_addr;
@ -269,13 +268,10 @@ void AnalyzeMpeg(u32 buffer_addr, MpegContext *ctx) {
ctx->endOfAudioReached = false;
ctx->endOfVideoReached = false;
if ((ctx->mpegStreamSize > 0) && !ctx->isAnalyzed) {
ctx->mediaengine->setFakeMode(fakeMode);
ctx->mediaengine->init(buffer_addr, ctx->mpegStreamSize, ctx->mpegOffset);
ctx->mediaengine->setVideoDim(ctx->avc.avcDetailFrameWidth, ctx->avc.avcDetailFrameHeight);
// mysterious?
//meChannel = new PacketChannel();
//meChannel.write(buffer_addr, mpegOffset);
if (ctx->mediaengine && (ctx->mpegStreamSize > 0) && !ctx->isAnalyzed) {
// init mediaEngine
ctx->mediaengine->loadStream(Memory::GetPointer(buffer_addr), ctx->mpegOffset, ctx->mpegOffset + ctx->mpegStreamSize);
ctx->mediaengine->setVideoDim();
}
// When used with scePsmf, some applications attempt to use sceMpegQueryStreamOffset
// and sceMpegQueryStreamSize, which forces a packet overwrite in the Media Engine and in
@ -299,9 +295,9 @@ private:
};
void __MpegInit(bool useMediaEngine_) {
lastMpegHandle = 0;
streamIdGen = 1;
fakeMode = !useMediaEngine_;
isCurrentMpegAnalyzed = false;
actionPostPut = __KernelRegisterActionType(PostPutAction::Create);
}
@ -309,7 +305,6 @@ void __MpegInit(bool useMediaEngine_) {
void __MpegDoState(PointerWrap &p) {
p.Do(lastMpegHandle);
p.Do(streamIdGen);
p.Do(fakeMode);
p.Do(isCurrentMpegAnalyzed);
p.Do(actionPostPut);
__KernelRestoreActionType(actionPostPut, PostPutAction::Create);
@ -369,14 +364,14 @@ u32 sceMpegCreate(u32 mpegAddr, u32 dataPtr, u32 size, u32 ringbufferAddr, u32 f
SceMpegRingBuffer ringbuffer;
if(ringbufferAddr != 0){
Memory::ReadStruct(ringbufferAddr, &ringbuffer);
if (ringbuffer.packetSize == 0) {
ringbuffer.packetsFree = 0;
} else {
ringbuffer.packetsFree = (ringbuffer.dataUpperBound - ringbuffer.data) / ringbuffer.packetSize;
}
ringbuffer.mpeg = mpegAddr;
Memory::WriteStruct(ringbufferAddr, &ringbuffer);
Memory::ReadStruct(ringbufferAddr, &ringbuffer);
if (ringbuffer.packetSize == 0) {
ringbuffer.packetsFree = 0;
} else {
ringbuffer.packetsFree = (ringbuffer.dataUpperBound - ringbuffer.data) / ringbuffer.packetSize;
}
ringbuffer.mpeg = mpegAddr;
Memory::WriteStruct(ringbufferAddr, &ringbuffer);
}
// Generate, and write mpeg handle into mpeg data, for some reason
@ -496,7 +491,7 @@ u32 sceMpegQueryStreamSize(u32 bufferAddr, u32 sizeAddr)
DEBUG_LOG(HLE, "sceMpegQueryStreamSize(%08x, %08x)", bufferAddr, sizeAddr);
MpegContext ctx;
ctx.mediaengine = new MediaEngine();
ctx.mediaengine = 0;
AnalyzeMpeg(bufferAddr, &ctx);
@ -635,62 +630,16 @@ u32 sceMpegAvcDecode(u32 mpeg, u32 auAddr, u32 frameWidth, u32 bufferAddr, u32 i
u32 init = Memory::Read_U32(initAddr);
DEBUG_LOG(HLE, "*buffer = %08x, *init = %08x", buffer, init);
const int width = std::min((int)frameWidth, 480);
const int height = ctx->avc.avcDetailFrameHeight;
int packetsInRingBuffer = ringbuffer.packets - ringbuffer.packetsFree;
int processedPackets = ringbuffer.packetsRead - packetsInRingBuffer;
int processedSize = processedPackets * ringbuffer.packetSize;
int packetsConsumed = 3;
if (ctx->mpegStreamSize > 0 && ctx->mpegLastTimestamp > 0) {
// Try a better approximation of the packets consumed based on the timestamp
int processedSizeBasedOnTimestamp = (int) ((((float) avcAu.pts) / ctx->mpegLastTimestamp) * ctx->mpegStreamSize);
if (processedSizeBasedOnTimestamp < processedSize) {
packetsConsumed = 0;
} else {
packetsConsumed = (processedSizeBasedOnTimestamp - processedSize) / ringbuffer.packetSize;
if (packetsConsumed > 10) {
packetsConsumed = 10;
}
}
DEBUG_LOG(HLE, "sceMpegAvcDecode consumed %d %d/%d %d", processedSizeBasedOnTimestamp, processedSize, ctx->mpegStreamSize, packetsConsumed);
}
if (ctx->mediaengine->stepVideo()) {
ctx->mediaengine->writeVideoImage(buffer, frameWidth, ctx->videoPixelMode);
// TODO: The idea here is to consume packets based on actual decoded bytes.
// We don't actually decode anything (readLength is always 0), so hardcoded for now.
//packetsConsumed = ctx->mediaengine->readLength() / ringbuffer.packetSize;
if (packetsConsumed == 0)
packetsConsumed = std::min(packetsInRingBuffer, 4);
// The MediaEngine is already consuming all the remaining
// packets when approaching the end of the video. The PSP
// is only consuming the last packet when reaching the end,
// not before.
// Consuming all the remaining packets?
if (ringbuffer.packetsFree + packetsConsumed >= ringbuffer.packets) {
// Having not yet reached the last timestamp?
if (ctx->mpegLastTimestamp > 0 && avcAu.pts < ctx->mpegLastTimestamp) {
// Do not yet consume all the remaining packets, leave 2 packets
packetsConsumed = ringbuffer.packets - ringbuffer.packetsFree - 2;
}
}
ctx->mediaengine->setReadLength(ctx->mediaengine->readLength() - packetsConsumed * ringbuffer.packetSize);
} else {
// Consume all remaining packets
packetsConsumed = ringbuffer.packets - ringbuffer.packetsFree;
ctx->mediaengine->writeVideoImage(Memory::GetPointer(buffer), frameWidth, ctx->videoPixelMode);
}
ringbuffer.packetsFree = std::min(16, ctx->mediaengine->getRemainSize() / 2048);
avcAu.pts = ctx->mediaengine->getVideoTimeStamp();
ctx->avc.avcFrameStatus = 1;
ctx->videoFrameCount++;
// Update the ringbuffer with the consumed packets
if (ringbuffer.packetsFree < ringbuffer.packets && packetsConsumed > 0) {
ringbuffer.packetsFree = std::min(ringbuffer.packets, ringbuffer.packetsFree + packetsConsumed);
DEBUG_LOG(HLE, "sceMpegAvcDecode consumed %d packets, remaining %d packets", packetsConsumed, ringbuffer.packets - ringbuffer.packetsFree);
}
ctx->avc.avcDecodeResult = MPEG_AVC_DECODE_SUCCESS;
// Flush structs back to memory
@ -705,6 +654,8 @@ u32 sceMpegAvcDecode(u32 mpeg, u32 auAddr, u32 frameWidth, u32 bufferAddr, u32 i
return hleDelayResult(0, "mpeg decode", avcFirstDelayMs);
else
return hleDelayResult(0, "mpeg decode", avcDecodeDelayMs);
//hleEatMicro(3300);
//return hleDelayResult(0, "mpeg decode", 200);
}
u32 sceMpegAvcDecodeStop(u32 mpeg, u32 frameWidth, u32 bufferAddr, u32 statusAddr)
@ -823,56 +774,17 @@ int sceMpegAvcDecodeYCbCr(u32 mpeg, u32 auAddr, u32 bufferAddr, u32 initAddr)
u32 init = Memory::Read_U32(initAddr);
DEBUG_LOG(HLE, "*buffer = %08x, *init = %08x", buffer, init);
int packetsInRingBuffer = ringbuffer.packets - ringbuffer.packetsFree;
int processedPackets = ringbuffer.packetsRead - packetsInRingBuffer;
int processedSize = processedPackets * ringbuffer.packetSize;
int packetsConsumed = 3;
if (ctx->mpegStreamSize > 0 && ctx->mpegLastTimestamp > 0) {
// Try a better approximation of the packets consumed based on the timestamp
int processedSizeBasedOnTimestamp = (int) ((((float) avcAu.pts) / ctx->mpegLastTimestamp) * ctx->mpegStreamSize);
if (processedSizeBasedOnTimestamp < processedSize) {
packetsConsumed = 0;
} else {
packetsConsumed = (processedSizeBasedOnTimestamp - processedSize) / ringbuffer.packetSize;
if (packetsConsumed > 10) {
packetsConsumed = 10;
}
}
DEBUG_LOG(HLE, "sceMpegAvcDecodeYCbCr consumed %d %d/%d %d", processedSizeBasedOnTimestamp, processedSize, ctx->mpegStreamSize, packetsConsumed);
}
if (ctx->mediaengine->stepVideo()) {
// TODO: Write it somewhere or buffer it or something?
// TODO: The idea here is to consume packets based on actual decoded bytes.
// We don't actually decode anything (readLength is always 0), so hardcoded for now.
//packetsConsumed = ctx->mediaengine->readLength() / ringbuffer.packetSize;
if (packetsConsumed == 0)
packetsConsumed = std::min(packetsInRingBuffer, 4);
// Consuming all the remaining packets?
if (ringbuffer.packetsFree + packetsConsumed >= ringbuffer.packets) {
// Having not yet reached the last timestamp?
if (ctx->mpegLastTimestamp > 0 && avcAu.pts < ctx->mpegLastTimestamp) {
// Do not yet consume all the remaining packets, leave 2 packets
packetsConsumed = ringbuffer.packets - ringbuffer.packetsFree - 2;
}
}
ctx->mediaengine->setReadLength(ctx->mediaengine->readLength() - packetsConsumed * ringbuffer.packetSize);
} else {
// Consume all remaining packets
packetsConsumed = ringbuffer.packets - ringbuffer.packetsFree;
// do nothing
;
}
ringbuffer.packetsFree = std::min(16, ctx->mediaengine->getRemainSize() / 2048);
avcAu.pts = ctx->mediaengine->getVideoTimeStamp();
ctx->avc.avcFrameStatus = 1;
ctx->videoFrameCount++;
// Update the ringbuffer with the consumed packets
if (ringbuffer.packetsFree < ringbuffer.packets && packetsConsumed > 0) {
ringbuffer.packetsFree = std::min(ringbuffer.packets, ringbuffer.packetsFree + packetsConsumed);
DEBUG_LOG(HLE, "sceMpegAvcDecodeYCbCr consumed %d packets, remaining %d packets", packetsConsumed, ringbuffer.packets - ringbuffer.packetsFree);
}
ctx->avc.avcDecodeResult = MPEG_AVC_DECODE_SUCCESS;
// Flush structs back to memory
@ -887,6 +799,8 @@ int sceMpegAvcDecodeYCbCr(u32 mpeg, u32 auAddr, u32 bufferAddr, u32 initAddr)
return hleDelayResult(0, "mpeg decode", avcFirstDelayMs);
else
return hleDelayResult(0, "mpeg decode", avcDecodeDelayMs);
//hleEatMicro(3300);
//return hleDelayResult(0, "mpeg decode", 200);
}
u32 sceMpegAvcDecodeFlush(u32 mpeg)
@ -955,6 +869,8 @@ int sceMpegRingbufferAvailableSize(u32 ringbufferAddr)
SceMpegRingBuffer ringbuffer;
Memory::ReadStruct(ringbufferAddr, &ringbuffer);
DEBUG_LOG(HLE, "%i=sceMpegRingbufferAvailableSize(%08x)", ringbuffer.packetsFree, ringbufferAddr);
MpegContext *ctx = getMpegCtx(ringbuffer.mpeg);
int result = std::min(ringbuffer.packetsFree, ctx->mediaengine->getRemainSize() / 2048);
return ringbuffer.packetsFree;
}
@ -966,16 +882,17 @@ void PostPutAction::run(MipsCall &call) {
int packetsAdded = currentMIPS->r[2];
if (packetsAdded > 0) {
if (ctx)
ctx->mediaengine->feedPacketData(ringbuffer.data, packetsAdded * ringbuffer.packetSize);
if (packetsAdded > ringbuffer.packetsFree) {
WARN_LOG(HLE, "sceMpegRingbufferPut clamping packetsAdded old=%i new=%i", packetsAdded, ringbuffer.packetsFree);
packetsAdded = ringbuffer.packetsFree;
}
ctx->mediaengine->addStreamData(Memory::GetPointer(ringbuffer.data), packetsAdded * 2048);
ringbuffer.packetsRead += packetsAdded;
ringbuffer.packetsWritten += packetsAdded;
ringbuffer.packetsFree -= packetsAdded;
//ringbuffer.packetsFree = std::min(16, ctx->mediaengine->getRemainSize() / 2048);
ringbuffer.packetsFree = 0;
}
DEBUG_LOG(HLE, "packetAdded: %i packetsRead: %i packetsTotol: %i", packetsAdded, ringbuffer.packetsRead, ringbuffer.packets);
Memory::WriteStruct(ringAddr_, &ringbuffer);
call.setReturnValue(packetsAdded);
@ -987,10 +904,8 @@ u32 sceMpegRingbufferPut(u32 ringbufferAddr, u32 numPackets, u32 available)
{
DEBUG_LOG(HLE, "sceMpegRingbufferPut(%08x, %i, %i)", ringbufferAddr, numPackets, available);
numPackets = std::min(numPackets, available);
if (numPackets <= 0) {
ERROR_LOG(HLE, "sub-zero number of packets put");
if (numPackets <= 0)
return 0;
}
SceMpegRingBuffer ringbuffer;
Memory::ReadStruct(ringbufferAddr, &ringbuffer);
@ -1001,14 +916,6 @@ u32 sceMpegRingbufferPut(u32 ringbufferAddr, u32 numPackets, u32 available)
return 0;
}
// Clamp to length of mpeg stream - this seems like a hack as we don't have access to the context here really
int mpegStreamPackets = (ctx->mpegStreamSize + ringbuffer.packetSize - 1) / ringbuffer.packetSize;
int remainingPackets = mpegStreamPackets - ringbuffer.packetsRead;
if (remainingPackets < 0) {
remainingPackets = 0;
}
numPackets = std::min(numPackets, (u32)remainingPackets);
// Execute callback function as a direct MipsCall, no blocking here so no messing around with wait states etc
if (ringbuffer.callback_addr) {
PostPutAction *action = (PostPutAction *) __KernelCreateAction(actionPostPut);
@ -1054,33 +961,23 @@ int sceMpegGetAvcAu(u32 mpeg, u32 streamId, u32 auAddr, u32 attrAddr)
streamInfo->second.needsReset = false;
}
// Wait for audio if too much ahead
if (ctx->atracRegistered && (sceAu.pts > sceAu.pts + getMaxAheadTimestamp(mpegRingbuffer)))
/*// Wait for audio if too much ahead
if (ctx->atracRegistered && (ctx->mediaengine->getVideoTimeStamp() > ctx->mediaengine->getAudioTimeStamp() + getMaxAheadTimestamp(mpegRingbuffer)))
{
ERROR_LOG(HLE, "sceMpegGetAvcAu - video too much ahead");
// TODO: Does this really reschedule?
return hleDelayResult(PSP_ERROR_MPEG_NO_DATA, "mpeg get avc", mpegDecodeErrorDelayMs);
}
}*/
int result = 0;
// read the au struct from ram
// TODO: For now, always checking, since readVideoAu() is stubbed.
if (!ctx->mediaengine->readVideoAu(&sceAu) || true) {
// Only return this after the video already ended.
if (ctx->endOfVideoReached) {
if (mpegRingbuffer.packetsFree < mpegRingbuffer.packets) {
mpegRingbuffer.packetsFree = mpegRingbuffer.packets;
Memory::WriteStruct(ctx->mpegRingbufferAddr, &mpegRingbuffer);
}
result = PSP_ERROR_MPEG_NO_DATA;
}
if (ctx->mpegLastTimestamp <= 0 || sceAu.pts >= ctx->mpegLastTimestamp) {
NOTICE_LOG(HLE, "End of video reached");
ctx->endOfVideoReached = true;
} else {
ctx->endOfAudioReached = false;
}
sceAu.pts = ctx->mediaengine->getVideoTimeStamp();
if (sceAu.pts >= ctx->mpegLastTimestamp) {
INFO_LOG(HLE, "video end reach. pts: %i dts: %i", (int)sceAu.pts, (int)ctx->mpegLastTimestamp);
mpegRingbuffer.packetsFree = mpegRingbuffer.packets;
Memory::WriteStruct(ctx->mpegRingbufferAddr, &mpegRingbuffer);
result = PSP_ERROR_MPEG_NO_DATA;
}
// The avcau struct may have been modified by mediaengine, write it back.
@ -1130,27 +1027,24 @@ int sceMpegGetAtracAu(u32 mpeg, u32 streamId, u32 auAddr, u32 attrAddr)
streamInfo->second.needsReset = false;
}
int result = 0;
if (mpegRingbuffer.packetsFree == mpegRingbuffer.packets) {
DEBUG_LOG(HLE, "PSP_ERROR_MPEG_NO_DATA=sceMpegGetAtracAu(%08x, %08x, %08x, %08x)", mpeg, streamId, auAddr, attrAddr);
// TODO: Does this really delay?
return hleDelayResult(PSP_ERROR_MPEG_NO_DATA, "mpeg get atrac", mpegDecodeErrorDelayMs);
}
//...
// TODO: Just faking it.
sceAu.pts += videoTimestampStep;
sceAu.write(auAddr);
int result = 0;
sceAu.pts = ctx->mediaengine->getAudioTimeStamp();
if (sceAu.pts >= ctx->mpegLastTimestamp) {
INFO_LOG(HLE, "video end reach. pts: %i dts: %i", (int)sceAu.pts, (int)ctx->mpegLastTimestamp);
mpegRingbuffer.packetsFree = mpegRingbuffer.packets;
Memory::WriteStruct(ctx->mpegRingbufferAddr, &mpegRingbuffer);
// TODO: And also audio end?
if (ctx->endOfVideoReached) {
if (mpegRingbuffer.packetsFree < mpegRingbuffer.packets) {
mpegRingbuffer.packetsFree = mpegRingbuffer.packets;
Memory::WriteStruct(ctx->mpegRingbufferAddr, &mpegRingbuffer);
}
result = PSP_ERROR_MPEG_NO_DATA;
}
sceAu.write(auAddr);
if (Memory::IsValidAddress(attrAddr)) {
Memory::Write_U32(0, attrAddr);
@ -1278,15 +1172,52 @@ u32 sceMpegAvcCopyYCbCr(u32 mpeg, u32 sourceAddr, u32 YCbCrAddr)
u32 sceMpegAtracDecode(u32 mpeg, u32 auAddr, u32 bufferAddr, int init)
{
DEBUG_LOG(HLE, "UNIMPL sceMpegAtracDecode(%08x, %08x, %08x, %i)", mpeg, auAddr, bufferAddr, init);
if (Memory::IsValidAddress(bufferAddr))
Memory::Memset(bufferAddr, 0, MPEG_ATRAC_ES_OUTPUT_SIZE);
if (!g_Config.bUseMediaEngine){
WARN_LOG(HLE, "Media Engine disabled");
return -1;
}
MpegContext *ctx = getMpegCtx(mpeg);
if (!ctx) {
return 0;
}
if (!Memory::IsValidAddress(auAddr) || !Memory::IsValidAddress(bufferAddr)) {
ERROR_LOG(HLE, "sceMpegAtracDecode: bad addresses");
return 0;
}
SceMpegAu avcAu;
avcAu.read(auAddr);
Memory::Memset(bufferAddr, 0, MPEG_ATRAC_ES_OUTPUT_SIZE);
ctx->mediaengine->getAudioSamples(Memory::GetPointer(bufferAddr));
avcAu.pts = ctx->mediaengine->getAudioTimeStamp();
avcAu.write(auAddr);
return hleDelayResult(0, "mpeg atrac decode", atracDecodeDelayMs);
//hleEatMicro(4000);
//return hleDelayResult(0, "mpeg atrac decode", 200);
}
// YCbCr -> RGB color space conversion
u32 sceMpegAvcCsc(u32 mpeg, u32 sourceAddr, u32 rangeAddr, int frameWidth, u32 destAddr)
{
ERROR_LOG(HLE, "UNIMPL sceMpegAvcCsc(%08x, %08x, %08x, %i, %08x)", mpeg, sourceAddr, rangeAddr, frameWidth, destAddr);
DEBUG_LOG(HLE, "sceMpegAvcCsc(%08x, %08x, %08x, %i, %08x)", mpeg, sourceAddr, rangeAddr, frameWidth, destAddr);
MpegContext *ctx = getMpegCtx(mpeg);
if (!ctx)
return -1;
if ((!Memory::IsValidAddress(rangeAddr)) || (!Memory::IsValidAddress(destAddr)))
return -1;
int x = Memory::Read_U32(rangeAddr);
int y = Memory::Read_U32(rangeAddr + 4);
int width = Memory::Read_U32(rangeAddr + 8);
int height = Memory::Read_U32(rangeAddr + 12);
ctx->mediaengine->writeVideoImageWithRange(Memory::GetPointer(destAddr), frameWidth, ctx->videoPixelMode,
x, y, width, height);
return 0;
}

View file

@ -62,19 +62,17 @@ struct SceMpegAu {
void read(u32 addr) {
Memory::ReadStruct(addr, this);
pts = (pts & 0xFFFFFFFFULL) << 32 | (pts >> 32);
dts = (dts & 0xFFFFFFFFULL) << 32 | (dts >> 32);
pts = (pts & 0xFFFFFFFFULL) << 32 | (((u64)pts) >> 32);
dts = (dts & 0xFFFFFFFFULL) << 32 | (((u64)dts) >> 32);
}
void write(u32 addr) {
pts = (pts & 0xFFFFFFFFULL) << 32 | (pts >> 32);
dts = (dts & 0xFFFFFFFFULL) << 32 | (dts >> 32);
pts = (pts & 0xFFFFFFFFULL) << 32 | (((u64)pts) >> 32);
dts = (dts & 0xFFFFFFFFULL) << 32 | (((u64)dts) >> 32);
Memory::WriteStruct(addr, this);
}
};
const int videoTimestampStep = 3003;
// As native in PSP ram
struct SceMpegRingBuffer {
// PSP info

View file

@ -21,6 +21,7 @@
#include "Core/HLE/scePsmf.h"
#include "Core/HLE/sceMpeg.h"
#include "Core/HW/MediaEngine.h"
#include <map>
@ -146,8 +147,9 @@ public:
class PsmfPlayer {
public:
// For savestates only.
PsmfPlayer() {}
PsmfPlayer() { mediaengine = new MediaEngine;}
PsmfPlayer(u32 data);
~PsmfPlayer() { if (mediaengine) delete mediaengine;}
void DoState(PointerWrap &p);
int videoCodec;
@ -166,6 +168,8 @@ public:
SceMpegAu psmfPlayerAtracAu;
SceMpegAu psmfPlayerAvcAu;
PsmfPlayerStatus status;
MediaEngine* mediaengine;
};
class PsmfStream {
@ -263,6 +267,7 @@ PsmfPlayer::PsmfPlayer(u32 data) {
playSpeed = Memory::Read_U32(data + 20);
psmfPlayerLastTimestamp = bswap32(Memory::Read_U32(data + PSMF_LAST_TIMESTAMP_OFFSET)) ;
status = PSMF_PLAYER_STATUS_INIT;
mediaengine = new MediaEngine;
}
void Psmf::DoState(PointerWrap &p) {
@ -307,6 +312,7 @@ void PsmfPlayer::DoState(PointerWrap &p) {
p.Do(playbackThreadPriority);
p.Do(psmfMaxAheadTimestamp);
p.Do(psmfPlayerLastTimestamp);
p.DoClass(mediaengine);
p.DoMarker("PsmfPlayer");
}
@ -670,6 +676,8 @@ int scePsmfPlayerSetPsmf(u32 psmfPlayer, const char *filename)
PsmfPlayer *psmfplayer = getPsmfPlayer(psmfPlayer);
if (psmfplayer)
psmfplayer->status = PSMF_PLAYER_STATUS_STANDBY;
psmfplayer->mediaengine->loadFile(filename);
psmfplayer->psmfPlayerLastTimestamp = psmfplayer->mediaengine->getLastTimeStamp();
return 0;
}
@ -679,6 +687,8 @@ int scePsmfPlayerSetPsmfCB(u32 psmfPlayer, const char *filename)
PsmfPlayer *psmfplayer = getPsmfPlayer(psmfPlayer);
if (psmfplayer)
psmfplayer->status = PSMF_PLAYER_STATUS_STANDBY;
psmfplayer->mediaengine->loadFile(filename);
psmfplayer->psmfPlayerLastTimestamp = psmfplayer->mediaengine->getLastTimeStamp();
return 0;
}
@ -698,15 +708,24 @@ int scePsmfPlayerStart(u32 psmfPlayer, u32 psmfPlayerData, int initPts)
psmfPlayerMap[psmfPlayer] = psmfplayer;
}
PsmfPlayerData data = {0};
data.videoCodec = psmfplayer->videoCodec;
data.videoStreamNum = psmfplayer->videoStreamNum;
data.audioCodec = psmfplayer->audioCodec;
data.audioStreamNum = psmfplayer->audioStreamNum;
data.playMode = psmfplayer->playMode;
data.playSpeed = psmfplayer->playSpeed;
data.psmfPlayerLastTimestamp = psmfplayer->psmfPlayerLastTimestamp;
Memory::WriteStruct(psmfPlayerData, &data);
if (Memory::IsValidAddress(psmfPlayerData)) {
PsmfPlayerData data = {0};
Memory::ReadStruct(psmfPlayerData, &data);
psmfplayer->videoCodec = data.videoCodec;
psmfplayer->videoStreamNum = data.videoStreamNum;
psmfplayer->audioCodec = data.audioCodec;
psmfplayer->audioStreamNum = data.audioStreamNum;
psmfplayer->playMode = data.playMode;
psmfplayer->playSpeed = data.playSpeed;
/*data.videoCodec = psmfplayer->videoCodec;
data.videoStreamNum = psmfplayer->videoStreamNum;
data.audioCodec = psmfplayer->audioCodec;
data.audioStreamNum = psmfplayer->audioStreamNum;
data.playMode = psmfplayer->playMode;
data.playSpeed = psmfplayer->playSpeed;
data.psmfPlayerLastTimestamp = psmfplayer->psmfPlayerLastTimestamp;
Memory::WriteStruct(psmfPlayerData, &data);*/
}
psmfplayer->psmfPlayerAtracAu.dts = initPts;
psmfplayer->psmfPlayerAtracAu.pts = initPts;
@ -731,7 +750,7 @@ int scePsmfPlayerDelete(u32 psmfPlayer)
int scePsmfPlayerUpdate(u32 psmfPlayer)
{
ERROR_LOG(HLE, "scePsmfPlayerUpdate(%08x)", psmfPlayer);
DEBUG_LOG(HLE, "scePsmfPlayerUpdate(%08x)", psmfPlayer);
PsmfPlayer *psmfplayer = getPsmfPlayer(psmfPlayer);
if (!psmfplayer) {
ERROR_LOG(HLE, "scePsmfPlayerUpdate - invalid psmf");
@ -739,13 +758,15 @@ int scePsmfPlayerUpdate(u32 psmfPlayer)
}
if (psmfplayer->psmfPlayerAvcAu.pts > 0) {
if (psmfplayer->psmfPlayerAvcAu.pts > psmfplayer->psmfPlayerLastTimestamp) {
if (psmfplayer->psmfPlayerAvcAu.pts >= psmfplayer->psmfPlayerLastTimestamp) {
psmfplayer->status = PSMF_PLAYER_STATUS_PLAYING_FINISHED;
}
}
// TODO: Once we start increasing pts somewhere, and actually know the last timestamp, do this better.
psmfplayer->status = PSMF_PLAYER_STATUS_PLAYING_FINISHED;
return 0;
psmfplayer->mediaengine->stepVideo();
psmfplayer->psmfPlayerAvcAu.pts = psmfplayer->mediaengine->getVideoTimeStamp();
// This seems to be crazy!
return hleDelayResult(0, "psmfPlayer update", 30000);
}
int scePsmfPlayerReleasePsmf(u32 psmfPlayer)
@ -759,30 +780,39 @@ int scePsmfPlayerReleasePsmf(u32 psmfPlayer)
int scePsmfPlayerGetVideoData(u32 psmfPlayer, u32 videoDataAddr)
{
ERROR_LOG(HLE, "UNIMPL scePsmfPlayerGetVideoData(%08x, %08x)", psmfPlayer, videoDataAddr);
DEBUG_LOG(HLE, "scePsmfPlayerGetVideoData(%08x, %08x)", psmfPlayer, videoDataAddr);
PsmfPlayer *psmfplayer = getPsmfPlayer(psmfPlayer);
if (!psmfplayer) {
ERROR_LOG(HLE, "scePsmfPlayerGetVideoData - invalid psmf");
return ERROR_PSMF_NOT_FOUND;
}
// TODO: Once we start increasing pts somewhere, and actually know the last timestamp, do this better.
psmfplayer->status = PSMF_PLAYER_STATUS_PLAYING_FINISHED;
return 0;
if (Memory::IsValidAddress(videoDataAddr)) {
int frameWidth = Memory::Read_U32(videoDataAddr);
u32 displaybuf = Memory::Read_U32(videoDataAddr + 4);
int displaypts = Memory::Read_U32(videoDataAddr + 8);
psmfplayer->mediaengine->writeVideoImage(Memory::GetPointer(displaybuf), frameWidth, videoPixelMode);
Memory::Write_U32(psmfplayer->psmfPlayerAvcAu.pts, videoDataAddr + 8);
}
return hleDelayResult(0, "psmfPlayer video decode", 3000);
}
int scePsmfPlayerGetAudioData(u32 psmfPlayer, u32 audioDataAddr)
{
ERROR_LOG(HLE, "UNIMPL scePsmfPlayerGetAudioData(%08x, %08x)", psmfPlayer, audioDataAddr);
DEBUG_LOG(HLE, "scePsmfPlayerGetAudioData(%08x, %08x)", psmfPlayer, audioDataAddr);
PsmfPlayer *psmfplayer = getPsmfPlayer(psmfPlayer);
if (!psmfplayer) {
ERROR_LOG(HLE, "scePsmfPlayerGetAudioData - invalid psmf");
return ERROR_PSMF_NOT_FOUND;
}
if (Memory::IsValidAddress(audioDataAddr))
if (Memory::IsValidAddress(audioDataAddr)) {
Memory::Memset(audioDataAddr, 0, audioSamplesBytes);
return 0;
psmfplayer->mediaengine->getAudioSamples(Memory::GetPointer(audioDataAddr));
}
return hleDelayResult(0, "psmfPlayer audio decode", 3000);
}
int scePsmfPlayerGetCurrentStatus(u32 psmfPlayer)
@ -792,13 +822,13 @@ int scePsmfPlayerGetCurrentStatus(u32 psmfPlayer)
ERROR_LOG(HLE, "scePsmfPlayerGetCurrentStatus(%08x) - invalid psmf", psmfPlayer);
return ERROR_PSMF_NOT_FOUND;
}
ERROR_LOG(HLE, "%d=scePsmfPlayerGetCurrentStatus(%08x)", psmfplayer->status, psmfPlayer);
DEBUG_LOG(HLE, "%d=scePsmfPlayerGetCurrentStatus(%08x)", psmfplayer->status, psmfPlayer);
return psmfplayer->status;
}
u32 scePsmfPlayerGetCurrentPts(u32 psmfPlayer, u32 currentPtsAddr)
{
ERROR_LOG(HLE, "scePsmfPlayerGetCurrentPts(%08x, %08x)", psmfPlayer , currentPtsAddr);
DEBUG_LOG(HLE, "scePsmfPlayerGetCurrentPts(%08x, %08x)", psmfPlayer , currentPtsAddr);
PsmfPlayer *psmfplayer = getPsmfPlayer(psmfPlayer);
if (!psmfplayer) {
ERROR_LOG(HLE, "scePsmfPlayerGetCurrentPts - invalid psmf");
@ -810,7 +840,7 @@ u32 scePsmfPlayerGetCurrentPts(u32 psmfPlayer, u32 currentPtsAddr)
if (Memory::IsValidAddress(currentPtsAddr)) {
//Comment out until psmfPlayerAvcAu.pts start increasing correctly, Ultimate Ghosts N Goblins relies on it .
//Memory::Write_U64(psmfplayer->psmfPlayerAvcAu.pts, currentPtsAddr);
Memory::Write_U64(psmfplayer->psmfPlayerAvcAu.pts, currentPtsAddr);
}
return 0;
}

View file

@ -17,27 +17,500 @@
#include "MediaEngine.h"
#include "../MemMap.h"
#include "GPU/GPUInterface.h"
#include "Core/HW/atrac3plus.h"
static const int modeBpp[4] = { 2, 2, 2, 4 };
#ifdef USE_FFMPEG
extern "C" {
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
void MediaEngine::writeVideoImage(u32 bufferPtr, int frameWidth, int videoPixelMode)
}
#endif // USE_FFMPEG
static const int TPSM_PIXEL_STORAGE_MODE_16BIT_BGR5650 = 0x00;
static const int TPSM_PIXEL_STORAGE_MODE_16BIT_ABGR5551 = 0x01;
static const int TPSM_PIXEL_STORAGE_MODE_16BIT_ABGR4444 = 0x02;
static const int TPSM_PIXEL_STORAGE_MODE_32BIT_ABGR8888 = 0x03;
inline void YUV444toRGB888(u8 ypos, u8 upos, u8 vpos, u8 &r, u8 &g, u8 &b)
{
if (videoPixelMode >= (int)(sizeof(modeBpp) / sizeof(modeBpp[0])) || videoPixelMode < 0)
u8 u = upos - 128;
u8 v = vpos -128;
int rdif = v + ((v * 103) >> 8);
int invgdif = ((u * 88) >> 8) + ((v * 183) >> 8);
int bdif = u + ((u * 198) >> 8);
r = (u8)(ypos + rdif);
g = (u8)(ypos - invgdif);
b = (u8)(ypos + bdif);
}
void getPixelColor(u8 r, u8 g, u8 b, u8 a, int pixelMode, u16* color)
{
switch (pixelMode)
{
ERROR_LOG(ME, "Unexpected videoPixelMode %d, using 0 instead.", videoPixelMode);
videoPixelMode = 0;
case TPSM_PIXEL_STORAGE_MODE_16BIT_BGR5650:
{
*color = ((b >> 3) << 11) | ((g >> 2) << 5) | (r >> 3);
}
break;
case TPSM_PIXEL_STORAGE_MODE_16BIT_ABGR5551:
{
*color = ((a >> 7) << 15) | ((b >> 3) << 10) | ((g >> 3) << 5) | (r >> 3);
}
break;
case TPSM_PIXEL_STORAGE_MODE_16BIT_ABGR4444:
{
*color = ((a >> 4) << 12) | ((b >> 4) << 8) | ((g >> 4) << 4) | (r >> 4);
}
break;
default:
// do nothing yet
break;
}
}
MediaEngine::MediaEngine(): m_pdata(0), m_streamSize(0), m_readSize(0){
m_pFormatCtx = 0;
m_pCodecCtx = 0;
m_pFrame = 0;
m_pFrameRGB = 0;
m_pIOContext = 0;
m_videoStream = -1;
m_buffer = 0;
m_demux = 0;
m_audioContext = 0;
}
MediaEngine::~MediaEngine() {
closeMedia();
}
void MediaEngine::closeMedia() {
#ifdef USE_FFMPEG
if (m_buffer)
av_free(m_buffer);
if (m_pFrameRGB)
av_free(m_pFrameRGB);
if (m_pFrame)
av_free(m_pFrame);
if (m_pIOContext && ((AVIOContext*)m_pIOContext)->buffer)
av_free(((AVIOContext*)m_pIOContext)->buffer);
if (m_pIOContext)
av_free(m_pIOContext);
if (m_pCodecCtx)
avcodec_close((AVCodecContext*)m_pCodecCtx);
if (m_pFormatCtx)
avformat_close_input((AVFormatContext**)&m_pFormatCtx);
#endif // USE_FFMPEG
if (m_pdata)
delete [] m_pdata;
if (m_demux)
delete m_demux;
m_buffer = 0;
m_pFrame = 0;
m_pFrameRGB = 0;
m_pIOContext = 0;
m_pCodecCtx = 0;
m_pFormatCtx = 0;
m_videoStream = -1;
m_pdata = 0;
m_demux = 0;
Atrac3plus_Decoder::closeContext(&m_audioContext);
}
int _MpegReadbuffer(void *opaque, uint8_t *buf, int buf_size)
{
MediaEngine *mpeg = (MediaEngine*)opaque;
int size = std::min(mpeg->m_bufSize, buf_size);
size = std::max(std::min((mpeg->m_readSize - mpeg->m_decodePos), size), 0);
if (size > 0)
memcpy(buf, mpeg->m_pdata + mpeg->m_decodePos, size);
mpeg->m_decodePos += size;
return size;
}
int64_t _MpegSeekbuffer(void *opaque, int64_t offset, int whence)
{
MediaEngine *mpeg = (MediaEngine*)opaque;
switch (whence) {
case SEEK_SET:
mpeg->m_decodePos = offset;
break;
case SEEK_CUR:
mpeg->m_decodePos += offset;
break;
case SEEK_END:
mpeg->m_decodePos = mpeg->m_streamSize - (u32)offset;
break;
}
return offset;
}
bool MediaEngine::openContext() {
#ifdef USE_FFMPEG
u8* tempbuf = (u8*)av_malloc(m_bufSize);
AVFormatContext *pFormatCtx = avformat_alloc_context();
m_pFormatCtx = (void*)pFormatCtx;
m_pIOContext = (void*)avio_alloc_context(tempbuf, m_bufSize, 0, (void*)this, _MpegReadbuffer, NULL, _MpegSeekbuffer);
pFormatCtx->pb = (AVIOContext*)m_pIOContext;
// Open video file
if(avformat_open_input((AVFormatContext**)&m_pFormatCtx, NULL, NULL, NULL) != 0)
return false;
if(avformat_find_stream_info(pFormatCtx, NULL) < 0)
return false;
// Dump information about file onto standard error
av_dump_format(pFormatCtx, 0, NULL, 0);
// Find the first video stream
for(int i = 0; i < pFormatCtx->nb_streams; i++) {
if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
m_videoStream = i;
break;
}
}
if(m_videoStream == -1)
return false;
// Get a pointer to the codec context for the video stream
m_pCodecCtx = (void*)pFormatCtx->streams[m_videoStream]->codec;
AVCodecContext *pCodecCtx = (AVCodecContext*)m_pCodecCtx;
// Find the decoder for the video stream
AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec == NULL)
return false;
// Open codec
AVDictionary *optionsDict = 0;
if(avcodec_open2(pCodecCtx, pCodec, &optionsDict)<0)
return false; // Could not open codec
setVideoDim();
int mpegoffset = bswap32(*(int*)(m_pdata + 8));
m_demux = new MpegDemux(m_pdata, m_streamSize, mpegoffset);
m_demux->setReadSize(m_readSize);
m_demux->demux();
m_audioPos = 0;
m_audioContext = Atrac3plus_Decoder::openContext();
#endif USE_FFMPEG
return true;
}
bool MediaEngine::loadStream(u8* buffer, int readSize, int StreamSize)
{
closeMedia();
// force to clear the useless FBO
gpu->Resized();
m_videopts = 0;
m_audiopts = 0;
m_bufSize = 0x2000;
m_decodePos = 0;
m_readSize = readSize;
m_streamSize = StreamSize;
m_pdata = new u8[StreamSize];
memcpy(m_pdata, buffer, m_readSize);
if (readSize > 0x2000)
openContext();
return true;
}
bool MediaEngine::loadFile(const char* filename)
{
PSPFileInfo info = pspFileSystem.GetFileInfo(filename);
s64 infosize = info.size;
u8* buf = new u8[infosize];
u32 h = pspFileSystem.OpenFile(filename, (FileAccess) FILEACCESS_READ);
pspFileSystem.ReadFile(h, buf, infosize);
pspFileSystem.CloseFile(h);
closeMedia();
// force to clear the useless FBO
gpu->Resized();
m_videopts = 0;
m_audiopts = 0;
m_bufSize = 0x2000;
m_decodePos = 0;
m_readSize = infosize;
m_streamSize = infosize;
m_pdata = buf;
if (m_readSize > 0x2000)
openContext();
return true;
}
void MediaEngine::addStreamData(u8* buffer, int addSize) {
int size = std::min(addSize, m_streamSize - m_readSize);
if (size > 0) {
memcpy(m_pdata + m_readSize, buffer, size);
m_readSize += size;
if (!m_pFormatCtx && m_readSize > 0x2000)
openContext();
if (m_demux) {
m_demux->setReadSize(m_readSize);
m_demux->demux();
}
}
}
bool MediaEngine::setVideoDim(int width, int height)
{
if (!m_pCodecCtx)
return false;
#ifdef USE_FFMPEG
AVCodecContext *pCodecCtx = (AVCodecContext*)m_pCodecCtx;
if (width == 0 && height == 0)
{
// use the orignal video size
m_desWidth = pCodecCtx->width;
m_desHeight = pCodecCtx->height;
}
else
{
m_desWidth = width;
m_desHeight = height;
}
int bpp = modeBpp[videoPixelMode];
// Allocate video frame
m_pFrame = avcodec_alloc_frame();
m_sws_ctx = (void*)
sws_getContext
(
pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
m_desWidth,
m_desHeight,
PIX_FMT_RGB24,
SWS_BILINEAR,
NULL,
NULL,
NULL
);
// fake image. To be improved.
if (Memory::IsValidAddress(bufferPtr))
// Use Dark Grey to identify CG is running
Memory::Memset(bufferPtr, 0x69, frameWidth * videoHeight_ * bpp);
// Allocate video frame for RGB24
m_pFrameRGB = avcodec_alloc_frame();
int numBytes = avpicture_get_size(PIX_FMT_RGB24, m_desWidth, m_desHeight);
m_buffer = (u8*)av_malloc(numBytes * sizeof(uint8_t));
// Assign appropriate parts of buffer to image planes in pFrameRGB
avpicture_fill((AVPicture *)m_pFrameRGB, m_buffer, PIX_FMT_RGB24, m_desWidth, m_desHeight);
#endif // USE_FFMPEG
return true;
}
void MediaEngine::feedPacketData(u32 addr, int size)
bool MediaEngine::stepVideo() {
// if video engine is broken, force to add timestamp
m_videopts += 3003;
#ifdef USE_FFMPEG
AVFormatContext *pFormatCtx = (AVFormatContext*)m_pFormatCtx;
AVCodecContext *pCodecCtx = (AVCodecContext*)m_pCodecCtx;
AVFrame *pFrame = (AVFrame*)m_pFrame;
AVFrame *pFrameRGB = (AVFrame*)m_pFrameRGB;
if ((!m_pFrame)||(!m_pFrameRGB))
return false;
AVPacket packet;
int frameFinished;
bool bGetFrame = false;
while(av_read_frame(pFormatCtx, &packet)>=0) {
if(packet.stream_index == m_videoStream) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
sws_scale((SwsContext*)m_sws_ctx, pFrame->data, pFrame->linesize, 0,
pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
if(frameFinished) {
if (m_videopts == 3003) {
m_audiopts = packet.pts;
}
m_videopts = packet.pts + packet.duration;
bGetFrame = true;
}
}
av_free_packet(&packet);
if (bGetFrame) break;
}
if (m_audiopts > 0) {
if (m_audiopts - m_videopts > 5000)
return stepVideo();
}
return bGetFrame;
#else
return true;
#endif // USE_FFMPEG
}
bool MediaEngine::writeVideoImage(u8* buffer, int frameWidth, int videoPixelMode) {
if ((!m_pFrame)||(!m_pFrameRGB))
return false;
#ifdef USE_FFMPEG
AVFrame *pFrameRGB = (AVFrame*)m_pFrameRGB;
// lock the image size
int height = m_desHeight;
int width = m_desWidth;
u8* imgbuf = buffer;
u8* data = pFrameRGB->data[0];
if (videoPixelMode == TPSM_PIXEL_STORAGE_MODE_32BIT_ABGR8888)
{
// ABGR8888
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++)
{
u8 r = *(data++);
u8 g = *(data++);
u8 b = *(data++);
*(imgbuf++) = r;
*(imgbuf++) = g;
*(imgbuf++) = b;
*(imgbuf++) = 0xFF;
}
imgbuf += (frameWidth - width) * 4;
}
}
else
{
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++)
{
u8 r = *(data++);
u8 g = *(data++);
u8 b = *(data++);
getPixelColor(r, g, b, 0xFF, videoPixelMode, (u16*)imgbuf);
imgbuf += 2;
}
imgbuf += (frameWidth - width) * 2;
}
}
#endif // USE_FFMPEG
return true;
}
bool MediaEngine::writeVideoImageWithRange(u8* buffer, int frameWidth, int videoPixelMode,
int xpos, int ypos, int width, int height) {
if ((!m_pFrame)||(!m_pFrameRGB))
return false;
#ifdef USE_FFMPEG
AVFrame *pFrameRGB = (AVFrame*)m_pFrameRGB;
// lock the image size
u8* imgbuf = buffer;
u8* data = pFrameRGB->data[0];
if (videoPixelMode == TPSM_PIXEL_STORAGE_MODE_32BIT_ABGR8888)
{
// ABGR8888
data += (ypos * m_desWidth + xpos) * 3;
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++)
{
u8 r = *(data++);
u8 g = *(data++);
u8 b = *(data++);
*(imgbuf++) = r;
*(imgbuf++) = g;
*(imgbuf++) = b;
*(imgbuf++) = 0xFF;
}
imgbuf += (frameWidth - width) * 4;
data += (m_desWidth - width) * 3;
}
}
else
{
data += (ypos * m_desWidth + xpos) * 3;
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++)
{
u8 r = *(data++);
u8 g = *(data++);
u8 b = *(data++);
getPixelColor(r, g, b, 0xFF, videoPixelMode, (u16*)imgbuf);
imgbuf += 2;
}
imgbuf += (frameWidth - width) * 2;
data += (m_desWidth - width) * 3;
}
}
#endif // USE_FFMPEG
return true;
}
static bool isHeader(u8* audioStream, int offset)
{
// This media engine is totally incompetent and will just ignore all data sent to it.
const u8 header1 = (u8)0x0F;
const u8 header2 = (u8)0xD0;
return (audioStream[offset] == header1) && (audioStream[offset+1] == header2);
}
static int getNextHeaderPosition(u8* audioStream, int curpos, int limit, int frameSize)
{
int endScan = limit - 1;
// Most common case: the header can be found at each frameSize
int offset = curpos + frameSize - 8;
if (offset < endScan && isHeader(audioStream, offset))
return offset;
for (int scan = curpos; scan < endScan; scan++) {
if (isHeader(audioStream, scan))
return scan;
}
return -1;
}
int MediaEngine::getAudioSamples(u8* buffer) {
if (!m_demux) {
return 0;
}
u8* audioStream = 0;
int audioSize = m_demux->getaudioStream(&audioStream);
if (m_audioPos >= audioSize || !isHeader(audioStream, m_audioPos))
{
return 0;
}
u8 headerCode1 = audioStream[2];
u8 headerCode2 = audioStream[3];
int frameSize = ((headerCode1 & 0x03) << 8) | (headerCode2 & 0xFF) * 8 + 0x10;
if (m_audioPos + frameSize > audioSize)
return 0;
m_audioPos += 8;
int nextHeader = getNextHeaderPosition(audioStream, m_audioPos, audioSize, frameSize);
u8* frame = audioStream + m_audioPos;
int outbytes = 0;
Atrac3plus_Decoder::atrac3plus_decode(m_audioContext, frame, frameSize - 8, &outbytes, buffer);
if (nextHeader >= 0) {
m_audioPos = nextHeader;
} else
m_audioPos = audioSize;
m_audiopts += 4180;
return outbytes;
}
s64 MediaEngine::getVideoTimeStamp() {
return m_videopts;
}
s64 MediaEngine::getAudioTimeStamp() {
if (m_audiopts > 0)
return m_audiopts;
return m_videopts;
}
s64 MediaEngine::getLastTimeStamp() {
if (!m_pdata)
return 0;
int lastTimeStamp = bswap32(*(int*)(m_pdata + 92));
return lastTimeStamp;
}

View file

@ -28,70 +28,62 @@
#include "../../Globals.h"
#include "../HLE/sceMpeg.h"
#include "ChunkFile.h"
#include "Core/HW/MpegDemux.h"
class MediaEngine
{
public:
MediaEngine() : fakeMode_(false), readLength_(0), fakeFrameCounter_(0) {}
MediaEngine();
~MediaEngine();
void setFakeMode(bool fake) {
fakeMode_ = fake;
}
void closeMedia();
bool loadStream(u8* buffer, int readSize, int StreamSize);
bool loadFile(const char* filename);
void addStreamData(u8* buffer, int addSize);
int getRemainSize() { return m_streamSize - m_readSize;}
void init(u32 bufferAddr, u32 mpegStreamSize, u32 mpegOffset) {
bufferAddr_ = bufferAddr;
mpegStreamSize_ = mpegStreamSize;
mpegOffset_ = mpegOffset;
}
void finish();
bool stepVideo();
bool writeVideoImage(u8* buffer, int frameWidth = 512, int videoPixelMode = 3);
bool writeVideoImageWithRange(u8* buffer, int frameWidth, int videoPixelMode,
int xpos, int ypos, int width, int height);
int getAudioSamples(u8* buffer);
void setVideoDim(int w, int h) { videoWidth_ = w; videoHeight_ = h; }
void feedPacketData(u32 addr, int size);
bool readVideoAu(SceMpegAu *au) {
if (fakeMode_) {
au->pts += videoTimestampStep;
}
return true;
}
bool readAudioAu(SceMpegAu *au) {
if (fakeMode_) {
}
return true;
}
bool stepVideo() {
if (fakeMode_)
return true;
return true;
}
void writeVideoImage(u32 bufferPtr, int frameWidth, int videoPixelMode);
// WTF is this?
int readLength() { return readLength_; }
void setReadLength(int len) { readLength_ = len; }
bool setVideoDim(int width = 0, int height = 0);
s64 getVideoTimeStamp();
s64 getAudioTimeStamp();
s64 getLastTimeStamp();
void DoState(PointerWrap &p) {
p.Do(fakeMode_);
p.Do(bufferAddr_);
p.Do(mpegStreamSize_);
p.Do(mpegOffset_);
p.Do(readLength_);
p.Do(videoWidth_);
p.Do(videoHeight_);
p.Do(fakeFrameCounter_);
p.Do(m_streamSize);
p.Do(m_readSize);
p.DoMarker("MediaEngine");
}
private:
bool fakeMode_;
u32 bufferAddr_;
u32 mpegStreamSize_;
u32 mpegOffset_;
int readLength_;
int videoWidth_;
int videoHeight_;
int fakeFrameCounter_;
bool openContext();
public:
void *m_pFormatCtx;
void *m_pCodecCtx;
void *m_pFrame;
void *m_pFrameRGB;
void *m_pIOContext;
int m_videoStream;
void *m_sws_ctx;
u8* m_buffer;
int m_desWidth;
int m_desHeight;
int m_streamSize;
int m_readSize;
int m_decodePos;
int m_bufSize;
s64 m_videopts;
u8* m_pdata;
MpegDemux *m_demux;
int m_audioPos;
void* m_audioContext;
s64 m_audiopts;
};

192
Core/HW/MpegDemux.cpp Normal file
View file

@ -0,0 +1,192 @@
#include "MpegDemux.h"
const int PACKET_START_CODE_MASK = 0xffffff00;
const int PACKET_START_CODE_PREFIX = 0x00000100;
const int SEQUENCE_START_CODE = 0x000001b3;
const int EXT_START_CODE = 0x000001b5;
const int SEQUENCE_END_CODE = 0x000001b7;
const int GOP_START_CODE = 0x000001b8;
const int ISO_11172_END_CODE = 0x000001b9;
const int PACK_START_CODE = 0x000001ba;
const int SYSTEM_HEADER_START_CODE = 0x000001bb;
const int PROGRAM_STREAM_MAP = 0x000001bc;
const int PRIVATE_STREAM_1 = 0x000001bd;
const int PADDING_STREAM = 0x000001be;
const int PRIVATE_STREAM_2 = 0x000001bf;
MpegDemux::MpegDemux(u8* buffer, int size, int offset)
{
m_buf = buffer;
m_len = size;
m_index = offset;
m_audioStream = 0;
m_audiopos = 0;
m_audioChannel = -1;
m_readSize = 0;
}
MpegDemux::~MpegDemux(void)
{
if (m_audioStream)
delete [] m_audioStream;
}
void MpegDemux::setReadSize(int readSize)
{
m_readSize = readSize;
}
int MpegDemux::readPesHeader(PesHeader &pesHeader, int length, int startCode) {
int c = 0;
while (length > 0) {
c = read8();
length--;
if (c != 0xFF) {
break;
}
}
if ((c & 0xC0) == 0x40) {
read8();
c = read8();
length -= 2;
}
pesHeader.pts = 0;
pesHeader.dts = 0;
if ((c & 0xE0) == 0x20) {
pesHeader.dts = pesHeader.pts = readPts(c);
length -= 4;
if ((c & 0x10) != 0) {
pesHeader.dts = readPts();
length -= 5;
}
} else if ((c & 0xC0) == 0x80) {
int flags = read8();
int headerLength = read8();
length -= 2;
length -= headerLength;
if ((flags & 0x80) != 0) {
pesHeader.dts = pesHeader.pts = readPts();
headerLength -= 5;
if ((flags & 0x40) != 0) {
pesHeader.dts = readPts();
headerLength -= 5;
}
}
if ((flags & 0x3F) != 0 && headerLength == 0) {
flags &= 0xC0;
}
if ((flags & 0x01) != 0) {
int pesExt = read8();
headerLength--;
int skip = (pesExt >> 4) & 0x0B;
skip += skip & 0x09;
if ((pesExt & 0x40) != 0 || skip > headerLength) {
pesExt = skip = 0;
}
this->skip(skip);
headerLength -= skip;
if ((pesExt & 0x01) != 0) {
int ext2Length = read8();
headerLength--;
if ((ext2Length & 0x7F) != 0) {
int idExt = read8();
headerLength--;
if ((idExt & 0x80) == 0) {
startCode = ((startCode & 0xFF) << 8) | idExt;
}
}
}
}
skip(headerLength);
}
if (startCode == PRIVATE_STREAM_1) {
int channel = read8();
pesHeader.channel = channel;
length--;
if (channel >= 0x80 && channel <= 0xCF) {
// Skip audio header
skip(3);
length -= 3;
if (channel >= 0xB0 && channel <= 0xBF) {
skip(1);
length--;
}
} else {
// PSP audio has additional 3 bytes in header
skip(3);
length -= 3;
}
}
return length;
}
int MpegDemux::demuxStream(bool bdemux, int startCode, int channel)
{
int length = read16();
if (bdemux) {
PesHeader pesHeader(channel);
length = readPesHeader(pesHeader, length, startCode);
if (pesHeader.channel == channel || channel < 0) {
channel = pesHeader.channel;
memcpy(m_audioStream + m_audiopos, m_buf + m_index, length);
m_audiopos += length;
}
skip(length);
} else {
skip(length);
}
return channel;
}
void MpegDemux::demux()
{
if (!m_audioStream)
m_audioStream = new u8[m_len - m_index];
while (m_index < m_len)
{
if (m_readSize != m_len && m_index + 2048 > m_readSize)
return;
// Search for start code
int startCode = 0xFF;
while ((startCode & PACKET_START_CODE_MASK) != PACKET_START_CODE_PREFIX && !isEOF()) {
startCode = (startCode << 8) | read8();
}
switch (startCode) {
case PACK_START_CODE: {
skip(10);
break;
}
case SYSTEM_HEADER_START_CODE: {
skip(14);
break;
}
case PADDING_STREAM:
case PRIVATE_STREAM_2: {
int length = read16();
skip(length);
break;
}
case PRIVATE_STREAM_1: {
// Audio stream
m_audioChannel = demuxStream(true, startCode, m_audioChannel);
break;
}
case 0x1E0: case 0x1E1: case 0x1E2: case 0x1E3:
case 0x1E4: case 0x1E5: case 0x1E6: case 0x1E7:
case 0x1E8: case 0x1E9: case 0x1EA: case 0x1EB:
case 0x1EC: case 0x1ED: case 0x1EE: case 0x1EF: {
// Video Stream
demuxStream(false, startCode, -1);
break;
}
}
}
}
int MpegDemux::getaudioStream(u8** audioStream)
{
*audioStream = m_audioStream;
return m_audiopos;
}

63
Core/HW/MpegDemux.h Normal file
View file

@ -0,0 +1,63 @@
// This is a simple version MpegDemux that can get media's audio stream.
// Thanks to JPCSP project.
#pragma once
#include "../../Globals.h"
class MpegDemux
{
public:
MpegDemux(u8* buffer, int size, int offset);
~MpegDemux(void);
void setReadSize(int readSize);
void demux();
// return it's size
int getaudioStream(u8 **audioStream);
private:
struct PesHeader {
long pts;
long dts;
int channel;
PesHeader(int chan) {
pts = 0;
dts = 0;
channel = chan;
}
};
int read8() {
return m_buf[m_index++] & 0xFF;
}
int read16() {
return (read8() << 8) | read8();
}
long readPts() {
return readPts(read8());
}
long readPts(int c) {
return (((long) (c & 0x0E)) << 29) | ((read16() >> 1) << 15) | (read16() >> 1);
}
bool isEOF() {
return m_index >= m_len;
}
void skip(int n) {
if (n > 0) {
m_index += n;
}
}
int readPesHeader(PesHeader &pesHeader, int length, int startCode);
int demuxStream(bool bdemux, int startCode, int channel);
private:
int m_index;
int m_len;
u8* m_buf;
u8* m_audioStream;
int m_audiopos;
int m_audioChannel;
int m_readSize;
};

View file

@ -189,9 +189,9 @@ void FramebufferManager::DrawPixels(const u8 *framebuf, int pixelFormat, int lin
for (int x = 0; x < 480; x++)
{
dst[x * 4] = src[x * 4];
dst[x * 4 + 1] = src[x * 4 + 3];
dst[x * 4 + 1] = src[x * 4 + 1];
dst[x * 4 + 2] = src[x * 4 + 2];
dst[x * 4 + 3] = src[x * 4 + 1];
dst[x * 4 + 3] = src[x * 4 + 3];
}
}
break;
@ -214,6 +214,10 @@ void FramebufferManager::DrawPixels(const u8 *framebuf, int pixelFormat, int lin
}
glBindTexture(GL_TEXTURE_2D,backbufTex);
if (g_Config.bLinearFiltering)
{
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
}
glTexSubImage2D(GL_TEXTURE_2D,0,0,0,480,272, GL_RGBA, GL_UNSIGNED_BYTE, convBuf);
float x, y, w, h;