* add a thp player. based off dimok & thakis' work ( requires libjpeg )

git-svn-id: http://wiiqt.googlecode.com/svn/trunk@48 389f4c8b-5dfe-645f-db0e-df882bc27289
This commit is contained in:
giantpune@gmail.com 2011-01-06 11:56:02 +00:00
parent a041dba1b3
commit 644aa141bb
20 changed files with 2219 additions and 0 deletions

BIN
thp_player/ffw.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.3 KiB

890
thp_player/gcvid.cpp Executable file
View File

@ -0,0 +1,890 @@
/***************************************************************************
* Copyright (C) 2010
* by thakis
*
* Modification and adjustment for the Wii by Dimok
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any
* damages arising from the use of this software.
*
* Permission is granted to anyone to use this software for any
* purpose, including commercial applications, and to alter it and
* redistribute it freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you
* must not claim that you wrote the original software. If you use
* this software in a product, an acknowledgment in the product
* documentation would be appreciated but is not required.
*
* 2. Altered source versions must be plainly marked as such, and
* must not be misrepresented as being the original software.
*
* 3. This notice may not be removed or altered from any source
* distribution.
*
* gcvid.cpp
***************************************************************************/
#include "gcvid.h"
#include <cstdlib> //NULL
#include <cstring> //memcmp
#include <string>
#include <cassert>
#include <QtEndian>
#include <QtGui>
using namespace std;
void readThpHeader(FILE* f, ThpHeader& h)
{
fread(&h, sizeof(h), 1, f);
h.version = qFromBigEndian(h.version);
h.maxBufferSize = qFromBigEndian(h.maxBufferSize);
h.maxAudioSamples = qFromBigEndian(h.maxAudioSamples);
u32 * ptr = (u32 *) &h.fps;
*ptr = qFromBigEndian(*ptr);
h.numFrames = qFromBigEndian(h.numFrames);
h.firstFrameSize = qFromBigEndian(h.firstFrameSize);
h.dataSize = qFromBigEndian(h.dataSize);
h.componentDataOffset = qFromBigEndian(h.componentDataOffset);
h.offsetsDataOffset = qFromBigEndian(h.offsetsDataOffset);
h.firstFrameOffset = qFromBigEndian(h.firstFrameOffset);
h.lastFrameOffset = qFromBigEndian(h.lastFrameOffset);
}
void readThpComponents(FILE* f, ThpComponents& c)
{
fread(&c, sizeof(c), 1, f);
c.numComponents = qFromBigEndian(c.numComponents);
}
void readThpVideoInfo(FILE* f, ThpVideoInfo& i, bool isVersion11)
{
fread(&i, sizeof(i), 1, f);
i.width = qFromBigEndian(i.width);
i.height = qFromBigEndian(i.height);
if(isVersion11)
i.unknown = qFromBigEndian(i.unknown);
else
{
i.unknown = 0;
fseek(f, -4, SEEK_CUR);
}
}
void readThpAudioInfo(FILE* f, ThpAudioInfo& i, bool isVersion11)
{
fread(&i, sizeof(i), 1, f);
i.numChannels = qFromBigEndian(i.numChannels);
i.frequency = qFromBigEndian(i.frequency);
i.numSamples = qFromBigEndian(i.numSamples);
if(isVersion11)
i.numData = qFromBigEndian(i.numData);
else
{
i.numData = 1;
fseek(f, -4, SEEK_CUR);
}
}
void readMthHeader(FILE* f, MthHeader& h)
{
fread(&h, sizeof(h), 1, f);
h.unknown = qFromBigEndian(h.unknown);
h.unknown2 = qFromBigEndian(h.unknown2);
h.maxFrameSize = qFromBigEndian(h.maxFrameSize);
h.width = qFromBigEndian(h.width);
h.height = qFromBigEndian(h.height);
h.fps = qFromBigEndian(h.fps);
h.numFrames = qFromBigEndian(h.numFrames);
h.offset = qFromBigEndian(h.offset);
h.unknown5 = qFromBigEndian(h.unknown5);
h.firstFrameSize = qFromBigEndian(h.firstFrameSize);
}
struct DecStruct
{
const u8* currSrcByte;
u32 blockCount;
u8 index;
u8 shift;
};
void thpAudioInitialize(DecStruct& s, const u8* srcStart)
{
s.currSrcByte = srcStart;
s.blockCount = 2;
s.index = (*s.currSrcByte >> 4) & 0x7;
s.shift = *s.currSrcByte & 0xf;
++s.currSrcByte;
}
s32 thpAudioGetNewSample(DecStruct& s)
{
//the following if is executed all 14 calls
//to thpAudioGetNewSample() (once for each
//microblock) because mask & 0xf can contain
//16 different values and starts with 2
if((s.blockCount & 0xf) == 0)
{
s.index = (*s.currSrcByte >> 4) & 0x7;
s.shift = *s.currSrcByte & 0xf;
++s.currSrcByte;
s.blockCount += 2;
}
s32 ret;
if((s.blockCount & 1) != 0)
{
s32 t = (*s.currSrcByte << 28) & 0xf0000000;
ret = t >> 28; //this has to be an arithmetic shift
++s.currSrcByte;
}
else
{
s32 t = (*s.currSrcByte << 24) & 0xf0000000;
ret = t >> 28; //this has to be an arithmetic shift
}
++s.blockCount;
return ret;
}
int thpAudioDecode(s16 * destBuffer, const u8* srcBuffer, bool separateChannelsInOutput, bool isInputStereo)
{
if(destBuffer == NULL || srcBuffer == NULL)
return 0;
ThpAudioBlockHeader* head = (ThpAudioBlockHeader*)srcBuffer;
u32 channelInSize = qFromBigEndian(head->channelSize);
u32 numSamples = qFromBigEndian(head->numSamples);
const u8* srcChannel1 = srcBuffer + sizeof(ThpAudioBlockHeader);
const u8* srcChannel2 = srcChannel1 + channelInSize;
s16* table1 = head->table1;
s16* table2 = head->table2;
s16* destChannel1, * destChannel2;
u32 delta;
if(separateChannelsInOutput)
{
//separated channels in output
destChannel1 = destBuffer;
destChannel2 = destBuffer + numSamples;
delta = 1;
}
else
{
//interleaved channels in output
destChannel1 = destBuffer;
destChannel2 = destBuffer + 1;
delta = 2;
}
DecStruct s;
if(!isInputStereo)
{
//mono channel in input
thpAudioInitialize(s, srcChannel1);
s16 prev1 = qFromBigEndian(*(s16*)(srcBuffer + 72));
s16 prev2 = qFromBigEndian(*(s16*)(srcBuffer + 74));
for(u32 i = 0; i < numSamples; ++i)
{
s64 res = (s64)thpAudioGetNewSample(s);
res = ((res << s.shift) << 11); //convert to 53.11 fixedpoint
//these values are 53.11 fixed point numbers
s64 val1 = qFromBigEndian(table1[2*s.index]);
s64 val2 = qFromBigEndian(table1[2*s.index + 1]);
//convert to 48.16 fixed point
res = (val1*prev1 + val2*prev2 + res) << 5;
//rounding:
u16 decimalPlaces = res & 0xffff;
if(decimalPlaces > 0x8000) //i.e. > 0.5
//round up
++res;
else if(decimalPlaces == 0x8000) //i.e. == 0.5
if((res & 0x10000) != 0)
//round up every other number
++res;
//get nonfractional parts of number, clamp to [-32768, 32767]
s32 final = (res >> 16);
if(final > 32767) final = 32767;
else if(final < -32768) final = -32768;
prev2 = prev1;
prev1 = final;
*destChannel1 = (s16)final;
*destChannel2 = (s16)final;
destChannel1 += delta;
destChannel2 += delta;
}
}
else
{
//two channels in input - nearly the same as for one channel,
//so no comments here (different lines are marked with XXX)
thpAudioInitialize(s, srcChannel1);
s16 prev1 = qFromBigEndian(*(s16*)(srcBuffer + 72));
s16 prev2 = qFromBigEndian(*(s16*)(srcBuffer + 74));
for(u32 i = 0; i < numSamples; ++i)
{
s64 res = (s64)thpAudioGetNewSample(s);
res = ((res << s.shift) << 11);
s64 val1 = qFromBigEndian(table1[2*s.index]);
s64 val2 = qFromBigEndian(table1[2*s.index + 1]);
res = (val1*prev1 + val2*prev2 + res) << 5;
u16 decimalPlaces = res & 0xffff;
if(decimalPlaces > 0x8000)
++res;
else if(decimalPlaces == 0x8000)
if((res & 0x10000) != 0)
++res;
s32 final = (res >> 16);
if(final > 32767) final = 32767;
else if(final < -32768) final = -32768;
prev2 = prev1;
prev1 = final;
*destChannel1 = (s16)final;
destChannel1 += delta;
}
thpAudioInitialize(s, srcChannel2);//XXX
prev1 = qFromBigEndian(*(s16*)(srcBuffer + 76));//XXX
prev2 = qFromBigEndian(*(s16*)(srcBuffer + 78));//XXX
for(u32 j = 0; j < numSamples; ++j)
{
s64 res = (s64)thpAudioGetNewSample(s);
res = ((res << s.shift) << 11);
s64 val1 = qFromBigEndian(table2[2*s.index]);//XXX
s64 val2 = qFromBigEndian(table2[2*s.index + 1]);//XXX
res = (val1*prev1 + val2*prev2 + res) << 5;
u16 decimalPlaces = res & 0xffff;
if(decimalPlaces > 0x8000)
++res;
else if(decimalPlaces == 0x8000)
if((res & 0x10000) != 0)
++res;
s32 final = (res >> 16);
if(final > 32767) final = 32767;
else if(final < -32768) final = -32768;
prev2 = prev1;
prev1 = final;
*destChannel2 = (s16)final;
destChannel2 += delta;
}
}
return numSamples;
}
VideoFrame::VideoFrame()
: _data(NULL), _w(0), _h(0), _p(0)
{}
VideoFrame::~VideoFrame()
{ dealloc(); }
void VideoFrame::resize(int width, int height)
{
if(width == _w && height == _h)
return;
dealloc();
_w = width;
_h = height;
//24 bpp, 4 byte padding
_p = 3*width;
_p += (4 - _p%4)%4;
_data = new u8[_p*_h];
}
int VideoFrame::getWidth() const
{ return _w; }
int VideoFrame::getHeight() const
{ return _h; }
int VideoFrame::getPitch() const
{ return _p; }
u8* VideoFrame::getData()
{ return _data; }
const u8* VideoFrame::getData() const
{ return _data; }
void VideoFrame::dealloc()
{
if(_data != NULL)
delete [] _data;
_data = NULL;
_w = _h = _p = 0;
}
//swaps red and blue channel of a video frame
void swapRB(VideoFrame& f)
{
u8* currLine = f.getData();
int hyt = f.getHeight();
int pitch = f.getPitch();
for(int y = 0; y < hyt; ++y)
{
for(int x = 0, x2 = 2; x < pitch; x += 3, x2 += 3)
{
u8 t = currLine[x];
currLine[x] = currLine[x2];
currLine[x2] = t;
}
currLine += pitch;
}
}
enum FILETYPE
{
THP, MTH, JPG,
UNKNOWN = -1
};
FILETYPE getFiletype(FILE* f)
{
long t = ftell(f);
fseek(f, 0, SEEK_SET);
u8 buff[4];
fread(buff, 1, 4, f);
FILETYPE ret = UNKNOWN;
if(memcmp("THP\0", buff, 4) == 0)
ret = THP;
else if(memcmp("MTHP", buff, 4) == 0)
ret = MTH;
else if(buff[0] == 0xff && buff[1] == 0xd8)
ret = JPG;
fseek(f, t, SEEK_SET);
return ret;
}
long getFilesize(FILE* f)
{
long t = ftell(f);
fseek(f, 0, SEEK_END);
long ret = ftell(f);
fseek(f, t, SEEK_SET);
return ret;
}
void decodeJpeg(const u8* data, int size, VideoFrame& dest);
VideoFile::VideoFile(FILE* f)
: _f(f)
{}
VideoFile::~VideoFile()
{
if(_f != NULL)
fclose(_f);
_f = NULL;
}
int VideoFile::getWidth() const
{ return 0; }
int VideoFile::getHeight() const
{ return 0; }
float VideoFile::getFps() const
{ return 0.f; }
int VideoFile::getFrameCount() const
{ return 0; }
int VideoFile::getCurrentFrameNr() const
{ return 0; }
void VideoFile::loadNextFrame()
{}
void VideoFile::SetFrameNo( u32 frameNo ){ Q_UNUSED( frameNo ); }
void VideoFile::getCurrentFrame(VideoFrame& f) const
{
Q_UNUSED( f ); }
bool VideoFile::hasSound() const
{ return false; }
int VideoFile::getNumChannels() const
{ return 0; }
int VideoFile::getFrequency() const
{ return 0; }
int VideoFile::getMaxAudioSamples() const
{ return 0; }
int VideoFile::getCurrentBuffer(s16* data) const
{
Q_UNUSED( data );
return 0; }
void VideoFile::loadFrame(VideoFrame& frame, const u8* data, int size) const
{
decodeJpeg(data, size, frame);
}
ThpVideoFile::ThpVideoFile(FILE* f)
: VideoFile(f)
{
readThpHeader(f, _head);
//this is just to find files that have this field != 0, i
//have no such a file
assert(_head.offsetsDataOffset == 0);
readThpComponents(f, _components);
for(u32 i = 0; i < _components.numComponents; ++i)
{
if(_components.componentTypes[i] == 0) //video
readThpVideoInfo(_f, _videoInfo, _head.version == 0x00011000);
else if(_components.componentTypes[i] == 1) //audio
{
readThpAudioInfo(_f, _audioInfo, _head.version == 0x00011000);
assert(_head.maxAudioSamples != 0);
}
}
_numInts = 3;
if(_head.maxAudioSamples != 0)
_numInts = 4;
_currFrameNr = -1;
_nextFrameOffset = _head.firstFrameOffset;
_nextFrameSize = _head.firstFrameSize;
_currFrameData.resize(_head.maxBufferSize); //include some padding
loadNextFrame();
}
int ThpVideoFile::getWidth() const
{ return _videoInfo.width; }
int ThpVideoFile::getHeight() const
{ return _videoInfo.height; }
float ThpVideoFile::getFps() const
{ return _head.fps; }
int ThpVideoFile::getFrameCount() const
{ return _head.numFrames; }
int ThpVideoFile::getCurrentFrameNr() const
{ return _currFrameNr; }
//TODO, really seek to correct frame
void ThpVideoFile::SetFrameNo( u32 frameNo )
{
Q_UNUSED( frameNo );
_currFrameNr = 0;
_nextFrameOffset = _head.firstFrameOffset;
_nextFrameSize = _head.firstFrameSize;
}
void ThpVideoFile::loadNextFrame()
{
++_currFrameNr;
if(_currFrameNr >= (int) _head.numFrames)
{
_currFrameNr = 0;
_nextFrameOffset = _head.firstFrameOffset;
_nextFrameSize = _head.firstFrameSize;
}
fseek(_f, _nextFrameOffset, SEEK_SET);
fread(&_currFrameData[0], 1, _nextFrameSize, _f);
_nextFrameOffset += _nextFrameSize;
_nextFrameSize = qFromBigEndian(*(u32*)&_currFrameData[0]);
}
void ThpVideoFile::getCurrentFrame(VideoFrame& f) const
{
int size = qFromBigEndian(*(u32*)(&_currFrameData[0] + 8));
loadFrame(f, &_currFrameData[0] + 4*_numInts, size);
}
bool ThpVideoFile::hasSound() const
{ return _head.maxAudioSamples != 0; }
int ThpVideoFile::getNumChannels() const
{
if(hasSound())
return _audioInfo.numChannels;
else
return 0;
}
int ThpVideoFile::getFrequency() const
{
if(hasSound())
return _audioInfo.frequency;
else
return 0;
}
int ThpVideoFile::getMaxAudioSamples() const
{ return _head.maxAudioSamples; }
int ThpVideoFile::getCurrentBuffer(s16* data) const
{
if(!hasSound())
return 0;
int jpegSize = qFromBigEndian(*(u32*)(&_currFrameData[0] + 8));
const u8* src = &_currFrameData[0] + _numInts*4 + jpegSize;
return thpAudioDecode(data, src, false, _audioInfo.numChannels == 2);
}
MthVideoFile::MthVideoFile(FILE* f)
: VideoFile(f)
{
readMthHeader(f, _head);
_currFrameNr = -1;
_nextFrameOffset = _head.offset;
_nextFrameSize = _head.firstFrameSize;
_thisFrameSize = 0;
_currFrameData.resize(_head.maxFrameSize);
loadNextFrame();
}
int MthVideoFile::getWidth() const
{ return _head.width; }
int MthVideoFile::getHeight() const
{ return _head.height; }
float MthVideoFile::getFps() const
{
return (float) 1.0f*_head.fps; //TODO: This has to be in there somewhere
}
int MthVideoFile::getFrameCount() const
{
return _head.numFrames;
}
int MthVideoFile::getCurrentFrameNr() const
{ return _currFrameNr; }
//TODO, really seek to correct frame
void MthVideoFile::SetFrameNo( u32 frameNo )
{
Q_UNUSED( frameNo );
_currFrameNr = 0;
_nextFrameOffset = _head.offset;
_nextFrameSize = _head.firstFrameSize;
}
void MthVideoFile::loadNextFrame()
{
++_currFrameNr;
if(_currFrameNr >= (int) _head.numFrames)
{
_currFrameNr = 0;
_nextFrameOffset = _head.offset;
_nextFrameSize = _head.firstFrameSize;
}
fseek(_f, _nextFrameOffset, SEEK_SET);
_currFrameData.resize(_nextFrameSize);
fread(&_currFrameData[0], 1, _nextFrameSize, _f);
_thisFrameSize = _nextFrameSize;
u32 nextSize;
nextSize = qFromBigEndian(*(u32*)(&_currFrameData[0]));
_nextFrameOffset += _nextFrameSize;
_nextFrameSize = nextSize;
}
void MthVideoFile::getCurrentFrame(VideoFrame& f) const
{
int size = _thisFrameSize;
loadFrame(f, &_currFrameData[0] + 4, size - 4);
}
JpgVideoFile::JpgVideoFile(FILE* f)
: VideoFile(f)
{
vector<u8> data(getFilesize(f));
fread(&data[0], 1, getFilesize(f), f);
loadFrame(_currFrame, &data[0], getFilesize(f));
}
int JpgVideoFile::getWidth() const
{ return _currFrame.getWidth(); }
int JpgVideoFile::getHeight() const
{ return _currFrame.getHeight(); }
int JpgVideoFile::getFrameCount() const
{ return 1; }
void JpgVideoFile::getCurrentFrame(VideoFrame& f) const
{
f.resize(_currFrame.getWidth(), _currFrame.getHeight());
memcpy(f.getData(), _currFrame.getData(),f.getPitch()*f.getHeight());
}
VideoFile* openVideo(const string& fileName)
{
FILE* f = fopen(fileName.c_str(), "rb");
if(f == NULL)
return NULL;
FILETYPE type = getFiletype(f);
switch(type)
{
case THP:
return new ThpVideoFile(f);
case MTH:
return new MthVideoFile(f);
case JPG:
return new JpgVideoFile(f);
default:
fclose(f);
return NULL;
}
}
void closeVideo(VideoFile*& vf)
{
if(vf != NULL)
delete vf;
vf = NULL;
}
//as mentioned above, we have to convert 0xff to 0xff 0x00
//after the image date has begun (ie, after the 0xff 0xda marker)
//but we must not convert the end-of-image-marker (0xff 0xd9)
//this way. There may be 0xff 0xd9 bytes embedded in the image
//data though, so I add 4 bytes to the input buffer
//and fill them with zeroes and check for 0xff 0xd9 0 0
//as end-of-image marker. this is not correct, but works
//and is easier to code... ;-)
//a better solution would be to patch jpeglib so that this conversion
//is not neccessary
u8 endBytesThp[] = { 0xff, 0xd9, 0, 0 }; //used in thp files
u8 endBytesMth[] = { 0xff, 0xd9, 0xff, 0 }; //used in mth files
int countRequiredSize(const u8* data, int size, int& start, int& end)
{
start = 2*size;
int count = 0;
int j;
for(j = size - 1; data[j] == 0; --j)
; //search end of data
if(data[j] == 0xd9) //thp file
end = j - 1;
else if(data[j] == 0xff) //mth file
end = j - 2;
for(int i = 0; i < end; ++i)
{
if(data[i] == 0xff)
{
//if i == srcSize - 1, then this would normally overrun src - that's why 4 padding
//bytes are included at the end of src
if(data[i + 1] == 0xda && start == 2*size)
start = i;
if(i > start)
++count;
}
}
return size + count;
}
void convertToRealJpeg(u8* dest, const u8* src, int srcSize, int start, int end)
{
int di = 0;
for(int i = 0; i < srcSize; ++i, ++di)
{
dest[di] = src[i];
//if i == srcSize - 1, then this would normally overrun src - that's why 4 padding
//bytes are included at the end of src
if(src[i] == 0xff && i > start && i < end)
{
++di;
dest[di] = 0;
}
}
}
void decodeRealJpeg(const u8* data, int size, VideoFrame& dest);
void decodeJpeg(const u8* data, int size, VideoFrame& dest)
{
//convert format so jpeglib understands it...
int start = 0, end = 0;
int newSize = countRequiredSize(data, size, start, end);
u8* buff = new u8[newSize];
convertToRealJpeg(buff, data, size, start, end);
//...and feed it to jpeglib
decodeRealJpeg(buff, newSize, dest);
delete [] buff;
}
extern "C"
{
#include <jpeglib.h>
}
//the following functions are needed to let
//libjpeg read from memory instead of from a file...
//it's a little clumsy to do :-|
const u8* g_jpegBuffer;
int g_jpegSize;
bool g_isLoading = false;
void jpegInitSource(j_decompress_ptr cinfo)
{ Q_UNUSED( cinfo ); }
boolean jpegFillInputBuffer(j_decompress_ptr cinfo)
{
cinfo->src->next_input_byte = g_jpegBuffer;
cinfo->src->bytes_in_buffer = g_jpegSize;
return TRUE;
}
void jpegSkipInputData(j_decompress_ptr cinfo, long num_bytes)
{
cinfo->src->next_input_byte += num_bytes;
cinfo->src->bytes_in_buffer -= num_bytes;
}
boolean jpegResyncToRestart(j_decompress_ptr cinfo, int desired)
{
jpeg_resync_to_restart(cinfo, desired);
return TRUE;
}
void jpegTermSource(j_decompress_ptr cinfo)
{ Q_UNUSED( cinfo ); }
void jpegErrorHandler(j_common_ptr cinfo)
{
char buff[1024];
(*cinfo->err->format_message)(cinfo, buff);
//MessageBox(g_hWnd, buff, "JpegLib error:", MB_OK);
}
void decodeRealJpeg(const u8* data, int size, VideoFrame& dest)
{
if(g_isLoading)
return;
g_isLoading = true;
/*
//debug
FILE* fout = fopen("curr.jpg", "wb");
fwrite(data, size, 1, fout);
fclose(fout);
//*/
//decompressor state
jpeg_decompress_struct cinfo;
jpeg_error_mgr errorMgr;
//read from memory manager
jpeg_source_mgr sourceMgr;
cinfo.err = jpeg_std_error(&errorMgr);
errorMgr.error_exit = jpegErrorHandler;
jpeg_create_decompress(&cinfo);
//setup read-from-memory
g_jpegBuffer = data;
g_jpegSize = size;
sourceMgr.bytes_in_buffer = size;
sourceMgr.next_input_byte = data;
sourceMgr.init_source = jpegInitSource;
sourceMgr.fill_input_buffer = jpegFillInputBuffer;
sourceMgr.skip_input_data = jpegSkipInputData;
sourceMgr.resync_to_restart = jpegResyncToRestart;
sourceMgr.term_source = jpegTermSource;
cinfo.src = &sourceMgr;
jpeg_read_header(&cinfo, TRUE);
#if 0
//set quality/speed parameters to speed:
cinfo.do_fancy_upsampling = FALSE;
cinfo.do_block_smoothing = FALSE;
//this actually slows decoding down:
//cinfo.dct_method = JDCT_FASTEST;
#endif
jpeg_start_decompress(&cinfo);
dest.resize(cinfo.output_width, cinfo.output_height);
if(cinfo.num_components == 3)
{
int y = 0;
while(cinfo.output_scanline < cinfo.output_height)
{
//invert image because windows wants it downside up
u8* destBuffer = &dest.getData()[y*dest.getPitch()];
//NO idea why jpeglib wants a pointer to a pointer
jpeg_read_scanlines(&cinfo, &destBuffer, 1);
++y;
}
//jpeglib gives an error in jpeg_finish_decompress() if no all
//scanlines are read by the application... :-|
//(but because we read all scanlines, it's not really needed)
cinfo.output_scanline = cinfo.output_height;
}
else
{
//MessageBox(g_hWnd, "Only RGB videos are currently supported.", "oops?", MB_OK);
}
jpeg_finish_decompress(&cinfo);
jpeg_destroy_decompress(&cinfo);
g_isLoading = false;
}

341
thp_player/gcvid.h Executable file
View File

@ -0,0 +1,341 @@
/***************************************************************************
* Copyright (C) 2010
* by thakis
*
* Modification and adjustment for the Wii by Dimok
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any
* damages arising from the use of this software.
*
* Permission is granted to anyone to use this software for any
* purpose, including commercial applications, and to alter it and
* redistribute it freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you
* must not claim that you wrote the original software. If you use
* this software in a product, an acknowledgment in the product
* documentation would be appreciated but is not required.
*
* 2. Altered source versions must be plainly marked as such, and
* must not be misrepresented as being the original software.
*
* 3. This notice may not be removed or altered from any source
* distribution.
*
* gcvid.h
***************************************************************************/
#ifndef THAKIS_GCVID_H
#define THAKIS_GCVID_H THAKIS_GCVID_H
#include <stdio.h> //FILE*
#include <string>
#include <vector>
//typedef unsigned char boolean;
typedef unsigned char u8;
typedef short s16;
typedef unsigned short u16;
typedef int s32;
typedef unsigned int u32;
typedef long long s64;
typedef float f32;
#pragma pack(push, 1)
/////////////////////////////////////////////////////////////////////
//THP
struct ThpHeader
{
char tag[4]; //'THP\0'
//from monk's thp player:
u32 version; //0x00011000 = 1.1, 0x00010000 = 1.0
u32 maxBufferSize;
u32 maxAudioSamples; //!= 0 if sound is stored in file
float fps; //usually 29.something (=0x41efc28f) for ntsc
u32 numFrames;
u32 firstFrameSize; //size of first frame
u32 dataSize; //size of file - ThpHeader.offset
//from monk's thp player:
u32 componentDataOffset; //ThpComponents stored here
u32 offsetsDataOffset; //?? if != 0, offset to table with offsets of all frames?
u32 firstFrameOffset;
u32 lastFrameOffset;
};
//monk:
struct ThpComponents
{
u32 numComponents; //usually 1 or 2 (video or video + audio)
//component type 0 is video, type 1 is audio,
//type 0xff is "no component" (numComponent many entries
//are != 0xff)
u8 componentTypes[16];
};
struct ThpVideoInfo
{
u32 width;
u32 height;
u32 unknown; //only for version 1.1 thp files
};
struct ThpAudioInfo
{
u32 numChannels;
u32 frequency;
u32 numSamples;
u32 numData; //only for version 1.1 - that many
//audio blocks are after each video block
//(for surround sound?)
};
//endmonk
//a frame image is basically a normal jpeg image (without
//the jfif application marker), the only important difference
//is that after the image start marker (0xff 0xda) values
//of 0xff are simply written as 0xff whereas the jpeg
//standard requires them to be written as 0xff 0x00 because
//0xff is the start of a 2-byte control code in jpeg
//frame (offsets relative to frame start):
//u32 total (image, sound, etc) size of NEXT frame
//u32 size1 at 0x04 (total size of PREV frame according to monk)
//u32 image data size at 0x08
//size of one audio block ONLY IF THE FILE HAS SOUND. ThpAudioInfo.numData
//many audio blocks after jpeg data
//jpeg data
//audio block(s)
struct ThpAudioBlockHeader
{
//size 80 byte
u32 channelSize; //size of one channel in bytes
u32 numSamples; //number of samples/channel
s16 table1[16]; //table for first channel
s16 table2[16]; //table for second channel
s16 channel1Prev1;
s16 channel1Prev2;
s16 channel2Prev1;
s16 channel2Prev2;
};
//audio block:
//u32 size of this audioblock
//
//u32 numBytes/channel of audioblock - that many bytes per channel after adpcm table)
//
//u32 number of samples per channel
//
//2*16 shorts adpcm table (one per channel - always stored both,
//even for mono files), 5.11 fixed point values
//
//4 s16: 2 shorts prev1 and prev2 for each channel (even for mono files)
//
//sound data
//sound data:
//8 byte are 14 samples:
//the first byte stores index (upper nibble) and shift (lower nibble),
//the following 7 bytes contain 14o samples a 4 bit each
/////////////////////////////////////////////////////////////////////
//MTH ("mute thp"?)
//similar to a thp file, but without sound
struct MthHeader
{
//one of the unknown has to be fps in some form
char tag[4]; //'MTHP'
u32 unknown;
u32 unknown2;
u32 maxFrameSize;
u32 width;
u32 height;
u32 fps;
u32 numFrames;
u32 offset;
u32 unknown5;
u32 firstFrameSize;
//5 padding u32's follow
};
//frame:
//u32 size of NEXT frame
//jpeg data
//see thp (above) for jpeg format. there's a small difference, though:
//mth jpegs end with 0xff 0xd9 0xff instead of 0xff 0xd9
#pragma pack(pop)
//little helper class that represents one frame of video
//data is 24 bpp, scanlines aligned to 4 byte boundary
class VideoFrame
{
public:
VideoFrame();
~VideoFrame();
void resize(int width, int height);
int getWidth() const;
int getHeight() const;
int getPitch() const;
u8* getData();
const u8* getData() const;
void dealloc();
private:
u8* _data;
int _w;
int _h;
int _p; //pitch in bytes
//copy constructor and asignment operator are not allowed
//VideoFrame(const VideoFrame& f);
VideoFrame& operator=(const VideoFrame& f);
};
//swaps red and blue channel of a video frame
void swapRB(VideoFrame& f);
class VideoFile
{
public:
VideoFile(FILE* f);
virtual ~VideoFile();
virtual int getWidth() const;
virtual int getHeight() const;
virtual float getFps() const;
virtual int getFrameCount() const;
virtual int getCurrentFrameNr() const;
virtual void loadNextFrame();
virtual void getCurrentFrame(VideoFrame& frame) const;
virtual void SetFrameNo( u32 frameNo );
//sound support:
virtual bool hasSound() const;
virtual int getNumChannels() const;
virtual int getFrequency() const;
virtual int getMaxAudioSamples() const;
virtual int getCurrentBuffer(s16* data) const;
protected:
FILE* _f;
//void loadFrame(long offset, int size);
void loadFrame(VideoFrame& frame, const u8* data, int size) const;
};
VideoFile* openVideo(const std::string& fileName);
void closeVideo(VideoFile*& vf);
class ThpVideoFile : public VideoFile
{
public:
ThpVideoFile(FILE* f);
virtual int getWidth() const;
virtual int getHeight() const;
virtual float getFps() const;
virtual int getFrameCount() const;
virtual int getCurrentFrameNr() const;
virtual void loadNextFrame();
virtual void getCurrentFrame(VideoFrame& frame) const;
virtual void SetFrameNo( u32 frameNo );
virtual bool hasSound() const;
virtual int getNumChannels() const;
virtual int getFrequency() const;
virtual int getMaxAudioSamples() const;
virtual int getCurrentBuffer(s16* data) const;
protected:
ThpHeader _head;
ThpComponents _components;
ThpVideoInfo _videoInfo;
ThpAudioInfo _audioInfo;
int _numInts;
int _currFrameNr;
int _nextFrameOffset;
int _nextFrameSize;
std::vector<u8> _currFrameData;
};
class MthVideoFile : public VideoFile
{
public:
MthVideoFile(FILE* f);
virtual int getWidth() const;
virtual int getHeight() const;
virtual float getFps() const;
virtual int getFrameCount() const;
virtual int getCurrentFrameNr() const;
virtual void SetFrameNo( u32 frameNo );
virtual void loadNextFrame();
virtual void getCurrentFrame(VideoFrame& frame) const;
protected:
MthHeader _head;
int _currFrameNr;
int _nextFrameOffset;
int _nextFrameSize;
int _thisFrameSize;
std::vector<u8> _currFrameData;
};
class JpgVideoFile : public VideoFile
{
public:
JpgVideoFile(FILE* f);
virtual int getWidth() const;
virtual int getHeight() const;
virtual int getFrameCount() const;
virtual void getCurrentFrame(VideoFrame& frame) const;
private:
VideoFrame _currFrame;
};
#endif //THAKIS_GCVID_H

12
thp_player/main.cpp Normal file
View File

@ -0,0 +1,12 @@
#include <QtGui/QApplication>
#include "thpwindow.h"
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
Q_INIT_RESOURCE( rc );
ThpWindow w;
w.show();
return a.exec();
}

BIN
thp_player/next.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.3 KiB

BIN
thp_player/pause.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

BIN
thp_player/play.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

BIN
thp_player/prev.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

16
thp_player/rc.qrc Normal file
View File

@ -0,0 +1,16 @@
<RCC>
<qresource prefix="/">
<file>ffw.png</file>
<file>next.png</file>
<file>pause.png</file>
<file>play.png</file>
<file>prev.png</file>
<file>repeat.png</file>
<file>rev.png</file>
<file>star.png</file>
<file>stop.png</file>
<file>vol_high.png</file>
<file>vol_low.png</file>
<file>vol_med.png</file>
</qresource>
</RCC>

BIN
thp_player/repeat.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.5 KiB

BIN
thp_player/rev.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

BIN
thp_player/star.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.3 KiB

BIN
thp_player/stop.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

26
thp_player/thp_player.pro Normal file
View File

@ -0,0 +1,26 @@
#-------------------------------------------------
#
# Project created by QtCreator 2011-01-05T10:05:50
#
#-------------------------------------------------
QT += core gui multimedia
TARGET = thp_player
TEMPLATE = app
SOURCES += main.cpp\
thpwindow.cpp \
gcvid.cpp
HEADERS += thpwindow.h \
gcvid.h
FORMS += thpwindow.ui
CONFIG += static
LIBS += -ljpeg
RESOURCES += \
rc.qrc

376
thp_player/thpwindow.cpp Normal file
View File

@ -0,0 +1,376 @@
#include "thpwindow.h"
#include "ui_thpwindow.h"
#include "../WiiQt/tools.h"
#define MAX_BUFFERS 50 //in frames
#define BUFFER_DELAY 200 //time to wait between checking buffer size ( msecs )
ThpWindow::ThpWindow(QWidget *parent) : QMainWindow(parent), ui(new Ui::ThpWindow)
{
ui->setupUi( this );
ui->mainToolBar->setVisible( false );
ui->label_fName->clear();
ui->label_fpsT->clear();
ui->label_itemNo->clear();
ui->label_sizeT->clear();
ui->label_timeCur->clear();
ui->label_timeFull->clear();
ui->label_video->clear();
ui->scrollArea->setWidgetResizable( false );
EnableGui( false );
ui->statusBar->addPermanentWidget( ui->label_fName, 0 );
ui->statusBar->addPermanentWidget( ui->label_itemNo, 0 );
AudioOutputDevice = NULL;
AudioOutput = NULL;
videoFile = NULL;
dontBuffer = false;
//currentDir = QDir::currentPath();
currentDir = "/media/Jenna/jenna/c/gui_fork/SSBB_JAP/DATA/files/movie";
//workaround for some bullshit bug in the Qt libs
foreach( const QAudioDeviceInfo &deviceInfo, QAudioDeviceInfo::availableDevices( QAudio::AudioOutput ) )
{
qDebug() << "l:" << deviceInfo.deviceName();
}
connect( &timer, SIGNAL( timeout() ), this, SLOT( ShowNextFrame() ) );
}
ThpWindow::~ThpWindow()
{
dontBuffer = true;
timer.stop();
delete ui;
if(videoFile)
{
closeVideo( videoFile );
videoFile = NULL;
}
if( AudioOutput )
{
AudioOutput->stop();
delete AudioOutput;
AudioOutput = NULL;
}
}
//open file
void ThpWindow::on_actionOpen_triggered()
{
timer.stop();
dontBuffer = true;
playList.clear();
curPlayListPos = 0;
QStringList fNames = QFileDialog::getOpenFileNames( this, tr("Open Files"), currentDir, tr("Wii/GC Videos (*.thp *.mth)") );
if( fNames.isEmpty() )
return;
foreach( QString str, fNames )
playList << str;
currentDir = QFileInfo( playList.at( 0 ) ).absolutePath();
PlayPlayListItem( 0 );
}
//open folder
void ThpWindow::on_actionOpen_Folder_triggered()
{
timer.stop();
dontBuffer = true;
playList.clear();
curPlayListPos = 0;
QString dirName = QFileDialog::getExistingDirectory( this, tr( "Open Folder" ), currentDir );
if( dirName.isEmpty() )
return;
currentDir = dirName;
QDir dir( dirName );
QFileInfoList fil = dir.entryInfoList( QStringList() << "*.thp" << "*.mth" , QDir::Files );
//qDebug() << "found" << fil.size() << "items";
foreach( QFileInfo fi, fil )
playList << fi.absoluteFilePath();
PlayPlayListItem( 0 );
}
void ThpWindow::PlayPlayListItem( quint32 i )
{
//qDebug() << "ThpWindow::PlayPlayListItem" << i;
if( !playList.size() ) //no videos to play
{
ui->label_itemNo->clear();
return;
}
if( i >= (quint32)playList.size() ) //all videos are played
{
curPlayListPos = 0; //skip back to beginning
if( !ui->pushButton_loop->isChecked() ) //dont loop
{
ui->pushButton_playPause->setChecked( false );
timer.stop();
dontBuffer = true;
}
}
ui->label_itemNo->setText( QString( "%1 / %2").arg( curPlayListPos + 1 ).arg( playList.size() ) );
LoadVideo( playList.at( curPlayListPos ) );
}
void ThpWindow::LoadVideo( const QString &path )
{
//qDebug() << "ThpWindow::LoadVideo" << path;
EnableGui( false );
std::string filepath = path.toUtf8().constData();
ui->progressBar_buffer->setValue( 0 );
//stop current video
timer.stop();
if( videoFile )
{
closeVideo( videoFile );
videoFile = NULL;
}
Frames.clear();
SoundBuffers.clear();
videoFile = openVideo( filepath );
if( !videoFile )
{
QMessageBox::information( this, tr("Player"), tr("Cannot load %1.").arg( path ) );
return;
}
//dontBuffer = false;
ui->label_video->setFixedSize( videoFile->getWidth(), videoFile->getHeight() );
frameCnt = videoFile->getFrameCount();
curFrame = 0;
for( quint8 i = 0; i < 3; i++ )
LoadNextFrame();
CreateAudioOutput();
//show some info in the gui
ui->label_fpsT->setText( QString( "%1" ).arg( videoFile->getFps(), 0, 'f', 3 ) );
ui->label_sizeT->setText( QString( "%1 x %2" ).arg( videoFile->getWidth() ).arg( videoFile->getHeight() ) );
ui->label_fName->setText( QFileInfo( path ).fileName() );
ui->horizontalSlider_pos->setMaximum( frameCnt );
//set timer for animation
qreal delay = 1000.0f/videoFile->getFps();
ui->label_timeFull->setText( TimeTxt( delay * videoFile->getFrameCount() ));
timer.setInterval( delay );
//if play button is clicked, just play
if( ui->pushButton_playPause->isChecked() )
timer.start();
//otherwise just load the first frame
else
ShowNextFrame();
//allow the buttons to work
EnableGui( true );
}
QString ThpWindow::TimeTxt( quint64 msecs )
{
quint32 hours = msecs / 3600000;
msecs -= ( hours * 3600000 );
quint32 minutes = msecs / 60000;
msecs -= ( minutes * 60000 );
quint32 seconds = msecs / 1000;
msecs -= ( seconds * 1000 );
return QString( "%1:%2:%3.%4" ).arg( hours, 2, 10, QChar( '0' ) )
.arg( minutes, 2, 10, QChar( '0' ) )
.arg( seconds, 2, 10, QChar( '0' ) )
.arg( msecs, 3, 10, QChar( '0' ) );
}
void ThpWindow::LoadNextFrame()
{
//qDebug() << "ThpWindow::LoadNextFrame()";
VideoFrame VideoF;
videoFile->loadNextFrame();
videoFile->getCurrentFrame(VideoF);
QImage image(VideoF.getData(), VideoF.getWidth(), VideoF.getHeight(), QImage::Format_RGB888);
if (image.isNull())
return;
Frames.push_back(QPixmap::fromImage(image));
int SoundPos = SoundBuffers.size();
SoundBuffers.resize(SoundBuffers.size()+1);
SoundBuffers[SoundPos].Buffer.resize(videoFile->getMaxAudioSamples()*2);
SoundBuffers[SoundPos].Size = videoFile->getCurrentBuffer(&SoundBuffers[SoundPos].Buffer[0])*2*2;
}
void ThpWindow::ShowNextFrame()
{
//qDebug() << "ThpWindow::ShowNextFrame()" << Frames.size() << curFrame << frameCnt;
if( Frames.size() < 3 )
{
BufferIfNeeded();
return;
}
if( ++curFrame >= frameCnt ) //end of video
{
PlayPlayListItem( ++curPlayListPos );
return;
}
ui->horizontalSlider_pos->setValue( curFrame );
qreal delay = 1000.0f/videoFile->getFps();
ui->label_timeCur->setText( TimeTxt( delay * videoFile->getCurrentFrameNr() ) );
ui->label_video->setPixmap(Frames[2]);
//ui->label_video->setPixmap(Frames[0]);
if( AudioOutputDevice && ui->pushButton_vol->isChecked() )
//&& SoundBuffers.size() > 2
//&& SoundBuffers[ 2 ].Buffer.size()
//&& SoundBuffers[ 2 ].Size )
AudioOutputDevice->write((char *) &SoundBuffers[2].Buffer[0], SoundBuffers[2].Size);
Frames.erase(Frames.begin());
SoundBuffers.erase(SoundBuffers.begin());
}
void ThpWindow::BufferIfNeeded()
{
if( dontBuffer )
return; //break the buffer loop
if( Frames.size() < MAX_BUFFERS )//we need to read a frame
{
LoadNextFrame();
}
//show buffer in the gui
int b = ((float)Frames.size() / (float)MAX_BUFFERS) * 100.0f;
ui->progressBar_buffer->setValue( b );
//wait a bit and call this function again
QTimer::singleShot( BUFFER_DELAY, this, SLOT( BufferIfNeeded() ) );
}
void ThpWindow::CreateAudioOutput()
{
//qDebug() << "ThpWindow::CreateAudioOutput()" << timer.isActive();
if( AudioOutput )
{
AudioOutput->stop();
delete AudioOutput;
AudioOutput = NULL;
}
AudioOutputDevice = NULL;
AudioFormat.setFrequency( videoFile->getFrequency() );
AudioFormat.setChannels( videoFile->getNumChannels() );
AudioFormat.setSampleSize( 16 );
AudioFormat.setCodec( "audio/pcm" );
AudioFormat.setByteOrder( QAudioFormat::LittleEndian );
AudioFormat.setSampleType( QAudioFormat::SignedInt );
QAudioDeviceInfo info( QAudioDeviceInfo::defaultOutputDevice() );
if( !info.isFormatSupported( AudioFormat ) )
{
AudioFormat = info.nearestFormat( AudioFormat );//try to find a usable audio playback format
if( !info.isFormatSupported( AudioFormat ) )
{
qWarning() << "unsupported audio format: can't play anything";
ui->statusBar->showMessage( tr( "Can't find suitable audio format" ), 5000 );
return;
}
}
AudioOutput = new QAudioOutput( AudioFormat, this );
if( !AudioOutput )
{
ui->statusBar->showMessage( tr( "Audio output error" ), 5000 );
qWarning() << "!AudioOutput";
return;
}
AudioOutputDevice = AudioOutput->start();
if( AudioOutput->error() )
{
ui->statusBar->showMessage( tr( "Audio output error" ), 5000 );
qWarning() << "AudioOutput->error()" << AudioOutput->error();
AudioOutput->stop();
AudioOutputDevice = NULL;
}
}
//enable/disable buttons
void ThpWindow::EnableGui( bool enable )
{
ui->pushButton_ffw->setEnabled( enable );
ui->pushButton_loop->setEnabled( enable );
ui->pushButton_next->setEnabled( enable );
ui->pushButton_playPause->setEnabled( enable );
ui->pushButton_prev->setEnabled( enable );
ui->pushButton_rewind->setEnabled( enable );
ui->pushButton_stop->setEnabled( enable );
ui->pushButton_vol->setEnabled( enable );
}
//play button
void ThpWindow::on_pushButton_playPause_clicked()
{
if( ui->pushButton_playPause->isChecked() )
{
dontBuffer = false;//start buffering again after stopped
timer.start();
}
else
{
//dontBuffer = true;//ok to buffer while paused
timer.stop();
}
}
//next button
void ThpWindow::on_pushButton_next_clicked()
{
PlayPlayListItem( ++curPlayListPos );
}
//prev button
void ThpWindow::on_pushButton_prev_clicked()
{
if( !curPlayListPos )
curPlayListPos = playList.size();
PlayPlayListItem( --curPlayListPos );
}
//stop button
void ThpWindow::on_pushButton_stop_clicked()
{
//stop playback
timer.stop();
ui->pushButton_playPause->setChecked( false );
//clear buffer
dontBuffer = true;
Frames.clear();
SoundBuffers.clear();
ui->progressBar_buffer->setValue( 0 );
//set video to first frame
videoFile->SetFrameNo( 0 );
//read a few frames into buffer
curFrame = 0;
for( quint8 i = 0; i < 3; i++ )
LoadNextFrame();
//show first frame in gui
ShowNextFrame();
}

73
thp_player/thpwindow.h Normal file
View File

@ -0,0 +1,73 @@
#ifndef THPWINDOW_H
#define THPWINDOW_H
#include <QAudioFormat>
#include <QAudioOutput>
#include "gcvid.h"
#include "../WiiQt/includes.h"
namespace Ui {
class ThpWindow;
}
typedef struct
{
std::vector< qint16 > Buffer;
int Size;
} SoundFrame;
class ThpWindow : public QMainWindow
{
Q_OBJECT
public:
explicit ThpWindow( QWidget *parent = 0 );
~ThpWindow();
void LoadVideo( const QString &path );
private:
Ui::ThpWindow *ui;
void LoadNextFrame();
VideoFile * videoFile;
QAudioOutput* AudioOutput;
QIODevice* AudioOutputDevice;
//QBuffer AudioOutputDevice;
QAudioFormat AudioFormat;
std::vector<QPixmap> Frames;
std::vector<SoundFrame> SoundBuffers;
void CreateAudioOutput();
QTimer timer;
bool dontBuffer;
QString TimeTxt( quint64 msecs );
quint64 frameCnt;
quint64 curFrame;
QStringList playList;
quint32 curPlayListPos;
void PlayPlayListItem( quint32 i );
QString currentDir;
void EnableGui( bool enable = true );
QSize MaxSizeForRatio( qreal w, qreal h );
private slots:
//void on_actionFit_To_Window_triggered(bool checked);
void on_pushButton_stop_clicked();
void on_pushButton_prev_clicked();
void on_pushButton_next_clicked();
void on_actionOpen_Folder_triggered();
void on_pushButton_playPause_clicked();
void ShowNextFrame();
void BufferIfNeeded();
void on_actionOpen_triggered();
//protected:
//void resizeEvent ( QResizeEvent * event );
};
#endif // THPWINDOW_H

485
thp_player/thpwindow.ui Normal file
View File

@ -0,0 +1,485 @@
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>ThpWindow</class>
<widget class="QMainWindow" name="ThpWindow">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>644</width>
<height>714</height>
</rect>
</property>
<property name="windowTitle">
<string>ThpWindow</string>
</property>
<widget class="QWidget" name="centralWidget">
<layout class="QGridLayout" name="gridLayout_2">
<property name="margin">
<number>2</number>
</property>
<property name="spacing">
<number>2</number>
</property>
<item row="0" column="0">
<widget class="QScrollArea" name="scrollArea">
<property name="minimumSize">
<size>
<width>640</width>
<height>520</height>
</size>
</property>
<property name="widgetResizable">
<bool>true</bool>
</property>
<property name="alignment">
<set>Qt::AlignCenter</set>
</property>
<widget class="QWidget" name="scrollAreaWidgetContents_2">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>636</width>
<height>516</height>
</rect>
</property>
<layout class="QGridLayout" name="gridLayout">
<property name="margin">
<number>0</number>
</property>
<property name="spacing">
<number>0</number>
</property>
<item row="0" column="0">
<widget class="QLabel" name="label_video">
<property name="sizePolicy">
<sizepolicy hsizetype="Preferred" vsizetype="Preferred">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="text">
<string>video goes here</string>
</property>
<property name="scaledContents">
<bool>true</bool>
</property>
<property name="alignment">
<set>Qt::AlignCenter</set>
</property>
</widget>
</item>
</layout>
</widget>
</widget>
</item>
<item row="1" column="0">
<layout class="QHBoxLayout" name="horizontalLayout" stretch="1,0">
<property name="spacing">
<number>2</number>
</property>
<property name="sizeConstraint">
<enum>QLayout::SetDefaultConstraint</enum>
</property>
<item>
<layout class="QVBoxLayout" name="verticalLayout_2">
<property name="spacing">
<number>2</number>
</property>
<item>
<layout class="QVBoxLayout" name="verticalLayout">
<property name="spacing">
<number>0</number>
</property>
<item>
<widget class="QSlider" name="horizontalSlider_pos">
<property name="enabled">
<bool>false</bool>
</property>
<property name="minimumSize">
<size>
<width>486</width>
<height>0</height>
</size>
</property>
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="invertedAppearance">
<bool>false</bool>
</property>
<property name="invertedControls">
<bool>false</bool>
</property>
</widget>
</item>
<item>
<widget class="QLabel" name="label_timeCur">
<property name="text">
<string>curTime</string>
</property>
<property name="alignment">
<set>Qt::AlignCenter</set>
</property>
</widget>
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_2">
<property name="spacing">
<number>2</number>
</property>
<item>
<widget class="QPushButton" name="pushButton_prev">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset resource="rc.qrc">
<normaloff>:/prev.png</normaloff>:/prev.png</iconset>
</property>
<property name="iconSize">
<size>
<width>36</width>
<height>36</height>
</size>
</property>
<property name="checkable">
<bool>false</bool>
</property>
<property name="checked">
<bool>false</bool>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pushButton_rewind">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset resource="rc.qrc">
<normaloff>:/rev.png</normaloff>:/rev.png</iconset>
</property>
<property name="iconSize">
<size>
<width>36</width>
<height>36</height>
</size>
</property>
<property name="checkable">
<bool>false</bool>
</property>
<property name="checked">
<bool>false</bool>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pushButton_playPause">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset resource="rc.qrc">
<normaloff>:/play.png</normaloff>
<normalon>:/pause.png</normalon>:/play.png</iconset>
</property>
<property name="iconSize">
<size>
<width>36</width>
<height>36</height>
</size>
</property>
<property name="checkable">
<bool>true</bool>
</property>
<property name="checked">
<bool>false</bool>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pushButton_stop">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset resource="rc.qrc">
<normaloff>:/stop.png</normaloff>:/stop.png</iconset>
</property>
<property name="iconSize">
<size>
<width>36</width>
<height>36</height>
</size>
</property>
<property name="checkable">
<bool>false</bool>
</property>
<property name="checked">
<bool>false</bool>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pushButton_ffw">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset resource="rc.qrc">
<normaloff>:/ffw.png</normaloff>:/ffw.png</iconset>
</property>
<property name="iconSize">
<size>
<width>36</width>
<height>36</height>
</size>
</property>
<property name="checkable">
<bool>false</bool>
</property>
<property name="checked">
<bool>false</bool>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pushButton_next">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset resource="rc.qrc">
<normaloff>:/next.png</normaloff>:/next.png</iconset>
</property>
<property name="iconSize">
<size>
<width>36</width>
<height>36</height>
</size>
</property>
<property name="checkable">
<bool>false</bool>
</property>
<property name="checked">
<bool>false</bool>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="pushButton_loop">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset resource="rc.qrc">
<normaloff>:/repeat.png</normaloff>:/repeat.png</iconset>
</property>
<property name="iconSize">
<size>
<width>36</width>
<height>36</height>
</size>
</property>
<property name="checkable">
<bool>true</bool>
</property>
<property name="checked">
<bool>false</bool>
</property>
</widget>
</item>
<item>
<spacer name="horizontalSpacer">
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>20</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<widget class="QPushButton" name="pushButton_vol">
<property name="text">
<string/>
</property>
<property name="icon">
<iconset resource="rc.qrc">
<normaloff>:/vol_low.png</normaloff>
<normalon>:/vol_high.png</normalon>:/vol_low.png</iconset>
</property>
<property name="iconSize">
<size>
<width>36</width>
<height>36</height>
</size>
</property>
<property name="checkable">
<bool>true</bool>
</property>
<property name="checked">
<bool>true</bool>
</property>
</widget>
</item>
</layout>
</item>
</layout>
</item>
<item>
<layout class="QVBoxLayout" name="verticalLayout_3">
<property name="spacing">
<number>2</number>
</property>
<property name="sizeConstraint">
<enum>QLayout::SetFixedSize</enum>
</property>
<item>
<widget class="QLabel" name="label_timeFull">
<property name="text">
<string>totalTime</string>
</property>
<property name="alignment">
<set>Qt::AlignCenter</set>
</property>
</widget>
</item>
<item>
<layout class="QFormLayout" name="formLayout">
<property name="sizeConstraint">
<enum>QLayout::SetMinAndMaxSize</enum>
</property>
<property name="fieldGrowthPolicy">
<enum>QFormLayout::AllNonFixedFieldsGrow</enum>
</property>
<property name="horizontalSpacing">
<number>2</number>
</property>
<property name="verticalSpacing">
<number>2</number>
</property>
<item row="1" column="0">
<widget class="QLabel" name="label_fps">
<property name="text">
<string>FPS:</string>
</property>
</widget>
</item>
<item row="1" column="1">
<widget class="QLabel" name="label_fpsT">
<property name="text">
<string>fps</string>
</property>
</widget>
</item>
<item row="2" column="0">
<widget class="QLabel" name="label_size">
<property name="text">
<string>Size:</string>
</property>
</widget>
</item>
<item row="2" column="1">
<widget class="QLabel" name="label_sizeT">
<property name="text">
<string>x x y</string>
</property>
</widget>
</item>
<item row="0" column="0">
<widget class="QLabel" name="label_buffer">
<property name="text">
<string>Buffer</string>
</property>
</widget>
</item>
<item row="0" column="1">
<widget class="QProgressBar" name="progressBar_buffer">
<property name="value">
<number>0</number>
</property>
<property name="format">
<string/>
</property>
</widget>
</item>
</layout>
</item>
</layout>
</item>
</layout>
</item>
<item row="2" column="0">
<widget class="QLabel" name="label_fName">
<property name="text">
<string>TextLabel</string>
</property>
</widget>
</item>
<item row="3" column="0">
<widget class="QLabel" name="label_itemNo">
<property name="text">
<string>TextLabel</string>
</property>
</widget>
</item>
</layout>
</widget>
<widget class="QMenuBar" name="menuBar">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>644</width>
<height>27</height>
</rect>
</property>
<widget class="QMenu" name="menuFile">
<property name="title">
<string>File</string>
</property>
<addaction name="actionOpen"/>
<addaction name="actionOpen_Folder"/>
</widget>
<addaction name="menuFile"/>
</widget>
<widget class="QToolBar" name="mainToolBar">
<attribute name="toolBarArea">
<enum>TopToolBarArea</enum>
</attribute>
<attribute name="toolBarBreak">
<bool>false</bool>
</attribute>
</widget>
<widget class="QStatusBar" name="statusBar"/>
<action name="actionOpen">
<property name="text">
<string>Open File</string>
</property>
<property name="shortcut">
<string>Ctrl+O</string>
</property>
</action>
<action name="actionOpen_Folder">
<property name="text">
<string>Open Folder</string>
</property>
<property name="shortcut">
<string>Ctrl+P</string>
</property>
</action>
</widget>
<layoutdefault spacing="6" margin="11"/>
<resources>
<include location="rc.qrc"/>
</resources>
<connections/>
</ui>

BIN
thp_player/vol_high.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.5 KiB

BIN
thp_player/vol_low.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

BIN
thp_player/vol_med.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB