Projects
Essentials
lightspark
Sign Up
Log In
Username
Password
We truncated the diff of some files because they were too big. If you want to see the full diff for every file,
click here
.
Overview
Repositories
Revisions
Requests
Users
Attributes
Meta
Expand all
Collapse all
Changes of Revision 86
View file
lightspark.spec
Changed
@@ -24,7 +24,7 @@ %bcond_with rtmp %endif Name: lightspark -Version: 0.7.2.99+git20151010.1946 +Version: 0.7.2.99+git20151101.1724 Release: 0 Summary: Modern, free, open-source flash player implementation License: LGPL-3.0+
View file
lightspark.tar.xz/src/backends/builtindecoder.cpp
Changed
@@ -18,11 +18,14 @@ **************************************************************************/ #include "backends/builtindecoder.h" +#include "scripting/flash/display/DisplayObject.h" +#include "scripting/flash/display/flashdisplay.h" +#include "scripting/flash/net/flashnet.h" using namespace lightspark; -BuiltinStreamDecoder::BuiltinStreamDecoder(std::istream& _s): - stream(_s),prevSize(0),decodedAudioBytes(0),decodedVideoFrames(0),decodedTime(0),frameRate(0.0) +BuiltinStreamDecoder::BuiltinStreamDecoder(std::istream& _s, NetStream* _ns): + stream(_s),prevSize(0),decodedAudioBytes(0),decodedVideoFrames(0),decodedTime(0),frameRate(0.0),netstream(_ns) { STREAM_TYPE t=classifyStream(stream); if(t==FLV_STREAM) @@ -42,7 +45,7 @@ if(strncmp(buf,"FLV",3)==0) ret=FLV_STREAM; else - throw ParseException("File signature not recognized"); + ret=UNKOWN_STREAM; s.seekg(0); return ret; @@ -94,10 +97,20 @@ } else { - assert_and_throw(audioCodec==tag.SoundFormat); - decodedAudioBytes+=audioDecoder->decodeData(tag.packetData,tag.packetLen,decodedTime); - //Adjust timing - decodedTime=decodedAudioBytes/audioDecoder->getBytesPerMSec(); + /* + if(tag.isHeader()) + { + //The tag is the header, initialize decoding + audioDecoder->switchCodec(tag.SoundFormat, tag.packetData, tag.packetLen); + tag.releaseBuffer(); + } + else*/ + { + assert_and_throw(audioCodec==tag.SoundFormat); + decodedAudioBytes+=audioDecoder->decodeData(tag.packetData,tag.packetLen,decodedTime); + //Adjust timing + decodedTime=decodedAudioBytes/audioDecoder->getBytesPerMSec(); + } } break; } @@ -130,25 +143,32 @@ videoDecoder=new NullVideoDecoder(); #endif videoDecoder->decodeData(tag.packetData,tag.packetLen, frameTime); + videoDecoder->framesdecoded++; decodedVideoFrames++; } } else { - videoDecoder->decodeData(tag.packetData,tag.packetLen, frameTime); - decodedVideoFrames++; + if(tag.isHeader()) + { + //The tag is the header, initialize decoding + videoDecoder->switchCodec(tag.codec,tag.packetData,tag.packetLen,frameRate); + tag.releaseBuffer(); + } + else + { + videoDecoder->decodeData(tag.packetData,tag.packetLen, frameTime); + videoDecoder->framesdecoded++; + decodedVideoFrames++; + } } break; } case 18: { - metadataTag=ScriptDataTag(stream); - prevSize=metadataTag.getTotalLen(); - - //The frameRate of the container overrides the stream - - if(metadataTag.metadataDouble.find("framerate") != metadataTag.metadataDouble.end()) - frameRate=metadataTag.metadataDouble["framerate"]; + ScriptDataTag tag(stream); + prevSize=tag.getTotalLen(); + netstream->sendClientNotification(tag.methodName,tag.dataobject.getPtr()); break; } default: @@ -157,23 +177,3 @@ } return true; } - -bool BuiltinStreamDecoder::getMetadataInteger(const char* name, uint32_t& ret) const -{ - auto it=metadataTag.metadataInteger.find(name); - if(it == metadataTag.metadataInteger.end()) - return false; - - ret=it->second; - return true; -} - -bool BuiltinStreamDecoder::getMetadataDouble(const char* name, double& ret) const -{ - auto it=metadataTag.metadataDouble.find(name); - if(it == metadataTag.metadataDouble.end()) - return false; - - ret=it->second; - return true; -}
View file
lightspark.tar.xz/src/backends/builtindecoder.h
Changed
@@ -25,6 +25,7 @@ namespace lightspark { +class NetStream; class BuiltinStreamDecoder: public StreamDecoder { @@ -38,13 +39,12 @@ uint32_t decodedTime; double frameRate; ScriptDataTag metadataTag; - enum STREAM_TYPE { FLV_STREAM=0 }; + enum STREAM_TYPE { FLV_STREAM=0, UNKOWN_STREAM=1 }; STREAM_TYPE classifyStream(std::istream& s); + NetStream* netstream; public: - BuiltinStreamDecoder(std::istream& _s); + BuiltinStreamDecoder(std::istream& _s, NetStream* _ns); bool decodeNextFrame(); - bool getMetadataInteger(const char* name, uint32_t& ret) const; - bool getMetadataDouble(const char* name, double& ret) const; }; };
View file
lightspark.tar.xz/src/backends/decoder.cpp
Changed
@@ -116,6 +116,15 @@ ownedContext(true),curBuffer(0),codecContext(NULL),curBufferOffset(0) { //The tag is the header, initialize decoding + switchCodec(codecId, initdata, datalen, frameRateHint); + + frameIn=av_frame_alloc(); +} + +void FFMpegVideoDecoder::switchCodec(LS_VIDEO_CODEC codecId, uint8_t *initdata, uint32_t datalen, double frameRateHint) +{ + if (codecContext) + avcodec_close(codecContext); #ifdef HAVE_AVCODEC_ALLOC_CONTEXT3 codecContext=avcodec_alloc_context3(NULL); #else @@ -154,9 +163,6 @@ assert(frameRateHint!=0.0); frameRate=frameRateHint; } - - - if (initdata) { codecContext->extradata=initdata; @@ -173,8 +179,6 @@ status=VALID; else status=INIT; - - frameIn=av_frame_alloc(); } FFMpegVideoDecoder::FFMpegVideoDecoder(AVCodecContext* _c, double frameRateHint): @@ -262,6 +266,8 @@ status=FLUSHED; flushed.signal(); } + framesdropped++; + return ret; } @@ -279,7 +285,11 @@ #else int ret=avcodec_decode_video(codecContext, frameIn, &frameOk, data, datalen); #endif - assert_and_throw(ret==(int)datalen); + if (ret < 0 || frameOk == 0) + { + LOG(LOG_INFO,"not decoded:"<<ret<<" "<< frameOk); + return false; + } if(frameOk) { assert(codecContext->pix_fmt==PIX_FMT_YUV420P); @@ -303,8 +313,11 @@ #else int ret=avcodec_decode_video(codecContext, frameIn, &frameOk, pkt->data, pkt->size); #endif - if (ret < 0) + if (ret < 0 || frameOk == 0) + { + LOG(LOG_INFO,"not decoded:"<<ret<<" "<< frameOk); return false; + } assert_and_throw(ret==(int)pkt->size); if(frameOk) @@ -461,10 +474,23 @@ #ifdef ENABLE_LIBAVCODEC FFMpegAudioDecoder::FFMpegAudioDecoder(LS_AUDIO_CODEC audioCodec, uint8_t* initdata, uint32_t datalen):ownedContext(true) { + switchCodec(audioCodec,initdata,datalen); +#if HAVE_AVCODEC_DECODE_AUDIO4 + frameIn=av_frame_alloc(); +#endif +} +void FFMpegAudioDecoder::switchCodec(LS_AUDIO_CODEC audioCodec, uint8_t* initdata, uint32_t datalen) +{ + if (codecContext) + avcodec_close(codecContext); AVCodec* codec=avcodec_find_decoder(LSToFFMpegCodec(audioCodec)); assert(codec); - codecContext=avcodec_alloc_context3(codec); +#ifdef HAVE_AVCODEC_ALLOC_CONTEXT3 + codecContext=avcodec_alloc_context3(NULL); +#else + codecContext=avcodec_alloc_context(); +#endif //HAVE_AVCODEC_ALLOC_CONTEXT3 if(initdata) { @@ -483,9 +509,6 @@ status=VALID; else status=INIT; -#if HAVE_AVCODEC_DECODE_AUDIO4 - frameIn=av_frame_alloc(); -#endif } FFMpegAudioDecoder::FFMpegAudioDecoder(LS_AUDIO_CODEC lscodec, int sampleRate, int channels, bool):ownedContext(true) @@ -904,7 +927,7 @@ bool FFMpegStreamDecoder::decodeNextFrame() { AVPacket pkt; - int ret=av_read_frame(formatCtx, &pkt); + int ret=av_read_frame(formatCtx, &pkt); if(ret<0) return false; auto time_base=formatCtx->streams[pkt.stream_index]->time_base; @@ -920,29 +943,14 @@ { if (customVideoDecoder) { - customVideoDecoder->decodePacket(&pkt, mtime); - customVideoDecoder->framesdecoded++; + if (customVideoDecoder->decodePacket(&pkt, mtime)) + customVideoDecoder->framesdecoded++; } } av_free_packet(&pkt); return true; } -bool FFMpegStreamDecoder::getMetadataInteger(const char* name, uint32_t& ret) const -{ - return false; -} - -bool FFMpegStreamDecoder::getMetadataDouble(const char* name, double& ret) const -{ - if( string(name) == "duration" ) - { - ret = double(formatCtx->duration) / double(AV_TIME_BASE); - return true; - } - return false; -} - int FFMpegStreamDecoder::avioReadPacket(void* t, uint8_t* buf, int buf_size) { FFMpegStreamDecoder* th=static_cast<FFMpegStreamDecoder*>(t);
View file
lightspark.tar.xz/src/backends/decoder.h
Changed
@@ -85,8 +85,9 @@ class VideoDecoder: public Decoder, public ITextureUploadable { public: - VideoDecoder():frameRate(0),framesdecoded(0),frameWidth(0),frameHeight(0),fenceCount(0),resizeGLBuffers(false){} - virtual ~VideoDecoder(){}; + VideoDecoder():frameRate(0),framesdecoded(0),framesdropped(0),frameWidth(0),frameHeight(0),fenceCount(0),resizeGLBuffers(false){} + virtual ~VideoDecoder(){} + virtual void switchCodec(LS_VIDEO_CODEC codecId, uint8_t* initdata, uint32_t datalen, double frameRateHint)=0; virtual bool decodeData(uint8_t* data, uint32_t datalen, uint32_t time)=0; virtual bool discardFrame()=0; virtual void skipUntil(uint32_t time)=0; @@ -101,6 +102,7 @@ } double frameRate; uint32_t framesdecoded; + uint32_t framesdropped; /* Useful to avoid destruction of the object while a pending upload is waiting */ @@ -194,6 +196,7 @@ Specialized decoding used by FFMpegStreamDecoder */ bool decodePacket(AVPacket* pkt, uint32_t time); + void switchCodec(LS_VIDEO_CODEC codecId, uint8_t* initdata, uint32_t datalen, double frameRateHint); bool decodeData(uint8_t* data, uint32_t datalen, uint32_t time); bool discardFrame(); void skipUntil(uint32_t time); @@ -241,7 +244,8 @@ void* operator new(size_t); void operator delete(void*); AudioDecoder():sampleRate(0),channelCount(0),initialTime(-1){} - virtual ~AudioDecoder(){}; + virtual ~AudioDecoder(){} + virtual void switchCodec(LS_AUDIO_CODEC codecId, uint8_t* initdata, uint32_t datalen)=0; virtual uint32_t decodeData(uint8_t* data, int32_t datalen, uint32_t time)=0; bool hasDecodedFrames() const { @@ -316,6 +320,7 @@ Specialized decoding used by FFMpegStreamDecoder */ uint32_t decodePacket(AVPacket* pkt, uint32_t time); + void switchCodec(LS_AUDIO_CODEC audioCodec, uint8_t* initdata, uint32_t datalen); uint32_t decodeData(uint8_t* data, int32_t datalen, uint32_t time); uint32_t decodeStreamSomePackets(std::istream& s, uint32_t time); }; @@ -327,8 +332,6 @@ StreamDecoder():audioDecoder(NULL),videoDecoder(NULL),valid(false){} virtual ~StreamDecoder(); virtual bool decodeNextFrame() = 0; - virtual bool getMetadataInteger(const char* name, uint32_t& ret) const=0; - virtual bool getMetadataDouble(const char* name, double& ret) const=0; bool isValid() const { return valid; } AudioDecoder* audioDecoder; VideoDecoder* videoDecoder; @@ -362,8 +365,6 @@ FFMpegStreamDecoder(std::istream& s); ~FFMpegStreamDecoder(); bool decodeNextFrame(); - bool getMetadataInteger(const char* name, uint32_t& ret) const; - bool getMetadataDouble(const char* name, double& ret) const; }; #endif
View file
lightspark.tar.xz/src/parsing/amf3_generator.h
Changed
@@ -117,7 +117,6 @@ _R<ASObject> parseXML(std::vector<ASObject*>& objMap, bool legacyXML) const; - tiny_string parseStringAMF0() const; _R<ASObject> parseECMAArrayAMF0(std::vector<tiny_string>& stringMap, std::vector<ASObject*>& objMap, std::vector<TraitsRef>& traitsMap) const; @@ -127,7 +126,8 @@ public: Amf3Deserializer(ByteArray* i):input(i) {} _R<ASObject> readObject() const; + tiny_string parseStringAMF0() const; }; -}; +} #endif /* PARSING_AMF3_GENERATOR_H */
View file
lightspark.tar.xz/src/parsing/flv.cpp
Changed
@@ -20,10 +20,16 @@ #include "parsing/flv.h" #include "swftypes.h" #include "compat.h" +#include "scripting/flash/net/flashnet.h" +#include "scripting/flash/utils/ByteArray.h" +#include "scripting/class.h" +#include "scripting/toplevel/toplevel.h" +#include "amf3_generator.h" using namespace lightspark; using namespace std; + FLV_HEADER::FLV_HEADER(std::istream& in):dataOffset(0),_hasAudio(false),_hasVideo(false) { UI8 Signature[3]; @@ -87,94 +93,28 @@ ScriptDataTag::ScriptDataTag(istream& s):VideoTag(s) { unsigned int start=s.tellg(); - tiny_string methodName; - - //Specs talks about an arbitrary number of stuff, actually just a string and an array are expected - UI8 Type; - s >> Type; - if(Type!=2) - throw ParseException("Unexpected type in FLV"); - - ScriptDataString String(s); - methodName=String.getString(); - s >> Type; - if(Type!=8) - throw ParseException("Unexpected type in FLV"); + _R<ByteArray> b = _NR<ByteArray>(Class<ByteArray>::getInstanceS()); + uint8_t* data =b->getBuffer(dataSize,true); + s.read((char*)data,dataSize); + b->setObjectEncoding(ObjectEncoding::AMF0); + b->setCurrentObjectEncoding(ObjectEncoding::AMF0); + b->setPosition(0); + uint8_t tagtype; + if (!b->readByte(tagtype)) + throw ParseException("Not enough data to parse tag type"); + if (tagtype != amf0_string_marker) + throw ParseException("wrong tagtype in ScriptDataTag"); + + Amf3Deserializer d(b.getPtr()); + methodName=d.parseStringAMF0(); + dataobject = d.readObject(); - ScriptECMAArray ecmaArray(s, this); //Compute totalLen unsigned int end=s.tellg(); totalLen=(end-start)+11; } -ScriptDataString::ScriptDataString(std::istream& s) -{ - UI16_FLV Length; - s >> Length; - size=Length; - //TODO: use resize on tiny_string - char* buf=new char[Length+1]; - s.read(buf,Length); - buf[Length]=0; - - val=tiny_string(buf,true); - - delete[] buf; -} - -ScriptECMAArray::ScriptECMAArray(std::istream& s, ScriptDataTag* tag) -{ - //numVar is an 'approximation' of array size - UI32_FLV numVar; - s >> numVar; - - while(1) - { - ScriptDataString varName(s); - //cout << varName.getString() << endl; - UI8 Type; - s >> Type; - switch(Type) - { - case 0: //double (big-endian) - { - union - { - uint64_t i; - double d; - } tmp; - s.read((char*)&tmp.i,8); - tmp.i=GINT64_FROM_BE(tmp.i); - tag->metadataDouble[varName.getString()] = tmp.d; - //cout << "FLV metadata double: " << varName.getString() << " = " << tmp.d << endl; - break; - } - case 1: //integer - { - UI8 b; - s >> b; - tag->metadataInteger[varName.getString()] = int(b); - //cout << "FLV metadata int: " << varName.getString() << " = " << (int)b << endl; - break; - } - case 2: //string - { - ScriptDataString String(s); - tag->metadataString[varName.getString()] = String.getString(); - //cout << "FLV metadata string: " << varName.getString() << " = " << String.getString() << endl; - break; - } - case 9: //End of array - { - return; - } - default: - LOG(LOG_ERROR,"Unknown type in flv parsing: " << (int)Type); - throw ParseException("Unexpected type in FLV"); - } - } -} VideoDataTag::VideoDataTag(istream& s):VideoTag(s),_isHeader(false),packetData(NULL) { @@ -238,7 +178,7 @@ SI24_FLV CompositionTime; s >> CompositionTime; - assert_and_throw(CompositionTime==0); //TODO: what are composition times + //assert_and_throw(CompositionTime==0); //TODO: what are composition times //Compute lenght of raw data packetLen=dataSize-5;
View file
lightspark.tar.xz/src/parsing/flv.h
Changed
@@ -25,6 +25,7 @@ #include <map> #include "swftypes.h" #include "backends/decoder.h" +#include "asobject.h" namespace lightspark { @@ -52,7 +53,7 @@ uint32_t timestamp; uint32_t totalLen; public: - VideoTag() {}; + VideoTag() {} VideoTag(std::istream& s); uint32_t getDataSize() const { return dataSize; } uint32_t getTotalLen() const { return totalLen; } @@ -61,31 +62,12 @@ class ScriptDataTag: public VideoTag { public: - //Metadatas - std::map<tiny_string, double> metadataDouble; - std::map<tiny_string, int> metadataInteger; - std::map<tiny_string, tiny_string> metadataString; - ScriptDataTag() {}; + tiny_string methodName; + _NR<ASObject> dataobject; + ScriptDataTag() {} ScriptDataTag(std::istream& s); }; -class ScriptDataString -{ -private: - uint32_t size; - tiny_string val; -public: - ScriptDataString(std::istream& s); - const tiny_string& getString() const { return val; } - uint32_t getSize() const { return size; } -}; - -class ScriptECMAArray -{ -public: - ScriptECMAArray(std::istream& s, ScriptDataTag* tag); -}; - class VideoDataTag: public VideoTag { private: @@ -126,6 +108,6 @@ bool isHeader() const { return _isHeader; } }; -}; +} #endif /* PARSING_FLV_H */
View file
lightspark.tar.xz/src/scripting/abc_fast_interpreter.cpp
Changed
@@ -852,6 +852,17 @@ name->resetNameIfObject(); break; } + case 0x5f: + { + //finddef + uint32_t t=data->uints[0]; + instructionPointer+=4; + multiname* name=context->context->getMultiname(t,context); + LOG(LOG_NOT_IMPLEMENTED,"opcode 0x5f (finddef) not implemented:"<< *name); + context->runtime_stack_push(getSys()->getNullRef()); + name->resetNameIfObject(); + break; + } case 0x60: { //getlex
View file
lightspark.tar.xz/src/scripting/abc_interpreter.cpp
Changed
@@ -960,6 +960,17 @@ name->resetNameIfObject(); break; } + case 0x5f: + { + //finddef + u30 t; + code >> t; + multiname* name=context->context->getMultiname(t,context); + LOG(LOG_NOT_IMPLEMENTED,"opcode 0x5f (finddef) not implemented:"<<*name); + context->runtime_stack_push(getSys()->getNullRef()); + name->resetNameIfObject(); + break; + } case 0x60: { //getlex
View file
lightspark.tar.xz/src/scripting/flash/display/flashdisplay.cpp
Changed
@@ -2111,6 +2111,8 @@ c->setDeclaredMethodByQName("tabChildren","",Class<IFunction>::getFunction(_setTabChildren),SETTER_METHOD,true); c->setDeclaredMethodByQName("wmodeGPU","",Class<IFunction>::getFunction(_getWmodeGPU),GETTER_METHOD,true); c->setDeclaredMethodByQName("invalidate","",Class<IFunction>::getFunction(_invalidate),NORMAL_METHOD,true); + c->setDeclaredMethodByQName("color","",Class<IFunction>::getFunction(_getColor),GETTER_METHOD,true); + c->setDeclaredMethodByQName("color","",Class<IFunction>::getFunction(_setColor),SETTER_METHOD,true); REGISTER_GETTER_SETTER(c,align); REGISTER_GETTER_SETTER(c,colorCorrection); REGISTER_GETTER_SETTER(c,displayState); @@ -2377,6 +2379,27 @@ //getVm()->addEvent(_MR(th),event); return NULL; } +ASFUNCTIONBODY(Stage,_getColor) +{ + Stage* th=static_cast<Stage*>(obj); + RGB rgb; + _NR<RootMovieClip> root = th->getRoot(); + if (!root.isNull()) + rgb = root->getBackground(); + return abstract_ui(rgb.toUInt()); +} + +ASFUNCTIONBODY(Stage,_setColor) +{ + Stage* th=static_cast<Stage*>(obj); + uint32_t color; + ARG_UNPACK(color); + RGB rgb(color); + _NR<RootMovieClip> root = th->getRoot(); + if (!root.isNull()) + root->setBackground(rgb); + return NULL; +} void StageScaleMode::sinit(Class_base* c)
View file
lightspark.tar.xz/src/scripting/flash/display/flashdisplay.h
Changed
@@ -552,6 +552,8 @@ ASFUNCTION(_setFrameRate); ASFUNCTION(_getWmodeGPU); ASFUNCTION(_invalidate); + ASFUNCTION(_getColor); + ASFUNCTION(_setColor); ASPROPERTY_GETTER_SETTER(tiny_string,align); ASPROPERTY_GETTER_SETTER(tiny_string,colorCorrection); ASPROPERTY_GETTER_SETTER(tiny_string,displayState);
View file
lightspark.tar.xz/src/scripting/flash/net/NetStreamInfo.cpp
Changed
@@ -89,7 +89,7 @@ ASFUNCTIONBODY_GETTER(NetStreamInfo,dataBufferLength); ASFUNCTIONBODY_GETTER_NOT_IMPLEMENTED(NetStreamInfo,dataByteCount); ASFUNCTIONBODY_GETTER(NetStreamInfo,dataBytesPerSecond); -ASFUNCTIONBODY_GETTER_NOT_IMPLEMENTED(NetStreamInfo,droppedFrames); +ASFUNCTIONBODY_GETTER(NetStreamInfo,droppedFrames); ASFUNCTIONBODY_GETTER_NOT_IMPLEMENTED(NetStreamInfo,isLive); ASFUNCTIONBODY_GETTER(NetStreamInfo,maxBytesPerSecond); ASFUNCTIONBODY_GETTER_NOT_IMPLEMENTED(NetStreamInfo,metaData);
View file
lightspark.tar.xz/src/scripting/flash/net/URLStream.cpp
Changed
@@ -33,9 +33,14 @@ using namespace lightspark; URLStreamThread::URLStreamThread(_R<URLRequest> request, _R<URLStream> ldr, _R<ByteArray> bytes) - : DownloaderThreadBase(request, ldr.getPtr()), loader(ldr), data(bytes),streambuffer(NULL),timestamp_last_progress(0) + : DownloaderThreadBase(request, ldr.getPtr()), loader(ldr), data(bytes),streambuffer(NULL),timestamp_last_progress(0),bytes_total(0) { } + +void URLStreamThread::setBytesTotal(uint32_t b) +{ + bytes_total = b; +} void URLStreamThread::setBytesLoaded(uint32_t b) { uint32_t curlen = data->getLength(); @@ -47,7 +52,7 @@ { timestamp_last_progress = cur; loader->incRef(); - getVm()->addEvent(loader,_MR(Class<ProgressEvent>::getInstanceS(b,0))); + getVm()->addEvent(loader,_MR(Class<ProgressEvent>::getInstanceS(b,bytes_total))); } } } @@ -70,6 +75,8 @@ loader->incRef(); getVm()->addEvent(loader,_MR(Class<Event>::getInstanceS("open"))); streambuffer = cache->createReader(); + loader->incRef(); + getVm()->addEvent(loader,_MR(Class<ProgressEvent>::getInstanceS(0,bytes_total))); cache->waitForTermination(); if(!downloader->hasFailed() && !threadAborting) {
View file
lightspark.tar.xz/src/scripting/flash/net/URLStream.h
Changed
@@ -39,10 +39,11 @@ _R<ByteArray> data; std::streambuf *streambuffer; uint64_t timestamp_last_progress; + uint32_t bytes_total; void execute(); public: URLStreamThread(_R<URLRequest> request, _R<URLStream> ldr, _R<ByteArray> bytes); - void setBytesTotal(uint32_t b) { (void)b; } + void setBytesTotal(uint32_t b); void setBytesLoaded(uint32_t b); };
View file
lightspark.tar.xz/src/scripting/flash/net/flashnet.cpp
Changed
@@ -1062,6 +1062,7 @@ c->setVariableByQName("CONNECT_TO_FMS","",Class<ASString>::getInstanceS("connectToFMS"),DECLARED_TRAIT); c->setVariableByQName("DIRECT_CONNECTIONS","",Class<ASString>::getInstanceS("directConnections"),DECLARED_TRAIT); c->setDeclaredMethodByQName("play","",Class<IFunction>::getFunction(play),NORMAL_METHOD,true); + c->setDeclaredMethodByQName("play2","",Class<IFunction>::getFunction(play2),NORMAL_METHOD,true); c->setDeclaredMethodByQName("resume","",Class<IFunction>::getFunction(resume),NORMAL_METHOD,true); c->setDeclaredMethodByQName("pause","",Class<IFunction>::getFunction(pause),NORMAL_METHOD,true); c->setDeclaredMethodByQName("togglePause","",Class<IFunction>::getFunction(togglePause),NORMAL_METHOD,true); @@ -1139,6 +1140,8 @@ } else LOG(LOG_NOT_IMPLEMENTED,"NetStreamInfo.currentBytesPerSecond/maxBytesPerSecond/dataBytesPerSecond is only implemented for data generation mode"); + if (th->videoDecoder) + res->droppedFrames = th->videoDecoder->framesdropped; res->playbackBytesPerSecond = th->playbackBytesPerSecond; res->audioBufferLength = th->bufferLength; res->videoBufferLength = th->bufferLength; @@ -1222,7 +1225,7 @@ //Reset the paused states th->paused = false; // th->audioPaused = false; - + // Parameter Null means data is generated by calls to "appendBytes" if (args[0]->is<Null>()) { @@ -1279,7 +1282,7 @@ //Until buffering is implemented, set a fake value. The BBC //news player panics if bufferLength is smaller than 2. - th->bufferLength = 10; + //th->bufferLength = 10; if(!th->url.isValid()) { @@ -1363,7 +1366,12 @@ LOG(LOG_CALLS, _("NetStream::close called")); return NULL; } - +ASFUNCTIONBODY(NetStream,play2) +{ + //NetStream* th=Class<NetStream>::cast(obj); + LOG(LOG_NOT_IMPLEMENTED,"Netstream.play2 not implemented:"<< args[0]->toDebugString()); + return NULL; +} ASFUNCTIONBODY(NetStream,seek) { //NetStream* th=Class<NetStream>::cast(obj); @@ -1544,11 +1552,16 @@ if (val == "resetBegin") { + th->threadAbort(); + th->closed = false; + LOG(LOG_INFO,"resetBegin"); if (th->datagenerationfile) delete th->datagenerationfile; th->datagenerationfile = new FileStreamCache; th->datagenerationfile->openForWriting(); th->datagenerationbuffer->setLength(0); + th->datagenerationthreadstarted = false; + th->datagenerationexpecttype = DATAGENERATION_HEADER; } else if (val == "resetSeek") { @@ -1580,21 +1593,30 @@ if(audioStream && getSys()->audioManager->isTimingAvailablePlugin()) { assert(audioDecoder); - streamTime=audioStream->getPlayedTime()+audioDecoder->initialTime; + if (streamTime == 0) + streamTime=audioStream->getPlayedTime()+audioDecoder->initialTime; + else if (this->bufferLength > 0) + streamTime+=1000/frameRate; } else { - streamTime+=1000/frameRate; + if (this->bufferLength > 0) + streamTime+=1000/frameRate; if (audioDecoder) audioDecoder->skipAll(); } this->bufferLength = (framesdecoded / frameRate) - (streamTime-prevstreamtime)/1000.0; + if (this->bufferLength < 0) + this->bufferLength = 0; //LOG(LOG_INFO,"tick:"<< " "<<bufferLength << " "<<streamTime<<" "<<frameRate<<" "<<framesdecoded<<" "<<bufferTime<<" "<<this->playbackBytesPerSecond<<" "<<this->getReceivedLength()); countermutex.unlock(); - videoDecoder->skipUntil(streamTime); - //The next line ensures that the downloader will not be destroyed before the upload jobs are fenced - videoDecoder->waitForFencing(); - getSys()->getRenderThread()->addUploadJob(videoDecoder); + if (videoDecoder) + { + videoDecoder->skipUntil(streamTime); + //The next line ensures that the downloader will not be destroyed before the upload jobs are fenced + videoDecoder->waitForFencing(); + getSys()->getRenderThread()->addUploadJob(videoDecoder); + } } void NetStream::tickFence() @@ -1633,9 +1655,10 @@ rawAccessAllowed = true; std::streambuf *sbuf = NULL; + StreamDecoder* streamDecoder=NULL; + if (datagenerationfile) { - LOG(LOG_INFO,"create reader"); sbuf = datagenerationfile->createReader(); } else @@ -1657,11 +1680,10 @@ } istream s(sbuf); s.exceptions(istream::goodbit); - + ThreadProfile* profile=getSys()->allocateProfiler(RGB(0,0,200)); profile->setTag("NetStream"); bool waitForFlush=true; - StreamDecoder* streamDecoder=NULL; //We need to catch possible EOF and other error condition in the non reliable stream try { @@ -1673,13 +1695,18 @@ } else { - streamDecoder=new FFMpegStreamDecoder(s); + streamDecoder=new BuiltinStreamDecoder(s,this); + if (!streamDecoder->isValid()) // not FLV stream, so we try ffmpeg detection + streamDecoder=new FFMpegStreamDecoder(s); if(!streamDecoder->isValid()) threadAbort(); } - + countermutex.lock(); + framesdecoded = 0; + frameRate=0; + videoDecoder = NULL; this->prevstreamtime = streamTime; this->bufferLength = 0; countermutex.unlock(); @@ -1696,10 +1723,8 @@ bool decodingSuccess= bufferfull && streamDecoder->decodeNextFrame(); if(!decodingSuccess && bufferfull) { + LOG(LOG_INFO,"decoding failed:"<<s.tellg()<<" "<<this->getReceivedLength()); bufferfull = false; - this->incRef(); - getVm()->addEvent(_MR(this), - _MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Buffer.Empty"))); } else { @@ -1724,18 +1749,25 @@ this->bufferLength = (framesdecoded / frameRate) - (streamTime-prevstreamtime)/1000.0; } countermutex.unlock(); + if (bufferfull && this->bufferLength < 0) + { + bufferfull = false; + this->bufferLength=0; + this->incRef(); + getVm()->addEvent(_MR(this),_MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Buffer.Empty"))); + } } } } - + if(videoDecoder==NULL && streamDecoder->videoDecoder) { videoDecoder=streamDecoder->videoDecoder; this->incRef(); getVm()->addEvent(_MR(this), - _MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Play.Start"))); + _MR(Class<NetStatusEvent>::getInstanceS("status", "NetStream.Play.Start"))); } - + if(!tickStarted && isReady() && ((framesdecoded / frameRate) >= this->bufferTime)) { if(audioDecoder==NULL && streamDecoder->audioDecoder) @@ -1744,9 +1776,6 @@ if(audioStream==NULL && audioDecoder && audioDecoder->isValid() && getSys()->audioManager->pluginLoaded()) audioStream=getSys()->audioManager->createStreamPlugin(audioDecoder); - if(!datagenerationfile && bufferfull) - sendClientNotification("onMetaData", createMetaDataObject(streamDecoder)); - tickStarted=true; this->incRef(); getVm()->addEvent(_MR(this),
View file
lightspark.tar.xz/src/scripting/flash/net/flashnet.h
Changed
@@ -287,10 +287,6 @@ enum DATAGENERATION_EXPECT_TYPE { DATAGENERATION_HEADER=0,DATAGENERATION_PREVTAG,DATAGENERATION_FLVTAG }; DATAGENERATION_EXPECT_TYPE datagenerationexpecttype; _NR<ByteArray> datagenerationbuffer; - - ASObject *createMetaDataObject(StreamDecoder* streamDecoder); - ASObject *createPlayStatusObject(const tiny_string& code); - void sendClientNotification(const tiny_string& name, ASObject *args); public: NetStream(Class_base* c); ~NetStream(); @@ -299,6 +295,7 @@ static void buildTraits(ASObject* o); ASFUNCTION(_constructor); ASFUNCTION(play); + ASFUNCTION(play2); ASFUNCTION(resume); ASFUNCTION(pause); ASFUNCTION(togglePause); @@ -323,6 +320,8 @@ ASPROPERTY_GETTER_SETTER(number_t, bufferTimeMax); ASPROPERTY_GETTER_SETTER(number_t, maxPauseBufferTime); + void sendClientNotification(const tiny_string& name, ASObject *args); + //Interface for video /** Get the frame width
View file
lightspark.tar.xz/src/scripting/flash/utils/ByteArray.h
Changed
@@ -81,6 +81,7 @@ void removeFrontBytes(int count); uint8_t getObjectEncoding() const { return objectEncoding; } + void setObjectEncoding(uint8_t encoding) { objectEncoding = encoding; } uint8_t getCurrentObjectEncoding() const { return currentObjectEncoding; } void setCurrentObjectEncoding(uint8_t encoding) { currentObjectEncoding = encoding; }
Locations
Projects
Search
Status Monitor
Help
Open Build Service
OBS Manuals
API Documentation
OBS Portal
Reporting a Bug
Contact
Mailing List
Forums
Chat (IRC)
Twitter
Open Build Service (OBS)
is an
openSUSE project
.