Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit 9b8569a

Browse files
luoying1234U1X6WK
authored andcommitted
Fix several android issues. 1. Fix "cannot launch" issue: fix fortify fwrite FILE issue and add null ptr check on configuration params. 2. Add media codec capacity check in extractor mode. 3. Fix hang issue in initial process by increasing the timeout limitation of init segment processing. 4. Fix hang issue in switching viewport by correcting stitching strategy. 5. Fix core dump issue caused by get/add current pose. 6. Fix mismatch frame issue in non-catchup mode.
Signed-off-by: Luo, Ying <[email protected]>
1 parent 70d75f7 commit 9b8569a

File tree

16 files changed

+178
-126
lines changed

16 files changed

+178
-126
lines changed

src/OmafDashAccess/OmafDashSource.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -705,7 +705,7 @@ void OmafDashSource::thread_dynamic() {
705705
SetStatus(STATUS_STOPPED);
706706
return;
707707
}
708-
uint32_t wait_time = 3000;
708+
uint32_t wait_time = 10000;
709709
uint32_t current_wait_time = 0;
710710
bool isInitSegParsed = omaf_reader_mgr_->IsInitSegmentsParsed();
711711
while (!isInitSegParsed) {
@@ -795,7 +795,7 @@ void OmafDashSource::thread_static() {
795795
SetStatus(STATUS_STOPPED);
796796
return;
797797
}
798-
uint32_t wait_time = 3000;
798+
uint32_t wait_time = 10000;
799799
uint32_t current_wait_time = 0;
800800
bool isInitSegParsed = omaf_reader_mgr_->IsInitSegmentsParsed();
801801
while (!isInitSegParsed) {

src/OmafDashAccess/OmafMediaStream.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -883,6 +883,7 @@ int32_t OmafMediaStream::TaskRun(OmafTilesStitch *stitch, std::pair<uint64_t, st
883883
std::lock_guard<std::mutex> lock(m_catchupPacketsMutex);
884884
m_catchupMergedPackets[video_id].push_back(catchupMergedPacket);
885885
OMAF_LOG(LOG_INFO, "[FrameSequences][CatchUp][Stitch]: Push one stitched catchup packet at PTS %lld, video id %d\n", currPTS, video_id);
886+
// ANDROID_LOGD("[FrameSequences][CatchUp][Stitch]: Push one stitched catchup packet at PTS %lld, video id %d\n", currPTS, video_id);
886887
}
887888

888889
// DONE remove successfully processed catchup tile tracks.

src/OmafDashAccess/OmafTilesStitch.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -452,7 +452,7 @@ int32_t OmafTilesStitch::UpdateSelectedTiles(std::map<uint32_t, MediaPacket *> &
452452
return ERROR_NONE;
453453
}
454454

455-
vector<pair<uint32_t, uint32_t>> OmafTilesStitch::GenerateRowAndColArr(uint32_t packetsSize, uint32_t splitNum, uint32_t maxTile_x, uint32_t maxTile_y)
455+
vector<pair<uint32_t, uint32_t>> OmafTilesStitch::GenerateRowAndColArr(uint32_t packetsSize, uint32_t splitNum, uint32_t maxTile_x, uint32_t maxTile_y, QualityRank ranking)
456456
{
457457
vector<pair<uint32_t, uint32_t>> arrangementArr;
458458
if (packetsSize < splitNum || splitNum == 0)
@@ -524,7 +524,7 @@ vector<pair<uint32_t, uint32_t>> OmafTilesStitch::GenerateRowAndColArr(uint32_t
524524
}
525525

526526
OMAF_LOG(LOG_INFO, "one arrangement has the tile division of %u x %u\n", sqrtedSize, dividedSize);
527-
if (dividedSize > sqrtedSize ) {
527+
if (maxTile_x > maxTile_y || ranking != HIGHEST_QUALITY_RANKING) {
528528
oneArrangement = std::make_pair(sqrtedSize, dividedSize); //height , width
529529
} else {
530530
oneArrangement = std::make_pair(dividedSize, sqrtedSize);
@@ -574,7 +574,7 @@ std::map<QualityRank, std::vector<TilesMergeArrangement *>> OmafTilesStitch::Cal
574574
uint32_t splitNum = ceil(float(packetsSize) / (maxTile_x * maxTile_y));
575575

576576
// 3. generate row and col arrays according to split num and maxTile_x & maxTile_y
577-
vector<pair<uint32_t, uint32_t>> rowAndColArr = GenerateRowAndColArr(packetsSize, splitNum, maxTile_x, maxTile_y);
577+
vector<pair<uint32_t, uint32_t>> rowAndColArr = GenerateRowAndColArr(packetsSize, splitNum, maxTile_x, maxTile_y, qualityRanking);
578578
vector<TilesMergeArrangement*> mergeArrList;
579579
for (uint32_t i = 0; i < splitNum; i++)
580580
{

src/OmafDashAccess/OmafTilesStitch.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -261,7 +261,7 @@ class OmafTilesStitch {
261261

262262
int32_t UpdateInitTilesMergeArr();
263263

264-
vector<pair<uint32_t, uint32_t>> GenerateRowAndColArr(uint32_t packetsSize, uint32_t splitNum, uint32_t maxTile_x, uint32_t maxTile_y);
264+
vector<pair<uint32_t, uint32_t>> GenerateRowAndColArr(uint32_t packetsSize, uint32_t splitNum, uint32_t maxTile_x, uint32_t maxTile_y, QualityRank ranking);
265265

266266
private:
267267
bool m_isInitialized; //<! whether the stitch class has been initialized

src/doc/Immersive_Video_Delivery_RefPlayer.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,4 +55,8 @@ The configuration file, config.xml, is used to config parameters for 360 linux p
5555
| maxVideoDecodeWidth | max video decoded width | decoded width that is supported |
5656
| maxVideoDecodeHeight | max video decoded height | decoded height that is supported |
5757
| predict | viewport prediction plugin | 0 is disable and 1 is enable |
58+
| intimeviewportupdate | support catch up path | 0 is disable and 1 is enable |
59+
| responseTimesInOneSeg| max catch up streams num in one segment duration | 1 or 2 |
60+
| maxCatchupWidth | max width of catch up streams | lower than 4k |
61+
| maxCatchupHeight | max height of catch up streams | lower than 4k |
5862
| PathOf360SCVPPlugins | path of 360SCVP plugins | needed for planar format rendering |

src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MediaLoader.java

Lines changed: 75 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,9 @@ public class MediaLoader {
102102
private SceneRenderer sceneRenderer;
103103
// The displaySurface is configured after both GL initialization and media loading.
104104
private Surface[] decodeSurface = new Surface[MAX_SURFACE_NUM + MAX_CATCHUP_SURFACE_NUM];
105+
private Pair<Integer, Surface> decoder_surface = null;
106+
private Pair<Integer, Surface> decoder_surface_cu = null;
107+
private Pair<Integer, Surface> display_surface = null;
105108
private Surface displaySurface;
106109

107110
// The actual work of loading media happens on a background thread.
@@ -200,80 +203,80 @@ private synchronized void displayWhenReady() {
200203
}
201204
// The important methods here are the setSurface & lockCanvas calls. These will have to happen
202205
// after the GLView is created.
203-
if (mediaPlayer != null && sceneRenderer.decode_surface_ready) {
204-
// 1. create decode surfaces and set them to native player.
205-
for (int i=0;i<MAX_SURFACE_NUM;i++){
206-
Pair<Integer, Surface> decoder_surface = sceneRenderer.createDecodeSurface(
207-
mediaPlayer.mConfig.maxVideoDecodeWidth, mediaPlayer.mConfig.maxVideoDecodeHeight, i);
208-
Log.i(TAG, "Complete to create one decode surface! surface id is " + i);
209-
mediaPlayer.SetDecodeSurface(decoder_surface.second, decoder_surface.first, i);//set surface
210-
Log.i(TAG, "ready to set decode surface!");
211-
decodeSurface[i] = decoder_surface.second;
212-
Log.i(TAG, "decode id in java " + decoder_surface.first);
213-
}
214-
for (int i=MAX_SURFACE_NUM;i<MAX_CATCHUP_SURFACE_NUM + MAX_SURFACE_NUM;i++){
215-
Pair<Integer, Surface> decoder_surface = sceneRenderer.createDecodeSurface(
216-
mediaPlayer.mConfig.maxCatchupWidth, mediaPlayer.mConfig.maxCatchupHeight, i);
217-
Log.i(TAG, "Complete to create one catch-up decode surface! surface id is " + i);
218-
mediaPlayer.SetDecodeSurface(decoder_surface.second, decoder_surface.first, i);//set surface
219-
Log.i(TAG, "ready to set decode surface!");
220-
decodeSurface[i] = decoder_surface.second;
221-
Log.i(TAG, "decode id in java " + decoder_surface.first);
222-
}
223-
// 2. create native player and get display width and height and projection format
224-
int ret = mediaPlayer.Create("./config.xml");
225-
if (ret != 0)
226-
{
227-
Log.e(TAG, "native media player create failed!");
228-
return;
229-
}
230-
// 3. create mesh according to PF
231-
int stereoFormat = Mesh.MEDIA_MONOSCOPIC;
232-
Mesh.MeshParams params = new Mesh.MeshParams();
233-
int projFormat = mediaPlayer.GetProjectionFormat();
234-
Log.i(TAG, "pf is " + projFormat);
235-
if (projFormat == PF_CUBEMAP) {
236-
mesh = CubeMapMesh.Create(params, context);
237-
Log.i(TAG, "Create cubemap mesh!");
238-
}
239-
else {
240-
params.radius = SPHERE_RADIUS_METERS;
241-
params.latitudes = DEFAULT_SPHERE_ROWS;
242-
params.longitudes = DEFAULT_SPHERE_COLUMNS;
243-
params.vFOV = DEFAULT_SPHERE_VERTICAL_DEGREES;
244-
params.hFOV = DEFAULT_SPHERE_HORIZONTAL_DEGREES;
245-
params.mediaFormat = stereoFormat;
246-
mesh = ERPMesh.Create(params);
247-
Log.i(TAG, "Create ERP mesh!");
248-
if (projFormat != PF_ERP) {
249-
Log.e(TAG, "Projection format is invalid! Default is ERP format!");
206+
if (mediaPlayer.mConfig != null && sceneRenderer.decode_surface_ready) {
207+
synchronized (this) {
208+
// 1. create decode surfaces and set them to native player.
209+
for (int i = 0; i < MAX_SURFACE_NUM; i++) {
210+
decoder_surface = sceneRenderer.createDecodeSurface(
211+
mediaPlayer.mConfig.maxVideoDecodeWidth, mediaPlayer.mConfig.maxVideoDecodeHeight, i);
212+
Log.i(TAG, "Complete to create one decode surface! surface id is " + i);
213+
mediaPlayer.SetDecodeSurface(decoder_surface.second, decoder_surface.first, i);//set surface
214+
Log.i(TAG, "ready to set decode surface!");
215+
decodeSurface[i] = decoder_surface.second;
216+
Log.i(TAG, "decode id in java " + decoder_surface.first);
250217
}
218+
for (int i = MAX_SURFACE_NUM; i < MAX_CATCHUP_SURFACE_NUM + MAX_SURFACE_NUM; i++) {
219+
decoder_surface_cu = sceneRenderer.createDecodeSurface(
220+
mediaPlayer.mConfig.maxCatchupWidth, mediaPlayer.mConfig.maxCatchupHeight, i);
221+
Log.i(TAG, "Complete to create one catch-up decode surface! surface id is " + i);
222+
mediaPlayer.SetDecodeSurface(decoder_surface_cu.second, decoder_surface_cu.first, i);//set surface
223+
Log.i(TAG, "ready to set decode surface!");
224+
decodeSurface[i] = decoder_surface_cu.second;
225+
Log.i(TAG, "decode id in java " + decoder_surface_cu.first);
226+
}
227+
// 2. create native player and get display width and height and projection format
228+
int ret = mediaPlayer.Create("./config.xml");
229+
if (ret != 0) {
230+
Log.e(TAG, "native media player create failed!");
231+
return;
232+
}
233+
// 3. create mesh according to PF
234+
int stereoFormat = Mesh.MEDIA_MONOSCOPIC;
235+
Mesh.MeshParams params = new Mesh.MeshParams();
236+
int projFormat = mediaPlayer.GetProjectionFormat();
237+
Log.i(TAG, "pf is " + projFormat);
238+
if (projFormat == PF_CUBEMAP) {
239+
mesh = CubeMapMesh.Create(params, context);
240+
Log.i(TAG, "Create cubemap mesh!");
241+
} else {
242+
params.radius = SPHERE_RADIUS_METERS;
243+
params.latitudes = DEFAULT_SPHERE_ROWS;
244+
params.longitudes = DEFAULT_SPHERE_COLUMNS;
245+
params.vFOV = DEFAULT_SPHERE_VERTICAL_DEGREES;
246+
params.hFOV = DEFAULT_SPHERE_HORIZONTAL_DEGREES;
247+
params.mediaFormat = stereoFormat;
248+
mesh = ERPMesh.Create(params);
249+
Log.i(TAG, "Create ERP mesh!");
250+
if (projFormat != PF_ERP) {
251+
Log.e(TAG, "Projection format is invalid! Default is ERP format!");
252+
}
253+
}
254+
// 4. get width / height and create display surface and set it to native player
255+
int displayWidth = mediaPlayer.GetWidth();
256+
int displayHeight = mediaPlayer.GetHeight();
257+
if (projFormat == PF_ERP) {
258+
sceneRenderer.displayTexId = Utils.glCreateTextureFor2D(mediaPlayer.GetWidth(), mediaPlayer.GetHeight());
259+
Log.i(TAG, "ERP Display texture id is " + sceneRenderer.displayTexId);
260+
} else if (projFormat == PF_CUBEMAP) {
261+
sceneRenderer.displayTexId = Utils.glCreateTextureForCube(mediaPlayer.GetWidth(), mediaPlayer.GetHeight());
262+
Log.i(TAG, "Cubemap Display texture id is " + sceneRenderer.displayTexId);
263+
} else {
264+
sceneRenderer.displayTexId = 0;
265+
Log.e(TAG, "Projection format is invalid! displayer texture id is set to zero!");
266+
}
267+
sceneRenderer.displayTexture = new SurfaceTexture(sceneRenderer.displayTexId);
268+
checkGlError();
269+
Log.i(TAG, "display width is " + displayWidth + " display height is " + displayHeight);
270+
display_surface = sceneRenderer.createDisplaySurface(
271+
displayWidth, displayHeight, mesh);
272+
Log.i(TAG, "ready to create display surface");
273+
mediaPlayer.SetDisplaySurface(display_surface.first);
274+
275+
displaySurface = display_surface.second;
276+
// 4. start native player thread
277+
Log.i(TAG, "start to start!");
278+
mediaPlayer.Start();
251279
}
252-
// 4. get width / height and create display surface and set it to native player
253-
int displayWidth = mediaPlayer.GetWidth();
254-
int displayHeight = mediaPlayer.GetHeight();
255-
if (projFormat == PF_ERP) {
256-
sceneRenderer.displayTexId = Utils.glCreateTextureFor2D(mediaPlayer.GetWidth(), mediaPlayer.GetHeight());
257-
Log.i(TAG, "ERP Display texture id is " + sceneRenderer.displayTexId);
258-
}else if (projFormat == PF_CUBEMAP) {
259-
sceneRenderer.displayTexId = Utils.glCreateTextureForCube(mediaPlayer.GetWidth(), mediaPlayer.GetHeight());
260-
Log.i(TAG, "Cubemap Display texture id is " + sceneRenderer.displayTexId);
261-
}else {
262-
sceneRenderer.displayTexId = 0;
263-
Log.e(TAG, "Projection format is invalid! displayer texture id is set to zero!");
264-
}
265-
sceneRenderer.displayTexture = new SurfaceTexture(sceneRenderer.displayTexId);
266-
checkGlError();
267-
Log.i(TAG, "display width is " + displayWidth + " display height is " + displayHeight);
268-
Pair<Integer, Surface> display_surface = sceneRenderer.createDisplaySurface(
269-
displayWidth, displayHeight, mesh);
270-
Log.i(TAG, "ready to create display surface");
271-
mediaPlayer.SetDisplaySurface(display_surface.first);
272-
273-
displaySurface = display_surface.second;
274-
// 4. start native player thread
275-
Log.i(TAG, "start to start!");
276-
mediaPlayer.Start();
277280
}else
278281
{
279282
Log.e(TAG, "media player is invalid!");

src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MediaPlayer/NativeMediaPlayer.java

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -44,9 +44,15 @@
4444

4545
public class NativeMediaPlayer {
4646
private final String TAG = "NATIVE_MEDIA_PLAYER";
47+
48+
public final int READY = 0;
49+
public final int PLAY = 1;
50+
public final int PAUSED = 2;
51+
public final int STOPPED = 3;
52+
4753
private long mHandler;
4854
public RenderConfig mConfig;
49-
private int status = 0;
55+
private int status = READY;
5056
private Context context;
5157

5258
static {
@@ -115,15 +121,6 @@ public HeadPose() {
115121
}
116122
}
117123

118-
public int GetCurrentStatus()
119-
{
120-
return status;
121-
}
122-
123-
public void SetCurrentStatus(int st)
124-
{
125-
status = st;
126-
}
127124
/**
128125
* Original signature : <code>Handler Init()</code><br>
129126
* <i>native declaration : line 82</i>

src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/MonoscopicView.java

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -307,8 +307,10 @@ public void onSurfaceChanged(GL10 gl, int width, int height) {
307307
screenWidth = width;
308308
screenHeight = height;
309309
GLES20.glViewport(0, 0, width, height);
310-
Matrix.perspectiveM(
311-
projectionMatrix, 0, mediaLoader.mediaPlayer.mConfig.viewportVFOV, (float) width / height, Z_NEAR, Z_FAR);
310+
if (mediaLoader.mediaPlayer != null && mediaLoader.mediaPlayer.mConfig != null) {
311+
Matrix.perspectiveM(
312+
projectionMatrix, 0, mediaLoader.mediaPlayer.mConfig.viewportVFOV, (float) width / height, Z_NEAR, Z_FAR);
313+
}
312314
}
313315

314316
@Override
@@ -322,7 +324,9 @@ public void onDrawFrame(GL10 gl) {
322324
}
323325

324326
Matrix.multiplyMM(viewProjectionMatrix, 0, projectionMatrix, 0, viewMatrix, 0);
325-
scene.glDrawFrame(viewProjectionMatrix, Type.MONOCULAR, screenWidth, screenHeight);
327+
if (mediaLoader.mediaPlayer != null && mediaLoader.mediaPlayer.GetStatus() == mediaLoader.mediaPlayer.PLAY) {
328+
scene.glDrawFrame(viewProjectionMatrix, Type.MONOCULAR, screenWidth, screenHeight);
329+
}
326330
}
327331

328332
/** Adjusts the GL camera's rotation based on device rotation. Runs on the sensor thread. */

src/player/app/android/app/src/main/java/com/vcd/immersive/omafplayer/Rendering/SceneRenderer.java

Lines changed: 24 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -277,10 +277,15 @@ private synchronized boolean glConfigureScene() {
277277
*/
278278
public void glDrawFrame(float[] viewProjectionMatrix, int eyeType, int width, int height) {
279279
Log.i(TAG, "begin to draw frame !");
280-
if (mediaPlayer != null) {
281-
mediaPlayer.SetCurrentStatus(1);
280+
if (mediaPlayer == null) {
281+
return;
282+
}
283+
if (mediaPlayer.GetStatus() == mediaPlayer.STOPPED) {
284+
Log.e(TAG, "Media player set to stopped!");
285+
return;
282286
}
283-
else{
287+
if (mediaPlayer.GetStatus() != mediaPlayer.PLAY) {
288+
Log.i(TAG, "Media player is not in PLAY mode!");
284289
return;
285290
}
286291
if (!glConfigureScene()) {
@@ -313,7 +318,7 @@ public void glDrawFrame(float[] viewProjectionMatrix, int eyeType, int width, in
313318
}
314319
Log.i(TAG, "update tex image at pts " + cnt++);
315320
}
316-
if (drawTimes++ % 2 == 0)
321+
if (drawTimes++ % 2 == 0 && cnt > renderCount)
317322
{
318323
Log.i(TAG, "begin to update display tex!");
319324
int ret = 0;
@@ -433,22 +438,26 @@ public synchronized void setVideoFrameListener(OnFrameAvailableListener videoFra
433438

434439
public void SetCurrentPosition(int pts)
435440
{
436-
if (mediaPlayer != null && mediaPlayer.GetCurrentStatus() != 0 && pose_history.size() != 0) {
437-
NativeMediaPlayer.HeadPose pose = pose_history.getFirst();
438-
pose.pts = pts;
439-
mediaPlayer.SetCurrentPosition(pose);
440-
Log.i(TAG, "Set current position to native player and pts is " + pose.pts);
441+
synchronized (this) {
442+
if (mediaPlayer != null && mediaPlayer.GetStatus() != 0 && pose_history.size() != 0) {
443+
NativeMediaPlayer.HeadPose pose = pose_history.getFirst();
444+
pose.pts = pts;
445+
mediaPlayer.SetCurrentPosition(pose);
446+
Log.i(TAG, "Set current position to native player and pts is " + pose.pts);
447+
}
441448
}
442449
}
443450

444451
public void AddCurrentPose(NativeMediaPlayer.HeadPose pose)
445452
{
446-
if (pose != null) {
447-
pose_history.addFirst(pose);
448-
}
449-
int max_pose_number = 100;
450-
if (pose_history.size() > max_pose_number) {
451-
pose_history.removeLast();
453+
synchronized (this) {
454+
if (pose != null) {
455+
pose_history.addFirst(pose);
456+
}
457+
int max_pose_number = 100;
458+
if (pose_history.size() > max_pose_number) {
459+
pose_history.removeLast();
460+
}
452461
}
453462
}
454463
}

src/player/player_lib/Api/MediaPlayer_Android.cpp

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -137,5 +137,12 @@ RenderStatus MediaPlayer_Android::Close()
137137
return RENDER_STATUS_OK;
138138
}
139139

140+
int MediaPlayer_Android::UpdateDisplayTex(int render_count)
141+
{
142+
m_renderManager->UpdateFrames(render_count);
143+
int ret = m_renderManager->UpdateDisplayTex();
144+
return ret;
145+
}
146+
140147
VCD_NS_END
141148
#endif

0 commit comments

Comments
 (0)