提交 c53f8b0f authored 作者: kongdywang's avatar kongdywang

1. Live streaming replaces the new kernel.

2. As the new kernel has been replaced, the live streaming live config currently only retains the properties of maxAutoAdjustCacheTime, minAutoAdjustCacheTime, connectRetryCount, and connectRetryInterval, with the rest of the parameters marked as deprecated. 3. New interfaces have been added to live streaming: enableReceiveSeiMessage, showDebugView, setProperty, getSupportedBitrate, and setCacheParams. 4. When playing live streaming, there is no longer a need to pass the playType parameter, which has been deprecated. 5. The live streaming and on-demand demo pages have added logic to wait for the license to load successfully before playing. 6. Other known issues have been fixed.
上级 71640a0b
......@@ -2,6 +2,8 @@
package com.tencent.vod.flutter;
import com.tencent.rtmp.TXLiveConstants;
/**
* 通用事件码
*/
......@@ -208,4 +210,35 @@ public class FTXEvent {
public static final String EXTRA_SUBTITLE_START_POSITION_MS = "startPositionMs";
public static final String EXTRA_SUBTITLE_DURATION_MS = "durationMs";
public static final String EXTRA_SUBTITLE_TRACK_INDEX = "trackIndex";
// player event
public static final String EVT_KEY_PLAYER_EVENT = "event";
public static final String EVT_KEY_PLAYER_WIDTH = "EVT_WIDTH";
public static final String EVT_KEY_PLAYER_HEIGHT = "EVT_HEIGHT";
public interface TUINetConst {
String NET_STATUS_CPU_USAGE = TXLiveConstants.NET_STATUS_CPU_USAGE;
String NET_STATUS_VIDEO_WIDTH = TXLiveConstants.NET_STATUS_VIDEO_WIDTH;
String NET_STATUS_VIDEO_HEIGHT = TXLiveConstants.NET_STATUS_VIDEO_HEIGHT;
String NET_STATUS_VIDEO_FPS = TXLiveConstants.NET_STATUS_VIDEO_FPS;
String NET_STATUS_VIDEO_GOP = TXLiveConstants.NET_STATUS_VIDEO_GOP;
String NET_STATUS_VIDEO_BITRATE = TXLiveConstants.NET_STATUS_VIDEO_BITRATE;
String NET_STATUS_AUDIO_BITRATE = TXLiveConstants.NET_STATUS_AUDIO_BITRATE;
String NET_STATUS_NET_SPEED = TXLiveConstants.NET_STATUS_NET_SPEED;
String NET_STATUS_AUDIO_CACHE = TXLiveConstants.NET_STATUS_AUDIO_CACHE;
String NET_STATUS_VIDEO_CACHE = TXLiveConstants.NET_STATUS_VIDEO_CACHE;
String NET_STATUS_AUDIO_INFO = TXLiveConstants.NET_STATUS_AUDIO_INFO;
String NET_STATUS_NET_JITTER = TXLiveConstants.NET_STATUS_NET_JITTER;
String NET_STATUS_SERVER_IP = TXLiveConstants.NET_STATUS_SERVER_IP;
String NET_STATUS_VIDEO_DPS = TXLiveConstants.NET_STATUS_VIDEO_DPS;
String NET_STATUS_QUALITY_LEVEL = TXLiveConstants.NET_STATUS_QUALITY_LEVEL;
String NET_STATUS_SYSTEM_CPU = "SYSTEM_CPU";
String NET_STATUS_VIDEO_LOSS = "VIDEO_PACKET_LOSS";
String NET_STATUS_AUDIO_LOSS = "AUDIO_PACKET_LOSS";
String NET_STATUS_AUDIO_TOTAL_BLOCK_TIME = "AUDIO_TOTAL_BLOCK_TIME";
String NET_STATUS_VIDEO_TOTAL_BLOCK_TIME = "VIDEO_TOTAL_BLOCK_TIME";
String NET_STATUS_VIDEO_BLOCK_RATE = "VIDEO_BLOCK_RATE";
String NET_STATUS_AUDIO_BLOCK_RATE = "AUDIO_BLOCK_RATE";
String NET_STATUS_RTT = "RTT";
}
}
package com.tencent.vod.flutter.live.egl;
import android.view.Surface;
import com.tencent.liteav.base.util.LiteavLog;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
public class EGL10Helper implements EGLHelper<EGLContext> {
private static final String TAG = "EGL10Helper";
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private static final int EGL_OPENGL_ES2_BIT = 4;
private static final int[] ATTRIBUTES_FOR_OFFSCREEN_SURFACE = {
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT, // 前台显示Surface这里EGL10.EGL_WINDOW_BIT
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE, 0,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
private static final int[] ATTRIBUTES_FOR_SURFACE = {
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_WINDOW_BIT, // 前台显示Surface这里EGL10.EGL_WINDOW_BIT
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE, 0,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
EGL10.EGL_NONE
};
public static EGL10Helper createEGLSurface(EGLConfig config, EGLContext context, Surface surface, int width,
int height) {
EGL10Helper egl = new EGL10Helper(width, height);
if (egl.initialize(config, context, surface)) {
return egl;
} else {
return null;
}
}
private final int mWidth;
private final int mHeight;
private EGLDisplay mEGLDisplay = EGL10.EGL_NO_DISPLAY;
private EGLContext mEGLContext = EGL10.EGL_NO_CONTEXT;
private EGLSurface mEGLSurface = EGL10.EGL_NO_SURFACE;
private EGL10 mEGL;
private EGLConfig mEGLConfig;
private EGL10Helper(int width, int height) {
mWidth = width;
mHeight = height;
}
@Override
public boolean swapBuffers() {
boolean ret = mEGL.eglSwapBuffers(mEGLDisplay, mEGLSurface);
checkEglError();
return ret;
}
@Override
public void makeCurrent() {
mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
checkEglError();
}
public void destroy() {
if (mEGLDisplay != EGL10.EGL_NO_DISPLAY) {
mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
if (mEGLSurface != EGL10.EGL_NO_SURFACE) {
mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface);
mEGLSurface = EGL10.EGL_NO_SURFACE;
}
if (mEGLContext != EGL10.EGL_NO_CONTEXT) {
mEGL.eglDestroyContext(mEGLDisplay, mEGLContext);
mEGLContext = EGL10.EGL_NO_CONTEXT;
}
mEGL.eglTerminate(mEGLDisplay);
checkEglError();
}
mEGLDisplay = EGL10.EGL_NO_DISPLAY;
}
public void unmakeCurrent() {
if (mEGLDisplay != EGL10.EGL_NO_DISPLAY) {
mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
}
}
private boolean initialize(EGLConfig config, EGLContext context, Surface surface) {
mEGL = (EGL10) EGLContext.getEGL();
mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
mEGL.eglInitialize(mEGLDisplay, new int[2]);
if (config == null) {
int[] numConfig = new int[1];
EGLConfig[] configs = new EGLConfig[1];
int[] configAttributes = surface == null ? ATTRIBUTES_FOR_OFFSCREEN_SURFACE : ATTRIBUTES_FOR_SURFACE;
mEGL.eglChooseConfig(mEGLDisplay, configAttributes, configs, 1, numConfig);
mEGLConfig = configs[0];
} else {
mEGLConfig = config;
}
int version = 2;
int[] attribList = {
EGL_CONTEXT_CLIENT_VERSION, version,
EGL10.EGL_NONE
};
if (context == null) {
context = EGL10.EGL_NO_CONTEXT;
}
mEGLContext = mEGL.eglCreateContext(mEGLDisplay, mEGLConfig, context, attribList);
if (mEGLContext == EGL10.EGL_NO_CONTEXT) {
checkEglError();
return false;
}
int[] attribListPbuffer = {
EGL10.EGL_WIDTH, mWidth,
EGL10.EGL_HEIGHT, mHeight,
EGL10.EGL_NONE
};
if (surface == null) {
mEGLSurface = mEGL.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, attribListPbuffer);
} else {
mEGLSurface = mEGL.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface, null);
}
if (mEGLSurface == EGL10.EGL_NO_SURFACE) {
checkEglError();
return false;
}
if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
checkEglError();
return false;
}
return true;
}
@Override
public EGLContext getContext() {
return mEGLContext;
}
public void checkEglError() {
int ec = mEGL.eglGetError();
if (ec != EGL10.EGL_SUCCESS) {
LiteavLog.e(TAG, "EGL error: 0x" + Integer.toHexString(ec));
}
}
}
package com.tencent.vod.flutter.live.egl;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.view.Surface;
import com.tencent.liteav.base.util.LiteavLog;
public class EGL14Helper implements EGLHelper<EGLContext> {
private static final String TAG = "EGL14Helper";
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private static final int GLES_VERSION = 2;
private static final int[] ATTRIBUTE_LIST_FOR_SURFACE = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_DEPTH_SIZE, 0,
EGL14.EGL_STENCIL_SIZE, 0,
EGL14.EGL_RENDERABLE_TYPE,
GLES_VERSION == 2 ? EGL14.EGL_OPENGL_ES2_BIT : EGL14.EGL_OPENGL_ES2_BIT | EGLExt.EGL_OPENGL_ES3_BIT_KHR,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
};
private static final int[] ATTRIBUTE_LIST_FOR_OFFSCREEN_SURFACE = {
EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,//The front desk displays Surface here EGL10.EGL_WINDOW_BIT
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_DEPTH_SIZE, 0,
EGL14.EGL_STENCIL_SIZE, 0,
EGL14.EGL_RENDERABLE_TYPE,
GLES_VERSION == 2 ? EGL14.EGL_OPENGL_ES2_BIT : EGL14.EGL_OPENGL_ES2_BIT | EGLExt.EGL_OPENGL_ES3_BIT_KHR,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
};
private final int mWidth;
private final int mHeight;
private EGLConfig mEGLConfig = null;
private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
private EGLSurface mEGLSurface;
private EGL14Helper(int width, int height) {
mWidth = width;
mHeight = height;
}
public static EGL14Helper createEGLSurface(EGLConfig config, EGLContext context, Surface surface, int width,
int height) {
EGL14Helper egl = new EGL14Helper(width, height);
if (egl.initialize(config, context, surface)) {
return egl;
} else {
return null;
}
}
@Override
public void makeCurrent() {
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
// called makeCurrent() before create?
LiteavLog.d(TAG, "NOTE: makeCurrent w/o display");
}
if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
@Override
public void destroy() {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
// Android is unusual in that it uses a reference-counted EGLDisplay. So for
// every eglInitialize() we need an eglTerminate().
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
mEGLSurface = EGL14.EGL_NO_SURFACE;
}
}
if (mEGLContext != EGL14.EGL_NO_CONTEXT) {
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
mEGLContext = EGL14.EGL_NO_CONTEXT;
}
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
}
@Override
public boolean swapBuffers() {
return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
}
private boolean initialize(EGLConfig config, EGLContext context, Surface surface) {
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
mEGLDisplay = null;
throw new RuntimeException("unable to initialize EGL14");
}
if (config != null) {
mEGLConfig = config;
} else {
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
int[] attribList = surface == null ? ATTRIBUTE_LIST_FOR_OFFSCREEN_SURFACE : ATTRIBUTE_LIST_FOR_SURFACE;
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0)) {
return false;
}
mEGLConfig = configs[0];
}
if (context == null) {
context = EGL14.EGL_NO_CONTEXT;
}
int[] attribList = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, GLES_VERSION,
EGL14.EGL_NONE
};
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, mEGLConfig, context, attribList, 0);
if (mEGLContext == EGL14.EGL_NO_CONTEXT) {
checkEGLError();
return false;
}
if (surface == null) {
int[] attribListPbuffer = {
EGL14.EGL_WIDTH, mWidth,
EGL14.EGL_HEIGHT, mHeight,
EGL14.EGL_NONE
};
mEGLSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, attribListPbuffer, 0);
} else {
int[] surfaceAttribs = {EGL14.EGL_NONE};
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface, surfaceAttribs, 0);
}
checkEGLError();
if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
checkEGLError();
return false;
}
return true;
}
@Override
public void unmakeCurrent() {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
}
}
public void setPresentationTime(long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
}
@Override
public EGLContext getContext() {
return mEGLContext;
}
public EGLConfig getConfig() {
return mEGLConfig;
}
private void checkEGLError() {
int ec = EGL14.eglGetError();
if (ec != EGL14.EGL_SUCCESS) {
LiteavLog.e(TAG, "EGL error:" + ec);
throw new RuntimeException(": EGL error: 0x" + Integer.toHexString(ec));
}
}
}
package com.tencent.vod.flutter.live.egl;
public interface EGLHelper<T> {
/**
* 返回EGLContext,用于创建共享EGLContext等。
*/
T getContext();
/**
* 将EGLContext绑定到当前线程,以及Helper中保存的draw Surface和read Surface。
*/
void makeCurrent();
/**
* 解除当前线程绑定的EGLContext、draw Surface、read Surface。
*/
void unmakeCurrent();
/**
* 将渲染的内容刷到绑定的绘制目标上。
*/
boolean swapBuffers();
/**
* 销毁创建的EGLContext以及相关的资源。
*/
void destroy();
}
package com.tencent.vod.flutter.live.egl;
public class GLConstants {
boolean debug = false;
int noTexture = -1;
int invalidProgramId = -1;
static float[] CUBE_VERTICES_ARRAYS = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f
};
static float[] TEXTURE_COORDS_NO_ROTATION = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f
};
static float[] TEXTURE_COORDS_ROTATE_LEFT = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f
};
static float[] TEXTURE_COORDS_ROTATE_RIGHT = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f
};
static float[] TEXTURE_COORDS_ROTATED_180 = {
1.0f, 1.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f
};
enum GLScaleType {
/**
* 居中显示,不裁剪,宽或高留黑边
*/
FIT_CENTER,
/**
* 居中裁剪
*/
CENTER_CROP,
}
}
package com.tencent.vod.flutter.live.egl;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.util.Pair;
import android.widget.ImageView;
import com.tencent.liteav.base.util.LiteavLog;
import com.tencent.vod.flutter.live.render.FTXRotation;
import com.tencent.vod.flutter.live.render.FTXTextureRotationUtils;
import java.nio.Buffer;
import javax.microedition.khronos.opengles.GL10;
public class OpenGlUtils {
private static final String TAG = "OpenGlUtils";
public static final int NO_TEXTURE = -1;
public static final float[] CUBE = {-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f};
public static final float[] TEXTURE = {0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f};
public static int generateFrameBufferId() {
int[] frameBufferIds = new int[1];
GLES20.glGenFramebuffers(1, frameBufferIds, 0);
return frameBufferIds[0];
}
public static int loadTexture(int format, Buffer data, int width, int height, int usedTexId) {
int[] textures = new int[1];
if (usedTexId == NO_TEXTURE) {
GLES20.glGenTextures(1, textures, 0);
LiteavLog.d(TAG, "glGenTextures textureId: " + textures[0]);
OpenGlUtils.bindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, format, width, height, 0, format, GLES20.GL_UNSIGNED_BYTE,
data);
} else {
OpenGlUtils.bindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, width, height, format, GLES20.GL_UNSIGNED_BYTE, data);
textures[0] = usedTexId;
}
return textures[0];
}
public static int generateTextureOES() {
int[] texture = new int[1];
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
return texture[0];
}
public static void deleteTexture(int textureId) {
if (NO_TEXTURE == textureId) {
return;
}
GLES20.glDeleteTextures(1, new int[]{textureId}, 0);
LiteavLog.d(TAG, "delete textureId " + textureId);
}
public static void deleteFrameBuffer(int frameBufferId) {
if (NO_TEXTURE == frameBufferId) {
return;
}
GLES20.glDeleteFramebuffers(1, new int[]{frameBufferId}, 0);
LiteavLog.d(TAG, "delete frame buffer id: " + frameBufferId);
}
public static void bindTexture(int target, int texture) {
GLES20.glBindTexture(target, texture);
checkGlError("bindTexture");
}
public static void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
LiteavLog.e(TAG, String.format("%s: glError %s", op, GLUtils.getEGLErrorString(error)));
}
}
/**
* 通过输入和输出的宽高,计算顶点数组和纹理数组
*
* @param scaleType 缩放方式
* @param inputFTXRotation 输入纹理的旋转角度
* @param needFlipHorizontal 是否进行镜面映射处理
* @param inputWith 输入纹理的宽(未经处理的)
* @param inputHeight 输入纹理的高(未经处理的)
* @param outputWidth 绘制目标的宽
* @param outputHeight 绘制目标的高
* @return 返回顶点数组和纹理数组
*
* Calculate the vertex array and texture array through the width and height of the input and output
*
* @param scaleType scaling method, can only be {@link ImageView.ScaleType#CENTER_CROP} and
* {@link ImageView.ScaleType#CENTER}
* @param inputFTXRotation input texture rotation angle
* @param needFlipHorizontal Whether to perform mirror mapping processing
* @param inputWith The width of the input texture (unprocessed)
* @param inputHeight The height of the input texture (unprocessed)
* @param outputWidth The width of the drawing target
* @param outputHeight the height of the drawing target
* @return Returns the vertex array and texture array
*/
public static Pair<float[], float[]> calcCubeAndTextureBuffer(ImageView.ScaleType scaleType,
FTXRotation inputFTXRotation,
boolean needFlipHorizontal,
int inputWith,
int inputHeight,
int outputWidth,
int outputHeight) {
boolean needRotate = (inputFTXRotation == FTXRotation.ROTATION_90
|| inputFTXRotation == FTXRotation.ROTATION_270);
int rotatedWidth = needRotate ? inputHeight : inputWith;
int rotatedHeight = needRotate ? inputWith : inputHeight;
float maxRratio = Math.max(1.0f * outputWidth / rotatedWidth, 1.0f * outputHeight / rotatedHeight);
float ratioWidth = 1.0f * Math.round(rotatedWidth * maxRratio) / outputWidth;
float ratioHeight = 1.0f * Math.round(rotatedHeight * maxRratio) / outputHeight;
float[] cube = OpenGlUtils.CUBE;
float[] textureCords = FTXTextureRotationUtils.getRotation(inputFTXRotation, needFlipHorizontal, true);
if (scaleType == ImageView.ScaleType.CENTER_CROP) {
float distHorizontal = needRotate ? ((1 - 1 / ratioHeight) / 2) : ((1 - 1 / ratioWidth) / 2);
float distVertical = needRotate ? ((1 - 1 / ratioWidth) / 2) : ((1 - 1 / ratioHeight) / 2);
textureCords = new float[]{
addDistance(textureCords[0], distHorizontal), addDistance(textureCords[1], distVertical),
addDistance(textureCords[2], distHorizontal), addDistance(textureCords[3], distVertical),
addDistance(textureCords[4], distHorizontal), addDistance(textureCords[5], distVertical),
addDistance(textureCords[6], distHorizontal), addDistance(textureCords[7], distVertical),};
} else {
cube = new float[]{
cube[0] / ratioHeight, cube[1] / ratioWidth, cube[2] / ratioHeight, cube[3] / ratioWidth,
cube[4] / ratioHeight, cube[5] / ratioWidth, cube[6] / ratioHeight, cube[7] / ratioWidth,};
}
return new Pair<>(cube, textureCords);
}
private static float addDistance(float coordinate, float distance) {
return coordinate == 0.0f ? distance : 1 - distance;
}
}
package com.tencent.vod.flutter.live.render;
import android.opengl.GLES20;
import com.tencent.vod.flutter.live.egl.OpenGlUtils;
import java.nio.FloatBuffer;
import java.util.LinkedList;
public class FTXGPUImageFilter {
public static final String NO_FILTER_VERTEX_SHADER = ""
+ "attribute vec4 position;\n"
+ "attribute vec4 inputTextureCoordinate;\n"
+ " \n"
+ "varying vec2 textureCoordinate;\n"
+ " \n"
+ "void main()\n"
+ "{\n"
+ " gl_Position = position;\n"
+ " textureCoordinate = inputTextureCoordinate.xy;\n"
+ "}";
public static final String NO_FILTER_FRAGMENT_SHADER = ""
+ "varying highp vec2 textureCoordinate;\n"
+ " \n"
+ "uniform sampler2D inputImageTexture;\n"
+ " \n"
+ "void main()\n"
+ "{\n"
+ " gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n"
+ "}";
public static final String NO_FILTER_FRAGMENT_SHADER_FLIP = ""
+ "varying highp vec2 textureCoordinate;\n"
+ " \n"
+ "uniform sampler2D inputImageTexture;\n" + " \n"
+ "void main()\n"
+ "{\n"
+ " gl_FragColor = texture2D(inputImageTexture, vec2(1.0 - textureCoordinate.x, "
+ "textureCoordinate.y));\n"
+ "}";
protected FTXProgram mProgram;
private final LinkedList<Runnable> mRunOnDraw;
protected float[] mTextureMatrix;
private int mGLAttribPosition;
private int mGLUniformTexture;
private int mGLAttribTextureCoordinate;
private boolean mIsInitialized;
public FTXGPUImageFilter() {
this(false);
}
public FTXGPUImageFilter(boolean flip) {
mRunOnDraw = new LinkedList<>();
mProgram = new FTXProgram(NO_FILTER_VERTEX_SHADER,
flip ? NO_FILTER_FRAGMENT_SHADER_FLIP : NO_FILTER_FRAGMENT_SHADER);
}
public final void init() {
onInit();
mIsInitialized = true;
}
protected void onInit() {
mProgram.build();
mGLAttribPosition = GLES20.glGetAttribLocation(mProgram.getProgramId(), "position");
mGLUniformTexture = GLES20.glGetUniformLocation(mProgram.getProgramId(), "inputImageTexture");
mGLAttribTextureCoordinate = GLES20.glGetAttribLocation(mProgram.getProgramId(), "inputTextureCoordinate");
mIsInitialized = true;
}
public void onOutputSizeChanged(final int width, final int height) {
}
protected void onUninit() {
}
public final void destroy() {
runPendingOnDrawTasks();
onUninit();
mIsInitialized = false;
mProgram.destroy();
}
public int getTarget() {
return GLES20.GL_TEXTURE_2D;
}
public void setTexutreTransform(float[] matrix) {
mTextureMatrix = matrix;
}
public boolean isInitialized() {
return mIsInitialized;
}
/**
* Use OpenGL to render the screen.
*/
public void onDraw(final int textureId, final FloatBuffer cubeBuffer, final FloatBuffer textureBuffer) {
GLES20.glUseProgram(mProgram.getProgramId());
runPendingOnDrawTasks();
if (!mIsInitialized) {
return;
}
cubeBuffer.position(0);
GLES20.glVertexAttribPointer(mGLAttribPosition, 2, GLES20.GL_FLOAT, false, 0, cubeBuffer);
GLES20.glEnableVertexAttribArray(mGLAttribPosition);
textureBuffer.position(0);
GLES20.glVertexAttribPointer(mGLAttribTextureCoordinate, 2, GLES20.GL_FLOAT, false, 0,
textureBuffer);
GLES20.glEnableVertexAttribArray(mGLAttribTextureCoordinate);
if (textureId != OpenGlUtils.NO_TEXTURE) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
OpenGlUtils.bindTexture(getTarget(), textureId);
GLES20.glUniform1i(mGLUniformTexture, 0);
}
beforeDrawArrays(textureId);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(mGLAttribPosition);
GLES20.glDisableVertexAttribArray(mGLAttribTextureCoordinate);
OpenGlUtils.bindTexture(getTarget(), 0);
}
protected void beforeDrawArrays(int textureId) {
}
protected void runPendingOnDrawTasks() {
//Copy the current one to be run to a new array,
// and then start execution to prevent the execution from being added again.
LinkedList<Runnable> runList;
synchronized (mRunOnDraw) {
runList = new LinkedList<>(mRunOnDraw);
mRunOnDraw.clear();
}
while (!runList.isEmpty()) {
runList.removeFirst().run();
}
}
}
package com.tencent.vod.flutter.live.render;
import android.opengl.GLES20;
import android.util.Log;
import com.tencent.liteav.videobase.utils.OpenGlUtils;
public class FTXProgram {
private static final String TAG = "Program";
private static final int INVALID_PROGRAM_ID = -1;
private final String mVertexShader;
private final String mFragmentShader;
private int mProgramId;
public FTXProgram(String vertexShader, String fragmentShader) {
mVertexShader = vertexShader;
mFragmentShader = fragmentShader;
mProgramId = INVALID_PROGRAM_ID;
}
/**
* 构建Program
*/
public void build() {
int vertexShaderId = loadShader(mVertexShader, GLES20.GL_VERTEX_SHADER);
if (vertexShaderId == 0) {
Log.e(TAG, "load vertex shader failed.");
return;
}
int fragmentShaderId = loadShader(mFragmentShader, GLES20.GL_FRAGMENT_SHADER);
if (fragmentShaderId == 0) {
Log.e(TAG, "load fragment shader failed.");
return;
}
int programId = GLES20.glCreateProgram();
GLES20.glAttachShader(programId, vertexShaderId);
GLES20.glAttachShader(programId, fragmentShaderId);
GLES20.glLinkProgram(programId);
int[] link = new int[1];
GLES20.glGetProgramiv(programId, GLES20.GL_LINK_STATUS, link, 0);
if (link[0] <= 0) {
Log.e(TAG, "link program failed. status: " + link[0]);
return;
}
GLES20.glDeleteShader(vertexShaderId);
GLES20.glDeleteShader(fragmentShaderId);
mProgramId = programId;
}
public int getProgramId() {
return mProgramId;
}
public void destroy() {
GLES20.glDeleteProgram(mProgramId);
mProgramId = INVALID_PROGRAM_ID;
}
private int loadShader(final String strSource, final int iType) {
int[] compiled = new int[1];
int iShader = GLES20.glCreateShader(iType);
GLES20.glShaderSource(iShader, strSource);
GLES20.glCompileShader(iShader);
GLES20.glGetShaderiv(iShader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
OpenGlUtils.checkGlError("glCompileShader");
return 0;
}
return iShader;
}
}
package com.tencent.vod.flutter.live.render;
public enum FTXRotation {
NORMAL, ROTATION_90, ROTATION_180, ROTATION_270;
/**
* Retrieves the int representation of the Rotation.
*
* @return 0, 90, 180 or 270
*/
public int asInt() {
switch (this) {
case NORMAL:
return 0;
case ROTATION_90:
return 90;
case ROTATION_180:
return 180;
case ROTATION_270:
return 270;
default:
return 0;
}
}
/**
* Create a Rotation from an integer. Needs to be either 0, 90, 180 or 270.
*
* @param rotation 0, 90, 180 or 270
* @return Rotation object
*/
public static FTXRotation fromInt(int rotation) {
switch (rotation) {
case 0:
return NORMAL;
case 90:
return ROTATION_90;
case 180:
return ROTATION_180;
case 270:
return ROTATION_270;
case 360:
return NORMAL;
default:
return NORMAL;
}
}
}
package com.tencent.vod.flutter.live.render;
public class FTXSize {
public int width;
public int height;
public FTXSize() {
}
public FTXSize(int width, int height) {
this.width = width;
this.height = height;
}
@SuppressWarnings("SuspiciousNameCombination")
public void swap() {
int temp = width;
width = height;
height = temp;
}
}
package com.tencent.vod.flutter.live.render;
public class FTXTextureRotationUtils {
public static final float[] TEXTURE_NO_ROTATION = {0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f};
public static final float[] TEXTURE_ROTATED_90 = {1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f};
public static final float[] TEXTURE_ROTATED_180 = {1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f};
public static final float[] TEXTURE_ROTATED_270 = {0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f};
private FTXTextureRotationUtils() {
}
/**
* Gets the rotation direction of the Texture.
*/
public static float[] getRotation(final FTXRotation fTXRotation,
final boolean flipHorizontal,
final boolean flipVertical) {
float[] rotatedTex;
switch (fTXRotation) {
case ROTATION_90:
rotatedTex = TEXTURE_ROTATED_90;
break;
case ROTATION_180:
rotatedTex = TEXTURE_ROTATED_180;
break;
case ROTATION_270:
rotatedTex = TEXTURE_ROTATED_270;
break;
case NORMAL:
default:
rotatedTex = TEXTURE_NO_ROTATION;
break;
}
if (flipHorizontal) {
rotatedTex = new float[]{
flip(rotatedTex[0]), rotatedTex[1],
flip(rotatedTex[2]), rotatedTex[3],
flip(rotatedTex[4]), rotatedTex[5],
flip(rotatedTex[6]), rotatedTex[7]};
}
if (flipVertical) {
rotatedTex = new float[]{
rotatedTex[0], flip(rotatedTex[1]),
rotatedTex[2], flip(rotatedTex[3]),
rotatedTex[4], flip(rotatedTex[5]),
rotatedTex[6], flip(rotatedTex[7])};
}
return rotatedTex;
}
private static float flip(final float i) {
if (i == 0.0f) {
return 1.0f;
}
return 0.0f;
}
}
......@@ -12,7 +12,6 @@ import com.tencent.vod.flutter.messages.FtxMessages.IntMsg;
import com.tencent.vod.flutter.messages.FtxMessages.IntPlayerMsg;
import com.tencent.vod.flutter.messages.FtxMessages.PipParamsPlayerMsg;
import com.tencent.vod.flutter.messages.FtxMessages.PlayerMsg;
import com.tencent.vod.flutter.messages.FtxMessages.StringIntPlayerMsg;
import com.tencent.vod.flutter.messages.FtxMessages.StringPlayerMsg;
import com.tencent.vod.flutter.messages.FtxMessages.TXFlutterLivePlayerApi;
......@@ -47,7 +46,7 @@ public class FTXLivePlayerDispatcher implements FtxMessages.TXFlutterLivePlayerA
@NonNull
@Override
public BoolMsg startLivePlay(@NonNull StringIntPlayerMsg playerMsg) {
public BoolMsg startLivePlay(@NonNull StringPlayerMsg playerMsg) {
TXFlutterLivePlayerApi api = getPlayer(playerMsg.getPlayerId());
if (null != api) {
return api.startLivePlay(playerMsg);
......@@ -168,4 +167,63 @@ public class FTXLivePlayerDispatcher implements FtxMessages.TXFlutterLivePlayerA
api.exitPictureInPictureMode(playerMsg);
}
}
@NonNull
@Override
public Long enableReceiveSeiMessage(@NonNull PlayerMsg playerMsg,
@NonNull Boolean isEnabled, @NonNull Long payloadType) {
TXFlutterLivePlayerApi api = getPlayer(playerMsg.getPlayerId());
if (null != api) {
return api.enableReceiveSeiMessage(playerMsg, isEnabled, payloadType);
}
return null;
}
@Override
public void showDebugView(@NonNull PlayerMsg playerMsg, @NonNull Boolean isShow) {
TXFlutterLivePlayerApi api = getPlayer(playerMsg.getPlayerId());
if (null != api) {
api.showDebugView(playerMsg, isShow);
}
}
@NonNull
@Override
public Long setProperty(@NonNull PlayerMsg playerMsg, @NonNull String key, @NonNull Object value) {
TXFlutterLivePlayerApi api = getPlayer(playerMsg.getPlayerId());
if (null != api) {
return api.setProperty(playerMsg, key, value);
}
return null;
}
@NonNull
@Override
public FtxMessages.ListMsg getSupportedBitrate(@NonNull PlayerMsg playerMsg) {
TXFlutterLivePlayerApi api = getPlayer(playerMsg.getPlayerId());
if (null != api) {
return api.getSupportedBitrate(playerMsg);
}
return null;
}
@NonNull
@Override
public Long setCacheParams(@NonNull PlayerMsg playerMsg, @NonNull Double minTime, @NonNull Double maxTime) {
TXFlutterLivePlayerApi api = getPlayer(playerMsg.getPlayerId());
if (null != api) {
return api.setCacheParams(playerMsg, minTime, maxTime);
}
return null;
}
@NonNull
@Override
public Long enablePictureInPicture(@NonNull BoolPlayerMsg msg) {
TXFlutterLivePlayerApi api = getPlayer(msg.getPlayerId());
if (null != api) {
return api.enablePictureInPicture(msg);
}
return null;
}
}
package com.tencent.vod.flutter.model;
import com.tencent.rtmp.TXLivePlayer;
import com.tencent.live2.V2TXLivePlayer;
import com.tencent.rtmp.TXVodPlayer;
import com.tencent.vod.flutter.FTXEvent;
public class TXPlayerHolder {
private TXVodPlayer mVodPlayer;
private TXLivePlayer mLivePlayer;
private int mPlayerType;
private boolean mInitPlayingStatus;
private V2TXLivePlayer mLivePlayer;
private final int mPlayerType;
private boolean mPlayingStatus;
private boolean mIsPlayingWhenCreated = false;
public TXPlayerHolder(TXVodPlayer vodPlayer) {
mVodPlayer = vodPlayer;
mInitPlayingStatus = vodPlayer.isPlaying();
mPlayingStatus = vodPlayer.isPlaying();
mIsPlayingWhenCreated = mPlayingStatus;
mPlayerType = FTXEvent.PLAYER_VOD;
}
public TXPlayerHolder(TXLivePlayer livePlayer) {
public TXPlayerHolder(V2TXLivePlayer livePlayer, boolean initPauseStatus) {
mLivePlayer = livePlayer;
mInitPlayingStatus = livePlayer.isPlaying();
mPlayingStatus = !initPauseStatus;
mIsPlayingWhenCreated = mPlayingStatus;
mPlayerType = FTXEvent.PLAYER_LIVE;
}
......@@ -27,19 +30,37 @@ public class TXPlayerHolder {
return mVodPlayer;
}
public TXLivePlayer getLivePlayer() {
public V2TXLivePlayer getLivePlayer() {
return mLivePlayer;
}
public boolean isPlayingWhenCreate() {
return mInitPlayingStatus;
return mIsPlayingWhenCreated;
}
public void tmpPause() {
public boolean isPlaying() {
return mPlayingStatus;
}
public void pause() {
if (null != mVodPlayer) {
mVodPlayer.pause();
mPlayingStatus = false;
} else if (null != mLivePlayer) {
mLivePlayer.pauseAudio();
mLivePlayer.pauseVideo();
mPlayingStatus = false;
}
}
public void resume() {
if (null != mVodPlayer) {
mVodPlayer.resume();
mPlayingStatus = true;
} else if (null != mLivePlayer) {
mLivePlayer.pause();
mLivePlayer.resumeAudio();
mLivePlayer.resumeVideo();
mPlayingStatus = true;
}
}
......
package com.tencent.vod.flutter.tools;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.os.Bundle;
import com.tencent.live2.V2TXLiveDef;
import com.tencent.vod.flutter.FTXEvent;
import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
public class FTXV2LiveTools {
public static V2TXLiveDef.V2TXLiveRotation transRotationFromDegree(int rotation) {
V2TXLiveDef.V2TXLiveRotation rotationCode;
if (rotation <= 0) {
rotationCode = V2TXLiveDef.V2TXLiveRotation.V2TXLiveRotation0;
} else if (rotation <= 90) {
rotationCode = V2TXLiveDef.V2TXLiveRotation.V2TXLiveRotation90;
} else if (rotation <= 180) {
rotationCode = V2TXLiveDef.V2TXLiveRotation.V2TXLiveRotation180;
} else {
rotationCode = V2TXLiveDef.V2TXLiveRotation.V2TXLiveRotation270;
}
return rotationCode;
}
public static ByteBuffer yuv420ToARGB8888(ByteBuffer yuv420Buffer, int width, int height) {
// 将YUV420格式的ByteBuffer转换为byte数组
byte[] yuv420Bytes = new byte[yuv420Buffer.remaining()];
yuv420Buffer.get(yuv420Bytes);
// 将YUV420格式的byte数组转换为Bitmap
YuvImage yuvImage = new YuvImage(yuv420Bytes, ImageFormat.YUV_420_888, width, height, null);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Rect rect = new Rect(0, 0, width, height);
yuvImage.compressToJpeg(rect, 100, outputStream);
byte[] argbBytes = outputStream.toByteArray();
// 将ARGB8888格式的byte数组转换为ByteBuffer
return ByteBuffer.wrap(argbBytes);
}
public static Bundle buildNetBundle(V2TXLiveDef.V2TXLivePlayerStatistics statistics) {
Bundle bundle = new Bundle();
if (null != statistics) {
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_CPU_USAGE, statistics.appCpu);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_VIDEO_WIDTH, statistics.width);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_VIDEO_HEIGHT, statistics.height);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_NET_SPEED, statistics.netSpeed);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_VIDEO_FPS, statistics.fps);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_VIDEO_BITRATE, statistics.videoBitrate);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_AUDIO_BITRATE, statistics.audioBitrate);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_NET_JITTER, statistics.jitterBufferDelay);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_SYSTEM_CPU, statistics.systemCpu);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_VIDEO_LOSS, statistics.videoPacketLoss);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_AUDIO_LOSS, statistics.audioPacketLoss);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_AUDIO_TOTAL_BLOCK_TIME, statistics.audioTotalBlockTime);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_VIDEO_TOTAL_BLOCK_TIME, statistics.videoTotalBlockTime);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_VIDEO_BLOCK_RATE, statistics.videoBlockRate);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_AUDIO_BLOCK_RATE, statistics.audioBlockRate);
bundle.putInt(FTXEvent.TUINetConst.NET_STATUS_RTT, statistics.rtt);
}
return bundle;
}
}
......@@ -92,7 +92,7 @@ public class TXCommonUtil {
public static Map<String, Object> getParams(int event, Bundle bundle) {
Map<String, Object> param = new HashMap<>();
if (event != 0) {
param.put("event", event);
param.put(FTXEvent.EVT_KEY_PLAYER_EVENT, event);
}
if (bundle != null && !bundle.isEmpty()) {
......
......@@ -31,10 +31,8 @@ import androidx.annotation.NonNull;
import androidx.core.content.ContextCompat;
import com.tencent.liteav.base.util.LiteavLog;
import com.tencent.rtmp.ITXLivePlayListener;
import com.tencent.rtmp.ITXVodPlayListener;
import com.tencent.rtmp.TXLiveConstants;
import com.tencent.rtmp.TXLivePlayer;
import com.tencent.rtmp.TXVodPlayer;
import com.tencent.vod.flutter.FTXEvent;
import com.tencent.vod.flutter.FTXPIPManager.PipParams;
......@@ -46,7 +44,7 @@ import com.tencent.vod.flutter.tools.TXSimpleEventBus;
public class FlutterPipImplActivity extends Activity implements TextureView.SurfaceTextureListener, ITXVodPlayListener,
ITXLivePlayListener, ServiceConnection {
ServiceConnection {
private static final String TAG = "FlutterPipImplActivity";
private static TXPlayerHolder pipPlayerHolder;
......@@ -129,7 +127,7 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
}
isInPip = true;
// pause first, resume video after entered pip
playerHolder.tmpPause();
playerHolder.pause();
pipPlayerHolder = playerHolder;
Intent intent = new Intent(activity, FlutterPipImplActivity.class);
Bundle bundle = new Bundle();
......@@ -191,7 +189,7 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
}
private void setLivePlayerListener() {
mPlayerHolder.getLivePlayer().setPlayListener(this);
// pip not need live observer
}
@Override
......@@ -296,6 +294,7 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
private void handlePipExitEvent() {
Bundle data = new Bundle();
TXPipResult pipResult = new TXPipResult();
pipResult.setPlaying(mPlayerHolder.isPlaying());
if (mPlayerHolder.getPlayerType() == FTXEvent.PLAYER_VOD) {
if (mIsPlayEnd) {
pipResult.setPlayTime(0F);
......@@ -303,11 +302,9 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
Float currentPlayTime = mPlayerHolder.getVodPlayer().getCurrentPlaybackTime();
pipResult.setPlayTime(currentPlayTime);
}
pipResult.setPlaying(mPlayerHolder.getVodPlayer().isPlaying());
pipResult.setPlayerId(mCurrentParams.getCurrentPlayerId());
data.putParcelable(FTXEvent.EXTRA_NAME_RESULT, pipResult);
} else if (mPlayerHolder.getPlayerType() == FTXEvent.PLAYER_LIVE) {
pipResult.setPlaying(mPlayerHolder.getLivePlayer().isPlaying());
pipResult.setPlayerId(mCurrentParams.getCurrentPlayerId());
data.putParcelable(FTXEvent.EXTRA_NAME_RESULT, pipResult);
}
......@@ -315,13 +312,9 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
mPlayerHolder.getVodPlayer().setSurface(null);
}
if (null != mPlayerHolder.getLivePlayer()) {
mPlayerHolder.getLivePlayer().setSurface(null);
}
if (null != mPlayerHolder.getVodPlayer()) {
mPlayerHolder.getVodPlayer().pause();
} else if (null != mPlayerHolder.getLivePlayer()) {
mPlayerHolder.getLivePlayer().pause();
mPlayerHolder.getLivePlayer().setRenderView((TextureView) null);
}
mPlayerHolder.pause();
int codeEvent = mIsNeedToStop ? FTXEvent.EVENT_PIP_MODE_ALREADY_EXIT : FTXEvent.EVENT_PIP_MODE_RESTORE_UI;
sendPipEvent(codeEvent, data);
exitPip(codeEvent == FTXEvent.EVENT_PIP_MODE_ALREADY_EXIT);
......@@ -387,7 +380,7 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
startActivity(intent);
}
},2000);
}, 2000);
}
/**
......@@ -447,11 +440,7 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
if (null != mPlayerHolder) {
boolean isInitPlaying = mPlayerHolder.isPlayingWhenCreate();
if (isInitPlaying) {
if (mPlayerHolder.getPlayerType() == FTXEvent.PLAYER_VOD) {
mPlayerHolder.getVodPlayer().resume();
} else if (mPlayerHolder.getPlayerType() == FTXEvent.PLAYER_LIVE) {
mPlayerHolder.getLivePlayer().resume();
}
mPlayerHolder.resume();
}
} else {
LiteavLog.e(TAG, "miss player when startPlay");
......@@ -519,7 +508,7 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
if (mPlayerHolder.getPlayerType() == FTXEvent.PLAYER_VOD) {
mPlayerHolder.getVodPlayer().setSurface(surface);
} else if (mPlayerHolder.getPlayerType() == FTXEvent.PLAYER_LIVE) {
mPlayerHolder.getLivePlayer().setSurface(surface);
mPlayerHolder.getLivePlayer().setRenderView(mVideoSurface);
} else {
LiteavLog.e(TAG, "unknown player type:" + mPlayerHolder.getPlayerType());
}
......@@ -542,23 +531,11 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
}
private void handleResumeOrPause() {
boolean dstPlaying = false;
if (mPlayerHolder.getPlayerType() == FTXEvent.PLAYER_VOD) {
TXVodPlayer vodPlayer = mPlayerHolder.getVodPlayer();
dstPlaying = !vodPlayer.isPlaying();
if (dstPlaying) {
vodPlayer.resume();
} else {
vodPlayer.pause();
}
} else if (mPlayerHolder.getPlayerType() == FTXEvent.PLAYER_LIVE) {
TXLivePlayer livePlayer = mPlayerHolder.getLivePlayer();
dstPlaying = !livePlayer.isPlaying();
boolean dstPlaying = !mPlayerHolder.isPlaying();
if (dstPlaying) {
livePlayer.resume();
mPlayerHolder.resume();
} else {
livePlayer.pause();
}
mPlayerHolder.pause();
}
handleResumeOrPause(dstPlaying);
}
......@@ -653,10 +630,6 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
sendPlayerEvent(event, bundle);
}
@Override
public void onPlayEvent(int event, Bundle bundle) {
}
private void sendPlayerEvent(int eventCode, Bundle data) {
Bundle params = new Bundle();
params.putInt(FTXEvent.EXTRA_NAME_PLAYER_ID, mCurrentParams.getCurrentPlayerId());
......@@ -669,10 +642,6 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
public void onNetStatus(TXVodPlayer txVodPlayer, Bundle bundle) {
}
@Override
public void onNetStatus(Bundle bundle) {
}
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
}
......@@ -680,5 +649,4 @@ public class FlutterPipImplActivity extends Activity implements TextureView.Surf
@Override
public void onServiceDisconnected(ComponentName name) {
}
}
\ No newline at end of file
......@@ -31,7 +31,7 @@ android {
defaultConfig {
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
applicationId "com.tencent.liteav.demo"
minSdkVersion 19
minSdkVersion flutter.minSdkVersion
targetSdk 34
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
......
......@@ -2,6 +2,10 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>UIBackgroundModes</key>
<array>
<string>audio</string>
</array>
<key>CADisableMinimumFrameDurationOnPhone</key>
<true/>
<key>CFBundleDevelopmentRegion</key>
......@@ -31,10 +35,6 @@
</dict>
<key>UIApplicationSupportsIndirectInputEvents</key>
<true/>
<key>UIBackgroundModes</key>
<array>
<string>audio</string>
</array>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
......
// Copyright (c) 2022 Tencent. All rights reserved.
// The obtained license URL
import 'dart:async';
// The obtained license URL
const LICENSE_URL = "";
// The obtained license key
const LICENSE_KEY = "";
Completer<bool> isLicenseSuc = Completer();
\ No newline at end of file
......@@ -338,7 +338,6 @@ class _DemoSuperPlayerState extends State<DemoSuperPlayer> with TXPipPlayerResto
model.appId = 1252463788;
model.videoId = new SuperPlayerVideoId();
model.videoId!.fileId = "5285890781763144364";
model.coverUrl = "http://1252463788.vod2.myqcloud.com/95576ef5vodtransgzp1252463788/e1ab85305285890781763144364/1536584350_1812858038.100_0.jpg";
model.title = AppLocals.current.playerTencentCloud;
model.playAction = playAction;
model.isEnableDownload = false;
......
......@@ -10,6 +10,7 @@ import 'package:superplayer_widget/demo_superplayer_lib.dart';
import 'ui/demo_inputdialog.dart';
import 'ui/demo_volume_slider.dart';
import 'ui/demo_video_slider_view.dart';
import 'common/demo_config.dart';
class DemoTXLivePlayer extends StatefulWidget {
@override
......@@ -19,13 +20,12 @@ class DemoTXLivePlayer extends StatefulWidget {
class _DemoTXLivelayerState extends State<DemoTXLivePlayer> with WidgetsBindingObserver {
late TXLivePlayerController _controller;
double _aspectRatio = 16.0 / 9.0;
double _progress = 0.0;
int _volume = 100;
bool _isMute = false;
String _url = "http://liteavapp.qcloud.com/live/liteavdemoplayerstreamid_demo1080p.flv";
int _currentBitRateIndex = 0;
bool _isStop = true;
bool _isPlaying = false;
double _maxLiveProgressTime = 0;
StreamSubscription? playEventSubscription;
StreamSubscription? playNetEventSubscription;
StreamSubscription? playerStateEventSubscription;
......@@ -39,38 +39,30 @@ class _DemoTXLivelayerState extends State<DemoTXLivePlayer> with WidgetsBindingO
playEventSubscription = _controller.onPlayerEventBroadcast.listen((event) {
// Subscribe to event distribution
if (event["event"] == TXVodPlayEvent.PLAY_EVT_PLAY_PROGRESS) {
_progress = event["EVT_PLAY_PROGRESS"].toDouble();
_maxLiveProgressTime = _progress >= _maxLiveProgressTime ? _progress : _maxLiveProgressTime;
progressSliderKey.currentState?.updateProgress(1, _maxLiveProgressTime);
} else if (event["event"] == TXVodPlayEvent.PLAY_EVT_RCV_FIRST_I_FRAME) {
int evtCode = event["event"];
if (evtCode == TXVodPlayEvent.PLAY_EVT_RCV_FIRST_I_FRAME) {
// First frame appearance
_isStop = false;
_isPlaying = true;
EasyLoading.dismiss();
_resizeVideo(event);
} else if (event["event"] == TXVodPlayEvent.PLAY_EVT_STREAM_SWITCH_SUCC) {
} else if (evtCode == TXVodPlayEvent.PLAY_EVT_PLAY_BEGIN) {
_isPlaying = true;
} else if (evtCode== TXVodPlayEvent.PLAY_EVT_STREAM_SWITCH_SUCC) {
// Stream switching successful.
EasyLoading.dismiss();
if (_url == "http://liteavapp.qcloud.com/live/liteavdemoplayerstreamid_demo1080p.flv") {
EasyLoading.showSuccess(AppLocals.current.playerSwitchTo1080);
} else {
EasyLoading.showSuccess(AppLocals.current.playerSwitchTo480);
}
} else if (event["event"] == TXVodPlayEvent.PLAY_ERR_STREAM_SWITCH_FAIL) {
EasyLoading.showSuccess(AppLocals.current.playerSwitchSuc);
} else if (evtCode == TXVodPlayEvent.PLAY_ERR_STREAM_SWITCH_FAIL) {
EasyLoading.dismiss();
EasyLoading.showError(AppLocals.current.playerLiveSwitchFailed);
switchUrl();
} else if (event["event"] == TXVodPlayEvent.PLAY_EVT_CHANGE_RESOLUTION) {
} else if (evtCode == TXVodPlayEvent.PLAY_EVT_CHANGE_RESOLUTION) {
LogUtils.w("PLAY_EVT_CHANGE_RESOLUTION", event);
_resizeVideo(event);
} else if(evtCode < 0 && evtCode != -100) {
EasyLoading.showError("play failed, code:$evtCode,event:$event");
}
});
playNetEventSubscription = _controller.onPlayerNetStatusBroadcast.listen((event) {
// Subscribe to status changes
});
playerStateEventSubscription = _controller.onPlayerState.listen((event) {
// Subscribe to status changes
debugPrint("Playback status ${event!.name}");
......@@ -78,8 +70,14 @@ class _DemoTXLivelayerState extends State<DemoTXLivePlayer> with WidgetsBindingO
await SuperPlayerPlugin.setConsoleEnabled(true);
await _controller.initialize();
await _controller.setConfig(FTXLivePlayConfig());
await _controller.startLivePlay(_url, playType: TXPlayType.LIVE_FLV);
if (!isLicenseSuc.isCompleted) {
SuperPlayerPlugin.setGlobalLicense(LICENSE_URL, LICENSE_KEY);
await isLicenseSuc.future;
await _controller.startLivePlay(_url);
} else {
await _controller.startLivePlay(_url);
}
}
void _resizeVideo(Map<dynamic, dynamic> event) {
......@@ -122,12 +120,17 @@ class _DemoTXLivelayerState extends State<DemoTXLivePlayer> with WidgetsBindingO
}
}
void switchUrl() {
bool switchUrl() {
bool switchStarted = true;
if (_url == "http://liteavapp.qcloud.com/live/liteavdemoplayerstreamid_demo480p.flv") {
_url = "http://liteavapp.qcloud.com/live/liteavdemoplayerstreamid_demo1080p.flv";
} else {
} else if (_url == "http://liteavapp.qcloud.com/live/liteavdemoplayerstreamid_demo1080p.flv") {
_url = "http://liteavapp.qcloud.com/live/liteavdemoplayerstreamid_demo480p.flv";
} else {
switchStarted = false;
EasyLoading.showInfo("no other steam to switch");
}
return switchStarted;
}
@override
......@@ -193,10 +196,21 @@ class _DemoTXLivelayerState extends State<DemoTXLivePlayer> with WidgetsBindingO
EasyLoading.showError(AppLocals.current.playerLiveStopTip);
return;
}
switchUrl();
List<FSteamInfo> steamInfo = await _controller.getSupportedBitrate();
if (steamInfo.isNotEmpty) {
FSteamInfo info = steamInfo[++_currentBitRateIndex % steamInfo.length];
if (info.url != null) {
_controller.switchStream(info.url!);
EasyLoading.show(status: 'loading...');
} else {
EasyLoading.showError("steam url is null");
}
} else {
if (switchUrl()) {
_controller.switchStream(_url);
EasyLoading.show(status: 'loading...');
}
}
}),
_createItem(_isMute ? AppLocals.current.playerCancelMute : AppLocals.current.playerSetMute, () async {
setState(() {
......
......@@ -11,6 +11,7 @@ import 'ui/demo_volume_slider.dart';
import 'ui/demo_speed_slider.dart';
import 'ui/demo_bitrate_checkbox.dart';
import 'ui/demo_video_slider_view.dart';
import 'common/demo_config.dart';
class DemoTXVodPlayer extends StatefulWidget {
@override
......@@ -84,10 +85,15 @@ class _DemoTXVodPlayerState extends State<DemoTXVodPlayer> with WidgetsBindingOb
});
await _controller.setLoop(true);
await _controller.enableHardwareDecode(enableHardware);
await _controller.setAudioPlayoutVolume(volume);
await _controller.setConfig(FTXVodPlayConfig());
_controller.setConfig(FTXVodPlayConfig());
if (!isLicenseSuc.isCompleted) {
SuperPlayerPlugin.setGlobalLicense(LICENSE_URL, LICENSE_KEY);
await isLicenseSuc.future;
await _controller.startVodPlay(_url);
} else {
await _controller.startVodPlay(_url);
}
}
void _resizeVideo(Map<dynamic, dynamic> event) {
......
......@@ -41,7 +41,15 @@ class _MyAppState extends State<MyApp> {
/// set player license
Future<void> initPlayerLicense() async {
// must called before setGlobalLicense
SuperPlayerPlugin.instance.setSDKListener(licenceLoadedListener:(code, result) {
if (code == 0) {
isLicenseSuc.complete(true);
}
});
await SuperPlayerPlugin.setGlobalLicense(LICENSE_URL, LICENSE_KEY);
// enable flexible license valid
SuperPlayerPlugin.setLicenseFlexibleValid(true);
}
// Platform messages are asynchronous, so we initialize in an async method.
......
......@@ -122,4 +122,22 @@
#define EXTRA_SUBTITLE_DURATION_MS @"durationMs"
#define EXTRA_SUBTITLE_TRACK_INDEX @"trackIndex"
// player event
#define EVT_KEY_PLAYER_EVENT @"event"
#define EVT_KEY_PLAYER_NET @"net"
#define EVT_KEY_PLAYER_WIDTH @"EVT_WIDTH"
#define EVT_KEY_PLAYER_HEIGHT @"EVT_HEIGHT"
// net event
#define NET_STATUS_SYSTEM_CPU @"SYSTEM_CPU"
#define NET_STATUS_VIDEO_LOSS @"VIDEO_PACKET_LOSS"
#define NET_STATUS_AUDIO_LOSS @"AUDIO_PACKET_LOSS"
#define NET_STATUS_AUDIO_TOTAL_BLOCK_TIME @"AUDIO_TOTAL_BLOCK_TIME"
#define NET_STATUS_VIDEO_TOTAL_BLOCK_TIME @"VIDEO_TOTAL_BLOCK_TIME"
#define NET_STATUS_VIDEO_BLOCK_RATE @"VIDEO_BLOCK_RATE"
#define NET_STATUS_AUDIO_BLOCK_RATE @"AUDIO_BLOCK_RATE"
#define NET_STATUS_RTT @"RTT"
#endif // SUPERPLAYER_FLUTTER_IOS_CLASSES_FTXEVENT_H_
......@@ -99,14 +99,6 @@
}
}
- (nullable BoolMsg *)startLivePlayPlayerMsg:(nonnull StringIntPlayerMsg *)playerMsg error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
id<TXFlutterLivePlayerApi> api = self.bridge.getPlayers[playerMsg.playerId];
if(api) {
return [api startLivePlayPlayerMsg:playerMsg error:error];
}
return nil;
}
- (nullable BoolMsg *)stopIsNeedClear:(nonnull BoolPlayerMsg *)isNeedClear error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
id<TXFlutterLivePlayerApi> api = self.bridge.getPlayers[isNeedClear.playerId];
if(api) {
......@@ -123,4 +115,66 @@
return nil;
}
- (nullable NSNumber *)enablePictureInPictureMsg:(nonnull BoolPlayerMsg *)msg error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
id<TXFlutterLivePlayerApi> api = self.bridge.getPlayers[msg.playerId];
if(api) {
return [api enablePictureInPictureMsg:msg error:error];
}
return nil;
}
- (nullable NSNumber *)enableReceiveSeiMessagePlayerMsg:(nonnull PlayerMsg *)playerMsg isEnabled:(nonnull NSNumber *)isEnabled payloadType:(nonnull NSNumber *)payloadType error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
id<TXFlutterLivePlayerApi> api = self.bridge.getPlayers[playerMsg.playerId];
if(api) {
return [api enableReceiveSeiMessagePlayerMsg:playerMsg isEnabled:isEnabled payloadType:payloadType error:error];
}
return nil;
}
- (nullable ListMsg *)getSupportedBitratePlayerMsg:(nonnull PlayerMsg *)playerMsg error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
id<TXFlutterLivePlayerApi> api = self.bridge.getPlayers[playerMsg.playerId];
if(api) {
return [api getSupportedBitratePlayerMsg:playerMsg error:error];
}
return nil;
}
- (nullable NSNumber *)setCacheParamsPlayerMsg:(nonnull PlayerMsg *)playerMsg minTime:(nonnull NSNumber *)minTime maxTime:(nonnull NSNumber *)maxTime error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
id<TXFlutterLivePlayerApi> api = self.bridge.getPlayers[playerMsg.playerId];
if(api) {
return [api setCacheParamsPlayerMsg:playerMsg minTime:minTime maxTime:maxTime error:error];
}
return nil;
}
- (nullable NSNumber *)setPropertyPlayerMsg:(nonnull PlayerMsg *)playerMsg key:(nonnull NSString *)key value:(nonnull id)value error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
id<TXFlutterLivePlayerApi> api = self.bridge.getPlayers[playerMsg.playerId];
if(api) {
return [api setPropertyPlayerMsg:playerMsg key:key value:value error:error];
}
return nil;
}
- (void)showDebugViewPlayerMsg:(nonnull PlayerMsg *)playerMsg isShow:(nonnull NSNumber *)isShow error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
id<TXFlutterLivePlayerApi> api = self.bridge.getPlayers[playerMsg.playerId];
if(api) {
[api showDebugViewPlayerMsg:playerMsg isShow:isShow error:error];
}
}
- (nullable BoolMsg *)startLivePlayPlayerMsg:(nonnull StringPlayerMsg *)playerMsg error:(FlutterError * _Nullable __autoreleasing * _Nonnull)error {
id<TXFlutterLivePlayerApi> api = self.bridge.getPlayers[playerMsg.playerId];
if(api) {
return [api startLivePlayPlayerMsg:playerMsg error:error];
}
return nil;
}
@end
......@@ -18,7 +18,6 @@ NS_ASSUME_NONNULL_BEGIN
@class PipParamsPlayerMsg;
@class StringListPlayerMsg;
@class BoolPlayerMsg;
@class StringIntPlayerMsg;
@class StringPlayerMsg;
@class DoublePlayerMsg;
@class IntPlayerMsg;
......@@ -113,15 +112,6 @@ NS_ASSUME_NONNULL_BEGIN
@property(nonatomic, strong, nullable) NSNumber * value;
@end
@interface StringIntPlayerMsg : NSObject
+ (instancetype)makeWithPlayerId:(nullable NSNumber *)playerId
strValue:(nullable NSString *)strValue
intValue:(nullable NSNumber *)intValue;
@property(nonatomic, strong, nullable) NSNumber * playerId;
@property(nonatomic, copy, nullable) NSString * strValue;
@property(nonatomic, strong, nullable) NSNumber * intValue;
@end
@interface StringPlayerMsg : NSObject
+ (instancetype)makeWithPlayerId:(nullable NSNumber *)playerId
value:(nullable NSString *)value;
......@@ -750,15 +740,14 @@ NSObject<FlutterMessageCodec> *TXFlutterLivePlayerApiGetCodec(void);
/// @return `nil` only when `error != nil`.
- (nullable IntMsg *)initializeOnlyAudio:(BoolPlayerMsg *)onlyAudio error:(FlutterError *_Nullable *_Nonnull)error;
///
/// 当设置[LivePlayer] 类型播放器时,需要参数[playType]
/// 参考: [PlayType.LIVE_RTMP] ...
/// 当设置[LivePlayer] 类型播放器时
/// 10.7版本开始,startPlay变更为startLivePlay,需要通过 {@link SuperPlayerPlugin#setGlobalLicense} 设置 Licence 后方可成功播放,
/// 否则将播放失败(黑屏),全局仅设置一次即可。直播 Licence、短视频 Licence 和视频播放 Licence 均可使用,若您暂未获取上述 Licence ,
/// 可[快速免费申请测试版 Licence](https://cloud.tencent.com/act/event/License) 以正常播放,正式版 License 需[购买]
/// (https://cloud.tencent.com/document/product/881/74588#.E8.B4.AD.E4.B9.B0.E5.B9.B6.E6.96.B0.E5.BB.BA.E6.AD.A3.E5.BC.8F.E7.89.88-license)。
///
/// @return `nil` only when `error != nil`.
- (nullable BoolMsg *)startLivePlayPlayerMsg:(StringIntPlayerMsg *)playerMsg error:(FlutterError *_Nullable *_Nonnull)error;
- (nullable BoolMsg *)startLivePlayPlayerMsg:(StringPlayerMsg *)playerMsg error:(FlutterError *_Nullable *_Nonnull)error;
/// 停止播放
/// return 是否停止成功
///
......@@ -802,6 +791,17 @@ NSObject<FlutterMessageCodec> *TXFlutterLivePlayerApiGetCodec(void);
- (nullable IntMsg *)enterPictureInPictureModePipParamsMsg:(PipParamsPlayerMsg *)pipParamsMsg error:(FlutterError *_Nullable *_Nonnull)error;
/// 退出画中画,如果该播放器处于画中画模式
- (void)exitPictureInPictureModePlayerMsg:(PlayerMsg *)playerMsg error:(FlutterError *_Nullable *_Nonnull)error;
/// @return `nil` only when `error != nil`.
- (nullable NSNumber *)enableReceiveSeiMessagePlayerMsg:(PlayerMsg *)playerMsg isEnabled:(NSNumber *)isEnabled payloadType:(NSNumber *)payloadType error:(FlutterError *_Nullable *_Nonnull)error;
- (void)showDebugViewPlayerMsg:(PlayerMsg *)playerMsg isShow:(NSNumber *)isShow error:(FlutterError *_Nullable *_Nonnull)error;
/// @return `nil` only when `error != nil`.
- (nullable NSNumber *)setPropertyPlayerMsg:(PlayerMsg *)playerMsg key:(NSString *)key value:(id)value error:(FlutterError *_Nullable *_Nonnull)error;
/// @return `nil` only when `error != nil`.
- (nullable ListMsg *)getSupportedBitratePlayerMsg:(PlayerMsg *)playerMsg error:(FlutterError *_Nullable *_Nonnull)error;
/// @return `nil` only when `error != nil`.
- (nullable NSNumber *)setCacheParamsPlayerMsg:(PlayerMsg *)playerMsg minTime:(NSNumber *)minTime maxTime:(NSNumber *)maxTime error:(FlutterError *_Nullable *_Nonnull)error;
/// @return `nil` only when `error != nil`.
- (nullable NSNumber *)enablePictureInPictureMsg:(BoolPlayerMsg *)msg error:(FlutterError *_Nullable *_Nonnull)error;
@end
extern void TXFlutterLivePlayerApiSetup(id<FlutterBinaryMessenger> binaryMessenger, NSObject<TXFlutterLivePlayerApi> *_Nullable api);
......
// Copyright (c) 2022 Tencent. All rights reserved.
#ifndef SUPERPLAYER_FLUTTER_IOS_CLASSES_TOOLS_FTXV2LIVETOOLS_H_
#define SUPERPLAYER_FLUTTER_IOS_CLASSES_TOOLS_FTXV2LIVETOOLS_H_
#import <Foundation/Foundation.h>
#import "FTXLiteAVSDKHeader.h"
@interface FTXV2LiveTools : NSObject
NS_ASSUME_NONNULL_BEGIN
+ (V2TXLiveRotation)transRotationFromDegree:(int)rotation;
+ (NSDictionary*)buildNetBundle:(V2TXLivePlayerStatistics *)statistics;
@end
NS_ASSUME_NONNULL_END
#endif // SUPERPLAYER_FLUTTER_IOS_CLASSES_TOOLS_FTXV2LIVETOOLS_H_
// Copyright (c) 2022 Tencent. All rights reserved.
#import "FTXV2LiveTools.h"
#import "FTXEvent.h"
@implementation FTXV2LiveTools
+ (V2TXLiveRotation)transRotationFromDegree:(int)rotation {
V2TXLiveRotation rotationCode = V2TXLiveRotation270;
if (rotation <= 0) {
rotationCode = V2TXLiveRotation0;
} else if (rotation <= 90) {
rotationCode = V2TXLiveRotation90;
} else if (rotation <= 180) {
rotationCode = V2TXLiveRotation180;
} else {
rotationCode = V2TXLiveRotation270;
}
return rotationCode;
}
+ (NSDictionary *)buildNetBundle:(V2TXLivePlayerStatistics *)statistics {
NSMutableDictionary *dic = @{}.mutableCopy;
[dic setValue:@(statistics.appCpu) forKey:NET_STATUS_CPU_USAGE];
[dic setValue:@(statistics.width) forKey:NET_STATUS_VIDEO_WIDTH];
[dic setValue:@(statistics.height) forKey:NET_STATUS_VIDEO_HEIGHT];
[dic setValue:@(statistics.netSpeed) forKey:NET_STATUS_NET_SPEED];
[dic setValue:@(statistics.fps) forKey:NET_STATUS_VIDEO_FPS];
[dic setValue:@(statistics.videoBitrate) forKey:NET_STATUS_VIDEO_BITRATE];
[dic setValue:@(statistics.audioBitrate) forKey:NET_STATUS_AUDIO_BITRATE];
[dic setValue:@(statistics.jitterBufferDelay) forKey:NET_STATUS_NET_JITTER];
[dic setValue:@(statistics.systemCpu) forKey:NET_STATUS_SYSTEM_CPU];
[dic setValue:@(statistics.videoPacketLoss) forKey:NET_STATUS_VIDEO_LOSS];
[dic setValue:@(statistics.audioPacketLoss) forKey:NET_STATUS_AUDIO_LOSS];
[dic setValue:@(statistics.audioTotalBlockTime) forKey:NET_STATUS_AUDIO_TOTAL_BLOCK_TIME];
[dic setValue:@(statistics.videoTotalBlockTime) forKey:NET_STATUS_VIDEO_TOTAL_BLOCK_TIME];
[dic setValue:@(statistics.videoBlockRate) forKey:NET_STATUS_VIDEO_BLOCK_RATE];
[dic setValue:@(statistics.audioBlockRate) forKey:NET_STATUS_AUDIO_BLOCK_RATE];
[dic setValue:@(statistics.rtt) forKey:NET_STATUS_RTT];
return dic;;
}
@end
......@@ -671,13 +671,12 @@ abstract class TXFlutterLivePlayerApi {
IntMsg initialize(BoolPlayerMsg onlyAudio);
///
/// 当设置[LivePlayer] 类型播放器时,需要参数[playType]
/// 参考: [PlayType.LIVE_RTMP] ...
/// 当设置[LivePlayer] 类型播放器时
/// 10.7版本开始,startPlay变更为startLivePlay,需要通过 {@link SuperPlayerPlugin#setGlobalLicense} 设置 Licence 后方可成功播放,
/// 否则将播放失败(黑屏),全局仅设置一次即可。直播 Licence、短视频 Licence 和视频播放 Licence 均可使用,若您暂未获取上述 Licence ,
/// 可[快速免费申请测试版 Licence](https://cloud.tencent.com/act/event/License) 以正常播放,正式版 License 需[购买]
/// (https://cloud.tencent.com/document/product/881/74588#.E8.B4.AD.E4.B9.B0.E5.B9.B6.E6.96.B0.E5.BB.BA.E6.AD.A3.E5.BC.8F.E7.89.88-license)。
BoolMsg startLivePlay(StringIntPlayerMsg playerMsg);
BoolMsg startLivePlay(StringPlayerMsg playerMsg);
/// 停止播放
/// return 是否停止成功
......@@ -724,6 +723,18 @@ abstract class TXFlutterLivePlayerApi {
/// 退出画中画,如果该播放器处于画中画模式
void exitPictureInPictureMode(PlayerMsg playerMsg);
int enableReceiveSeiMessage(PlayerMsg playerMsg, bool isEnabled, int payloadType);
void showDebugView(PlayerMsg playerMsg, bool isShow);
int setProperty(PlayerMsg playerMsg, String key, Object value);
ListMsg getSupportedBitrate(PlayerMsg playerMsg);
int setCacheParams(PlayerMsg playerMsg, double minTime, double maxTime);
int enablePictureInPicture(BoolPlayerMsg msg);
}
@HostApi()
......
......@@ -3,9 +3,6 @@ part of SuperPlayer;
/// TXLivePlayer config
class FTXLivePlayConfig {
// Player cache time, in seconds, with a minimum value of 0, default value: 5.
// 播放器缓存时间,单位秒,取值需要大于0,默认值:5
double cacheTime = 5.0;
// The maximum time for automatic adjustment of player cache, in seconds, with a minimum value of 0, default value: 5
// 播放器缓存自动调整的最大时间,单位秒,取值需要大于0,默认值:5
......@@ -14,12 +11,6 @@ class FTXLivePlayConfig {
// The minimum time for automatic adjustment of player cache, in seconds, with a minimum value of 0, default value: 1
// 播放器缓存自动调整的最小时间,单位秒,取值需要大于0,默认值为1
double minAutoAdjustCacheTime = 1.0;
// The threshold for player video lag warning, in milliseconds. Only lag with a rendering interval exceeding
// this threshold will receive the PLAY_WARNING_VIDEO_PLAY_LAG notification
// 播放器视频卡顿报警阈值,单位毫秒,只有渲染间隔超过这个阈值的卡顿才会有 PLAY_WARNING_VIDEO_PLAY_LAG 通知
int videoBlockThreshold = 800;
// he number of times the SDK defaults to retry when the player encounters a network disconnection,
// with a value range of 1-10, default value: 3
// 播放器遭遇网络连接断开时 SDK 默认重试的次数,取值范围1 - 10,默认值:3。
......@@ -29,99 +20,49 @@ class FTXLivePlayConfig {
// 网络重连的时间间隔,单位秒,取值范围3 - 30,默认值:3。
int connectRetryInterval = 3;
// Whether to automatically adjust the player's cache time, default value: true
// true: Enable automatic adjustment. The maximum and minimum values of automatic adjustment can be set
// by modifying `maxCacheTime` and `minCacheTime`, respectively.
// false: Disable automatic adjustment and use the default specified cache time (1s).
// You can adjust the cache time by modifying `cacheTime`.
//
// 是否自动调整播放器缓存时间,默认值:true
// true:启用自动调整,自动调整的最大值和最小值可以分别通过修改 maxCacheTime 和 minCacheTime 来设置
// false:关闭自动调整,采用默认的指定缓存时间(1s),可以通过修改 cacheTime 来调整缓存时间
// params invalid, it will remove in future version
@deprecated
double cacheTime = 5.0;
// params invalid, it will remove in future version
@deprecated
int videoBlockThreshold = 800;
// params invalid, it will remove in future version
@deprecated
bool autoAdjustCacheTime = true;
// 是否开启回声消除, 默认值为 false
// params invalid, it will remove in future version
@deprecated
bool enableAec = false;
// 是否开启消息通道, 默认值为 true
// params invalid, it will remove in future version
@deprecated
bool enableMessage = true;
// Whether to enable MetaData data callback, default value is NO.
// true: The SDK throws out the MetaData data of the video stream through EVT_PLAY_GET_METADATA message.
// false: The SDK does not throw out the MetaData data of the video stream.
// Standard live streams will have a MetaData data header at the beginning, which supports customization.
// You can set some custom data through the metaData property in TXLivePushConfig, and receive these data
// through the onPlayEvent(EVT_PLAY_GET_METADATA) message in TXLivePlayListener.
// [Special Note] Only one MetaData data header can be set in each audio and video stream, unless the network is disconnected
// and reconnected, otherwise the EVT_PLAY_GET_METADATA message of TXLivePlayer will only be received once.
//
// 是否开启 MetaData 数据回调,默认值为 NO。
// true:SDK 通过 EVT_PLAY_GET_METADATA 消息抛出视频流的 MetaData 数据;
// false:SDK 不抛出视频流的 MetaData 数据。
// 标准直播流都会在最开始的阶段有一个 MetaData 数据头,该数据头支持定制。
// 您可以通过 TXLivePushConfig 中的 metaData 属性设置一些自定义数据,再通过 TXLivePlayListener 中的
// onPlayEvent(EVT_PLAY_GET_METADATA) 消息接收到这些数据。
//【特别说明】每条音视频流中只能设置一个 MetaData 数据头,除非断网重连,否则 TXLivePlayer 的
// EVT_PLAY_GET_METADATA 消息也只会收到一次。
// params invalid, it will remove in future version
@deprecated
bool enableMetaData = false;
// Whether to enable HTTP header information callback, default value is ""
// HTTP
// In addition to standard fields such as "content-length" and "content-type" in the HTTP response header,
// different cloud service providers may also add some non-standard fields.
// For example, Tencent Cloud will add an "X-Tlive-SpanId" response header to the HTTP-FLV format live stream of live CDN,
// and set a random string in it to uniquely identify a live broadcast.
//
// If you are using Tencent Cloud's live CDN, you can set flvSessionKey to "X-Tlive-SpanId".
// The SDK will parse this field in the HTTP response header and notify your App through the onPlayEvent(EVT_PLAY_GET_FLVSESSIONKEY)
// event in TXLivePlayListener.
//
// [Special Note] Only one flvSessionKey can be parsed in each audio and video stream, unless the network is disconnected
// and reconnected, otherwise EVT_PLAY_GET_FLVSESSIONKEY will only be thrown once.
//
// 是否开启 HTTP 头信息回调,默认值为 “”
// HTTP
// 响应头中除了“content-length”、“content-type”等标准字段,不同云服务商还可能会添加一些非标准字段。
// 比如腾讯云会在直播 CDN 的 HTTP-FLV 格式的直播流中增加 “X-Tlive-SpanId”
// 响应头,并在其中设置一个随机字符串,用来唯一标识一次直播。
//
// 如果您在使用腾讯云的直播 CDN,可以设置 flvSessionKey 为 “X-Tlive-SpanId”,SDK 会在 HTTP
// 响应头里解析这个字段, 并通过 TXLivePlayListener 中的 onPlayEvent(EVT_PLAY_GET_FLVSESSIONKEY)
// 事件通知给您的 App。
//
//【特别说明】每条音视频流中只能解析一个 flvSessionKey,除非断网重连,否则
// EVT_PLAY_GET_FLVSESSIONKEY 只会抛送一次。
// params invalid, it will remove in future version
@deprecated
String flvSessionKey = "";
Map<String, dynamic> toJson() {
Map<String, dynamic> json = {};
json["cacheTime"] = cacheTime;
json["maxAutoAdjustCacheTime"] = maxAutoAdjustCacheTime;
json["minAutoAdjustCacheTime"] = minAutoAdjustCacheTime;
json["videoBlockThreshold"] = videoBlockThreshold;
json["connectRetryCount"] = connectRetryCount;
json["connectRetryInterval"] = connectRetryInterval;
json["autoAdjustCacheTime"] = autoAdjustCacheTime;
json["enableAec"] = enableAec;
json["enableMessage"] = enableMessage;
json["enableMetaData"] = enableMetaData;
json["flvSessionKey"] = flvSessionKey;
return json;
}
FTXLivePlayConfigPlayerMsg toMsg() {
return FTXLivePlayConfigPlayerMsg(
cacheTime: cacheTime,
maxAutoAdjustCacheTime: maxAutoAdjustCacheTime,
minAutoAdjustCacheTime: minAutoAdjustCacheTime,
videoBlockThreshold: videoBlockThreshold,
connectRetryCount: connectRetryCount,
connectRetryInterval: connectRetryInterval,
autoAdjustCacheTime: autoAdjustCacheTime,
enableAec: enableAec,
enableMessage: enableMessage,
enableMetaData: enableMetaData,
flvSessionKey: flvSessionKey,
);
}
}
......@@ -28,6 +28,7 @@ class TXLivePlayerController extends ChangeNotifier implements ValueListenable<T
Stream<Map<dynamic, dynamic>> get onPlayerEventBroadcast => _eventStreamController.stream;
@Deprecated("playerNetEvent will no longer return any events.")
Stream<Map<dynamic, dynamic>> get onPlayerNetStatusBroadcast => _netStatusStreamController.stream;
TXLivePlayerController()
......@@ -146,8 +147,6 @@ class TXLivePlayerController extends ChangeNotifier implements ValueListenable<T
return await startLivePlay(url, playType: playType);
}
/// When setting a [LivePlayer] type player, the parameter [playType] is required.
/// Reference: [PlayType.LIVE_RTMP] ...
/// Starting from version 10.7, the method `startPlay` has been changed to `startLivePlay` for playing videos via a URL.
/// To play videos successfully, it is necessary to set the license by using the method `SuperPlayerPlugin#setGlobalLicense`.
/// Failure to set the license will result in video playback failure (a black screen).
......@@ -158,20 +157,23 @@ class TXLivePlayerController extends ChangeNotifier implements ValueListenable<T
/// @param url : 视频播放地址 video playback address
/// return 是否播放成功 if play successfully
///
/// 当设置[LivePlayer] 类型播放器时,需要参数[playType]
/// 参考: [PlayType.LIVE_RTMP] ...
/// <h1>
/// @deprecated: playType is invalid now, it will removed in future version
/// </h1>
///
/// 10.7版本开始,startPlay变更为startLivePlay,需要通过 {@link SuperPlayerPlugin#setGlobalLicense} 设置 Licence 后方可成功播放,
/// 否则将播放失败(黑屏),全局仅设置一次即可。直播 Licence、短视频 Licence 和视频播放 Licence 均可使用,若您暂未获取上述 Licence ,
/// 可[快速免费申请测试版 Licence](https://cloud.tencent.com/act/event/License) 以正常播放,正式版 License 需[购买]
/// (https://cloud.tencent.com/document/product/881/74588#.E8.B4.AD.E4.B9.B0.E5.B9.B6.E6.96.B0.E5.BB.BA.E6.AD.A3.E5.BC.8F.E7.89.88-license)。
Future<bool> startLivePlay(String url, {int? playType}) async {
///
///
Future<bool> startLivePlay(String url, {@deprecated int? playType}) async {
await _initPlayer.future;
await _createTexture.future;
_changeState(TXPlayerState.buffering);
printVersionInfo();
BoolMsg boolMsg = await _livePlayerApi.startLivePlay(StringIntPlayerMsg()
..strValue = url
..intValue = playType
BoolMsg boolMsg = await _livePlayerApi.startLivePlay(StringPlayerMsg()
..value = url
..playerId = _playerId);
return boolMsg.value ?? false;
}
......@@ -336,12 +338,9 @@ class TXLivePlayerController extends ChangeNotifier implements ValueListenable<T
/// 使用系统默认图标,只支持flutter本地资源图片,传递的时候,与flutter使用图片资源一致,例如: images/back_icon.png
@override
Future<int> enterPictureInPictureMode(
{String? backIconForAndroid,
String? playIconForAndroid,
String? pauseIconForAndroid,
String? forwardIconForAndroid}) async {
if (defaultTargetPlatform == TargetPlatform.android) {
{String? backIconForAndroid, String? playIconForAndroid, String? pauseIconForAndroid, String? forwardIconForAndroid}) async {
await _initPlayer.future;
if (defaultTargetPlatform == TargetPlatform.android) {
IntMsg intMsg = await _livePlayerApi.enterPictureInPictureMode(PipParamsPlayerMsg()
..backIconForAndroid = backIconForAndroid
..playIconForAndroid = playIconForAndroid
......@@ -349,6 +348,9 @@ class TXLivePlayerController extends ChangeNotifier implements ValueListenable<T
..forwardIconForAndroid = forwardIconForAndroid
..playerId = _playerId);
return intMsg.value ?? -1;
} else if (defaultTargetPlatform == TargetPlatform.iOS) {
// The background picture-in-picture feature for ios steaming live is temporarily disabled.
return -1;
} else {
return -1;
}
......@@ -359,7 +361,105 @@ class TXLivePlayerController extends ChangeNotifier implements ValueListenable<T
/// 退出画中画,如果该播放器处于画中画模式
@override
Future<void> exitPictureInPictureMode() async {
await _livePlayerApi.exitPictureInPictureMode(PlayerMsg()..playerId = _playerId);
if (defaultTargetPlatform == TargetPlatform.android) {
await _livePlayerApi.exitPictureInPictureMode(PlayerMsg()
..playerId = _playerId);
} else if (defaultTargetPlatform == TargetPlatform.iOS) {
await _livePlayerApi.enablePictureInPicture(BoolPlayerMsg()
..value = false
..playerId = _playerId);
}
}
///
/// Enable reception of SEI messages
///
/// 开启接收 SEI 消息
///
/// @param enable YES: Enable reception of SEI messages; NO: Disable reception of SEI messages. [Default]: NO.
/// YES: 开启接收 SEI 消息; NO: 关闭接收 SEI 消息。【默认值】: NO。
/// @param payloadType Specify the payloadType for receiving SEI messages, supporting 5, 242, 243.
/// Please keep it consistent with the sender's payloadType.
/// 指定接收 SEI 消息的 payloadType,支持 5、242、243,请与发送端的 payloadType 保持一致。
///
Future<int> enableReceiveSeiMessage(bool isEnabled, int payloadType) async {
return await _livePlayerApi.enableReceiveSeiMessage(PlayerMsg(playerId: _playerId),
isEnabled, payloadType);
}
///
/// Whether to display the debugging overlay of player status information
///
/// 是否显示播放器状态信息的调试浮层
///
/// @param isShow 是否显示。default:NO。
///
Future<void> showDebugView(bool isShow) async {
await _livePlayerApi.showDebugView(PlayerMsg(playerId: _playerId), isShow);
}
///
/// Call the advanced API interface of V2TXLivePlayer
///
/// @note This interface is used to call some advanced features.
/// @param key The corresponding key for the advanced API, please refer to the definition of {@link V2TXLiveProperty} for details.
/// @param value The parameters required when calling the advanced API corresponding to the key.
/// @return The return value {@link V2TXLiveCode}.
/// - 0: Success.
/// - -2: Operation failed, key is not allowed to be nil.
///
/// 调用 V2TXLivePlayer 的高级 API 接口
///
/// @note 该接口用于调用一些高级功能。
/// @param key 高级 API 对应的 key, 详情请参考 {@link V2TXLiveProperty} 定义。
/// @param value 调用 key 所对应的高级 API 时,需要的参数。
/// @return 返回值 {@link V2TXLiveCode}。
/// - 0: 成功。
/// - -2: 操作失败,key 不允许为 nil。
///
Future<int> setProperty(String key, Object value) async {
return await _livePlayerApi.setProperty(PlayerMsg(playerId: _playerId), key, value);
}
///
/// get live steam info
///
/// 获取码流信息
///
Future<List<FSteamInfo>> getSupportedBitrate() async {
ListMsg listMsg = await _livePlayerApi.getSupportedBitrate(PlayerMsg(playerId: _playerId));
List<FSteamInfo> steamList = [];
if (null != listMsg.value) {
for (Object? obj in listMsg.value!) {
if (null != obj) {
steamList.add(FSteamInfo.createFromMsg(obj));
}
}
}
return steamList;
}
///
/// Set the minimum and maximum time for automatic adjustment of player cache (unit: seconds)
///
/// @param minTime The minimum time for automatic cache adjustment, which must be greater than 0. [Default]: 1.
/// @param maxTime The maximum time for automatic cache adjustment, which must be greater than 0. [Default]: 5.
/// @return The return value {@link V2TXLiveCode}.
/// - 0: Success.
/// - -2: Operation failed, minTime and maxTime need to be greater than 0.
/// - -3: The player is in playback state and does not support modifying cache policy.
///
/// 设置播放器缓存自动调整的最小和最大时间 ( 单位:秒 )
///
/// @param minTime 缓存自动调整的最小时间,取值需要大于0。【默认值】:1。
/// @param maxTime 缓存自动调整的最大时间,取值需要大于0。【默认值】:5。
/// @return 返回值 {@link V2TXLiveCode}。
/// - 0: 成功。
/// - -2: 操作失败,minTime 和 maxTime 需要大于0。
/// - -3: 播放器处于播放状态,不支持修改缓存策略。
///
Future<int> setCacheParams(double minTime, double maxTime) async {
return await _livePlayerApi.setCacheParams(PlayerMsg(playerId: _playerId), minTime, maxTime);
}
/// Release player resource occupation.
......
......@@ -869,6 +869,26 @@ class TXSubtitleRenderModel {
}
}
class FSteamInfo {
int? width;
int? height;
int? bitrate;
int? frameRate;
String? url;
static FSteamInfo createFromMsg(Object obj) {
FSteamInfo info = FSteamInfo();
if (obj is Map) {
info.width = obj["width"];
info.height = obj["height"];
info.bitrate = obj["bitrate"];
info.frameRate = obj["framerate"];
info.url = obj["url"];
}
return info;
}
}
/// Player type.
///
/// 播放器类型
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论