diff --git a/AndroidMedia/.gitignore b/AndroidMedia/.gitignore
deleted file mode 100644
index 42afabfd..00000000
--- a/AndroidMedia/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/build
\ No newline at end of file
diff --git a/AndroidMedia/build.gradle b/AndroidMedia/build.gradle
deleted file mode 100644
index c8506643..00000000
--- a/AndroidMedia/build.gradle
+++ /dev/null
@@ -1,40 +0,0 @@
-
-apply plugin: 'com.android.library'
-//apply plugin: 'com.android.application'
-apply plugin: 'kotlin-android'
-apply plugin: 'kotlin-kapt'
-
-android {
- compileSdkVersion rootProject.ext.compileSdkVersion
- buildToolsVersion rootProject.ext.buildToolsVersion
-
- defaultConfig {
- minSdkVersion rootProject.ext.minSdkVersion
- targetSdkVersion rootProject.ext.targetSdkVersion
- versionCode rootProject.ext.versionCode
- versionName rootProject.ext.versionName
-// applicationId "com.frank.androidmedia"
- }
-
- buildTypes {
- release {
- minifyEnabled false
- proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
- }
- }
- compileOptions {
- sourceCompatibility JavaVersion.VERSION_1_8
- targetCompatibility JavaVersion.VERSION_1_8
- }
-}
-
-dependencies {
-
- implementation fileTree(include: ['*.jar'], dir: 'libs')
- implementation "androidx.appcompat:appcompat:$rootProject.appcompatVersion"
- implementation "androidx.recyclerview:recyclerview:$rootProject.recyclerviewVersion"
- implementation "androidx.core:core-ktx:$rootProject.core_ktx"
- implementation "androidx.lifecycle:lifecycle-viewmodel-ktx:$rootProject.lifecycle_ktx"
- implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
-
-}
\ No newline at end of file
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioProcessController.kt b/AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioProcessController.kt
deleted file mode 100644
index 1ca58820..00000000
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioProcessController.kt
+++ /dev/null
@@ -1,100 +0,0 @@
-package com.frank.androidmedia.controller
-
-import android.media.audiofx.AcousticEchoCanceler
-import android.media.audiofx.AudioEffect
-import android.media.audiofx.AutomaticGainControl
-import android.media.audiofx.NoiseSuppressor
-import android.util.Log
-import java.lang.Exception
-
-/**
- *
- * @author frank
- * @date 2022/3/23
- */
-open class AudioProcessController {
-
- /*************************************************************
- AudioEffect
- |
- ____________________|___________________
- | | |
- AcousticEchoCanceler AutomaticGainControl NoiseSuppressor
-
- **************************************************************/
-
- companion object {
- val TAG: String = AudioProcessController::class.java.simpleName
- }
-
- private var noiseSuppressor: NoiseSuppressor? = null
- private var automaticGainControl: AutomaticGainControl? = null
- private var acousticEchoCanceler: AcousticEchoCanceler? = null
-
- fun initAEC(audioSessionId: Int): Boolean {
- if (!AcousticEchoCanceler.isAvailable()) {
- Log.e(TAG, "AEC not available...")
- return false
- }
- try {
- acousticEchoCanceler = AcousticEchoCanceler.create(audioSessionId)
- } catch (e: Exception) {
- Log.e(TAG, "init AcousticEchoCanceler error=$e")
- return false
- }
- val result = acousticEchoCanceler?.setEnabled(true)
- if (result != AudioEffect.SUCCESS) {
- acousticEchoCanceler?.release()
- acousticEchoCanceler = null
- return false
- }
- return true
- }
-
- fun initAGC(audioSessionId: Int): Boolean {
- if (!AutomaticGainControl.isAvailable()) {
- Log.e(TAG, "AGC not available...")
- return false
- }
- try {
- automaticGainControl = AutomaticGainControl.create(audioSessionId)
- } catch (e: Exception) {
- Log.e(TAG, "init AutomaticGainControl error=$e")
- return false
- }
- val result = automaticGainControl?.setEnabled(true)
- if (result != AudioEffect.SUCCESS) {
- automaticGainControl?.release()
- automaticGainControl = null
- return false
- }
- return true
- }
-
- fun initNS(audioSessionId: Int): Boolean {
- if (!NoiseSuppressor.isAvailable()) {
- Log.e(TAG, "NS not available...")
- return false
- }
- try {
- noiseSuppressor = NoiseSuppressor.create(audioSessionId)
- } catch (e: Exception) {
- Log.e(TAG, "init NoiseSuppressor error=$e")
- return false
- }
- val result = noiseSuppressor?.setEnabled(true)
- if (result != AudioEffect.SUCCESS) {
- noiseSuppressor?.release()
- noiseSuppressor = null
- return false
- }
- return true
- }
-
- fun release() {
- noiseSuppressor?.release()
- acousticEchoCanceler?.release()
- automaticGainControl?.release()
- }
-
-}
\ No newline at end of file
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioRecordController.kt b/AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioRecordController.kt
deleted file mode 100644
index 46f1b270..00000000
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioRecordController.kt
+++ /dev/null
@@ -1,154 +0,0 @@
-package com.frank.androidmedia.controller
-
-import android.media.AudioFormat
-import android.media.AudioRecord
-import android.media.MediaRecorder
-import android.util.Log
-import com.frank.androidmedia.util.WavUtil
-import java.io.File
-import java.io.FileOutputStream
-import java.io.IOException
-import java.lang.Exception
-
-/**
- * Using AudioRecord to record an audio segment.
- * See also MediaRecord, which used to record media.
- *
- * @author frank
- * @date 2022/3/22
- */
-open class AudioRecordController {
-
- companion object {
- val TAG: String = AudioTrackController::class.java.simpleName
- }
-
- private var minBufferSize = 0
- private var mAudioRecord: AudioRecord? = null
- private var mRecordThread: RecordThread? = null
-
- private val enableAudioProcessor = false
- private var mAudioProcessController: AudioProcessController? = null
-
- private fun initAudioRecord() {
- val sampleRate = 44100
- val audioFormat = AudioFormat.ENCODING_PCM_16BIT
- val channelConfig = AudioFormat.CHANNEL_IN_STEREO
- minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat)
- mAudioRecord = AudioRecord( MediaRecorder.AudioSource.MIC,
- sampleRate,
- channelConfig,
- audioFormat,
- minBufferSize)
-
- if (enableAudioProcessor) {
- mAudioProcessController = AudioProcessController()
- var result:Boolean? = mAudioProcessController?.initAEC(mAudioRecord?.audioSessionId!!)
- Log.e(TAG, "init AEC result=$result")
- result = mAudioProcessController?.initAGC(mAudioRecord?.audioSessionId!!)
- Log.e(TAG, "init AGC result=$result")
- result = mAudioProcessController?.initNS(mAudioRecord?.audioSessionId!!)
- Log.e(TAG, "init NS result=$result")
- }
- }
-
- private class RecordThread(recordPath: String, audioRecord: AudioRecord, bufferSize: Int) : Thread() {
-
- var isRecording = false
- private val lock = Object()
- private var mPath: String? = null
- private lateinit var mData: ByteArray
- private var mBufferSize = 0
- private var mAudioRecord: AudioRecord? = null
- private var mOutputStream: FileOutputStream? = null
-
- init {
- mPath = recordPath
- isRecording = true
- mBufferSize = bufferSize
- mAudioRecord = audioRecord
- }
-
- override fun run() {
- super.run()
-
- try {
- mData = ByteArray(mBufferSize)
- mOutputStream = FileOutputStream(mPath)
- } catch (e: Exception) {
- Log.e(TAG, "open file error=$e")
- isRecording = false
- }
-
- while (isRecording) {
- synchronized(lock) {
- if (isRecording) {
- val size: Int = mAudioRecord?.read(mData, 0, mBufferSize)!!
- if (size > 0) {
- mOutputStream?.write(mData, 0, size)
- } else if (size < 0) {
- Log.e(TAG, "read data error, size=$size")
- }
- }
- }
- }
-
- if (mOutputStream != null) {
- try {
- mOutputStream?.close()
- } catch (e: IOException) {
- e.printStackTrace()
- }
- }
- // convert pcm to wav
- val wavPath = File(mPath).parent + "/test.wav"
- WavUtil.makePCMToWAVFile(mPath, wavPath, true)
- }
- }
-
- fun startRecord(recordPath: String) {
- if (mAudioRecord == null) {
- try {
- initAudioRecord()
- } catch (e: Exception) {
- Log.e(TAG, "init AudioRecord error=$e")
- return
- }
- }
-
- if (mAudioRecord!!.recordingState == AudioRecord.RECORDSTATE_RECORDING) {
- Log.e(TAG, "is recording audio...")
- return
- }
-
- let {
- Log.i(TAG, "start record...")
- mAudioRecord!!.startRecording()
- mRecordThread = RecordThread(recordPath, mAudioRecord!!, minBufferSize)
- mRecordThread!!.start()
- }
- }
-
- fun stopRecord() {
- Log.i(TAG, "stop record...")
- if (mRecordThread != null) {
- mRecordThread!!.isRecording = false
- mRecordThread!!.interrupt()
- mRecordThread = null
- }
- if (mAudioRecord != null) {
- mAudioRecord!!.stop()
- }
- }
-
- fun release() {
- if (mAudioRecord != null) {
- mAudioRecord!!.release()
- mAudioRecord = null
- }
- if (mAudioProcessController != null) {
- mAudioProcessController!!.release()
- mAudioProcessController = null
- }
- }
-}
\ No newline at end of file
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioTrackController.kt b/AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioTrackController.kt
deleted file mode 100644
index 31b157d6..00000000
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioTrackController.kt
+++ /dev/null
@@ -1,177 +0,0 @@
-package com.frank.androidmedia.controller
-
-import android.media.*
-import android.os.SystemClock
-import android.util.Log
-import java.lang.Exception
-import java.util.concurrent.atomic.AtomicBoolean
-
-/**
- * 1. Using MediaExtractor to demux audio.
- * 2. Using MediaCodec to decode audio.
- * 3. Using AudioTrack to play audio.
- * See also AAduio, oboe, openSL ES.
- *
- * @author frank
- * @date 2022/3/21
- */
-open class AudioTrackController {
-
- companion object {
- val TAG: String = AudioTrackController::class.java.simpleName
-
- private const val DEQUEUE_TIME = (10 * 1000).toLong()
- private const val SLEEP_TIME: Long = 20
- }
-
- private var running: AtomicBoolean? = null
- private var audioTrack: AudioTrack? = null
- private var mediaCodec: MediaCodec? = null
- private var mediaExtractor: MediaExtractor? = null
-
- private fun parseAudioFormat(path: String): MediaFormat? {
- mediaExtractor = MediaExtractor()
- try {
- mediaExtractor?.setDataSource(path)
- for (i in 0 until mediaExtractor!!.trackCount) {
- val mediaFormat = mediaExtractor!!.getTrackFormat(i)
- val mimeType = mediaFormat.getString(MediaFormat.KEY_MIME)
- if (mimeType != null && mimeType.startsWith("audio")) {
- mediaExtractor!!.selectTrack(i)
- return mediaFormat
- }
- }
- } catch (e: Exception) {
- Log.e(TAG, "parseAudioFormat err=$e")
- }
- return null
- }
-
- private fun initMediaCodec(mediaFormat: MediaFormat): Boolean {
- val mimeType = mediaFormat.getString(MediaFormat.KEY_MIME)
- mediaCodec = mimeType?.let { MediaCodec.createDecoderByType(it) }
- return try {
- mediaCodec!!.configure(mediaFormat, null, null, 0)
- mediaCodec!!.start()
- true
- } catch (e: Exception) {
- Log.e(TAG, "initMediaCodec err=$e")
- false
- }
- }
-
- private fun initAudioTrack(mediaFormat: MediaFormat): Boolean {
- val sampleRate = mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE)
- val channelCount = mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT)
- val channelConfig = if (channelCount == 1) {
- AudioFormat.CHANNEL_OUT_MONO
- } else {
- AudioFormat.CHANNEL_OUT_STEREO
- }
- val encoding = AudioFormat.ENCODING_PCM_16BIT
- val bufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, encoding)
- Log.e(TAG, "sampleRate=$sampleRate, channelCount=$channelCount, bufferSize=$bufferSize")
-
- try {
- val audioFormat = AudioFormat.Builder()
- .setEncoding(encoding)
- .setSampleRate(sampleRate)
- .setChannelMask(channelConfig)
- .build()
- val audioAttributes = AudioAttributes.Builder()
- .setLegacyStreamType(AudioManager.STREAM_MUSIC)
- .build()
- audioTrack = AudioTrack(audioAttributes, audioFormat,
- bufferSize, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE)
- audioTrack!!.play()
- } catch (e: Exception) {
- Log.e(TAG, "initAudioTrack err=$e")
- return false
- }
- return true
- }
-
- private fun release() {
- if (mediaExtractor != null) {
- mediaExtractor!!.release()
- mediaExtractor = null
- }
- if (mediaCodec != null) {
- mediaCodec!!.release()
- mediaCodec = null
- }
- if (audioTrack != null) {
- audioTrack!!.release()
- audioTrack = null
- }
- Log.e(TAG, "release done...")
- }
-
- fun playAudio(path: String) {
- var finished = false
- val data = ByteArray(10 * 1024)
- running = AtomicBoolean(true)
- val bufferInfo = MediaCodec.BufferInfo()
- val mediaFormat = parseAudioFormat(path) ?: return release()
- var result = initMediaCodec(mediaFormat)
- if (!result) {
- return release()
- }
- result = initAudioTrack(mediaFormat)
- if (!result) {
- return release()
- }
-
- while (!finished) {
- if (!running!!.get()) {
- break
- }
- val inputIndex = mediaCodec!!.dequeueInputBuffer(DEQUEUE_TIME)
- if (inputIndex >= 0) {
- val inputBuffer = mediaCodec!!.getInputBuffer(inputIndex)
- // demux
- val sampleSize = mediaExtractor!!.readSampleData(inputBuffer!!, 0)
- // decode
- if (sampleSize < 0) {
- mediaCodec!!.queueInputBuffer(inputIndex, 0, 0,
- 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM)
- finished = true
- } else {
- mediaCodec!!.queueInputBuffer(inputIndex, 0, sampleSize,
- mediaExtractor!!.sampleTime, mediaExtractor!!.sampleFlags)
- mediaExtractor!!.advance()
- }
- }
-
- val outputIndex = mediaCodec!!.dequeueOutputBuffer(bufferInfo, DEQUEUE_TIME)
- // play
- if (outputIndex >= 0) {
- val outputBuffer = mediaCodec!!.getOutputBuffer(outputIndex)
- val size = outputBuffer!!.limit()
- outputBuffer.get(data, outputBuffer.position(), size - outputBuffer.position())
- audioTrack!!.write(data, 0, size)
- mediaCodec!!.releaseOutputBuffer(outputIndex, false)
- SystemClock.sleep(SLEEP_TIME)
- }
- }
-
- release()
- }
-
- fun stop() {
- running?.set(false)
- }
-
- fun getAudioSessionId(): Int {
- if (audioTrack == null)
- return 0
- return audioTrack!!.audioSessionId
- }
-
- fun attachAudioEffect(effectId: Int) {
- if (audioTrack == null)
- return
- audioTrack!!.attachAuxEffect(effectId)
- }
-
-}
\ No newline at end of file
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaMetadataController.kt b/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaMetadataController.kt
deleted file mode 100644
index 9843c44c..00000000
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaMetadataController.kt
+++ /dev/null
@@ -1,113 +0,0 @@
-package com.frank.androidmedia.controller
-
-import android.graphics.Bitmap
-import android.graphics.BitmapFactory
-import android.media.MediaMetadataRetriever
-import android.util.Log
-import java.lang.Exception
-
-/**
- * Retrieve media metadata from video or audio,
- * and get thumbnail/frame from video or audio
- * @author frank
- * @date 2022/3/18
- */
-
-open class MediaMetadataController {
-
- private var title: String? = null
- private var duration: Long = 0
- private var bitrate: Int = 0
-
- private var width: Int = 0
- private var height: Int = 0
- private var frameRate: Float = 0.0f
-
- private var thumbnail: Bitmap? = null
-
- private var mRetriever: MediaMetadataRetriever? = null
-
-
- fun retrieveMetadata(path: String) {
- val retriever = MediaMetadataRetriever()
- try {
- retriever.setDataSource(path)
- title = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_TITLE)
- if (title != null) {
- Log.i(TAG, "title=$title")
- }
- val durationStr = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)
- if (durationStr != null) {
- duration = durationStr.toLong()
- Log.i(TAG, "duration=$duration")
- }
- val bitrateStr = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE)
- if (bitrateStr != null) {
- bitrate = bitrateStr.toInt()
- }
- val widthStr = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH)
- if (widthStr != null) {
- width = widthStr.toInt()
- }
- val heightStr = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT)
- if (heightStr != null) {
- height = heightStr.toInt()
- Log.i(TAG, "video width=$width,height=$height")
- }
- try {
- if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
- val frameRateStr = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_CAPTURE_FRAMERATE)
- if (frameRateStr != null) {
- frameRate = frameRateStr.toFloat()
- }
- Log.i(TAG, "frameRate=$frameRate")
- }
- } catch (e: Exception) {
- Log.e(TAG, "retrieve frameRate error=$e")
- }
- val hasVideoStr = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_HAS_VIDEO)
- val hasAudioStr = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_HAS_AUDIO)
- if (hasVideoStr != null && "yes" == hasVideoStr) {
- thumbnail = retriever.getFrameAtTime(0)
- } else if (hasAudioStr != null && "yes" == hasAudioStr) {
- val byteArray = retriever.embeddedPicture
- if (byteArray != null) {
- thumbnail = BitmapFactory.decodeByteArray(byteArray, 0, byteArray.size, null)
- }
- }
- if (thumbnail != null) {
- Log.i(TAG, "thumbnail width=${thumbnail?.width}, height=${thumbnail?.height}")
- }
- } catch (e: Exception) {
- Log.e(TAG, "retrieve error=$e")
- } finally {
- retriever.release()
- }
- }
-
- fun initRetriever(path: String) {
- mRetriever = MediaMetadataRetriever()
- try {
- mRetriever?.setDataSource(path)
- } catch (e: Exception) {
- Log.e(TAG, "initRetriever error=$e")
- }
- }
-
- fun getFrameAtTime(timeUs: Long) : Bitmap? {
- if (mRetriever == null)
- return null
- return mRetriever!!.getFrameAtTime(timeUs)
- }
-
- fun releaseRetriever() {
- if (mRetriever != null) {
- mRetriever?.release()
- }
- }
-
- companion object {
- val TAG: String = MediaMetadataController::class.java.simpleName
- }
-
-}
\ No newline at end of file
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaMuxController.kt b/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaMuxController.kt
deleted file mode 100644
index fd27af62..00000000
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaMuxController.kt
+++ /dev/null
@@ -1,91 +0,0 @@
-package com.frank.androidmedia.controller
-
-import android.media.MediaCodec
-import android.media.MediaExtractor
-import android.media.MediaFormat
-import android.media.MediaMuxer
-import android.util.Log
-import java.lang.Exception
-import java.nio.ByteBuffer
-
-/**
- * Using MediaExtractor to demux media format.
- * Using MediaMuxer to mux media format again.
- * @author frank
- * @date 2022/3/21
- */
-open class MediaMuxController {
-
- fun muxMediaFile(inputPath: String, outputPath: String): Boolean {
- if (inputPath.isEmpty() || outputPath.isEmpty()) {
- return false
- }
- var happenError = false
- // 1、create MediaMuxer
- val mediaMuxer = MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
- val mediaExtractor = MediaExtractor()
- try {
- var videoIndex = 0
- var audioIndex = 0
- var audioFormat: MediaFormat? = null
- var videoFormat: MediaFormat? = null
- var finished = false
- val bufferInfo = MediaCodec.BufferInfo()
- val inputBuffer = ByteBuffer.allocate(2 * 1024 * 1024)
- mediaExtractor.setDataSource(inputPath)
- // select track with mimetype
- for (i in 0 until mediaExtractor.trackCount) {
- val mediaFormat = mediaExtractor.getTrackFormat(i)
- val mimeType = mediaFormat.getString(MediaFormat.KEY_MIME)
- if (mimeType != null && mimeType.startsWith("video")) {
- videoIndex = i
- videoFormat = mediaFormat
- mediaExtractor.selectTrack(i)
- } else if (mimeType != null && mimeType.startsWith("audio") && audioFormat == null) {
- audioIndex = i
- audioFormat = mediaFormat
- mediaExtractor.selectTrack(i)
- }
- }
- // 2、add MediaFormat into track
- if (videoFormat != null) {
- mediaMuxer.addTrack(videoFormat)
- }
- if (audioFormat != null) {
- mediaMuxer.addTrack(audioFormat)
- }
- // 3、start the muxer
- mediaMuxer.start()
-
- while (!finished) {
- // demux media stream
- val sampleSize = mediaExtractor.readSampleData(inputBuffer, 0)
- if (sampleSize > 0) {
- bufferInfo.size = sampleSize
- bufferInfo.flags = mediaExtractor.sampleFlags
- bufferInfo.presentationTimeUs = mediaExtractor.sampleTime
- // 4、call MediaMuxer to mux media stream
- if (mediaExtractor.sampleTrackIndex == videoIndex) {
- mediaMuxer.writeSampleData(videoIndex, inputBuffer, bufferInfo)
- } else if (mediaExtractor.sampleTrackIndex == audioIndex) {
- mediaMuxer.writeSampleData(audioIndex, inputBuffer, bufferInfo)
- }
- inputBuffer.flip()
- mediaExtractor.advance()
- } else if (sampleSize < 0) {
- finished = true
- }
- }
-
- } catch (e: Exception) {
- Log.e("MediaMuxController", "mux error=$e")
- happenError = true
- } finally {
- // 5、release resource
- mediaMuxer.release()
- mediaExtractor.release()
- return !happenError
- }
- }
-
-}
\ No newline at end of file
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaProjectionController.kt b/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaProjectionController.kt
deleted file mode 100644
index 90e2cee6..00000000
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaProjectionController.kt
+++ /dev/null
@@ -1,184 +0,0 @@
-package com.frank.androidmedia.controller
-
-import android.app.Activity
-import android.content.Context
-import android.content.Intent
-import android.graphics.Bitmap
-import android.graphics.PixelFormat
-import android.hardware.display.DisplayManager
-import android.hardware.display.VirtualDisplay
-import android.media.ImageReader
-import android.media.MediaCodec
-import android.media.MediaCodecInfo
-import android.media.MediaFormat
-import android.media.projection.MediaProjection
-import android.media.projection.MediaProjectionManager
-import android.os.Environment
-import android.util.DisplayMetrics
-import android.util.Log
-import android.view.Surface
-import android.view.WindowManager
-import com.frank.androidmedia.listener.VideoEncodeCallback
-import java.io.FileOutputStream
-import java.lang.Exception
-
-/**
- * Using MediaProjectionManager to screenshot,
- * and using MediaProjection recording screen.
- *
- * @author frank
- * @date 2022/3/25
- */
-open class MediaProjectionController(type: Int) {
-
- companion object {
- const val TYPE_SCREEN_SHOT = 0
- const val TYPE_SCREEN_RECORD = 1
- const val TYPE_SCREEN_LIVING = 2
- }
-
- private var type = TYPE_SCREEN_SHOT
- private val requestCode = 123456
- private var virtualDisplay: VirtualDisplay? = null
- private var displayMetrics: DisplayMetrics? = null
- private var mediaProjection: MediaProjection? = null
- private var mediaProjectionManager: MediaProjectionManager? = null
-
- private var encodeThread: Thread? = null
- private var videoEncoder: MediaCodec? = null
- private var isVideoEncoding = false
- private var videoEncodeData: ByteArray? = null
- private var videoEncodeCallback: VideoEncodeCallback? = null
-
- init {
- this.type = type
- }
-
- fun setVideoEncodeListener(encodeCallback: VideoEncodeCallback) {
- videoEncodeCallback = encodeCallback
- }
-
- fun startScreenRecord(context: Context) {
- val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
- displayMetrics = DisplayMetrics()
- windowManager.defaultDisplay.getMetrics(displayMetrics)
- mediaProjectionManager = context.getSystemService(Context.MEDIA_PROJECTION_SERVICE) as MediaProjectionManager
- val intent = mediaProjectionManager?.createScreenCaptureIntent()
- (context as Activity).startActivityForResult(intent, requestCode)
- }
-
- fun createVirtualDisplay(surface: Surface) {
- virtualDisplay = mediaProjection?.createVirtualDisplay("hello", displayMetrics!!.widthPixels,
- displayMetrics!!.heightPixels, displayMetrics!!.densityDpi,
- DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
- surface, null, null)
- }
-
- private fun saveBitmap(bitmap: Bitmap?, path: String) {
- if (path.isEmpty() || bitmap == null)
- return
- var outputStream: FileOutputStream? = null
- try {
- outputStream = FileOutputStream(path)
- bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream)
- } catch (e: Exception) {
-
- } finally {
- outputStream?.close()
- }
- }
-
- private fun getBitmap() {
- val imageReader = ImageReader.newInstance(displayMetrics!!.widthPixels,
- displayMetrics!!.heightPixels, PixelFormat.RGBA_8888, 3)
- createVirtualDisplay(imageReader.surface)
- imageReader.setOnImageAvailableListener ({ reader: ImageReader ->
-
- val image = reader.acquireNextImage()
- val planes = image.planes
- val buffer = planes[0].buffer
- val pixelStride = planes[0].pixelStride
- val rowStride = planes[0].rowStride
- val rowPadding = rowStride - pixelStride * image.width
- val bitmap = Bitmap.createBitmap(image.width + rowPadding / pixelStride,
- image.height, Bitmap.Config.ARGB_8888)
- bitmap.copyPixelsFromBuffer(buffer)
- val filePath = Environment.getExternalStorageDirectory().path + "/hello.jpg"
- saveBitmap(bitmap, filePath)
- image.close()
- imageReader.close()
- }, null)
- }
-
- private fun initMediaCodec(width: Int, height: Int) {
- val mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height)
- mediaFormat.setInteger(MediaFormat.KEY_WIDTH, width)
- mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, height)
- mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20)
- mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height)
- mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3)
- mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface)
-
- videoEncoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
- videoEncoder?.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
- createVirtualDisplay(videoEncoder!!.createInputSurface())
- }
-
- private fun startVideoEncoder() {
- if (videoEncoder == null || isVideoEncoding)
- return
- encodeThread = Thread {
- try {
- val bufferInfo = MediaCodec.BufferInfo()
- videoEncoder?.start()
-
- while (isVideoEncoding && !Thread.currentThread().isInterrupted) {
- val outputIndex = videoEncoder!!.dequeueOutputBuffer(bufferInfo, 30 * 1000)
- if (outputIndex >= 0) {
- val byteBuffer = videoEncoder!!.getOutputBuffer(outputIndex)
- if (videoEncodeData == null || videoEncodeData!!.size < bufferInfo.size) {
- videoEncodeData = ByteArray(bufferInfo.size)
- }
- if (videoEncodeCallback != null && byteBuffer != null) {
- byteBuffer.get(videoEncodeData, bufferInfo.offset, bufferInfo.size)
- videoEncodeCallback!!.onVideoEncodeData(videoEncodeData!!, bufferInfo.size,
- bufferInfo.flags, bufferInfo.presentationTimeUs)
- }
- videoEncoder!!.releaseOutputBuffer(outputIndex, false)
- } else {
- Log.e("EncodeThread", "invalid index=$outputIndex")
- }
- }
- } catch (e: Exception) {
- isVideoEncoding = false
- Log.e("EncodeThread", "encode error=$e")
- }
- }
- isVideoEncoding = true
- encodeThread?.start()
- }
-
- fun onActivityResult(resultCode: Int, data: Intent) {
- mediaProjection = mediaProjectionManager?.getMediaProjection(resultCode, data)
- if (type == TYPE_SCREEN_SHOT) {
- getBitmap()
- } else if (type == TYPE_SCREEN_LIVING) {
- initMediaCodec(displayMetrics!!.widthPixels, displayMetrics!!.heightPixels)
- startVideoEncoder()
- }
- }
-
- fun getRequestCode(): Int {
- return requestCode
- }
-
-
- fun stopScreenRecord() {
- mediaProjection?.stop()
- virtualDisplay?.release()
- isVideoEncoding = false
- encodeThread?.interrupt()
- videoEncoder?.release()
- }
-
-}
\ No newline at end of file
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaRecordController.kt b/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaRecordController.kt
deleted file mode 100644
index 9a05bae4..00000000
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaRecordController.kt
+++ /dev/null
@@ -1,141 +0,0 @@
-package com.frank.androidmedia.controller
-
-import android.content.Context
-import android.content.Intent
-import android.hardware.Camera
-import android.media.CamcorderProfile
-import android.media.MediaRecorder
-import android.util.DisplayMetrics
-import android.util.Log
-import android.view.Surface
-import android.view.WindowManager
-
-/**
- * Using MediaRecorder to record a media file.
- *
- * @author frank
- * @date 2022/3/21
- */
-open class MediaRecordController {
-
- private val usingProfile = true
- private var mCamera: Camera? = null
- private var mOutputPath: String? = null
- private var mMediaRecorder: MediaRecorder? = null
- private var mDisplayMetrics: DisplayMetrics? = null
- private var mMediaProjectionController: MediaProjectionController? = null
-
- private fun initMediaRecord(videoSource: Int, surface: Surface?, outputPath: String) {
- if (videoSource == MediaRecorder.VideoSource.CAMERA
- || videoSource == MediaRecorder.VideoSource.DEFAULT) {
- // open camera
- mCamera = Camera.open()
- mCamera!!.setDisplayOrientation(90)
- mCamera!!.unlock()
- mMediaRecorder?.setCamera(mCamera)
- }
- // Note: pay attention to calling order
- mMediaRecorder?.setVideoSource(videoSource)
- mMediaRecorder?.setAudioSource(MediaRecorder.AudioSource.MIC)
- if (usingProfile && (videoSource == MediaRecorder.VideoSource.CAMERA
- || videoSource == MediaRecorder.VideoSource.DEFAULT)) {
- // QUALITY_480P QUALITY_720P QUALITY_1080P QUALITY_2160P
- val profile = CamcorderProfile.get(CamcorderProfile.QUALITY_720P)
- mMediaRecorder?.setProfile(profile)
- } else {
- mMediaRecorder?.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4)
- mMediaRecorder?.setAudioEncoder(MediaRecorder.AudioEncoder.AAC)
- mMediaRecorder?.setVideoEncoder(MediaRecorder.VideoEncoder.H264)
- mMediaRecorder?.setVideoSize(mDisplayMetrics!!.widthPixels, mDisplayMetrics!!.heightPixels)
- mMediaRecorder?.setVideoEncodingBitRate(5000 * 1000)
- mMediaRecorder?.setVideoFrameRate(25)
- mMediaRecorder?.setAudioChannels(2)
- mMediaRecorder?.setAudioSamplingRate(48000)
- }
- mMediaRecorder?.setOutputFile(outputPath)
- if (surface != null && (videoSource == MediaRecorder.VideoSource.CAMERA
- || videoSource == MediaRecorder.VideoSource.DEFAULT)) {
- mMediaRecorder?.setPreviewDisplay(surface)
- }
- try {
- mMediaRecorder?.prepare()
- } catch (e: Exception) {
- Log.e("MediaRecorder", "prepare recorder error=$e")
- }
- }
-
- private fun startRecordInternal(videoSource: Int, surface: Surface?, outputPath: String) {
- initMediaRecord(videoSource, surface, outputPath)
- try {
- if (videoSource == MediaRecorder.VideoSource.SURFACE) {
- mMediaProjectionController?.createVirtualDisplay(mMediaRecorder?.surface!!)
- }
- mMediaRecorder?.start()
- } catch (e: Exception) {
- Log.e("MediaRecorder", "start recorder error=$e")
- }
- }
-
- /**
- * Start record camera or screen
- * @param videoSource the source of video, see {@link MediaRecorderVideoSource.CAMERA}
- * or {@link MediaRecorder.VideoSource.SURFACE}
- * @param surface the Surface to preview, when videoSource = MediaRecorderVideoSource.CAMERA
- * @param context the Context of Activity
- * @param outputPath the output path to save media file
- */
- fun startRecord(videoSource: Int, surface: Surface?, context: Context, outputPath: String) {
- if (mMediaRecorder == null) {
- mMediaRecorder = MediaRecorder()
- val windowManager = context.getSystemService(Context.WINDOW_SERVICE) as WindowManager
- mDisplayMetrics = DisplayMetrics()
- windowManager.defaultDisplay.getMetrics(mDisplayMetrics)
- }
-
- if (videoSource == MediaRecorder.VideoSource.SURFACE) {
- mOutputPath = outputPath
- mMediaProjectionController = MediaProjectionController(MediaProjectionController.TYPE_SCREEN_RECORD)
- mMediaProjectionController?.startScreenRecord(context)
- return
- }
-
- startRecordInternal(videoSource, surface, outputPath)
- Log.i("MediaRecorder", "startRecord...")
- }
-
- /**
- * Stop recording camera or screen,
- * and release everything.
- */
- fun stopRecord() {
- if (mMediaRecorder != null) {
- mMediaRecorder?.stop()
- mMediaRecorder?.reset()
- }
- if (mCamera != null) {
- mCamera!!.stopPreview()
- }
- if (mMediaProjectionController != null) {
- mMediaProjectionController!!.stopScreenRecord()
- }
- Log.i("MediaRecorder", "stopRecord...")
- }
-
- fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent) {
- if (requestCode == mMediaProjectionController?.getRequestCode()) {
- mMediaProjectionController?.onActivityResult(resultCode, data)
- startRecordInternal(MediaRecorder.VideoSource.SURFACE, null, mOutputPath!!)
- }
- }
-
- fun release() {
- if (mMediaRecorder != null) {
- mMediaRecorder?.release()
- mMediaRecorder = null
- }
- if (mCamera != null) {
- mCamera!!.release()
- mCamera!!.lock()
- }
- }
-}
\ No newline at end of file
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/listener/VideoEncodeCallback.kt b/AndroidMedia/src/main/java/com/frank/androidmedia/listener/VideoEncodeCallback.kt
deleted file mode 100644
index a3da3f11..00000000
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/listener/VideoEncodeCallback.kt
+++ /dev/null
@@ -1,12 +0,0 @@
-package com.frank.androidmedia.listener
-
-/**
- * @author xufulong
- * @date 4/1/22 1:44 PM
- * @desc
- */
-interface VideoEncodeCallback {
-
- fun onVideoEncodeData(data: ByteArray, size: Int, flag: Int, timestamp: Long)
-
-}
\ No newline at end of file
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/util/WavHeader.kt b/AndroidMedia/src/main/java/com/frank/androidmedia/util/WavHeader.kt
deleted file mode 100644
index f8043f19..00000000
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/util/WavHeader.kt
+++ /dev/null
@@ -1,81 +0,0 @@
-package com.frank.androidmedia.util
-
-import java.io.ByteArrayOutputStream
-import java.io.IOException
-
-/**
- * The header of wave format
- * @author frank
- * @date 2022/3/22
- */
-internal class WavHeader {
- var riffID = charArrayOf('R', 'I', 'F', 'F')
- @JvmField
- var riffSize = 0
- var riffType = charArrayOf('W', 'A', 'V', 'E')
- var formatID = charArrayOf('f', 'm', 't', ' ')
- @JvmField
- var formatSize = 0
- @JvmField
- var formatTag: Short = 0
- @JvmField
- var numChannels: Short = 0
- @JvmField
- var sampleRate = 0
- @JvmField
- var avgBytesPerSec = 0
- @JvmField
- var blockAlign: Short = 0
- @JvmField
- var bitsPerSample: Short = 0
- var dataID = charArrayOf('d', 'a', 't', 'a')
- @JvmField
- var dataSize = 0
-
- @get:Throws(IOException::class)
- val header: ByteArray
- get() {
- val bos = ByteArrayOutputStream()
- writeChar(bos, riffID)
- writeInt(bos, riffSize)
- writeChar(bos, riffType)
- writeChar(bos, formatID)
- writeInt(bos, formatSize)
- writeShort(bos, formatTag.toInt())
- writeShort(bos, numChannels.toInt())
- writeInt(bos, sampleRate)
- writeInt(bos, avgBytesPerSec)
- writeShort(bos, blockAlign.toInt())
- writeShort(bos, bitsPerSample.toInt())
- writeChar(bos, dataID)
- writeInt(bos, dataSize)
- bos.flush()
- val r = bos.toByteArray()
- bos.close()
- return r
- }
-
- @Throws(IOException::class)
- private fun writeShort(bos: ByteArrayOutputStream, s: Int) {
- val data = ByteArray(2)
- data[1] = (s shl 16 shr 24).toByte()
- data[0] = (s shl 24 shr 24).toByte()
- bos.write(data)
- }
-
- @Throws(IOException::class)
- private fun writeInt(bos: ByteArrayOutputStream, n: Int) {
- val buf = ByteArray(4)
- buf[3] = (n shr 24).toByte()
- buf[2] = (n shl 8 shr 24).toByte()
- buf[1] = (n shl 16 shr 24).toByte()
- buf[0] = (n shl 24 shr 24).toByte()
- bos.write(buf)
- }
-
- private fun writeChar(bos: ByteArrayOutputStream, id: CharArray) {
- for (c in id) {
- bos.write(c.toInt())
- }
- }
-}
\ No newline at end of file
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/util/WavUtil.kt b/AndroidMedia/src/main/java/com/frank/androidmedia/util/WavUtil.kt
deleted file mode 100644
index 2126dc6a..00000000
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/util/WavUtil.kt
+++ /dev/null
@@ -1,64 +0,0 @@
-package com.frank.androidmedia.util
-
-import android.util.Log
-import java.io.*
-
-/**
- * Convert pcm to wav
- *
- * @author frank
- * @date 2022/3/22
- */
-object WavUtil {
-
- fun makePCMToWAVFile(pcmPath: String?, wavPath: String?, deletePcmFile: Boolean): Boolean {
- val buffer: ByteArray
- val file = File(pcmPath)
- if (!file.exists()) {
- return false
- }
- val len = file.length().toInt()
- val header = WavHeader()
- header.riffSize = len + (44 - 8)
- header.formatSize = 16
- header.bitsPerSample = 16
- header.numChannels = 2
- header.formatTag = 0x0001
- header.sampleRate = 44100
- header.blockAlign = (header.numChannels * header.bitsPerSample / 8).toShort()
- header.avgBytesPerSec = header.blockAlign * header.sampleRate
- header.dataSize = len
- val h: ByteArray = try {
- header.header
- } catch (e1: IOException) {
- e1.message?.let { Log.e("WavUtil", it) }
- return false
- }
- if (h.size != 44) return false
- val dstFile = File(wavPath)
- if (dstFile.exists()) dstFile.delete()
- try {
- buffer = ByteArray(1024 * 4)
- val inStream: InputStream
- val ouStream: OutputStream
- ouStream = BufferedOutputStream(FileOutputStream(wavPath))
- ouStream.write(h, 0, h.size)
- inStream = BufferedInputStream(FileInputStream(file))
- var size = inStream.read(buffer)
- while (size != -1) {
- ouStream.write(buffer)
- size = inStream.read(buffer)
- }
- inStream.close()
- ouStream.close()
- } catch (e: IOException) {
- e.message?.let { Log.e("WavUtil", it) }
- return false
- }
- if (deletePcmFile) {
- file.delete()
- }
- Log.i("WavUtil", "makePCMToWAVFile success...")
- return true
- }
-}
\ No newline at end of file
diff --git a/CameraFilter/build.gradle b/CameraFilter/build.gradle
index 1adb1c6c..11e3573f 100644
--- a/CameraFilter/build.gradle
+++ b/CameraFilter/build.gradle
@@ -4,6 +4,7 @@ plugins {
android {
compileSdkVersion rootProject.ext.compileSdkVersion
+ namespace "com.frank.camerafilter"
defaultConfig {
minSdkVersion rootProject.ext.minSdkVersion
diff --git a/Live/build.gradle b/Live/build.gradle
index e3320b15..821326e3 100644
--- a/Live/build.gradle
+++ b/Live/build.gradle
@@ -3,6 +3,8 @@ apply plugin: 'com.android.library'
android {
compileSdkVersion rootProject.ext.compileSdkVersion
buildToolsVersion rootProject.ext.buildToolsVersion
+ namespace "com.frank.live"
+
defaultConfig {
minSdkVersion rootProject.ext.minSdkVersion
targetSdkVersion rootProject.ext.targetSdkVersion
diff --git a/libmp3/.gitignore b/NextPlayer/.gitignore
similarity index 100%
rename from libmp3/.gitignore
rename to NextPlayer/.gitignore
diff --git a/NextPlayer/build.gradle b/NextPlayer/build.gradle
new file mode 100644
index 00000000..d0cdf090
--- /dev/null
+++ b/NextPlayer/build.gradle
@@ -0,0 +1,65 @@
+plugins {
+ id 'com.android.library'
+ id 'maven-publish'
+}
+
+android {
+ compileSdk 34
+ namespace "com.frank.next"
+
+ defaultConfig {
+ minSdk 21
+ targetSdk 34
+
+ consumerProguardFiles "consumer-rules.pro"
+ ndk {
+ abiFilters "arm64-v8a", "armeabi-v7a"
+ }
+ externalNativeBuild {
+ cmake {
+ arguments '-DANDROID_STL=c++_shared'
+ }
+ }
+ }
+
+ sourceSets {
+ main {
+ jniLibs.srcDirs = ["libs"]
+ }
+ }
+
+ buildTypes {
+ release {
+ minifyEnabled false
+ proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
+ }
+ }
+
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+
+ externalNativeBuild {
+ cmake {
+ path "../../engine/CMakeLists.txt"
+ }
+ }
+
+ composeOptions {
+ kotlinCompilerExtensionVersion '1.3.2'
+ }
+
+ publishing {
+ singleVariant('release') {
+ withSourcesJar()
+ withJavadocJar()
+ }
+ }
+}
+
+dependencies {
+ implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
+ implementation "androidx.core:core-ktx:$rootProject.core_ktx"
+ implementation "androidx.appcompat:appcompat:$rootProject.appcompatVersion"
+}
diff --git a/AndroidMedia/proguard-rules.pro b/NextPlayer/proguard-rules.pro
similarity index 100%
rename from AndroidMedia/proguard-rules.pro
rename to NextPlayer/proguard-rules.pro
diff --git a/AndroidMedia/src/main/AndroidManifest.xml b/NextPlayer/src/main/AndroidManifest.xml
similarity index 75%
rename from AndroidMedia/src/main/AndroidManifest.xml
rename to NextPlayer/src/main/AndroidManifest.xml
index c5993b20..f9c7cbd8 100644
--- a/AndroidMedia/src/main/AndroidManifest.xml
+++ b/NextPlayer/src/main/AndroidManifest.xml
@@ -1,5 +1,5 @@
+ package="com.frank.next">
\ No newline at end of file
diff --git a/NextPlayer/src/main/java/com/frank/next/player/BasePlayer.java b/NextPlayer/src/main/java/com/frank/next/player/BasePlayer.java
new file mode 100644
index 00000000..1af106c2
--- /dev/null
+++ b/NextPlayer/src/main/java/com/frank/next/player/BasePlayer.java
@@ -0,0 +1,107 @@
+package com.frank.next.player;
+
+/**
+ * Note: base of player
+ * Date: 2026/1/8
+ * Author: frank
+ */
+public abstract class BasePlayer implements IPlayer {
+
+ private OnInfoListener mOnInfoListener;
+ private OnErrorListener mOnErrorListener;
+ private OnCompleteListener mOnCompleteListener;
+ private OnPreparedListener mOnPreparedListener;
+ private OnBufferUpdateListener mOnBufferUpdateListener;
+ private OnSeekCompleteListener mOnSeekCompleteListener;
+ private OnVideoSizeChangedListener mOnVideoSizeChangedListener;
+
+ public BasePlayer() {
+ }
+
+ @Override
+ public final void setOnPreparedListener(IPlayer.OnPreparedListener listener) {
+ this.mOnPreparedListener = listener;
+ }
+
+ @Override
+ public final void setOnCompletionListener(OnCompleteListener listener) {
+ this.mOnCompleteListener = listener;
+ }
+
+ @Override
+ public final void setOnBufferingUpdateListener(OnBufferUpdateListener listener) {
+ this.mOnBufferUpdateListener = listener;
+ }
+
+ @Override
+ public final void setOnSeekCompleteListener(IPlayer.OnSeekCompleteListener listener) {
+ this.mOnSeekCompleteListener = listener;
+ }
+
+ @Override
+ public final void setOnVideoSizeChangedListener(IPlayer.OnVideoSizeChangedListener listener) {
+ this.mOnVideoSizeChangedListener = listener;
+ }
+
+ @Override
+ public final void setOnErrorListener(IPlayer.OnErrorListener listener) {
+ this.mOnErrorListener = listener;
+ }
+
+ @Override
+ public final void setOnInfoListener(IPlayer.OnInfoListener listener) {
+ this.mOnInfoListener = listener;
+ }
+
+ @Override
+ public void setOnPlayingListener(OnPlayingListener listener) {
+
+ }
+
+ public void resetListeners() {
+ this.mOnInfoListener = null;
+ this.mOnErrorListener = null;
+ this.mOnPreparedListener = null;
+ this.mOnCompleteListener = null;
+ this.mOnSeekCompleteListener = null;
+ this.mOnBufferUpdateListener = null;
+ this.mOnVideoSizeChangedListener = null;
+ }
+
+ protected final void onPrepared() {
+ if (this.mOnPreparedListener != null) {
+ this.mOnPreparedListener.onPrepared(this);
+ }
+ }
+
+ protected void onVideoSizeChanged(int width, int height, int sarNum, int sarDen) {
+ if (this.mOnVideoSizeChangedListener != null) {
+ this.mOnVideoSizeChangedListener.onVideoSizeChanged( width, height);
+ }
+ }
+
+ protected void onInfo(int what, int extra) {
+ if (mOnInfoListener != null) {
+ mOnInfoListener.onInfo(what, extra);
+ }
+ }
+
+ protected void onError(int what, int extra) {
+ if (this.mOnErrorListener != null) {
+ this.mOnErrorListener.onError(what, extra);
+ }
+ }
+
+ protected void onSeekComplete() {
+ if (this.mOnSeekCompleteListener != null) {
+ this.mOnSeekCompleteListener.onSeekComplete(this);
+ }
+ }
+
+ protected final void onComplete() {
+ if (this.mOnCompleteListener != null) {
+ this.mOnCompleteListener.onComplete(this);
+ }
+ }
+
+}
diff --git a/NextPlayer/src/main/java/com/frank/next/player/IPlayer.java b/NextPlayer/src/main/java/com/frank/next/player/IPlayer.java
new file mode 100644
index 00000000..3903773e
--- /dev/null
+++ b/NextPlayer/src/main/java/com/frank/next/player/IPlayer.java
@@ -0,0 +1,179 @@
+package com.frank.next.player;
+
+import android.view.Surface;
+import android.view.SurfaceHolder;
+
+import java.io.IOException;
+import java.util.Map;
+
+/**
+ * Note: interface of player
+ * Date: 2026/1/6
+ * Author: frank
+ */
+public interface IPlayer {
+
+ /**********************************message begin**********************************/
+
+ int MSG_COMPONENT_OPEN = 1000;
+ int MSG_OPEN_INPUT = 1001;
+ int MSG_FIND_STREAM_INFO = 1002;
+ int MSG_VIDEO_FIRST_PACKET = 1004;
+ int MSG_VIDEO_DECODE_START = 1006;
+ int MSG_ON_PREPARED = 1007;
+ int MSG_SET_VIDEO_SIZE = 1008;
+ int MSG_SET_VIDEO_SAR = 1009;
+ int MSG_ROTATION_CHANGED = 1010;
+ int MSG_VIDEO_RENDER_START = 1011;
+ int MSG_AUDIO_RENDER_START = 1012;
+ int MSG_NO_OP = 1013; // Flush
+ int MSG_ON_ERROR = 1014;
+ int MSG_ON_COMPLETED = 1015;
+ int MSG_MEDIA_INFO = 1016;
+
+ int MSG_BUFFER_START = 2000;
+ int MSG_BUFFER_UPDATE = 2001; // progress
+ int MSG_BUFFER_BYTE_UPDATE = 2002; // cached data in bytes
+ int MSG_BUFFER_TIME_UPDATE = 2003; // cached duration in ms
+ int MSG_BUFFER_END = 2004;
+
+ int MSG_SEEK_COMPLETED = 3003;
+ int MSG_PLAY_URL_CHANGED = 3006;
+
+ /*******************************PlayControl begin*********************************/
+
+ void prepareAsync() throws IllegalStateException;
+
+ void start() throws IllegalStateException;
+
+ void pause() throws IllegalStateException;
+
+ void seekTo(long msec) throws IllegalStateException;
+
+ void stop() throws IllegalStateException;
+
+ void reset();
+
+ void release();
+
+ /***********************************Set begin************************************/
+
+ void setCachePath(String path);
+
+ void setEnableMediaCodec(boolean enable);
+
+ void setDataSource(String path) throws IOException, IllegalStateException;
+
+ void setDataSource(String path, Map headers) throws IOException, IllegalStateException;
+
+ void setSurface(Surface surface);
+
+ void setDisplay(SurfaceHolder sh);
+
+ void setSpeed(float speed);
+
+ void setVolume(float leftVolume, float rightVolume);
+
+ void setScreenOnWhilePlaying(boolean screenOn);
+
+
+ /**********************************Get begin***********************************/
+
+ String getAudioCodecInfo();
+
+ String getVideoCodecInfo();
+
+ int getVideoWidth();
+
+ int getVideoHeight();
+
+ long getCurrentPosition();
+
+ long getDuration();
+
+ String getPlayUrl() throws IllegalStateException;
+
+ int getVideoSarNum();
+
+ int getVideoSarDen();
+
+ int getVideoDecoder();
+
+ float getVideoFrameRate();
+
+ float getVideoDecodeFrameRate();
+
+ float getVideoRenderFrameRate();
+
+ long getVideoCacheTime();
+
+ long getAudioCacheTime();
+
+ long getVideoCacheSize();
+
+ long getAudioCacheSize();
+
+ long getBitRate();
+
+ long getFileSize();
+
+ boolean isPlaying();
+
+ int getPlayerState();
+
+ long getSeekCostTime();
+
+ String getDataSource();
+
+
+ /**********************************interface begin**********************************/
+
+ void setOnPreparedListener(OnPreparedListener listener);
+
+ void setOnInfoListener(OnInfoListener listener);
+
+ void setOnErrorListener(OnErrorListener listener);
+
+ void setOnBufferingUpdateListener(OnBufferUpdateListener listener);
+
+ void setOnVideoSizeChangedListener(OnVideoSizeChangedListener listener);
+
+ void setOnSeekCompleteListener(OnSeekCompleteListener listener);
+
+ void setOnCompletionListener(OnCompleteListener listener);
+
+ void setOnPlayingListener(OnPlayingListener listener);
+
+ interface OnPreparedListener {
+ void onPrepared(IPlayer mp);
+ }
+
+ interface OnBufferUpdateListener {
+ void onBufferUpdate(int progress);
+ }
+
+ interface OnVideoSizeChangedListener {
+ void onVideoSizeChanged(int width, int height);
+ }
+
+ interface OnInfoListener {
+ boolean onInfo(int what, int extra);
+ }
+
+ interface OnErrorListener {
+ boolean onError(int what, int extra);
+ }
+
+ interface OnSeekCompleteListener {
+ void onSeekComplete(IPlayer mp);
+ }
+
+ interface OnCompleteListener {
+ void onComplete(IPlayer mp);
+ }
+
+ interface OnPlayingListener {
+ void onPlaying(boolean playing);
+ }
+
+}
diff --git a/NextPlayer/src/main/java/com/frank/next/player/NextPlayer.java b/NextPlayer/src/main/java/com/frank/next/player/NextPlayer.java
new file mode 100644
index 00000000..bbcc372d
--- /dev/null
+++ b/NextPlayer/src/main/java/com/frank/next/player/NextPlayer.java
@@ -0,0 +1,493 @@
+package com.frank.next.player;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.text.TextUtils;
+import android.util.Log;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+
+import androidx.annotation.NonNull;
+
+import com.frank.next.loader.LibraryLoader;
+
+import java.io.IOException;
+import java.lang.ref.WeakReference;
+import java.nio.charset.StandardCharsets;
+import java.util.Map;
+
+
+public class NextPlayer extends BasePlayer {
+ private final static String TAG = NextPlayer.class.getName();
+
+ enum LogLevel {
+ LOG_DEFAULT,
+ LOG_DEBUG,
+ LOG_INFO,
+ LOG_WARN,
+ LOG_ERROR
+ }
+
+ private static final int gLogCallBackLevel = LogLevel.LOG_DEBUG.ordinal();
+ private static NativeLogCallback mNativeLogListener;
+
+ private int mVideoWidth;
+ private int mVideoHeight;
+ private int mVideoSarNum;
+ private int mVideoSarDen;
+
+ private String mDataSource;
+ private EventHandler mEventHandler;
+ private SurfaceHolder mSurfaceHolder;
+
+ private static volatile boolean mNativeInitialized = false;
+
+ public static void loadLibraryOnce() {
+ LibraryLoader.loadLibsOnce();
+ }
+
+ private static void initOnce() {
+ synchronized (NextPlayer.class) {
+ if (!mNativeInitialized) {
+ native_init();
+ mNativeInitialized = true;
+ }
+ }
+ }
+
+ public NextPlayer() {
+ this(null);
+ }
+
+ public NextPlayer(Context context) {
+ initPlayer(context);
+ }
+
+ private void initPlayer(Context context) {
+ loadLibraryOnce();
+ initOnce();
+
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else {
+ mEventHandler = null;
+ }
+ native_setup(new WeakReference<>(this));
+ }
+
+ @Override
+ public void setDisplay(SurfaceHolder holder) throws IllegalStateException {
+ mSurfaceHolder = holder;
+ Surface surface;
+ if (holder != null) {
+ surface = holder.getSurface();
+ } else {
+ surface = null;
+ }
+ _setVideoSurface(surface);
+ }
+
+ @Override
+ public void setDataSource(String path, Map headers)
+ throws IOException, IllegalStateException {
+ if (headers != null && !headers.isEmpty()) {
+ StringBuilder sb = new StringBuilder();
+ for (Map.Entry entry : headers.entrySet()) {
+ sb.append(entry.getKey());
+ sb.append(":");
+ String value = entry.getValue();
+ if (!TextUtils.isEmpty(value))
+ sb.append(entry.getValue());
+ sb.append("\r\n");
+ _setHeaders(sb.toString());
+ }
+ }
+ setDataSource(path);
+ }
+
+ @Override
+ public void setDataSource(String path) throws IOException, IllegalStateException {
+ mDataSource = path;
+ _setDataSource(path);
+ }
+
+ @Override
+ public String getDataSource() {
+ return mDataSource;
+ }
+
+ @Override
+ public void prepareAsync() throws IllegalStateException {
+ _prepareAsync();
+ }
+
+ @Override
+ public void start() throws IllegalStateException {
+ _start();
+ }
+
+ @Override
+ public void stop() throws IllegalStateException {
+ _stop();
+ }
+
+ @Override
+ public void pause() throws IllegalStateException {
+ _pause();
+ }
+
+ @Override
+ public void setScreenOnWhilePlaying(boolean screenOn) {
+ if (mSurfaceHolder != null) {
+ mSurfaceHolder.setKeepScreenOn(screenOn);
+ }
+ }
+
+ @Override
+ public void setEnableMediaCodec(boolean enable) {
+ _setEnableMediaCodec(enable);
+ }
+
+ @Override
+ public void setCachePath(String path) {
+ _setVideoCacheDir(path);
+ }
+
+ @Override
+ public String getAudioCodecInfo() {
+ return _getAudioCodecInfo();
+ }
+
+ @Override
+ public String getVideoCodecInfo() {
+ return _getVideoCodecInfo();
+ }
+
+ @Override
+ public float getVideoFrameRate() {
+ return _getVideoFileFps();
+ }
+
+ @Override
+ public int getVideoWidth() {
+ return mVideoWidth;
+ }
+
+ @Override
+ public int getVideoHeight() {
+ return mVideoHeight;
+ }
+
+ @Override
+ public boolean isPlaying() {
+ return _playing();
+ }
+
+ @Override
+ public void seekTo(long msec) throws IllegalStateException {
+ _seekTo(msec);
+ }
+
+ @Override
+ public long getCurrentPosition() {
+ return _getCurrentPosition();
+ }
+
+ @Override
+ public long getDuration() {
+ return _getDuration();
+ }
+
+ @Override
+ public void release() {
+ resetListeners();
+ _release();
+ }
+
+ @Override
+ public void reset() {
+ _reset();
+ mEventHandler.removeCallbacksAndMessages(null);
+ mVideoWidth = 0;
+ mVideoHeight = 0;
+ }
+
+ @Override
+ public void setVolume(float leftVolume, float rightVolume) {
+ _setVolume(leftVolume, rightVolume);
+ }
+
+ @Override
+ public String getPlayUrl() throws IllegalStateException {
+ return _getPlayUrl();
+ }
+
+ @Override
+ public int getVideoSarNum() {
+ return mVideoSarNum;
+ }
+
+ @Override
+ public int getVideoSarDen() {
+ return mVideoSarDen;
+ }
+
+ @Override
+ public void setSurface(Surface surface) {
+ mSurfaceHolder = null;
+ _setVideoSurface(surface);
+ }
+
+ @Override
+ public void setSpeed(float speed) {
+ _setSpeed(speed);
+ }
+
+ @Override
+ public int getVideoDecoder() {
+ return _getVideoDecoder();
+ }
+
+ @Override
+ public float getVideoRenderFrameRate() {
+ return _getVideoRenderFrameRate();
+ }
+
+ @Override
+ public float getVideoDecodeFrameRate() {
+ return _getVideoDecodeFrameRate();
+ }
+
+ @Override
+ public long getVideoCacheTime() {
+ return _getVideoCachedTime();
+ }
+
+ @Override
+ public long getAudioCacheTime() {
+ return _getAudioCachedTime();
+ }
+
+ @Override
+ public long getVideoCacheSize() {
+ return _getVideoCachedSize();
+ }
+
+ @Override
+ public long getAudioCacheSize() {
+ return _getAudioCachedSize();
+ }
+
+ @Override
+ public long getFileSize() {
+ return _getFileSize();
+ }
+
+ @Override
+ public long getBitRate() {
+ return _getBitRate();
+ }
+
+ @Override
+ public long getSeekCostTime() {
+ return _getSeekCostTime();
+ }
+
+ @Override
+ public int getPlayerState() {
+ return _getPlayerState();
+ }
+
+ private static class EventHandler extends Handler {
+ private final WeakReference mWeakPlayer;
+
+ public EventHandler(NextPlayer mp, Looper looper) {
+ super(looper);
+ mWeakPlayer = new WeakReference<>(mp);
+ }
+
+ @Override
+ public void handleMessage(@NonNull Message msg) {
+ NextPlayer player = mWeakPlayer.get();
+ if (player == null) {
+ return;
+ }
+
+ switch (msg.what) {
+ case MSG_ON_PREPARED:
+ player.onPrepared();
+ return;
+
+ case MSG_ON_COMPLETED:
+ player.onComplete();
+ return;
+
+ case MSG_BUFFER_UPDATE:
+ return;
+
+ case MSG_SEEK_COMPLETED: {
+ player.onSeekComplete();
+ player.onInfo(MSG_SEEK_COMPLETED, msg.arg2);
+ return;
+ }
+
+ case MSG_SET_VIDEO_SIZE:
+ player.mVideoWidth = msg.arg1;
+ player.mVideoHeight = msg.arg2;
+ player.onVideoSizeChanged(player.mVideoWidth, player.mVideoHeight,
+ player.mVideoSarNum, player.mVideoSarDen);
+ return;
+
+ case MSG_ON_ERROR:
+ player.onError(msg.arg1, msg.arg2);
+ if (msg.arg2 >= 0) {
+ player.onComplete();
+ }
+ return;
+
+ case MSG_MEDIA_INFO:
+ switch (msg.arg1) {
+ case MSG_VIDEO_RENDER_START:
+ break;
+ case MSG_PLAY_URL_CHANGED:
+ player.onInfo(msg.arg1, (Integer) msg.obj);
+ return;
+ }
+ player.onInfo(msg.arg1, msg.arg2);
+ return;
+
+ case MSG_SET_VIDEO_SAR:
+ player.mVideoSarNum = msg.arg1;
+ player.mVideoSarDen = msg.arg2;
+ player.onVideoSizeChanged(player.mVideoWidth, player.mVideoHeight,
+ player.mVideoSarNum, player.mVideoSarDen);
+ break;
+
+ default:
+ break;
+ }
+ }
+ }
+
+ private static void postEventFromNative(Object weakThiz, int what, int arg1, int arg2, Object obj) {
+ if (weakThiz == null)
+ return;
+
+ NextPlayer mp = (NextPlayer) ((WeakReference>) weakThiz).get();
+ if (mp == null) {
+ return;
+ }
+
+ if (mp.mEventHandler != null) {
+ Message m = mp.mEventHandler.obtainMessage(what, arg1, arg2);
+ mp.mEventHandler.sendMessage(m);
+ }
+ }
+
+ private static void onNativeLog(int level, String tag, byte[] logContent) {
+ if (level < gLogCallBackLevel) {
+ return;
+ }
+ String logStr = new String(logContent, StandardCharsets.UTF_8);
+ if (mNativeLogListener != null) {
+ mNativeLogListener.onLogOutput(level, tag, logStr);
+ } else {
+ Log.println(level, tag, logStr);
+ }
+ }
+
+ private static void log(int level, String log) {
+ if (level < gLogCallBackLevel) {
+ return;
+ }
+ if (mNativeLogListener != null) {
+ mNativeLogListener.onLogOutput(level, TAG, log);
+ } else {
+ Log.println(level, TAG, log);
+ }
+ }
+
+ public static void setNativeLogCallback(NativeLogCallback nativeLogCallback) {
+ mNativeLogListener = nativeLogCallback;
+ }
+
+ public interface NativeLogCallback {
+ void onLogOutput(int logLevel, String tag, String log);
+ }
+
+
+ private static native void native_init();
+
+ private native void native_setup(Object player);
+
+ private native void _setVideoSurface(Surface surface) throws IllegalStateException;
+
+ private native void _setHeaders(String headers);
+
+ private native void _setDataSource(String path)
+ throws IOException, IllegalArgumentException, SecurityException, IllegalStateException;
+
+ private native void _prepareAsync() throws IllegalStateException;
+
+ private native void _start() throws IllegalStateException;
+
+ private native void _stop() throws IllegalStateException;
+
+ private native void _pause() throws IllegalStateException;
+
+ private native void _setEnableMediaCodec(boolean enable);
+
+ private native void _setVideoCacheDir(String dir);
+
+ private native String _getAudioCodecInfo();
+
+ private native String _getVideoCodecInfo();
+
+ private native float _getVideoFileFps();
+
+ private native boolean _playing();
+
+ private native void _seekTo(long msec) throws IllegalStateException;
+
+ private native long _getCurrentPosition();
+
+ private native long _getDuration();
+
+ private native void _reset();
+
+ private native void _setVolume(float leftVolume, float rightVolume);
+
+ private native String _getPlayUrl();
+
+ private native void _setSpeed(float speed);
+
+ private native int _getVideoDecoder();
+
+ private native float _getVideoRenderFrameRate();
+
+ private native float _getVideoDecodeFrameRate();
+
+ private native long _getVideoCachedTime(); // ms
+
+ private native long _getAudioCachedTime();
+
+ private native long _getVideoCachedSize(); // byte
+
+ private native long _getAudioCachedSize();
+
+ private native long _getFileSize();
+
+ private native long _getBitRate();
+
+ private native long _getSeekCostTime();
+
+ private native int _getPlayerState();
+
+ private native void _release();
+
+}
diff --git a/NextPlayer/src/main/java/com/frank/next/renderview/IRenderView.java b/NextPlayer/src/main/java/com/frank/next/renderview/IRenderView.java
new file mode 100644
index 00000000..a7f4ec44
--- /dev/null
+++ b/NextPlayer/src/main/java/com/frank/next/renderview/IRenderView.java
@@ -0,0 +1,55 @@
+package com.frank.next.renderview;
+
+import android.graphics.SurfaceTexture;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.View;
+
+import com.frank.next.player.IPlayer;
+
+public interface IRenderView {
+ int RENDER_MODE_ASPECT_FIT = 0; // without clip
+ int RENDER_MODE_ASPECT_FILL = 1; // may clip
+ int RENDER_MODE_16_9 = 2;
+ int RENDER_MODE_4_3 = 3;
+ int RENDER_MODE_WRAP = 4;
+ int RENDER_MODE_MATCH = 5;
+
+ View getView();
+
+ boolean waitForResize();
+
+ void setVideoRotation(int degree);
+
+ void setAspectRatio(int aspectRatio);
+
+ void addRenderCallback(IRenderCallback callback);
+
+ void removeRenderCallback(IRenderCallback callback);
+
+ void setVideoSize(int videoWidth, int videoHeight);
+
+ void setVideoAspectRatio(int videoSarNum, int videoSarDen);
+
+ interface ISurfaceHolder {
+
+ Surface openSurface();
+
+ void bindPlayer(IPlayer mp);
+
+ IRenderView getRenderView();
+
+ SurfaceHolder getSurfaceHolder();
+
+ SurfaceTexture getSurfaceTexture();
+ }
+
+ interface IRenderCallback {
+
+ void onSurfaceCreated(ISurfaceHolder holder, int width, int height);
+
+ void onSurfaceChanged(ISurfaceHolder holder, int format, int width, int height);
+
+ void onSurfaceDestroyed(ISurfaceHolder holder);
+ }
+}
diff --git a/NextPlayer/src/main/java/com/frank/next/renderview/MeasureHelper.java b/NextPlayer/src/main/java/com/frank/next/renderview/MeasureHelper.java
new file mode 100644
index 00000000..daa94af9
--- /dev/null
+++ b/NextPlayer/src/main/java/com/frank/next/renderview/MeasureHelper.java
@@ -0,0 +1,169 @@
+package com.frank.next.renderview;
+
+import android.view.View;
+
+import java.lang.ref.WeakReference;
+
+
+public final class MeasureHelper {
+
+ private int mVideoWidth;
+ private int mVideoHeight;
+ private int mVideoSarNum;
+ private int mVideoSarDen;
+ private int mMeasuredWidth;
+ private int mMeasuredHeight;
+ private int mVideoRotationDegree;
+ private int mCurrentAspectRatio = IRenderView.RENDER_MODE_ASPECT_FIT;
+
+ private final WeakReference mWeakView;
+
+ public MeasureHelper(View view) {
+ mWeakView = new WeakReference<>(view);
+ }
+
+ public View getView() {
+ return mWeakView.get();
+ }
+
+ public int getMeasuredWidth() {
+ return mMeasuredWidth;
+ }
+
+ public int getMeasuredHeight() {
+ return mMeasuredHeight;
+ }
+
+ public void setAspectRatio(int aspectRatio) {
+ mCurrentAspectRatio = aspectRatio;
+ }
+
+ public void setVideoSize(int videoWidth, int videoHeight) {
+ mVideoWidth = videoWidth;
+ mVideoHeight = videoHeight;
+ }
+
+ public void setVideoRotation(int videoRotationDegree) {
+ mVideoRotationDegree = videoRotationDegree;
+ }
+
+ public void setVideoSampleAspectRatio(int videoSarNum, int videoSarDen) {
+ mVideoSarNum = videoSarNum;
+ mVideoSarDen = videoSarDen;
+ }
+
+ public void doMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ if (mVideoRotationDegree == 90 || mVideoRotationDegree == 270) {
+ int tempSpec = widthMeasureSpec;
+ widthMeasureSpec = heightMeasureSpec;
+ heightMeasureSpec = tempSpec;
+ }
+
+ int width = View.getDefaultSize(mVideoWidth, widthMeasureSpec);
+ int height = View.getDefaultSize(mVideoHeight, heightMeasureSpec);
+
+ if (mCurrentAspectRatio == IRenderView.RENDER_MODE_MATCH) {
+ width = widthMeasureSpec;
+ height = heightMeasureSpec;
+ } else if (mVideoWidth > 0 && mVideoHeight > 0) {
+ int widthSpecMode = View.MeasureSpec.getMode(widthMeasureSpec);
+ int widthSpecSize = View.MeasureSpec.getSize(widthMeasureSpec);
+ int heightSpecMode = View.MeasureSpec.getMode(heightMeasureSpec);
+ int heightSpecSize = View.MeasureSpec.getSize(heightMeasureSpec);
+
+ if (widthSpecMode == View.MeasureSpec.AT_MOST && heightSpecMode == View.MeasureSpec.AT_MOST) {
+ float specAspectRatio = (float) widthSpecSize / (float) heightSpecSize;
+ float displayAspectRatio;
+ switch (mCurrentAspectRatio) {
+ case IRenderView.RENDER_MODE_16_9:
+ displayAspectRatio = 16.0f / 9.0f;
+ if (mVideoRotationDegree == 90 || mVideoRotationDegree == 270)
+ displayAspectRatio = 1.0f / displayAspectRatio;
+ break;
+ case IRenderView.RENDER_MODE_4_3:
+ displayAspectRatio = 4.0f / 3.0f;
+ if (mVideoRotationDegree == 90 || mVideoRotationDegree == 270)
+ displayAspectRatio = 1.0f / displayAspectRatio;
+ break;
+ case IRenderView.RENDER_MODE_ASPECT_FIT:
+ case IRenderView.RENDER_MODE_ASPECT_FILL:
+ case IRenderView.RENDER_MODE_WRAP:
+ default:
+ displayAspectRatio = (float) mVideoWidth / (float) mVideoHeight;
+ if (mVideoSarNum > 0 && mVideoSarDen > 0)
+ displayAspectRatio = displayAspectRatio * mVideoSarNum / mVideoSarDen;
+ break;
+ }
+
+ boolean maybeWider = displayAspectRatio > specAspectRatio;
+
+ switch (mCurrentAspectRatio) {
+ case IRenderView.RENDER_MODE_ASPECT_FIT:
+ case IRenderView.RENDER_MODE_16_9:
+ case IRenderView.RENDER_MODE_4_3:
+ if (maybeWider) {
+ width = widthSpecSize;
+ height = (int) (width / displayAspectRatio);
+ } else {
+ width = (int) (height * displayAspectRatio);
+ height = heightSpecSize;
+ }
+ break;
+ case IRenderView.RENDER_MODE_ASPECT_FILL:
+ if (maybeWider) {
+ height = heightSpecSize;
+ width = (int) (height * displayAspectRatio);
+ } else {
+ width = widthSpecSize;
+ height = (int) (width / displayAspectRatio);
+ }
+ break;
+ case IRenderView.RENDER_MODE_WRAP:
+ default:
+ if (maybeWider) {
+ width = Math.min(mVideoWidth, widthSpecSize);
+ height = (int) (width / displayAspectRatio);
+ } else {
+ height = Math.min(mVideoHeight, heightSpecSize);
+ width = (int) (height * displayAspectRatio);
+ }
+ break;
+ }
+ } else if (widthSpecMode == View.MeasureSpec.EXACTLY && heightSpecMode == View.MeasureSpec.EXACTLY) {
+ width = widthSpecSize;
+ height = heightSpecSize;
+ if (mVideoWidth * height < width * mVideoHeight) {
+ width = height * mVideoWidth / mVideoHeight;
+ } else if (mVideoWidth * height > width * mVideoHeight) {
+ height = width * mVideoHeight / mVideoWidth;
+ }
+ } else if (widthSpecMode == View.MeasureSpec.EXACTLY) {
+ width = widthSpecSize;
+ height = width * mVideoHeight / mVideoWidth;
+ if (heightSpecMode == View.MeasureSpec.AT_MOST && height > heightSpecSize) {
+ height = heightSpecSize;
+ }
+ } else if (heightSpecMode == View.MeasureSpec.EXACTLY) {
+ width = height * mVideoWidth / mVideoHeight;
+ height = heightSpecSize;
+ if (widthSpecMode == View.MeasureSpec.AT_MOST && width > widthSpecSize) {
+ width = widthSpecSize;
+ }
+ } else {
+ width = mVideoWidth;
+ height = mVideoHeight;
+ if (heightSpecMode == View.MeasureSpec.AT_MOST && height > heightSpecSize) {
+ width = height * mVideoWidth / mVideoHeight;
+ height = heightSpecSize;
+ }
+ if (widthSpecMode == View.MeasureSpec.AT_MOST && width > widthSpecSize) {
+ width = widthSpecSize;
+ height = width * mVideoHeight / mVideoWidth;
+ }
+ }
+ }
+ mMeasuredWidth = width;
+ mMeasuredHeight = height;
+ }
+
+}
diff --git a/NextPlayer/src/main/java/com/frank/next/renderview/SurfaceRenderView.java b/NextPlayer/src/main/java/com/frank/next/renderview/SurfaceRenderView.java
new file mode 100644
index 00000000..b94e2606
--- /dev/null
+++ b/NextPlayer/src/main/java/com/frank/next/renderview/SurfaceRenderView.java
@@ -0,0 +1,226 @@
+package com.frank.next.renderview;
+
+import android.content.Context;
+import android.graphics.SurfaceTexture;
+import android.util.AttributeSet;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.accessibility.AccessibilityEvent;
+import android.view.accessibility.AccessibilityNodeInfo;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+
+import com.frank.next.player.IPlayer;
+
+import java.lang.ref.WeakReference;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * Render with SurfaceView
+ */
+
+public class SurfaceRenderView extends SurfaceView implements IRenderView {
+ private MeasureHelper mMeasureHelper;
+
+ public SurfaceRenderView(Context context) {
+ super(context);
+ initView();
+ }
+
+ public SurfaceRenderView(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ initView();
+ }
+
+ public SurfaceRenderView(Context context, AttributeSet attrs, int defStyleAttr) {
+ super(context, attrs, defStyleAttr);
+ initView();
+ }
+
+ private void initView() {
+ mMeasureHelper = new MeasureHelper(this);
+ mSurfaceCallback = new SurfaceCallback(this);
+ getHolder().addCallback(mSurfaceCallback);
+ getHolder().setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
+ }
+
+ @Override
+ public View getView() {
+ return this;
+ }
+
+ @Override
+ public boolean waitForResize() {
+ return true;
+ }
+
+ @Override
+ public void setVideoSize(int videoWidth, int videoHeight) {
+ if (videoWidth > 0 && videoHeight > 0) {
+ mMeasureHelper.setVideoSize(videoWidth, videoHeight);
+ getHolder().setFixedSize(videoWidth, videoHeight);
+ requestLayout();
+ }
+ }
+
+ @Override
+ public void setAspectRatio(int aspectRatio) {
+ mMeasureHelper.setAspectRatio(aspectRatio);
+ requestLayout();
+ }
+
+ @Override
+ public void setVideoAspectRatio(int videoSarNum, int videoSarDen) {
+ if (videoSarNum > 0 && videoSarDen > 0) {
+ mMeasureHelper.setVideoSampleAspectRatio(videoSarNum, videoSarDen);
+ requestLayout();
+ }
+ }
+
+ @Override
+ public void setVideoRotation(int degree) {
+
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ mMeasureHelper.doMeasure(widthMeasureSpec, heightMeasureSpec);
+ setMeasuredDimension(mMeasureHelper.getMeasuredWidth(), mMeasureHelper.getMeasuredHeight());
+ }
+
+ private static final class InternalSurfaceHolder implements IRenderView.ISurfaceHolder {
+ private final SurfaceHolder mSurfaceHolder;
+ private final SurfaceRenderView mSurfaceView;
+
+ public InternalSurfaceHolder(@NonNull SurfaceRenderView surfaceView,
+ @Nullable SurfaceHolder surfaceHolder) {
+ mSurfaceView = surfaceView;
+ mSurfaceHolder = surfaceHolder;
+ }
+
+ public void bindPlayer(IPlayer mp) {
+ if (mp != null) {
+ mp.setDisplay(mSurfaceHolder);
+ }
+ }
+
+ @NonNull
+ @Override
+ public IRenderView getRenderView() {
+ return mSurfaceView;
+ }
+
+ @Nullable
+ @Override
+ public SurfaceHolder getSurfaceHolder() {
+ return mSurfaceHolder;
+ }
+
+ @Nullable
+ @Override
+ public SurfaceTexture getSurfaceTexture() {
+ return null;
+ }
+
+ @Nullable
+ @Override
+ public Surface openSurface() {
+ if (mSurfaceHolder == null)
+ return null;
+ return mSurfaceHolder.getSurface();
+ }
+ }
+
+ @Override
+ public void addRenderCallback(IRenderCallback callback) {
+ mSurfaceCallback.addRenderCallback(callback);
+ }
+
+ @Override
+ public void removeRenderCallback(IRenderCallback callback) {
+ mSurfaceCallback.removeRenderCallback(callback);
+ }
+
+ private SurfaceCallback mSurfaceCallback;
+
+ private static final class SurfaceCallback implements SurfaceHolder.Callback {
+ private SurfaceHolder mSurfaceHolder;
+ private boolean mIsFormatChanged;
+ private int mFormat;
+ private int mWidth;
+ private int mHeight;
+
+ private final WeakReference mWeakSurfaceView;
+ private final Map mRenderCallbackMap = new ConcurrentHashMap<>();
+
+ public SurfaceCallback(@NonNull SurfaceRenderView surfaceView) {
+ mWeakSurfaceView = new WeakReference<>(surfaceView);
+ }
+
+ public void addRenderCallback(@NonNull IRenderCallback callback) {
+ mRenderCallbackMap.put(callback, callback);
+ ISurfaceHolder surfaceHolder = null;
+ if (mSurfaceHolder != null) {
+ surfaceHolder = new InternalSurfaceHolder(mWeakSurfaceView.get(), mSurfaceHolder);
+ callback.onSurfaceCreated(surfaceHolder, mWidth, mHeight);
+ }
+
+ if (mIsFormatChanged) {
+ if (surfaceHolder == null)
+ surfaceHolder = new InternalSurfaceHolder(mWeakSurfaceView.get(), mSurfaceHolder);
+ callback.onSurfaceChanged(surfaceHolder, mFormat, mWidth, mHeight);
+ }
+ }
+
+ public void removeRenderCallback(@NonNull IRenderCallback callback) {
+ mRenderCallbackMap.remove(callback);
+ }
+
+ @Override
+ public void surfaceCreated(SurfaceHolder holder) {
+ mSurfaceHolder = holder;
+ ISurfaceHolder surfaceHolder = new InternalSurfaceHolder(mWeakSurfaceView.get(), mSurfaceHolder);
+ for (IRenderCallback renderCallback : mRenderCallbackMap.keySet()) {
+ renderCallback.onSurfaceCreated(surfaceHolder, 0, 0);
+ }
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ mSurfaceHolder = null;
+ ISurfaceHolder surfaceHolder = new InternalSurfaceHolder(mWeakSurfaceView.get(), mSurfaceHolder);
+ for (IRenderCallback renderCallback : mRenderCallbackMap.keySet()) {
+ renderCallback.onSurfaceDestroyed(surfaceHolder);
+ }
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ mWidth = width;
+ mHeight = height;
+ mFormat = format;
+ mSurfaceHolder = holder;
+ mIsFormatChanged = true;
+ ISurfaceHolder surfaceHolder = new InternalSurfaceHolder(mWeakSurfaceView.get(), mSurfaceHolder);
+ for (IRenderCallback renderCallback : mRenderCallbackMap.keySet()) {
+ renderCallback.onSurfaceChanged(surfaceHolder, format, width, height);
+ }
+ }
+ }
+
+ @Override
+ public void onInitializeAccessibilityEvent(AccessibilityEvent event) {
+ super.onInitializeAccessibilityEvent(event);
+ event.setClassName(SurfaceRenderView.class.getName());
+ }
+
+ @Override
+ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) {
+ super.onInitializeAccessibilityNodeInfo(info);
+ info.setClassName(SurfaceRenderView.class.getName());
+ }
+}
diff --git a/NextPlayer/src/main/java/com/frank/next/renderview/TextureRenderView.java b/NextPlayer/src/main/java/com/frank/next/renderview/TextureRenderView.java
new file mode 100644
index 00000000..e570bf2b
--- /dev/null
+++ b/NextPlayer/src/main/java/com/frank/next/renderview/TextureRenderView.java
@@ -0,0 +1,281 @@
+package com.frank.next.renderview;
+
+import android.content.Context;
+import android.graphics.SurfaceTexture;
+import android.util.AttributeSet;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.TextureView;
+import android.view.View;
+import android.view.accessibility.AccessibilityEvent;
+import android.view.accessibility.AccessibilityNodeInfo;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+
+import com.frank.next.player.IPlayer;
+
+import java.lang.ref.WeakReference;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * render of TextureView
+ */
+public class TextureRenderView extends TextureView implements IRenderView {
+
+ private static final String TAG = TextureRenderView.class.getSimpleName();
+
+ private MeasureHelper mMeasureHelper;
+
+ public TextureRenderView(Context context) {
+ super(context);
+ initView();
+ }
+
+ public TextureRenderView(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ initView();
+ }
+
+ public TextureRenderView(Context context, AttributeSet attrs, int defStyleAttr) {
+ super(context, attrs, defStyleAttr);
+ initView();
+ }
+
+ private void initView() {
+ mMeasureHelper = new MeasureHelper(this);
+ mSurfaceCallback = new SurfaceCallback(this);
+ setSurfaceTextureListener(mSurfaceCallback);
+ }
+
+ @Override
+ public View getView() {
+ return this;
+ }
+
+ @Override
+ public boolean waitForResize() {
+ return false;
+ }
+
+ @Override
+ protected void onDetachedFromWindow() {
+ mSurfaceCallback.willDetachFromWindow();
+ super.onDetachedFromWindow();
+ mSurfaceCallback.didDetachFromWindow();
+ }
+
+ @Override
+ public void setVideoSize(int videoWidth, int videoHeight) {
+ if (videoWidth > 0 && videoHeight > 0) {
+ mMeasureHelper.setVideoSize(videoWidth, videoHeight);
+ requestLayout();
+ }
+ }
+
+ @Override
+ public void setVideoAspectRatio(int videoSarNum, int videoSarDen) {
+ if (videoSarNum > 0 && videoSarDen > 0) {
+ mMeasureHelper.setVideoSampleAspectRatio(videoSarNum, videoSarDen);
+ requestLayout();
+ }
+ }
+
+ @Override
+ public void setVideoRotation(int degree) {
+ mMeasureHelper.setVideoRotation(degree);
+ setRotation(degree);
+ }
+
+ @Override
+ public void setAspectRatio(int aspectRatio) {
+ mMeasureHelper.setAspectRatio(aspectRatio);
+ requestLayout();
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ mMeasureHelper.doMeasure(widthMeasureSpec, heightMeasureSpec);
+ setMeasuredDimension(mMeasureHelper.getMeasuredWidth(), mMeasureHelper.getMeasuredHeight());
+ }
+
+ public IRenderView.ISurfaceHolder getSurfaceHolder() {
+ return new VideoSurfaceHolder(this, mSurfaceCallback.mSurfaceTexture);
+ }
+
+ private static final class VideoSurfaceHolder implements IRenderView.ISurfaceHolder {
+ private final TextureRenderView mTextureView;
+ private final SurfaceTexture mSurfaceTexture;
+
+ public VideoSurfaceHolder(@NonNull TextureRenderView textureView, @Nullable SurfaceTexture surfaceTexture) {
+ mTextureView = textureView;
+ mSurfaceTexture = surfaceTexture;
+ }
+
+ public void bindPlayer(IPlayer mp) {
+ if (mp == null)
+ return;
+ mp.setSurface(openSurface());
+ }
+
+ @NonNull
+ @Override
+ public IRenderView getRenderView() {
+ return mTextureView;
+ }
+
+ @Nullable
+ @Override
+ public SurfaceHolder getSurfaceHolder() {
+ return null;
+ }
+
+ @Nullable
+ @Override
+ public SurfaceTexture getSurfaceTexture() {
+ return mSurfaceTexture;
+ }
+
+ @Nullable
+ @Override
+ public Surface openSurface() {
+ if (mSurfaceTexture == null)
+ return null;
+ return new Surface(mSurfaceTexture);
+ }
+ }
+
+ @Override
+ public void addRenderCallback(IRenderCallback callback) {
+ mSurfaceCallback.addRenderCallback(callback);
+ }
+
+ @Override
+ public void removeRenderCallback(IRenderCallback callback) {
+ mSurfaceCallback.removeRenderCallback(callback);
+ }
+
+ private SurfaceCallback mSurfaceCallback;
+
+ private static final class SurfaceCallback implements SurfaceTextureListener {
+ private int mWidth;
+ private int mHeight;
+ private boolean mFormatChanged;
+ private SurfaceTexture mSurfaceTexture;
+ private boolean mOwnSurfaceTexture = true;
+ private boolean mWillDetachFromWindow = false;
+ private boolean mDidDetachFromWindow = false;
+
+ private final WeakReference mWeakRenderView;
+ private final Map mRenderCallbackMap = new ConcurrentHashMap();
+
+ public SurfaceCallback(@NonNull TextureRenderView renderView) {
+ mWeakRenderView = new WeakReference(renderView);
+ }
+
+ public void setOwnSurfaceTexture(boolean ownSurfaceTexture) {
+ mOwnSurfaceTexture = ownSurfaceTexture;
+ }
+
+ public void addRenderCallback(@NonNull IRenderCallback callback) {
+ mRenderCallbackMap.put(callback, callback);
+ ISurfaceHolder surfaceHolder = null;
+ if (mSurfaceTexture != null) {
+ surfaceHolder = new VideoSurfaceHolder(mWeakRenderView.get(), mSurfaceTexture);
+ callback.onSurfaceCreated(surfaceHolder, mWidth, mHeight);
+ }
+
+ if (mFormatChanged) {
+ if (surfaceHolder == null)
+ surfaceHolder = new VideoSurfaceHolder(mWeakRenderView.get(), mSurfaceTexture);
+ callback.onSurfaceChanged(surfaceHolder, 0, mWidth, mHeight);
+ }
+ }
+
+ public void removeRenderCallback(@NonNull IRenderCallback callback) {
+ mRenderCallbackMap.remove(callback);
+ }
+
+ @Override
+ public void onSurfaceTextureAvailable(@NonNull SurfaceTexture surface, int width, int height) {
+ mSurfaceTexture = surface;
+
+ ISurfaceHolder surfaceHolder = new VideoSurfaceHolder(mWeakRenderView.get(), surface);
+ for (IRenderCallback renderCallback : mRenderCallbackMap.keySet()) {
+ renderCallback.onSurfaceCreated(surfaceHolder, 0, 0);
+ }
+ }
+
+ @Override
+ public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture surface, int width, int height) {
+ mWidth = width;
+ mHeight = height;
+ mFormatChanged = true;
+ mSurfaceTexture = surface;
+
+ ISurfaceHolder surfaceHolder = new VideoSurfaceHolder(mWeakRenderView.get(), surface);
+ for (IRenderCallback renderCallback : mRenderCallbackMap.keySet()) {
+ renderCallback.onSurfaceChanged(surfaceHolder, 0, width, height);
+ }
+ }
+
+ @Override
+ public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surface) {
+ }
+
+ @Override
+ public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surface) {
+ ISurfaceHolder surfaceHolder = new VideoSurfaceHolder(mWeakRenderView.get(), surface);
+ for (IRenderCallback renderCallback : mRenderCallbackMap.keySet()) {
+ renderCallback.onSurfaceDestroyed(surfaceHolder);
+ }
+ return mOwnSurfaceTexture;
+ }
+
+ public void willDetachFromWindow() {
+ mWillDetachFromWindow = true;
+ }
+
+ public void didDetachFromWindow() {
+ mDidDetachFromWindow = true;
+ }
+
+ public void releaseSurfaceTexture(SurfaceTexture surfaceTexture) {
+ if (surfaceTexture == null) {
+ return;
+ }
+ if (mDidDetachFromWindow) {
+ if (surfaceTexture != mSurfaceTexture) {
+ surfaceTexture.release();
+ } else if (!mOwnSurfaceTexture) {
+ surfaceTexture.release();
+ }
+ } else if (mWillDetachFromWindow) {
+ if (surfaceTexture != mSurfaceTexture) {
+ surfaceTexture.release();
+ } else if (!mOwnSurfaceTexture) {
+ setOwnSurfaceTexture(true);
+ }
+ } else {
+ if (surfaceTexture != mSurfaceTexture) {
+ surfaceTexture.release();
+ } else if (!mOwnSurfaceTexture) {
+ setOwnSurfaceTexture(true);
+ }
+ }
+ }
+ }
+
+ @Override
+ public void onInitializeAccessibilityEvent(AccessibilityEvent event) {
+ super.onInitializeAccessibilityEvent(event);
+ event.setClassName(TextureRenderView.class.getName());
+ }
+
+ @Override
+ public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) {
+ super.onInitializeAccessibilityNodeInfo(info);
+ info.setClassName(TextureRenderView.class.getName());
+ }
+}
diff --git a/app/build.gradle b/app/build.gradle
index eee2e2d3..0bb156b0 100644
--- a/app/build.gradle
+++ b/app/build.gradle
@@ -5,6 +5,8 @@ apply plugin: 'kotlin-kapt'
android {
compileSdkVersion rootProject.ext.compileSdkVersion
buildToolsVersion rootProject.ext.buildToolsVersion
+ namespace "com.frank.ffmpeg"
+
defaultConfig {
applicationId "com.frank.ffmpeg"
minSdkVersion rootProject.ext.minSdkVersion
@@ -46,12 +48,18 @@ android {
targetCompatibility = JavaVersion.VERSION_1_8
}
+ kotlinOptions {
+ jvmTarget = JavaVersion.VERSION_1_8
+ }
+
packagingOptions {
exclude 'META-INF/proguard/coroutines.pro'
- pickFirst 'lib/armeabi-v7a/libmp3lame.so'
- pickFirst 'lib/arm64-v8a/libmp3lame.so'
pickFirst 'lib/armeabi-v7a/libffmpeg.so'
pickFirst 'lib/arm64-v8a/libffmpeg.so'
+
+ jniLibs {
+ useLegacyPackaging = true
+ }
}
packagingOptions {
@@ -69,8 +77,6 @@ dependencies {
implementation "androidx.core:core-ktx:$rootProject.core_ktx"
implementation "androidx.lifecycle:lifecycle-viewmodel-ktx:$rootProject.lifecycle_ktx"
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
- implementation project(':AndroidMedia')
- //implementation "libmp3" if you need mp3-lite module
-// implementation project(':libmp3')
implementation project(':CameraFilter')
+ implementation project(':NextPlayer')
}
diff --git a/app/libs/arm64-v8a/libffmpeg.so b/app/libs/arm64-v8a/libffmpeg.so
index a5e4816c..e2784638 100755
Binary files a/app/libs/arm64-v8a/libffmpeg.so and b/app/libs/arm64-v8a/libffmpeg.so differ
diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml
index a1fe186c..07cfdb14 100644
--- a/app/src/main/AndroidManifest.xml
+++ b/app/src/main/AndroidManifest.xml
@@ -11,7 +11,8 @@
-
+
@@ -38,9 +39,7 @@
-
-
diff --git a/app/src/main/cpp/CMakeLists.txt b/app/src/main/cpp/CMakeLists.txt
index 7a0d9962..6a217a42 100644
--- a/app/src/main/cpp/CMakeLists.txt
+++ b/app/src/main/cpp/CMakeLists.txt
@@ -11,7 +11,6 @@ cmake_minimum_required(VERSION 3.4.1)
# Gradle automatically packages shared libraries with your APK.
set(SRC_FFMPEG
- ffmpeg_cmd.c
ffmpeg/cmdutils.c
ffmpeg/ffmpeg.c
ffmpeg/ffmpeg_demux.c
@@ -51,20 +50,12 @@ add_library( # Sets the name of the library.
${SRC_FFMPEG}
${SRC_VISUALIZER}
${SRC_METADATA}
-# ${SRC_FFPLAYER}
+ ffmpeg_cmd.c
video_filter.c
- ffprobe_cmd.cpp
- video_cutting.cpp
- yuv/yuv_converter.cpp
- pcm/pcm_process.cpp
media_transcode.cpp
- ff_audio_resample.cpp
- common_media_jni.cpp
ff_audio_player.cpp
audio_player_jni.cpp
- ff_rtmp_pusher.cpp
- ffmpeg_pusher_jni.cpp
- )
+)
add_library( ffmpeg
SHARED
@@ -94,6 +85,8 @@ find_library( # Sets the name of the path variable.
log-lib
log )
+target_link_options(media-handle PRIVATE "-Wl,-z,max-page-size=16384")
+
target_link_libraries( # Specifies the target library.
media-handle
ffmpeg
diff --git a/app/src/main/cpp/audio_player_jni.cpp b/app/src/main/cpp/audio_player_jni.cpp
index e873b245..fa64098b 100644
--- a/app/src/main/cpp/audio_player_jni.cpp
+++ b/app/src/main/cpp/audio_player_jni.cpp
@@ -103,11 +103,3 @@ AUDIO_PLAYER_FUNC(void, native_1release, long context) {
audioPlayer->setExit(true);
}
-
-extern "C"
-JNIEXPORT jstring JNICALL
-Java_com_frank_ffmpeg_FFmpegCmd_getInfo(JNIEnv *env, jclass clazz) {
- const char* ffmpeg_version = av_version_info();
- LOGE("Version","ffmpeg version: %s",ffmpeg_version);
- return env->NewStringUTF( ffmpeg_version);
-}
\ No newline at end of file
diff --git a/app/src/main/cpp/common_media_jni.cpp b/app/src/main/cpp/common_media_jni.cpp
deleted file mode 100644
index f99f30e6..00000000
--- a/app/src/main/cpp/common_media_jni.cpp
+++ /dev/null
@@ -1,20 +0,0 @@
-//
-// Created by xu fulong on 2022/9/7.
-//
-
-#include
-
-#include "ff_audio_resample.h"
-
-COMMON_MEDIA_FUNC(int, audioResample, jstring srcFile, jstring dstFile, int sampleRate) {
- const char *src_file = env->GetStringUTFChars(srcFile, JNI_FALSE);
- const char *dst_file = env->GetStringUTFChars(dstFile, JNI_FALSE);
-
- auto *audioResample = new FFAudioResample();
- int ret = audioResample->resampling(src_file, dst_file, sampleRate);
-
- delete audioResample;
- env->ReleaseStringUTFChars(dstFile, dst_file);
- env->ReleaseStringUTFChars(srcFile, src_file);
- return ret;
-}
\ No newline at end of file
diff --git a/app/src/main/cpp/ff_audio_resample.cpp b/app/src/main/cpp/ff_audio_resample.cpp
deleted file mode 100644
index 04b571ce..00000000
--- a/app/src/main/cpp/ff_audio_resample.cpp
+++ /dev/null
@@ -1,386 +0,0 @@
-//
-// Created by xu fulong on 2022/7/12.
-//
-
-#include "ff_audio_resample.h"
-
-#define ALOGE(Format, ...) LOGE("audio_resample", Format, ##__VA_ARGS__)
-
-
-FFAudioResample::FFAudioResample() {
- resample = new AudioResample();
-}
-
-FFAudioResample::~FFAudioResample() {
- delete resample;
-}
-
-static int initOutputFrame(AudioResample **pResample) {
- AudioResample *ar = *pResample;
-
- AVFrame *frame = av_frame_alloc();
- frame->format = ar->outCodecCtx->sample_fmt;
- frame->nb_samples = ar->outCodecCtx->frame_size;
- frame->sample_rate = ar->outCodecCtx->sample_rate;
- frame->channel_layout = ar->outCodecCtx->channel_layout;
-
- int ret = av_frame_get_buffer(frame, 0);
- ar->outFrame = frame;
- *pResample = ar;
- return ret;
-}
-
-static int initResample(AudioResample **pResample) {
- AudioResample *ar = *pResample;
- SwrContext *context = swr_alloc_set_opts(nullptr,
- av_get_default_channel_layout(ar->outCodecCtx->channels),
- ar->outCodecCtx->sample_fmt,
- ar->outCodecCtx->sample_rate,
- av_get_default_channel_layout(ar->inCodecCtx->channels),
- ar->inCodecCtx->sample_fmt,
- ar->inCodecCtx->sample_rate,
- 0, nullptr);
- int ret = swr_init(context);
- ar->resampleCtx = context;
- *pResample = ar;
- return ret;
-}
-
-static int initConvertedSamples(AudioResample **pResample, uint8_t ***converted_input_samples, int frame_size) {
- int ret;
- AudioResample *ar = *pResample;
- *converted_input_samples = (uint8_t **) calloc(ar->outCodecCtx->channels, sizeof(**converted_input_samples));
-
- if ((ret = av_samples_alloc(*converted_input_samples, nullptr,
- ar->outCodecCtx->channels,
- frame_size,
- ar->outCodecCtx->sample_fmt, 0)) < 0) {
- ALOGE("av_samples_alloc error:%s", av_err2str(ret));
- av_freep(&(*converted_input_samples)[0]);
- free(*converted_input_samples);
- return ret;
- }
- return 0;
-}
-
-int FFAudioResample::openInputFile(const char *filename) {
- int ret;
- const AVCodec *input_codec;
- AVStream *audio_stream = nullptr;
-
- if ((ret = avformat_open_input(&resample->inFormatCtx, filename, nullptr,nullptr)) < 0) {
- ALOGE("Could not open input file:%s\n", av_err2str(ret));
- return ret;
- }
- avformat_find_stream_info(resample->inFormatCtx, nullptr);
-
- for (int i = 0; i < resample->inFormatCtx->nb_streams; ++i) {
- if (resample->inFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
- audio_stream = resample->inFormatCtx->streams[i];
- }
- }
- if (!(input_codec = avcodec_find_decoder(audio_stream->codecpar->codec_id))) {
- ALOGE("Could not find input codec:%s\n", avcodec_get_name(audio_stream->codecpar->codec_id));
- return -1;
- }
-
- resample->inCodecCtx = avcodec_alloc_context3(input_codec);
- avcodec_parameters_to_context(resample->inCodecCtx, audio_stream->codecpar);
-
- if ((ret = avcodec_open2(resample->inCodecCtx, input_codec, nullptr)) < 0) {
- ALOGE("Could not open input codec (error:%s)\n", av_err2str(ret));
- }
- resample->inFrame = av_frame_alloc();
-
- return 0;
-}
-
-int FFAudioResample::openOutputFile(const char *filename, int sample_rate) {
- AVIOContext *output_io_context = nullptr;
- const AVCodec *output_codec;
- int ret;
-
- if ((ret = avio_open(&output_io_context, filename, AVIO_FLAG_WRITE)) < 0) {
- ALOGE("Could not open output file:%s\n", av_err2str(ret));
- return ret;
- }
-
- resample->outFormatCtx = avformat_alloc_context();
- resample->outFormatCtx->pb = output_io_context;
- resample->outFormatCtx->url = av_strdup(filename);
- resample->outFormatCtx->oformat = av_guess_format(nullptr, filename,nullptr);
- if (!(resample->outFormatCtx->oformat)) {
- ALOGE("Could not find output file format\n");
- return -1;
- }
-
- /* Find the encoder to be used by its name. */
- if (!(output_codec = avcodec_find_encoder(resample->inCodecCtx->codec_id))) {
- ALOGE( "Could not find encoder=%s\n", resample->inCodecCtx->codec->name);
- return -1;
- }
-
- /* Create a new audio stream in the output file container. */
- AVStream *stream = avformat_new_stream(resample->outFormatCtx, nullptr);
-
- resample->outCodecCtx = avcodec_alloc_context3(output_codec);
-
- /* Set the basic encoder parameters.*/
- resample->outCodecCtx->channels = resample->inCodecCtx->channels;
- resample->outCodecCtx->channel_layout = av_get_default_channel_layout(resample->inCodecCtx->channels);
- resample->outCodecCtx->sample_rate = sample_rate;
- resample->outCodecCtx->sample_fmt = output_codec->sample_fmts[0];
-
- /* Allow the use of the experimental AAC encoder. */
- resample->outCodecCtx->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;
-
- /* Set the sample rate for the container. */
- stream->time_base.den = sample_rate;
- stream->time_base.num = 1;
-
- /* Some container formats (like MP4) require global headers to be present.
- * Mark the encoder so that it behaves accordingly. */
- if (resample->outFormatCtx->oformat->flags & AVFMT_GLOBALHEADER)
- resample->outCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
-
- /* Open the encoder for the audio stream to use it later. */
- if ((ret = avcodec_open2(resample->outCodecCtx, output_codec, nullptr)) < 0) {
- ALOGE("Could not open output codec (error:%s)\n", av_err2str(ret));
- return ret;
- }
-
- avcodec_parameters_from_context(stream->codecpar, resample->outCodecCtx);
- return 0;
-}
-
-int FFAudioResample::decodeAudioFrame(AVFrame *frame, int *data_present, int *finished) {
- int ret;
-
- if ((ret = av_read_frame(resample->inFormatCtx, &resample->inPacket)) < 0) {
- if (ret == AVERROR_EOF)
- *finished = 1;
- else {
- ALOGE("Could not read frame (error:%s)\n", av_err2str(ret));
- return ret;
- }
- }
- if (resample->inFormatCtx->streams[resample->inPacket.stream_index]->codecpar->codec_type
- != AVMEDIA_TYPE_AUDIO) {
- ret = 0;
- ALOGE("isn't audio packet, skip it...");
- goto cleanup;
- }
- /* Send the audio frame stored in the temporary packet to the decoder.*/
- if ((ret = avcodec_send_packet(resample->inCodecCtx, &resample->inPacket)) < 0) {
- ALOGE("Could not send packet for decoding (error:%s)\n", av_err2str(ret));
- return ret;
- }
- /* Receive one frame from the decoder. */
- ret = avcodec_receive_frame(resample->inCodecCtx, frame);
- if (ret == AVERROR(EAGAIN)) {
- ret = 0;
- goto cleanup;
- } else if (ret == AVERROR_EOF) {
- *finished = 1;
- ret = 0;
- goto cleanup;
- } else if (ret < 0) {
- ALOGE("Could not decode frame (error:%s)\n", av_err2str(ret));
- goto cleanup;
- } else {
- *data_present = 1;
- goto cleanup;
- }
-
-cleanup:
- av_packet_unref(&resample->inPacket);
- return ret;
-}
-
-/**
- * Read one audio frame from the input file, decode, convert and store
- * it in the FIFO buffer.
- *
- */
-int FFAudioResample::decodeAndConvert(int *finished) {
- uint8_t **converted_dst_samples = nullptr;
- int data_present = 0;
- int ret = AVERROR_EXIT;
-
- /* Decode one frame worth of audio samples. */
- if (decodeAudioFrame(resample->inFrame, &data_present, finished))
- goto cleanup;
- if (*finished) {
- ret = 0;
- goto cleanup;
- }
- /* If there is decoded data, convert and store it. */
- if (data_present) {
- int dst_nb_samples = (int) av_rescale_rnd(resample->inFrame->nb_samples, resample->outCodecCtx->sample_rate,
- resample->inCodecCtx->sample_rate, AV_ROUND_UP);
-
- if (initConvertedSamples(&resample, &converted_dst_samples, dst_nb_samples))
- goto cleanup;
-
- ret = swr_convert(resample->resampleCtx, converted_dst_samples, dst_nb_samples,
- (const uint8_t**)resample->inFrame->extended_data, resample->inFrame->nb_samples);
- if (ret < 0) {
- ALOGE("Could not convert input samples (error:%s)\n", av_err2str(ret));
- goto cleanup;
- }
-
- av_audio_fifo_write(resample->fifo, (void **)converted_dst_samples, ret);
- }
- ret = 0;
-
-cleanup:
- if (converted_dst_samples) {
- av_freep(&converted_dst_samples[0]);
- free(converted_dst_samples);
- }
-
- return ret;
-}
-
-int FFAudioResample::encodeAudioFrame(AVFrame *frame, int *data_present) {
- int ret;
-
- /* Set a timestamp based on the sample rate for the container. */
- if (frame) {
- frame->pts = resample->pts;
- resample->pts += frame->nb_samples;
- }
-
- ret = avcodec_send_frame(resample->outCodecCtx, frame);
- if (ret == AVERROR_EOF) {
- ret = 0;
- goto cleanup;
- } else if (ret < 0) {
- ALOGE("Could not send packet for encoding (error:%s)\n", av_err2str(ret));
- return ret;
- }
-
- ret = avcodec_receive_packet(resample->outCodecCtx, &resample->outPacket);
- if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
- ret = 0;
- goto cleanup;
- } else if (ret < 0) {
- ALOGE("Could not encode frame (error:%s)\n", av_err2str(ret));
- goto cleanup;
- } else {
- *data_present = 1;
- }
-
- /* Write one audio frame from the temporary packet to the output file. */
- if (*data_present &&
- (ret = av_write_frame(resample->outFormatCtx, &resample->outPacket)) < 0) {
- ALOGE("Could not write frame (error:%s)\n", av_err2str(ret));
- }
-
-cleanup:
- av_packet_unref(&resample->outPacket);
- return ret;
-}
-
-/**
- * Load one audio frame from the FIFO buffer, encode and write it to the
- * output file.
- *
- */
-int FFAudioResample::encodeAndWrite() {
- int data_written;
- const int frame_size = FFMIN(av_audio_fifo_size(resample->fifo),
- resample->outCodecCtx->frame_size);
-
- resample->outFrame->nb_samples = frame_size;
- if (av_audio_fifo_read(resample->fifo, (void **)resample->outFrame->data, frame_size) < frame_size) {
- ALOGE("Could not read data from FIFO\n");
- return AVERROR_EXIT;
- }
-
- if (encodeAudioFrame(resample->outFrame, &data_written)) {
- return AVERROR_EXIT;
- }
- return 0;
-}
-
-int FFAudioResample::resampling(const char *src_file, const char *dst_file, int sampleRate) {
- int ret = AVERROR_EXIT;
-
- /* Open the input file for reading. */
- if (openInputFile(src_file))
- goto cleanup;
- /* Open the output file for writing. */
- if (openOutputFile(dst_file, sampleRate))
- goto cleanup;
- /* Initialize the re-sampler to be able to convert audio sample formats. */
- if (initResample(&resample))
- goto cleanup;
- /* Initialize the FIFO buffer to store audio samples to be encoded. */
- resample->fifo = av_audio_fifo_alloc(resample->outCodecCtx->sample_fmt,
- resample->outCodecCtx->channels, 1024 * 10);
- if (initOutputFrame(&resample))
- goto cleanup;
- /* Write the header of the output file container. */
- if ((ret = avformat_write_header(resample->outFormatCtx, nullptr)) < 0) {
- ALOGE("write header error=%s", av_err2str(ret));
- }
-
- while (true) {
- int finished = 0;
- const int output_frame_size = resample->outCodecCtx->frame_size;
-
- while (av_audio_fifo_size(resample->fifo) < output_frame_size) {
- /* Decode one frame, convert sample format and put it into the FIFO buffer. */
- if (decodeAndConvert(&finished))
- goto cleanup;
-
- if (finished)
- break;
- }
-
- /* If we have enough samples for the encoder, we encode them.*/
- while (av_audio_fifo_size(resample->fifo) >= output_frame_size ||
- (finished && av_audio_fifo_size(resample->fifo) > 0))
- if (encodeAndWrite())
- goto cleanup;
-
- /* encode all the remaining samples. */
- if (finished) {
- int data_written;
- do {
- data_written = 0;
- if (encodeAudioFrame(nullptr, &data_written))
- goto cleanup;
- } while (data_written);
- break;
- }
- }
-
- /* Write the trailer of the output file container. */
- if (av_write_trailer(resample->outFormatCtx)) {
- ALOGE("write trailer error...");
- }
- ret = 0;
-
-cleanup:
- if (resample->fifo)
- av_audio_fifo_free(resample->fifo);
- swr_free(&(resample->resampleCtx));
- if (resample->outCodecCtx)
- avcodec_free_context(&(resample->outCodecCtx));
- if (resample->outFormatCtx) {
- avio_closep(&(resample->outFormatCtx->pb));
- avformat_free_context(resample->outFormatCtx);
- }
- if (resample->inCodecCtx)
- avcodec_free_context(&(resample->inCodecCtx));
- if (resample->inFormatCtx)
- avformat_close_input(&(resample->inFormatCtx));
- if (resample->inFrame)
- av_frame_free(&(resample->inFrame));
- if (resample->outFrame)
- av_frame_free(&(resample->outFrame));
-
- return ret;
-}
\ No newline at end of file
diff --git a/app/src/main/cpp/ff_audio_resample.h b/app/src/main/cpp/ff_audio_resample.h
deleted file mode 100644
index 2a4547e9..00000000
--- a/app/src/main/cpp/ff_audio_resample.h
+++ /dev/null
@@ -1,71 +0,0 @@
-//
-// Created by xu fulong on 2022/9/7.
-//
-
-#ifndef FFMPEGANDROID_FF_AUDIO_RESAMPLE_H
-#define FFMPEGANDROID_FF_AUDIO_RESAMPLE_H
-
-#include "ffmpeg_jni_define.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-#include "libavformat/avformat.h"
-#include "libavformat/avio.h"
-
-#include "libavcodec/avcodec.h"
-
-#include "libavutil/audio_fifo.h"
-#include "libavutil/avassert.h"
-#include "libavutil/avstring.h"
-#include "libavutil/frame.h"
-#include "libavutil/opt.h"
-
-#include "libswresample/swresample.h"
-#ifdef __cplusplus
-}
-#endif
-
-struct AudioResample {
- int64_t pts = 0;
-
- AVPacket inPacket;
- AVPacket outPacket;
- AVFrame *inFrame;
- AVFrame *outFrame;
-
- SwrContext *resampleCtx;
- AVAudioFifo *fifo = nullptr;
-
- AVFormatContext *inFormatCtx;
- AVCodecContext *inCodecCtx;
- AVFormatContext *outFormatCtx;
- AVCodecContext *outCodecCtx;
-};
-
-class FFAudioResample {
-private:
-
- AudioResample *resample;
-
- int openInputFile(const char *filename);
-
- int openOutputFile(const char *filename, int sample_rate);
-
- int decodeAudioFrame(AVFrame *frame, int *data_present, int *finished);
-
- int decodeAndConvert(int *finished);
-
- int encodeAudioFrame(AVFrame *frame, int *data_present);
-
- int encodeAndWrite();
-public:
-
- FFAudioResample();
-
- ~FFAudioResample();
-
- int resampling(const char *src_file, const char *dst_file, int sampleRate);
-
-};
-#endif //FFMPEGANDROID_FF_AUDIO_RESAMPLE_H
diff --git a/app/src/main/cpp/ff_rtmp_pusher.cpp b/app/src/main/cpp/ff_rtmp_pusher.cpp
deleted file mode 100644
index 530c0ae0..00000000
--- a/app/src/main/cpp/ff_rtmp_pusher.cpp
+++ /dev/null
@@ -1,121 +0,0 @@
-//
-// Created by xu fulong on 2022/9/9.
-//
-
-#include "ff_rtmp_pusher.h"
-
-#define PUSH_TAG "ff_rtmp_pusher"
-
-int FFRtmpPusher::open(const char *inputPath, const char *outputPath) {
- int ret;
-
- avformat_network_init();
- ret = avformat_open_input(&inFormatCtx, inputPath, nullptr, nullptr);
- if (ret < 0) {
- LOGE(PUSH_TAG, "avformat_open_input err=%s", av_err2str(ret));
- return ret;
- }
- avformat_find_stream_info(inFormatCtx, nullptr);
- av_dump_format(inFormatCtx, 0, inputPath, 0);
- ret = avformat_alloc_output_context2(&outFormatCtx, nullptr, "flv", outputPath);
- if (ret < 0 || !outFormatCtx) {
- LOGE(PUSH_TAG, "alloc format_context err=%s", av_err2str(ret));
- return ret;
- }
-
- // Flv video: h264 audio: aac/mp3
- // If not h264, should transcode to h264
- for (int i = 0; i < inFormatCtx->nb_streams; ++i) {
- AVStream *in_stream = inFormatCtx->streams[i];
- const auto *codec = avcodec_find_encoder(in_stream->codecpar->codec_id);
- AVStream *out_stream = avformat_new_stream(outFormatCtx, codec);
- avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar);
- out_stream->codecpar->codec_tag = 0;
-
- if (in_stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
- video_index = i;
- } else if (in_stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
- if (audio_index == -1) {
- audio_index = i;
- }
- }
- }
-
- if (!(outFormatCtx->oformat->flags & AVFMT_NOFILE)) {
- ret = avio_open2(&outFormatCtx->pb, outputPath, AVIO_FLAG_WRITE, nullptr, nullptr);
- if (ret < 0) {
- LOGE(PUSH_TAG, "avio open error=%s", av_err2str(ret));
- return ret;
- }
- }
-
- ret = avformat_write_header(outFormatCtx, nullptr);
- if (ret < 0) {
- LOGE(PUSH_TAG, "avformat_write_header err=%s", av_err2str(ret));
- }
- return ret;
-}
-
-void rescale(AVFormatContext *in_format_ctx, AVFormatContext *out_format_ctx, AVPacket *packet) {
- AVStream *in_stream = in_format_ctx->streams[packet->stream_index];
- AVStream *out_stream = out_format_ctx->streams[packet->stream_index];
-
- if (in_stream->time_base.num == out_stream->time_base.num
- && in_stream->time_base.den == out_stream->time_base.den) {
- packet->pos = -1;
- return;
- }
-
- packet->pts = av_rescale_q(packet->pts, in_stream->time_base, out_stream->time_base);
- packet->dts = av_rescale_q(packet->dts, in_stream->time_base, out_stream->time_base);
- packet->duration = av_rescale_q(packet->duration, in_stream->time_base, out_stream->time_base);
- packet->pos = -1;
-}
-
-int FFRtmpPusher::push() {
- int ret;
- int64_t startTime = av_gettime();
-
- while (true) {
- ret = av_read_frame(inFormatCtx, &packet);
- if (ret < 0) {
- LOGE(PUSH_TAG, "av_read_frame err=%s", av_err2str(ret));
- break;
- }
-
- if (packet.stream_index != video_index && packet.stream_index != audio_index)
- continue;
-
- // sync
- AVRational time_base = inFormatCtx->streams[packet.stream_index]->time_base;
- int64_t pts_time = av_rescale_q(packet.pts, time_base, AV_TIME_BASE_Q);
- int64_t cur_time = av_gettime() - startTime;
- if (pts_time > cur_time) {
- av_usleep((unsigned int)(pts_time - cur_time));
- }
-
- rescale(inFormatCtx, outFormatCtx, &packet);
-
- ret = av_interleaved_write_frame(outFormatCtx, &packet);
- if (ret < 0) {
- LOGE(PUSH_TAG, "write frame err=%s", av_err2str(ret));
- break;
- }
-
- av_packet_unref(&packet);
- }
-
- return ret;
-}
-
-void FFRtmpPusher::close() {
- if (outFormatCtx) {
- av_write_trailer(outFormatCtx);
- avformat_close_input(&outFormatCtx);
- outFormatCtx = nullptr;
- }
- if (inFormatCtx) {
- avformat_close_input(&inFormatCtx);
- inFormatCtx = nullptr;
- }
-}
\ No newline at end of file
diff --git a/app/src/main/cpp/ff_rtmp_pusher.h b/app/src/main/cpp/ff_rtmp_pusher.h
deleted file mode 100644
index 7765624d..00000000
--- a/app/src/main/cpp/ff_rtmp_pusher.h
+++ /dev/null
@@ -1,39 +0,0 @@
-//
-// Created by xu fulong on 2022/9/9.
-//
-
-#ifndef FF_RTMP_PUSHER_H
-#define FF_RTMP_PUSHER_H
-
-#include "ffmpeg_jni_define.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-#include "libavformat/avformat.h"
-#include "libavcodec/avcodec.h"
-#include "libavutil/time.h"
-#ifdef __cplusplus
-}
-#endif
-
-class FFRtmpPusher {
-private:
- AVFormatContext *inFormatCtx;
- AVFormatContext *outFormatCtx;
-
- AVPacket packet;
- int video_index = -1;
- int audio_index = -1;
-
-public:
-
- int open(const char *inputPath, const char *outputPath);
-
- int push();
-
- void close();
-
-};
-
-#endif //FF_RTMP_PUSHER_H
diff --git a/app/src/main/cpp/ffmpeg_cmd.c b/app/src/main/cpp/ffmpeg_cmd.c
index e8f8201d..776e2b69 100644
--- a/app/src/main/cpp/ffmpeg_cmd.c
+++ b/app/src/main/cpp/ffmpeg_cmd.c
@@ -1,5 +1,7 @@
+
#include
#include "ffmpeg/ffmpeg.h"
+#include "ffmpeg/ffprobe.h"
#include "ffmpeg_jni_define.h"
#define FFMPEG_TAG "FFmpegCmd"
@@ -8,7 +10,6 @@
#define ALOGI(TAG, FORMAT, ...) __android_log_vprint(ANDROID_LOG_INFO, TAG, FORMAT, ##__VA_ARGS__)
#define ALOGE(TAG, FORMAT, ...) __android_log_vprint(ANDROID_LOG_ERROR, TAG, FORMAT, ##__VA_ARGS__)
-
int err_count;
JNIEnv *ff_env;
jclass ff_class;
@@ -94,4 +95,26 @@ void progress_callback(int position, int duration, int state) {
if (ff_env && ff_class && ff_method) {
(*ff_env)->CallStaticVoidMethod(ff_env, ff_class, ff_method, position, duration, state);
}
-}
\ No newline at end of file
+}
+
+FFMPEG_FUNC(jstring, handleProbe, jobjectArray commands) {
+ int argc = (*env)->GetArrayLength(env, commands);
+ char **argv = (char **) malloc(argc * sizeof(char *));
+ int i;
+ for (i = 0; i < argc; i++) {
+ jstring jstr = (jstring) (*env)->GetObjectArrayElement(env, commands, i);
+ char *temp = (char *) (*env)->GetStringUTFChars(env, jstr, 0);
+ argv[i] = (char *)(malloc(1024));
+ strcpy(argv[i], temp);
+ (*env)->ReleaseStringUTFChars(env, jstr, temp);
+ }
+ //execute ffprobe command
+ char *result = ffprobe_run(argc, argv);
+ //release memory
+ for (i = 0; i < argc; i++) {
+ free(argv[i]);
+ }
+ free(argv);
+
+ return (*env)->NewStringUTF(env, result);
+}
diff --git a/app/src/main/cpp/ffmpeg_jni_define.h b/app/src/main/cpp/ffmpeg_jni_define.h
index 3a65cbcb..d86714a5 100644
--- a/app/src/main/cpp/ffmpeg_jni_define.h
+++ b/app/src/main/cpp/ffmpeg_jni_define.h
@@ -22,26 +22,10 @@ extern "C" { \
JNIEXPORT RETURN_TYPE JNICALL Java_com_frank_ffmpeg_FFmpegCmd_ ## FUNC_NAME \
(JNIEnv *env, jclass thiz, ##__VA_ARGS__)\
-#define FFPROBE_FUNC(RETURN_TYPE, FUNC_NAME, ...) \
-extern "C" { \
- JNIEXPORT RETURN_TYPE JNICALL Java_com_frank_ffmpeg_FFmpegCmd_ ## FUNC_NAME \
- (JNIEnv *env, jclass thiz, ##__VA_ARGS__);\
-}\
- JNIEXPORT RETURN_TYPE JNICALL Java_com_frank_ffmpeg_FFmpegCmd_ ## FUNC_NAME \
- (JNIEnv *env, jclass thiz, ##__VA_ARGS__)\
-
#define VIDEO_PLAYER_FUNC(RETURN_TYPE, FUNC_NAME, ...) \
JNIEXPORT RETURN_TYPE JNICALL Java_com_frank_ffmpeg_VideoPlayer_ ## FUNC_NAME \
(JNIEnv *env, jobject thiz, ##__VA_ARGS__)\
-#define PUSHER_FUNC(RETURN_TYPE, FUNC_NAME, ...) \
-extern "C" { \
- JNIEXPORT RETURN_TYPE JNICALL Java_com_frank_ffmpeg_FFmpegPusher_ ## FUNC_NAME \
- (JNIEnv *env, jobject thiz, ##__VA_ARGS__);\
-} \
- JNIEXPORT RETURN_TYPE JNICALL Java_com_frank_ffmpeg_FFmpegPusher_ ## FUNC_NAME \
- (JNIEnv *env, jobject thiz, ##__VA_ARGS__) \
-
#define RETRIEVER_FUNC(RETURN_TYPE, FUNC_NAME, ...) \
extern "C" { \
JNIEXPORT RETURN_TYPE JNICALL Java_com_frank_ffmpeg_metadata_FFmpegMediaRetriever_ ## FUNC_NAME \
@@ -50,12 +34,4 @@ extern "C" { \
JNIEXPORT RETURN_TYPE JNICALL Java_com_frank_ffmpeg_metadata_FFmpegMediaRetriever_ ## FUNC_NAME \
(JNIEnv *env, jobject thiz, ##__VA_ARGS__)\
-#define COMMON_MEDIA_FUNC(RETURN_TYPE, FUNC_NAME, ...) \
-extern "C" { \
- JNIEXPORT RETURN_TYPE JNICALL Java_com_frank_ffmpeg_CommonMediaHelper_ ## FUNC_NAME \
- (JNIEnv *env, jobject thiz, ##__VA_ARGS__);\
-}\
- JNIEXPORT RETURN_TYPE JNICALL Java_com_frank_ffmpeg_CommonMediaHelper_ ## FUNC_NAME \
- (JNIEnv *env, jobject thiz, ##__VA_ARGS__)\
-
#endif //FFMPEGANDROID_FFMPEG_JNI_DEFINE_H
diff --git a/app/src/main/cpp/ffmpeg_pusher_jni.cpp b/app/src/main/cpp/ffmpeg_pusher_jni.cpp
deleted file mode 100644
index 28de1bd8..00000000
--- a/app/src/main/cpp/ffmpeg_pusher_jni.cpp
+++ /dev/null
@@ -1,25 +0,0 @@
-//
-// Created by xu fulong on 2022/9/9.
-//
-
-#include
-#include "ff_rtmp_pusher.h"
-
-PUSHER_FUNC(int, pushStream, jstring inputPath, jstring outputPath) {
- int ret;
- const char *input_path = env->GetStringUTFChars(inputPath, JNI_FALSE);
- const char *output_path = env->GetStringUTFChars(outputPath, JNI_FALSE);
- auto *rtmpPusher = new FFRtmpPusher();
- ret = rtmpPusher->open(input_path, output_path);
- if (ret < 0) {
- LOGE("ffmpeg_pusher_jni", "open error=%d", ret);
- return ret;
- }
- ret = rtmpPusher->push();
-
- rtmpPusher->close();
- env->ReleaseStringUTFChars(inputPath, input_path);
- env->ReleaseStringUTFChars(outputPath, output_path);
-
- return ret;
-}
\ No newline at end of file
diff --git a/app/src/main/cpp/ffprobe_cmd.cpp b/app/src/main/cpp/ffprobe_cmd.cpp
deleted file mode 100644
index 624f3c9f..00000000
--- a/app/src/main/cpp/ffprobe_cmd.cpp
+++ /dev/null
@@ -1,42 +0,0 @@
-//
-// Created by frank on 2020-01-06.
-//
-
-#include
-#include
-#include
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-#include "ffmpeg_jni_define.h"
-#include "ffmpeg/ffprobe.h"
-
-#ifdef __cplusplus
-}
-#endif
-
-FFPROBE_FUNC(jstring, handleProbe, jobjectArray commands) {
- int argc = env->GetArrayLength(commands);
- char **argv = (char **) malloc(argc * sizeof(char *));
- int i;
- for (i = 0; i < argc; i++) {
- jstring jstr = (jstring) env->GetObjectArrayElement( commands, i);
- char *temp = (char *) env->GetStringUTFChars(jstr, 0);
- argv[i] = static_cast(malloc(1024));
- strcpy(argv[i], temp);
- env->ReleaseStringUTFChars(jstr, temp);
- }
- //execute ffprobe command
- char *result = ffprobe_run(argc, argv);
- //release memory
- for (i = 0; i < argc; i++) {
- free(argv[i]);
- }
- free(argv);
-
- return env->NewStringUTF(result);
-}
-
-
diff --git a/app/src/main/cpp/pcm/pcm_process.cpp b/app/src/main/cpp/pcm/pcm_process.cpp
deleted file mode 100644
index 88abd340..00000000
--- a/app/src/main/cpp/pcm/pcm_process.cpp
+++ /dev/null
@@ -1,93 +0,0 @@
-//
-// Created by xu fulong on 2022/8/5.
-//
-
-#include
-#include
-#include
-#include
-
-void pcm_raise_speed(char *input_path, char *output_path)
-{
- FILE *input = fopen(input_path, "rb+");
- FILE *output = fopen(output_path, "wb+");
- if (!input && !output) {
- printf("open file fail, msg=%s\n", strerror(errno));
- return;
- }
-
- int count = 0;
- char *buf = (char*) malloc(sizeof(char) * 4);
- while(!feof(input)) {
- fread(buf, sizeof(char), 4, input);
- if (count % 2 == 0) {
- // L
- fwrite(buf, sizeof(char), 2, output);
- // R
- fwrite(buf + 2, sizeof(char), 2, output);
- }
- count++;
- }
-
- free(buf);
- fclose(output);
- fclose(input);
-}
-
-void pcm_change_volume(char *input_path, char *output_path)
-{
- FILE *input = fopen(input_path, "rb+");
- FILE *output = fopen(output_path, "wb+");
- if (!input && !output) {
- printf("open file fail, msg=%s\n", strerror(errno));
- return;
- }
-
- int count = 0;
- char *buf = (char*) malloc(sizeof(char) * 4);
- while(!feof(input)) {
- fread(buf, sizeof(char), 4, input);
- short *left = (short*) buf;
- *left /= 2;
- short *right = (short*) (buf + 2);
- *right /= 2;
- // L
- fwrite(left, sizeof(short), 1, output);
- // R
- fwrite(right, sizeof(short), 1, output);
- count++;
- }
- printf("resample count=%d\n", count);
-
- free(buf);
- fclose(output);
- fclose(input);
-}
-
-void pcm_split_channel(char *input_path, char *left_path, char *right_path)
-{
- FILE *input = fopen(input_path, "rb+");
- FILE *left = fopen(left_path, "wb+");
- FILE *right = fopen(right_path, "wb+");
- if (!input && !left && !right) {
- printf("open file fail, msg=%s\n", strerror(errno));
- return;
- }
-
- int count = 0;
- char *buf = (char*) malloc(sizeof(char) * 4);
- while(!feof(input)) {
- fread(buf, sizeof(char), 4, input);
- // L
- fwrite(buf, sizeof(char), 2, left);
- // R
- fwrite(buf+2, sizeof(char), 2, right);
- count++;
- }
- printf("resample count=%d\n", count);
-
- free(buf);
- fclose(left);
- fclose(right);
- fclose(input);
-}
\ No newline at end of file
diff --git a/app/src/main/cpp/video_cutting.cpp b/app/src/main/cpp/video_cutting.cpp
deleted file mode 100644
index ef3d227e..00000000
--- a/app/src/main/cpp/video_cutting.cpp
+++ /dev/null
@@ -1,127 +0,0 @@
-//
-// Created by xu fulong on 2022/5/13.
-//
-
-#include
-
-#ifdef __ANDROID__
-#include
-#define LOGE(FORMAT, ...) __android_log_print(ANDROID_LOG_ERROR, "CutVideo", FORMAT, ##__VA_ARGS__)
-#else
-#include
-#define LOGE(FORMAT, ...) printf(FORMAT, ##__VA_ARGS__)
-#endif
-
-int CutVideo::open_output_file(AVFormatContext *ifmt_ctx, const char *filename)
-{
- int ret;
- avformat_alloc_output_context2(&ofmt_ctx, nullptr, nullptr, filename);
- if (!ofmt_ctx) {
- LOGE("Could not create output context\n");
- return AVERROR_UNKNOWN;
- }
-
- dts_start_offset = new int64_t [ifmt_ctx->nb_streams];
- memset(dts_start_offset, -1, sizeof(int64_t) * ifmt_ctx->nb_streams);
- pts_start_offset = new int64_t [ifmt_ctx->nb_streams];
- memset(pts_start_offset, -1, sizeof(int64_t) * ifmt_ctx->nb_streams);
-
- for (int i = 0; i < ifmt_ctx->nb_streams; i++) {
- AVStream* in_stream = ifmt_ctx->streams[i];
- AVCodecParameters* codecpar = in_stream->codecpar;
-
- const AVCodec* dec = avcodec_find_decoder(codecpar->codec_id);
- AVStream* out_stream = avformat_new_stream(ofmt_ctx, dec);
- if (!out_stream) {
- LOGE("Failed allocating output stream\n");
- ret = AVERROR_UNKNOWN;
- return ret;
- }
- avcodec_parameters_copy(out_stream->codecpar, codecpar);
- }
-
- av_dump_format(ofmt_ctx, 0, filename, 1);
- if (!(ofmt_ctx->oformat->flags & AVFMT_NOFILE)) {
- ret = avio_open(&ofmt_ctx->pb, filename, AVIO_FLAG_WRITE);
- if (ret < 0) {
- LOGE("Could not open output file %s\n", filename);
- return ret;
- }
- }
- /* init muxer, write output file header */
- ret = avformat_write_header(ofmt_ctx, nullptr);
- if (ret < 0) {
- LOGE("Error occurred when opening output file\n");
- return ret;
- }
-
- return 0;
-}
-
-void CutVideo:: setParam(int64_t start_time, int64_t duration) {
- m_startTime = start_time;
- m_duration = duration;
-}
-
-AVPacket* CutVideo::copy_packet(AVFormatContext *ifmt_ctx, AVPacket *packet) {
- auto* pkt = (AVPacket*)av_malloc(sizeof(AVPacket));
- av_new_packet(pkt, 0);
- if (0 == av_packet_ref(pkt, packet)) {
- AVStream* in_stream = ifmt_ctx->streams[pkt->stream_index];
- AVStream* out_stream = ofmt_ctx->streams[pkt->stream_index];
- if (pts_start_offset[pkt->stream_index] == -1) {
- pts_start_offset[pkt->stream_index] = pkt->pts;
- dts_start_offset[pkt->stream_index] = pkt->dts;
- }
-
- // convert pts and dts
- pkt->pts = av_rescale_q_rnd(pkt->pts, in_stream->time_base, out_stream->time_base,
- static_cast(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
- pkt->dts = av_rescale_q_rnd(pkt->dts, in_stream->time_base, out_stream->time_base,
- static_cast(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
- pkt->duration = av_rescale_q(pkt->duration, in_stream->time_base, out_stream->time_base);
- pkt->pos = -1;
- return pkt;
- }
- return nullptr;
-}
-
-int CutVideo::write_internal(AVFormatContext *ifmt_ctx, AVPacket *packet)
-{
- int ret;
- AVPacket *pkt = copy_packet(ifmt_ctx, packet);
- if (pkt == nullptr) {
- LOGE("packet is NULL\n");
- return -1;
- }
- pkt->pts = pkt->pts - pts_start_offset[pkt->stream_index];
- pkt->dts = pkt->dts - dts_start_offset[pkt->stream_index];
-// LOGE("pts=%ld, dts=%ld, stream_index=%d", pkt->pts, pkt->dts, pkt->stream_index);
- // write packet into file
- //TODO:when pts < dts, it occurs error.
- // Therefore, wo need to cache packet queue, and ascend sort by dts
- if ((ret = av_interleaved_write_frame(ofmt_ctx, pkt)) < 0) {
- LOGE("Error to mux packet, stream_index=%d, pts=%ld, dts=%ld\n", pkt->stream_index, pkt->pts, pkt->dts);
- }
- av_packet_unref(pkt);
- return ret;
-}
-
-void CutVideo::write_output_file(AVFormatContext *ifmt_ctx, AVPacket *packet) {
- int64_t timestamp = packet->pts * av_q2d(ifmt_ctx->streams[packet->stream_index]->time_base);
- if (timestamp >= m_startTime && timestamp < m_startTime + m_duration) {
- write_internal(ifmt_ctx, packet);
- }
-}
-
-void CutVideo::close_output_file() {
- if (!ofmt_ctx)
- return;
- av_write_trailer(ofmt_ctx);
- if (!(ofmt_ctx->oformat->flags & AVFMT_NOFILE)) {
- avio_close(ofmt_ctx->pb);
- }
- avformat_free_context(ofmt_ctx);
- delete pts_start_offset;
- delete dts_start_offset;
-}
\ No newline at end of file
diff --git a/app/src/main/cpp/video_cutting.h b/app/src/main/cpp/video_cutting.h
deleted file mode 100644
index 10dcf775..00000000
--- a/app/src/main/cpp/video_cutting.h
+++ /dev/null
@@ -1,42 +0,0 @@
-//
-// Created by xu fulong on 2022/5/13.
-//
-
-#ifndef CUT_VIDEO_H
-#define CUT_VIDEO_H
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-#include "libavformat/avformat.h"
-#ifdef __cplusplus
-}
-#endif
-
-class CutVideo {
-private:
-
- int64_t m_startTime = 15;
- int64_t m_duration = 10;
-
- int64_t *dts_start_offset;
- int64_t *pts_start_offset;
-
- AVFormatContext *ofmt_ctx = nullptr;
-
- AVPacket* copy_packet(AVFormatContext *ifmt_ctx, AVPacket *packet);
-
- int write_internal(AVFormatContext *ifmt_ctx, AVPacket *packet);
-
-public:
-
- int open_output_file(AVFormatContext *ifmt_ctx, const char *filename);
-
- void setParam(int64_t start_time, int64_t duration);
-
- void write_output_file(AVFormatContext *ifmt_ctx, AVPacket *packet);
-
- void close_output_file();
-};
-
-#endif //CUT_VIDEO_H
diff --git a/app/src/main/cpp/yuv/yuv_converter.cpp b/app/src/main/cpp/yuv/yuv_converter.cpp
deleted file mode 100644
index 96f204dc..00000000
--- a/app/src/main/cpp/yuv/yuv_converter.cpp
+++ /dev/null
@@ -1,208 +0,0 @@
-//
-// Created by xu fulong on 2022/7/9.
-//
-
-#include "yuv_converter.h"
-#include
-
-// https://chromium.googlesource.com/libyuv/libyuv
-// https://mymusing.co/bt601-yuv-to-rgb-conversion-color/
-// https://www.color.org/chardata/rgb/rgb_registry.xalter
-
-// YUV to RGB
-// or (RGB:[0, 1])(UV:[-0.5, 0.5])
-// R = Y + 1.407 * V
-// G = Y - 0.345 * U - 0.716 * V
-// B = Y + 1.779 * U
-
-// Y = 0.299 * R + 0.587 * G + 0.114 * B
-// U = -0.147 * R - 0.289 * G + 0.436 * B
-// V = 0.615 * R - 0.515 * G - 0.100 * B
-
-// normalize (Y:[16, 235] UV:[16, 240])(RGB:[0, 255])
-// R = (298 * Y + 411 * V - 57344) >> 8
-// G = (298 * Y - 101 * U - 211 * V + 34739) >> 8
-// B = (298 * Y + 519 * U - 71117) >> 8
-
-// Y = (66 * R + 129 * G + 25 * B) >> 8 + 16
-// U = (-38 * R - 74 * G + 112 * B) >> 8 + 128
-// V = (112 * R - 94 * G - 18 * B) >> 8 + 128
-
-static void rgba_to_yuv420p(const int *argb, int8_t *yuv, int width, int height) {
- int frameSize = width * height;
- int index = 0;
- int yIndex = 0;
- int uIndex = frameSize;
- int vIndex = frameSize * 5 / 4;
- int R, G, B, Y, U, V;
-
- for (int j = 0; j < height; j++) {
- for (int i = 0; i < width; i++) {
- R = (argb[index] & 0xff0000) >> 16;
- G = (argb[index] & 0xff00) >> 8;
- B = (argb[index] & 0xff);
-
- // RGB to YUV algorithm
- Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
-
- // I420(YUV420p) -> YYYYYYYY UU VV
- yuv[yIndex++] = (int8_t) Y;
- if (j % 2 == 0 && i % 2 == 0) {
- U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
- V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
- yuv[uIndex++] = (int8_t) U;
- yuv[vIndex++] = (int8_t) V;
- }
- index++;
- }
- }
-}
-
-static int yuv2argb(int y, int u, int v) {
-#define max(a, b) ((a) > (b) ? (a) : (b))
-
- int r, g, b;
-
- r = y + (int) (1.407f * u);
- g = y - (int) (0.345f * v + 0.716f * u);
- b = y + (int) (1.779f * v);
- r = r > 255 ? 255 : max(r, 0);
- g = g > 255 ? 255 : max(g, 0);
- b = b > 255 ? 255 : max(b, 0);
- return 0xff000000 | (r << 16) | (g << 8) | b;
-}
-
-static void yuv420p_to_argb(const int8_t *yuv, int *argb, int width, int height) {
- int size = width * height;
- int offset = size;
- int u, v, y1, y2, y3, y4;
-
- for (int i = 0, k = 0; i < size; i += 2, k++) {
- y1 = yuv[i] & 0xff;
- y2 = yuv[i + 1] & 0xff;
- y3 = yuv[width + i] & 0xff;
- y4 = yuv[width + i + 1] & 0xff;
-
- u = yuv[offset + k] & 0xff;
- v = yuv[offset * 5 / 4 + k] & 0xff;
- u = u - 128;
- v = v - 128;
-
- argb[i] = yuv2argb(y1, u, v);
- argb[i + 1] = yuv2argb(y2, u, v);
- argb[width + i] = yuv2argb(y3, u, v);
- argb[width + i + 1] = yuv2argb(y4, u, v);
-
- if (i != 0 && (i + 2) % width == 0)
- i += width;
- }
-}
-
-static void nv21_to_yuv420p(int8_t *dst, int8_t *src, int len) {
- memcpy(dst, src, len); // y
- for (int i = 0; i < len / 4; ++i) {
- *(dst + len + i) = *(src + len + i * 2 + 1); // u
- *(dst + len * 5 / 4 + i) = *(src + len + i * 2); // v
- }
-}
-
-static void nv12_to_yuv420p(int8_t *dst, int8_t *src, int len) {
- memcpy(dst, src, len); // y
- for (int i = 0; i < len / 4; ++i) {
- *(dst + len + i) = *(src + len + i * 2); // u
- *(dst + len * 5 / 4 + i) = *(src + len + i * 2 + 1); // v
- }
-}
-
-static void yuv420p_rotate90(int8_t *dst, const int8_t *src, int width, int height) {
- int n = 0;
- int wh = width * height;
- int half_width = width / 2;
- int half_height = height / 2;
- // y
- for (int j = 0; j < width; j++) {
- for (int i = height - 1; i >= 0; i--) {
- dst[n++] = src[width * i + j];
- }
- }
- // u
- for (int i = 0; i < half_width; i++) {
- for (int j = 1; j <= half_height; j++) {
- dst[n++] = src[wh + ((half_height - j) * half_width + i)];
- }
- }
- // v
- for (int i = 0; i < half_width; i++) {
- for (int j = 1; j <= half_height; j++) {
- dst[n++] = src[wh + wh / 4 + ((half_height - j) * half_width + i)];
- }
- }
-}
-
-static void yuv420p_rotate180(int8_t *dst, const int8_t *src, int width, int height) {
- int n = 0;
- int half_width = width / 2;
- int half_height = height / 2;
- // y
- for (int j = height - 1; j >= 0; j--) {
- for (int i = width; i > 0; i--) {
- dst[n++] = src[width * j + i - 1];
- }
- }
- // u
- int offset = width * height;
- for (int j = half_height - 1; j >= 0; j--) {
- for (int i = half_width; i > 0; i--) {
- dst[n++] = src[offset + half_width * j + i - 1];
- }
- }
- // v
- offset += half_width * half_height;
- for (int j = half_height - 1; j >= 0; j--) {
- for (int i = half_width; i > 0; i--) {
- dst[n++] = src[offset + half_width * j + i - 1];
- }
- }
-}
-
-static void yuv420p_rotate270(int8_t *dst, const int8_t *src, int width, int height) {
-
- for (int j = 0; j < width; j++) {
- for (int i = 1; i <= height; i++) {
- *dst++ = *(src + i * width - j);
- }
- }
-
- auto *src_u = const_cast(src + width * height);
- for (int j = 0; j < width / 2; j++) {
- for (int i = 1; i <= height / 2; i++) {
- *dst++ = *(src_u + i * width / 2 - j);
- }
- }
-
- auto *src_v = const_cast(src + width * height * 5 / 4);
- for (int j = 0; j < width / 2; j++) {
- for (int i = 1; i <= height / 2; i++) {
- *dst++ = *(src_v + i * width / 2 - j);
- }
- }
-}
-
-static void yuv420p_rotate(int8_t *dst, int8_t *src, int width, int height, int degree) {
- switch(degree) {
- case 0:
- memcpy(dst, src, width * height * 3 / 2);
- break;
- case 90:
- yuv420p_rotate90(dst, src, width, height);
- break;
- case 180:
- yuv420p_rotate180(dst, src, width, height);
- break;
- case 270:
- yuv420p_rotate270(dst, src, width, height);
- break;
- default:
- break;
- }
-}
\ No newline at end of file
diff --git a/app/src/main/cpp/yuv/yuv_converter.h b/app/src/main/cpp/yuv/yuv_converter.h
deleted file mode 100644
index 491d312e..00000000
--- a/app/src/main/cpp/yuv/yuv_converter.h
+++ /dev/null
@@ -1,32 +0,0 @@
-//
-// Created by xu fulong on 2022/7/9.
-//
-
-#ifndef FFMPEGANDROID_YUV_CONVERTER_H
-#define FFMPEGANDROID_YUV_CONVERTER_H
-
-#include
-
-static void rgba_to_yuv420p(const int *argb, int8_t *yuv, int width, int height);
-
-static void yuv420p_to_argb(const int8_t *yuv, int *argb, int width, int height);
-
-static void yuv420p_rotate(int8_t *dst, int8_t *src, int width, int height, int degree);
-
-/**
- * convert NV21 to YUV420P
- * @param dst data of yuv420p
- * @param src data of nv21
- * @param len width*height
- */
-static void nv21_to_yuv420p(int8_t *dst, int8_t *src, int len);
-
-/**
- * convert NV12 to YUV420P
- * @param dst data of yuv420p
- * @param src data of nv12
- * @param len width*height
- */
-static void nv12_to_yuv420p(int8_t *dst, int8_t *src, int len);
-
-#endif //FFMPEGANDROID_YUV_CONVERTER_H
diff --git a/app/src/main/java/com/frank/ffmpeg/CommonMediaHelper.java b/app/src/main/java/com/frank/ffmpeg/CommonMediaHelper.java
deleted file mode 100644
index 1602134b..00000000
--- a/app/src/main/java/com/frank/ffmpeg/CommonMediaHelper.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.frank.ffmpeg;
-
-/**
- * @author xufulong
- * @date 2022/9/7 10:16 上午
- * @desc
- */
-public class CommonMediaHelper {
-
- static {
- System.loadLibrary("media-handle");
- }
-
- public native int audioResample(String inputFile, String outputFile, int sampleRate);
-
-}
diff --git a/app/src/main/java/com/frank/ffmpeg/FFmpegApplication.java b/app/src/main/java/com/frank/ffmpeg/FFmpegApplication.java
index 95e41afe..b5a837c9 100644
--- a/app/src/main/java/com/frank/ffmpeg/FFmpegApplication.java
+++ b/app/src/main/java/com/frank/ffmpeg/FFmpegApplication.java
@@ -16,4 +16,12 @@ public static FFmpegApplication getInstance() {
return context;
}
+ public boolean enableSurfaceView() {
+ return true;
+ }
+
+ public boolean useMediaCodec() {
+ return false;
+ }
+
}
diff --git a/app/src/main/java/com/frank/ffmpeg/FFmpegCmd.java b/app/src/main/java/com/frank/ffmpeg/FFmpegCmd.java
index 2d417d0a..5f553d0e 100644
--- a/app/src/main/java/com/frank/ffmpeg/FFmpegCmd.java
+++ b/app/src/main/java/com/frank/ffmpeg/FFmpegCmd.java
@@ -142,8 +142,6 @@ public static String executeProbeSynchronize(final String[] commands) {
private native static String handleProbe(String[] commands);
- public native static String getInfo();
-
public static void onProgressCallback(int position, int duration, @FFmpegState int state) {
Log.e(TAG, "onProgress position=" + position
+ "--duration=" + duration + "--state=" + state);
diff --git a/app/src/main/java/com/frank/ffmpeg/FFmpegPusher.java b/app/src/main/java/com/frank/ffmpeg/FFmpegPusher.java
deleted file mode 100644
index 54c334cc..00000000
--- a/app/src/main/java/com/frank/ffmpeg/FFmpegPusher.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package com.frank.ffmpeg;
-
-/**
- * Using FFmpeg to push FLV stream
- * Created by frank on 2018/2/2.
- */
-
-public class FFmpegPusher {
- static {
- System.loadLibrary("media-handle");
- }
-
- /**
- * JNI interface: select file and push to rtmp server
- *
- * @param filePath liveUrl
- * @param liveUrl the url of rtmp server
- * @return the result of pushing stream
- */
- public native int pushStream(String filePath, String liveUrl);
-
-}
diff --git a/app/src/main/java/com/frank/ffmpeg/activity/AudioEffectActivity.kt b/app/src/main/java/com/frank/ffmpeg/activity/AudioEffectActivity.kt
index 82ea0333..6566077a 100644
--- a/app/src/main/java/com/frank/ffmpeg/activity/AudioEffectActivity.kt
+++ b/app/src/main/java/com/frank/ffmpeg/activity/AudioEffectActivity.kt
@@ -11,8 +11,8 @@ import android.view.View
import android.widget.*
import androidx.recyclerview.widget.LinearLayoutManager
import androidx.recyclerview.widget.RecyclerView
-import com.frank.androidmedia.controller.AudioEffectController
-import com.frank.androidmedia.listener.AudioEffectCallback
+import com.frank.ffmpeg.controller.AudioEffectController
+import com.frank.ffmpeg.listener.AudioEffectCallback
import com.frank.ffmpeg.R
import com.frank.ffmpeg.adapter.EqualizerAdapter
import com.frank.ffmpeg.listener.OnSeekBarListener
diff --git a/app/src/main/java/com/frank/ffmpeg/activity/CameraFilterActivity.kt b/app/src/main/java/com/frank/ffmpeg/activity/CameraFilterActivity.kt
index 00cda5ec..3898bc6b 100644
--- a/app/src/main/java/com/frank/ffmpeg/activity/CameraFilterActivity.kt
+++ b/app/src/main/java/com/frank/ffmpeg/activity/CameraFilterActivity.kt
@@ -5,7 +5,6 @@ import android.view.View
import com.frank.camerafilter.factory.BeautyFilterType
import com.frank.camerafilter.widget.BeautyCameraView
import com.frank.ffmpeg.R
-import com.frank.ffmpeg.util.FilterTypeUtil
class CameraFilterActivity : BaseActivity() {
@@ -56,7 +55,7 @@ class CameraFilterActivity : BaseActivity() {
if (index >= filterType.size)
index = 0
cameraView!!.setFilter(filterType[index])
- showToast(getString(FilterTypeUtil.filterTypeToNameId(filterType[index])))
+ showToast(getString(filterTypeToNameId(filterType[index])))
}
}
@@ -64,4 +63,22 @@ class CameraFilterActivity : BaseActivity() {
}
+ fun filterTypeToNameId(type: BeautyFilterType): Int {
+ return when (type) {
+ BeautyFilterType.NONE -> R.string.camera_filter_none
+ BeautyFilterType.BRIGHTNESS -> R.string.camera_filter_brightness
+ BeautyFilterType.SATURATION -> R.string.camera_filter_saturation
+ BeautyFilterType.CONTRAST -> R.string.camera_filter_contrast
+ BeautyFilterType.SHARPEN -> R.string.camera_filter_sharpen
+ BeautyFilterType.BLUR -> R.string.camera_filter_blur
+ BeautyFilterType.HUE -> R.string.camera_filter_hue
+ BeautyFilterType.WHITE_BALANCE -> R.string.camera_filter_balance
+ BeautyFilterType.SKETCH -> R.string.camera_filter_sketch
+ BeautyFilterType.OVERLAY -> R.string.camera_filter_overlay
+ BeautyFilterType.BREATH_CIRCLE -> R.string.camera_filter_circle
+ else -> R.string.camera_filter_none
+ }
+ }
+
+
}
\ No newline at end of file
diff --git a/app/src/main/java/com/frank/ffmpeg/activity/MainActivity.kt b/app/src/main/java/com/frank/ffmpeg/activity/MainActivity.kt
index 8b764a70..083269ae 100644
--- a/app/src/main/java/com/frank/ffmpeg/activity/MainActivity.kt
+++ b/app/src/main/java/com/frank/ffmpeg/activity/MainActivity.kt
@@ -1,20 +1,18 @@
package com.frank.ffmpeg.activity
import android.content.Intent
-import android.net.Uri
import android.os.Build
import android.os.Bundle
+import android.os.Environment
import android.provider.Settings
import android.view.View
-import android.widget.TextView
-import androidx.core.content.ContentProviderCompat
import androidx.recyclerview.widget.RecyclerView
import androidx.recyclerview.widget.StaggeredGridLayoutManager
-import com.frank.ffmpeg.FFmpegCmd
import com.frank.ffmpeg.R
import com.frank.ffmpeg.adapter.WaterfallAdapter
import com.frank.ffmpeg.listener.OnItemClickListener
+import androidx.core.net.toUri
/**
* The main entrance of all Activity
@@ -22,7 +20,6 @@ import com.frank.ffmpeg.listener.OnItemClickListener
*/
class MainActivity : BaseActivity() {
- private val MANAGE_STORAGE_RC: Int = 100
override val layoutId: Int
get() = R.layout.activity_main
@@ -30,34 +27,35 @@ class MainActivity : BaseActivity() {
super.onCreate(savedInstanceState)
initView()
- getQueryPermission()
+ managerPermission()
}
- fun getQueryPermission(){
- fun isRPlus() = Build.VERSION.SDK_INT >= Build.VERSION_CODES.R
- if(!isRPlus()) {
+ private fun managerPermission() {
+ if(Build.VERSION.SDK_INT < Build.VERSION_CODES.R) {
return
}
+
+ if (Environment.isExternalStorageManager()) {
+ return
+ }
+
val packageName = this.packageName
try {
-// "queryPermission".toast(ContentProviderCompat.requireContext())
val intent = Intent(Settings.ACTION_MANAGE_APP_ALL_FILES_ACCESS_PERMISSION)
intent.addCategory("android.intent.category.DEFAULT")
- intent.data = Uri.parse("package:$packageName")
- this.startActivityForResult(intent, MANAGE_STORAGE_RC)
+ intent.data = "package:$packageName".toUri()
+ startActivity(intent)
} catch (e: Exception) {
-// "error:".toast(ContentProviderCompat.requireContext())
val intent = Intent()
intent.action = Settings.ACTION_MANAGE_ALL_FILES_ACCESS_PERMISSION
- startActivityForResult(intent, MANAGE_STORAGE_RC)
+ startActivity(intent)
}
}
+
private fun initView() {
val list = listOf(
getString(R.string.audio_handle),
getString(R.string.video_handle),
- getString(R.string.media_handle),
- getString(R.string.video_push),
getString(R.string.video_live),
getString(R.string.video_filter),
getString(R.string.video_preview),
@@ -76,7 +74,6 @@ class MainActivity : BaseActivity() {
}
})
viewWaterfall.adapter = adapter
- findViewById(R.id.tv_version).text =" Version:"+FFmpegCmd.getInfo()
}
private fun doClick(pos: Int) {
@@ -86,21 +83,17 @@ class MainActivity : BaseActivity() {
-> intent.setClass(this@MainActivity, AudioHandleActivity::class.java)
1 //handle video
-> intent.setClass(this@MainActivity, VideoHandleActivity::class.java)
- 2 //handle media
- -> intent.setClass(this@MainActivity, MediaHandleActivity::class.java)
- 3 //pushing
- -> intent.setClass(this@MainActivity, PushActivity::class.java)
- 4 //realtime living with rtmp stream
+ 2 //realtime living with rtmp stream
-> intent.setClass(this@MainActivity, LiveActivity::class.java)
- 5 //filter effect
+ 3 //filter effect
-> intent.setClass(this@MainActivity, FilterActivity::class.java)
- 6 //preview thumbnail
+ 4 //preview thumbnail
-> intent.setClass(this@MainActivity, VideoPreviewActivity::class.java)
- 7 //probe media format
+ 5 //probe media format
-> intent.setClass(this@MainActivity, ProbeFormatActivity::class.java)
- 8 //audio effect
+ 6 //audio effect
-> intent.setClass(this@MainActivity, AudioEffectActivity::class.java)
- 9 //camera filter
+ 7 //camera filter
-> intent.setClass(this@MainActivity, CameraFilterActivity::class.java)
else -> {
}
diff --git a/app/src/main/java/com/frank/ffmpeg/activity/MediaHandleActivity.kt b/app/src/main/java/com/frank/ffmpeg/activity/MediaHandleActivity.kt
deleted file mode 100644
index 5077429a..00000000
--- a/app/src/main/java/com/frank/ffmpeg/activity/MediaHandleActivity.kt
+++ /dev/null
@@ -1,200 +0,0 @@
-package com.frank.ffmpeg.activity
-
-import android.annotation.SuppressLint
-import android.os.Build
-import android.os.Bundle
-import android.os.Environment
-import android.os.Handler
-import android.os.Message
-import android.util.Log
-import android.view.View
-import android.widget.LinearLayout
-import android.widget.TextView
-
-import com.frank.ffmpeg.FFmpegCmd
-import com.frank.ffmpeg.R
-import com.frank.ffmpeg.handler.FFmpegHandler
-import com.frank.ffmpeg.util.FFmpegUtil
-import com.frank.ffmpeg.util.FileUtil
-import com.frank.ffmpeg.util.ThreadPoolUtil
-
-import java.io.File
-import java.util.Locale
-
-import com.frank.ffmpeg.handler.FFmpegHandler.MSG_BEGIN
-import com.frank.ffmpeg.handler.FFmpegHandler.MSG_FINISH
-import com.frank.ffmpeg.handler.FFmpegHandler.MSG_PROGRESS
-
-/**
- * using ffmpeg to handle media
- * Created by frank on 2018/1/23.
- */
-class MediaHandleActivity : BaseActivity() {
- private val audioFile = PATH + File.separator + "tiger.mp3"
-
- private var layoutProgress: LinearLayout? = null
- private var txtProgress: TextView? = null
- private var viewId: Int = 0
- private var layoutMediaHandle: LinearLayout? = null
- private var ffmpegHandler: FFmpegHandler? = null
-
- @SuppressLint("HandlerLeak")
- private val mHandler = object : Handler() {
- override fun handleMessage(msg: Message) {
- super.handleMessage(msg)
- when (msg.what) {
- MSG_BEGIN -> {
- layoutProgress!!.visibility = View.VISIBLE
- layoutMediaHandle!!.visibility = View.GONE
- }
- MSG_FINISH -> {
- layoutProgress!!.visibility = View.GONE
- layoutMediaHandle!!.visibility = View.VISIBLE
- if (!outputPath.isNullOrEmpty() && !this@MediaHandleActivity.isDestroyed) {
- showToast("Save to:$outputPath")
- outputPath = ""
- }
- // reset progress
- txtProgress!!.text = String.format(Locale.getDefault(), "%d%%", 0)
- }
- MSG_PROGRESS -> {
- val progress = msg.arg1
- if (progress > 0) {
- txtProgress!!.visibility = View.VISIBLE
- txtProgress!!.text = String.format(Locale.getDefault(), "%d%%", progress)
- } else {
- txtProgress!!.visibility = View.INVISIBLE
- }
- }
- else -> {
- }
- }
- }
- }
-
- override val layoutId: Int
- get() = R.layout.activity_media_handle
-
- override fun onCreate(savedInstanceState: Bundle?) {
- super.onCreate(savedInstanceState)
-
- hideActionBar()
- initView()
- ffmpegHandler = FFmpegHandler(mHandler)
- if (Build.VERSION.SDK_INT > Build.VERSION_CODES.Q) {
- PATH = cacheDir.absolutePath
- }
- }
-
- private fun initView() {
- layoutProgress = getView(R.id.layout_progress)
- txtProgress = getView(R.id.txt_progress)
- layoutMediaHandle = getView(R.id.layout_media_handle)
- initViewsWithClick(
- R.id.btn_mux,
- R.id.btn_extract_audio,
- R.id.btn_extract_video,
- R.id.btn_dubbing
- )
- }
-
- override fun onViewClick(view: View) {
- viewId = view.id
- selectFile()
- }
-
- override fun onSelectedFile(filePath: String) {
- doHandleMedia(filePath)
- }
-
- /**
- * Using ffmpeg cmd to handle media
- *
- * @param srcFile srcFile
- */
- private fun doHandleMedia(srcFile: String) {
- var commandLine: Array? = null
- if (!FileUtil.checkFileExist(srcFile)) {
- return
- }
- if (!FileUtil.isVideo(srcFile)) {
- showToast(getString(R.string.wrong_video_format))
- return
- }
-
- when (viewId) {
- R.id.btn_mux//mux:pure video and pure audio
- -> {
- ThreadPoolUtil.executeSingleThreadPool (Runnable { mediaMux(srcFile) })
- return
- }
- R.id.btn_extract_audio//extract audio
- -> {
- outputPath = PATH + File.separator + "extractAudio.aac"
- commandLine = FFmpegUtil.extractAudio(srcFile, outputPath)
- }
- R.id.btn_extract_video//extract video
- -> {
- outputPath = PATH + File.separator + "extractVideo.mp4"
- commandLine = FFmpegUtil.extractVideo(srcFile, outputPath)
- }
- R.id.btn_dubbing//dubbing
- -> {
- ThreadPoolUtil.executeSingleThreadPool (Runnable{ mediaDubbing(srcFile) })
- return
- }
- else -> {
- }
- }
- if (ffmpegHandler != null) {
- ffmpegHandler!!.executeFFmpegCmd(commandLine)
- }
- }
-
- private fun muxVideoAndAudio(videoPath: String, outputPath: String) {
- var commandLine = FFmpegUtil.mediaMux(videoPath, audioFile, true, outputPath)
- var result = FFmpegCmd.executeSync(commandLine)
- if (result != 0) {
- commandLine = FFmpegUtil.mediaMux(videoPath, audioFile, false, outputPath)
- result = FFmpegCmd.executeSync(commandLine)
- Log.e(TAG, "mux audio and video result=$result")
- }
- }
-
- private fun mediaMux(srcFile: String) {
- mHandler.sendEmptyMessage(MSG_BEGIN)
- val suffix = FileUtil.getFileSuffix(srcFile)
- val muxPath = PATH + File.separator + "mux" + suffix
- Log.e(TAG, "muxPath=$muxPath")
- muxVideoAndAudio(srcFile, muxPath)
- mHandler.sendEmptyMessage(MSG_FINISH)
- }
-
- private fun mediaDubbing(srcFile: String) {
- mHandler.sendEmptyMessage(MSG_BEGIN)
- val dubbingSuffix = FileUtil.getFileSuffix(srcFile)
- val dubbingPath = PATH + File.separator + "dubbing" + dubbingSuffix
- val temp = PATH + File.separator + "temp" + dubbingSuffix
- val commandLine1 = FFmpegUtil.extractVideo(srcFile, temp)
- val dubbingResult = FFmpegCmd.executeSync(commandLine1)
- if (dubbingResult != 0) {
- Log.e(TAG, "extract video fail, result=$dubbingResult")
- return
- }
- muxVideoAndAudio(temp, dubbingPath)
- FileUtil.deleteFile(temp)
- mHandler.sendEmptyMessage(MSG_FINISH)
- }
-
- override fun onDestroy() {
- super.onDestroy()
- mHandler.removeCallbacksAndMessages(null)
- }
-
- companion object {
-
- private val TAG = MediaHandleActivity::class.java.simpleName
- private var PATH = Environment.getExternalStorageDirectory().path
- private var outputPath :String ?= null
- }
-}
diff --git a/app/src/main/java/com/frank/ffmpeg/activity/PlayerActivity.kt b/app/src/main/java/com/frank/ffmpeg/activity/PlayerActivity.kt
new file mode 100644
index 00000000..ddd0f92c
--- /dev/null
+++ b/app/src/main/java/com/frank/ffmpeg/activity/PlayerActivity.kt
@@ -0,0 +1,103 @@
+package com.frank.ffmpeg.activity
+
+import android.content.pm.ActivityInfo
+import android.content.res.Configuration
+import android.os.Bundle
+import android.view.WindowManager
+import androidx.appcompat.app.AppCompatActivity
+import com.frank.ffmpeg.R
+import com.frank.ffmpeg.presenter.PlayerPresenter
+
+/**
+ * Note: Activity of player
+ * Date: 2026/1/29 20:20
+ * Author: frank
+ */
+class PlayerActivity : AppCompatActivity() {
+
+ private var isPortrait = true
+ private var mPlayIndex = 0
+ private var mPlayUrls = arrayListOf()
+
+ private var mPlayerPresenter: PlayerPresenter? = null
+
+ override fun onCreate(savedInstanceState: Bundle?) {
+ super.onCreate(savedInstanceState)
+ requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_USER
+ setContentView(R.layout.layout_activity_player)
+ initData()
+
+ mPlayerPresenter = PlayerPresenter()
+ mPlayerPresenter?.initView(window.decorView, this)
+ mPlayerPresenter?.initPlayer(mPlayUrls[0])
+ mPlayerPresenter?.setOnPresenterListener(object : PlayerPresenter.OnPresenterListener {
+ override fun onNextUrl(): String {
+ return getNextUrl()
+ }
+
+ override fun onSwitchOrientation() {
+ switchOrientation()
+ }
+ })
+ }
+
+ private fun initData() {
+ mPlayUrls.add("https://d2rkgq4wjjlyz5.cloudfront.net/e07cd0562d1271ef80032680f9ea0102/h265-ordinary-ld.m3u8")
+ mPlayUrls.add("https://sf1-cdn-tos.huoshanstatic.com/obj/media-fe/xgplayer_doc_video/mp4/xgplayer-demo-360p.mp4")
+ }
+
+ override fun onConfigurationChanged(newConfig: Configuration) {
+ super.onConfigurationChanged(newConfig)
+ if (newConfig.orientation == Configuration.ORIENTATION_LANDSCAPE) {
+ isPortrait = false
+ window.setFlags(
+ WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS,
+ WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS)
+ mPlayerPresenter?.getPlayerView()?.requestLayout()
+ } else if (newConfig.orientation == Configuration.ORIENTATION_PORTRAIT) {
+ isPortrait = true
+ val attrs = window.attributes
+ attrs.flags = attrs.flags and WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS.inv()
+ window.attributes = attrs
+ window.clearFlags(WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS)
+ mPlayerPresenter?.getPlayerView()?.requestLayout()
+ }
+ }
+
+ override fun onResume() {
+ super.onResume()
+ mPlayerPresenter?.getPlayerView()?.start()
+ }
+
+ override fun onPause() {
+ super.onPause()
+ mPlayerPresenter?.getPlayerView()?.pause()
+ }
+
+ override fun onDestroy() {
+ super.onDestroy()
+ mPlayerPresenter?.getPlayerView()?.release()
+ }
+
+
+ // next video
+ private fun getNextUrl(): String {
+ mPlayIndex += 1
+ if (mPlayIndex > (mPlayUrls.size - 1)) {
+ mPlayIndex = 0
+ }
+
+ val index = mPlayIndex % mPlayUrls.size
+ return mPlayUrls[index]
+ }
+
+ // switch orientation
+ private fun switchOrientation() {
+ requestedOrientation = if (isPortrait) {
+ ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE
+ } else {
+ ActivityInfo.SCREEN_ORIENTATION_PORTRAIT
+ }
+ }
+
+}
diff --git a/app/src/main/java/com/frank/ffmpeg/activity/PushActivity.kt b/app/src/main/java/com/frank/ffmpeg/activity/PushActivity.kt
deleted file mode 100644
index a2785b88..00000000
--- a/app/src/main/java/com/frank/ffmpeg/activity/PushActivity.kt
+++ /dev/null
@@ -1,71 +0,0 @@
-package com.frank.ffmpeg.activity
-
-import android.os.Bundle
-import android.text.TextUtils
-import android.util.Log
-import android.view.View
-import android.widget.EditText
-
-import com.frank.ffmpeg.FFmpegPusher
-import com.frank.ffmpeg.R
-
-import java.io.File
-
-/**
- * Using FFmpeg to push rtmp stream,
- * with SRS media server convert to http-flv stream
- * Created by frank on 2018/2/2.
- */
-class PushActivity : BaseActivity() {
-
- private var editInputPath: EditText? = null
-
- private var editLiveURL: EditText? = null
-
- override val layoutId: Int
- get() = R.layout.activity_push
-
- override fun onCreate(savedInstanceState: Bundle?) {
- super.onCreate(savedInstanceState)
-
- hideActionBar()
- initView()
- }
-
- private fun initView() {
- editInputPath = getView(R.id.edit_file_path)
- editLiveURL = getView(R.id.edit_live_url)
- editInputPath!!.setText(INPUT_PATH)
- editLiveURL!!.setText(LIVE_URL)
-
- initViewsWithClick(R.id.btn_push_stream)
- }
-
- private fun startPushStreaming() {
- val filePath = editInputPath!!.text.toString()
- val liveUrl = editLiveURL!!.text.toString()
-
- if (!TextUtils.isEmpty(filePath) && !TextUtils.isEmpty(filePath)) {
- Thread(Runnable {
- FFmpegPusher().pushStream(filePath, liveUrl)
- }).start()
- }
- }
-
- override fun onViewClick(view: View) {
- if (view.id == R.id.btn_push_stream) {
- startPushStreaming()
- }
- }
-
- override fun onSelectedFile(filePath: String) {
-
- }
-
- companion object {
-
- // storage/emulated/0/beyond.mp4
- private const val INPUT_PATH = "http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4"
- private const val LIVE_URL = "rtmp://192.168.17.168/live/stream"
- }
-}
diff --git a/app/src/main/java/com/frank/ffmpeg/activity/VideoPreviewActivity.kt b/app/src/main/java/com/frank/ffmpeg/activity/VideoPreviewActivity.kt
index 151242f7..92b349a1 100644
--- a/app/src/main/java/com/frank/ffmpeg/activity/VideoPreviewActivity.kt
+++ b/app/src/main/java/com/frank/ffmpeg/activity/VideoPreviewActivity.kt
@@ -10,8 +10,8 @@ import android.view.Surface
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.view.View
-import com.frank.androidmedia.controller.MediaPlayController
-import com.frank.androidmedia.listener.PlayerCallback
+import com.frank.ffmpeg.controller.MediaPlayController
+import com.frank.ffmpeg.listener.PlayerCallback
import com.frank.ffmpeg.R
import com.frank.ffmpeg.view.VideoPreviewBar
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioEffectController.kt b/app/src/main/java/com/frank/ffmpeg/controller/AudioEffectController.kt
similarity index 96%
rename from AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioEffectController.kt
rename to app/src/main/java/com/frank/ffmpeg/controller/AudioEffectController.kt
index 7bac2401..c5d96db5 100644
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/AudioEffectController.kt
+++ b/app/src/main/java/com/frank/ffmpeg/controller/AudioEffectController.kt
@@ -1,9 +1,8 @@
-package com.frank.androidmedia.controller
+package com.frank.ffmpeg.controller
import android.R
import android.content.Context
import android.media.audiofx.*
-import android.os.Build
import android.util.Log
import android.util.Pair
import android.view.View
@@ -11,9 +10,8 @@ import android.widget.AdapterView
import android.widget.ArrayAdapter
import android.widget.SeekBar
import android.widget.Spinner
-import androidx.annotation.RequiresApi
-import com.frank.androidmedia.listener.AudioEffectCallback
-import com.frank.androidmedia.wrap.AudioVisualizer
+import com.frank.ffmpeg.listener.AudioEffectCallback
+import com.frank.ffmpeg.effect.AudioVisualizer
import java.util.ArrayList
/**
@@ -132,7 +130,6 @@ open class AudioEffectController(audioEffectCallback: AudioEffectCallback) {
/**
* Setup AudioEffect of LoudnessEnhancer, which uses to enhance loudness
*/
- @RequiresApi(Build.VERSION_CODES.KITKAT)
fun setLoudnessEnhancer(audioSessionId: Int, barEnhancer: SeekBar) {
mLoudnessEnhancer = LoudnessEnhancer(audioSessionId)
mLoudnessEnhancer!!.enabled = true
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaDecodeController.kt b/app/src/main/java/com/frank/ffmpeg/controller/MediaDecodeController.kt
similarity index 78%
rename from AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaDecodeController.kt
rename to app/src/main/java/com/frank/ffmpeg/controller/MediaDecodeController.kt
index 4e1362a1..8b8938ee 100644
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaDecodeController.kt
+++ b/app/src/main/java/com/frank/ffmpeg/controller/MediaDecodeController.kt
@@ -1,4 +1,4 @@
-package com.frank.androidmedia.controller
+package com.frank.ffmpeg.controller
import android.media.MediaCodec
import android.media.MediaExtractor
@@ -18,65 +18,6 @@ import android.view.Surface
open class MediaDecodeController(val mSurface: Surface, val mFilePath: String, val mCallback: OnDataCallback?) {
- /**
- *
- * MediaExtractor extractor = new MediaExtractor();
- * extractor.setDataSource(...);
- * int numTracks = extractor.getTrackCount();
- * for (int i = 0; i <= numTracks; ++i) {
- * MediaFormat format = extractor.getTrackFormat(i);
- * String mime = format.getString(MediaFormat.KEY_MIME);
- * if (weAreInterestedInThisTrack) {
- * extractor.selectTrack(i);
- * }
- * }
- * ByteBuffer inputBuffer = ByteBuffer.allocate(...)
- * while (extractor.readSampleData(inputBuffer, ...) != 0) {
- * int trackIndex = extractor.getSampleTrackIndex();
- * long presentationTimeUs = extractor.getSampleTime();
- * ...
- * extractor.advance();
- * }
- * extractor.release();
- * extractor = null;
- */
-
-/*
- // MediaCodec is typically used like this in asynchronous mode:
- MediaCodec codec = MediaCodec.createByCodecName(name);
- MediaFormat mOutputFormat;
- codec.setCallback(new MediaCodec.Callback() {
- @Override
- void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
- ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
- codec.queueInputBuffer(inputBufferId, …);
- }
-
- @Override
- void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, …) {
- ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
- MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId);
- codec.releaseOutputBuffer(outputBufferId, …);
- }
-
- @Override
- void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
- mOutputFormat = format;
- }
-
- @Override
- void onError(…) {
-
- }
- });
- codec.configure(format, …);
- mOutputFormat = codec.getOutputFormat();
- codec.start();
- // wait for processing to complete
- codec.stop();
- codec.release();
-*/
-
private var videoDecodeThread: VideoDecodeThread? = null
interface OnDataCallback {
@@ -185,7 +126,7 @@ open class MediaDecodeController(val mSurface: Surface, val mFilePath: String, v
mediaExtractor!!.setDataSource(mFilePath)
for (i in 0 until mediaExtractor!!.trackCount) {
mediaFormat = mediaExtractor!!.getTrackFormat(i)
- mimeType = mediaFormat!!.getString(MediaFormat.KEY_MIME)
+ mimeType = mediaFormat.getString(MediaFormat.KEY_MIME)
if (mimeType != null && mimeType.startsWith("video/")) {
mediaExtractor!!.selectTrack(i)
break
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaPlayController.kt b/app/src/main/java/com/frank/ffmpeg/controller/MediaPlayController.kt
similarity index 60%
rename from AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaPlayController.kt
rename to app/src/main/java/com/frank/ffmpeg/controller/MediaPlayController.kt
index 8dcef042..ce742034 100644
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/controller/MediaPlayController.kt
+++ b/app/src/main/java/com/frank/ffmpeg/controller/MediaPlayController.kt
@@ -1,11 +1,10 @@
-package com.frank.androidmedia.controller
+package com.frank.ffmpeg.controller
import android.media.MediaPlayer
-import android.media.PlaybackParams
import android.media.TimedText
import android.util.Log
import android.view.Surface
-import com.frank.androidmedia.listener.PlayerCallback
+import com.frank.ffmpeg.listener.PlayerCallback
import java.io.IOException
/**
@@ -46,14 +45,11 @@ open class MediaPlayController(playerCallback: PlayerCallback) {
playerCallback?.onPrepare()
}
- mediaPlayer!!.setOnInfoListener { mp: MediaPlayer?, what: Int, extra: Int ->
- (
- if (what == MediaPlayer.MEDIA_INFO_VIDEO_RENDERING_START) {
- if (!renderFirstFrame) {
- renderFirstFrame = true
- playerCallback?.onRenderFirstFrame()
- }
- })
+ mediaPlayer!!.setOnInfoListener { mp: MediaPlayer?, what: Int, extra: Int -> (
+ if (what == MediaPlayer.MEDIA_INFO_VIDEO_RENDERING_START && !renderFirstFrame) {
+ renderFirstFrame = true
+ playerCallback?.onRenderFirstFrame()
+ } else {})
return@setOnInfoListener true
}
@@ -90,55 +86,6 @@ open class MediaPlayController(playerCallback: PlayerCallback) {
mediaPlayer?.seekTo(position)
}
- fun togglePlay() {
- if (mediaPlayer == null)
- return
-
- if (mediaPlayer!!.isPlaying) {
- mediaPlayer!!.pause()
- } else {
- mediaPlayer!!.start()
- }
- }
-
- fun getVideoWidth(): Int {
- return mediaPlayer!!.videoWidth
- }
-
- fun getVideoHeight(): Int {
- return mediaPlayer!!.videoHeight
- }
-
- fun mute() {
- mediaPlayer?.setVolume(0.0f, 0.0f)
- }
-
- fun setVolume(volume: Float) {
- if (volume < 0 || volume > 1)
- return
- mediaPlayer?.setVolume(volume, volume)
- }
-
- /**
- * Set playback rate
- */
- fun setSpeed(speed: Float) {
- if (speed <= 0 || speed > 8)
- return
- if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.M) {
- val params = PlaybackParams()
- params.speed = speed
- mediaPlayer?.playbackParams = params
- }
- }
-
- /**
- * Select audio or subtitle track, when there are multi tracks
- */
- fun selectTrack(trackId: Int) {
- mediaPlayer?.selectTrack(trackId)
- }
-
fun releasePlayer() {
if (mediaPlayer != null) {
mediaPlayer!!.stop()
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/wrap/AudioVisualizer.java b/app/src/main/java/com/frank/ffmpeg/effect/AudioVisualizer.java
similarity index 97%
rename from AndroidMedia/src/main/java/com/frank/androidmedia/wrap/AudioVisualizer.java
rename to app/src/main/java/com/frank/ffmpeg/effect/AudioVisualizer.java
index dc32d836..e5b577e5 100644
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/wrap/AudioVisualizer.java
+++ b/app/src/main/java/com/frank/ffmpeg/effect/AudioVisualizer.java
@@ -1,4 +1,4 @@
-package com.frank.androidmedia.wrap;
+package com.frank.ffmpeg.effect;
import android.media.audiofx.Visualizer;
import android.util.Log;
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/listener/AudioEffectCallback.kt b/app/src/main/java/com/frank/ffmpeg/listener/AudioEffectCallback.kt
similarity index 90%
rename from AndroidMedia/src/main/java/com/frank/androidmedia/listener/AudioEffectCallback.kt
rename to app/src/main/java/com/frank/ffmpeg/listener/AudioEffectCallback.kt
index 74a661f6..cddef5a4 100644
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/listener/AudioEffectCallback.kt
+++ b/app/src/main/java/com/frank/ffmpeg/listener/AudioEffectCallback.kt
@@ -1,4 +1,4 @@
-package com.frank.androidmedia.listener
+package com.frank.ffmpeg.listener
import android.util.Pair
import android.widget.SeekBar
diff --git a/AndroidMedia/src/main/java/com/frank/androidmedia/listener/PlayerCallback.kt b/app/src/main/java/com/frank/ffmpeg/listener/PlayerCallback.kt
similarity index 83%
rename from AndroidMedia/src/main/java/com/frank/androidmedia/listener/PlayerCallback.kt
rename to app/src/main/java/com/frank/ffmpeg/listener/PlayerCallback.kt
index 092c5d7f..9de38881 100644
--- a/AndroidMedia/src/main/java/com/frank/androidmedia/listener/PlayerCallback.kt
+++ b/app/src/main/java/com/frank/ffmpeg/listener/PlayerCallback.kt
@@ -1,4 +1,4 @@
-package com.frank.androidmedia.listener
+package com.frank.ffmpeg.listener
/**
*
diff --git a/app/src/main/java/com/frank/ffmpeg/presenter/PlayerPresenter.kt b/app/src/main/java/com/frank/ffmpeg/presenter/PlayerPresenter.kt
new file mode 100644
index 00000000..e2ace6cc
--- /dev/null
+++ b/app/src/main/java/com/frank/ffmpeg/presenter/PlayerPresenter.kt
@@ -0,0 +1,266 @@
+package com.frank.ffmpeg.presenter
+
+import android.annotation.SuppressLint
+import android.app.Activity
+import android.os.Handler
+import android.os.Looper
+import android.util.Log
+import android.view.ScaleGestureDetector
+import android.view.View
+import android.widget.ImageView
+import android.widget.LinearLayout
+import android.widget.RelativeLayout
+import android.widget.SeekBar
+import android.widget.SeekBar.OnSeekBarChangeListener
+import android.widget.TextView
+import com.frank.next.player.IPlayer
+import com.frank.ffmpeg.R
+import com.frank.ffmpeg.util.PlayerUtil
+import com.frank.ffmpeg.util.TimeUtil
+import com.frank.ffmpeg.view.PlayerView
+import kotlin.math.max
+import kotlin.math.min
+import androidx.core.view.isVisible
+
+/**
+ * Note: Presenter of player
+ * Date: 2026/1/28 08:08
+ * Author: frank
+ */
+class PlayerPresenter {
+
+ companion object {
+ const val TAG = "PlayerPresenter"
+ }
+
+ // UI
+ private lateinit var backView: ImageView
+ private lateinit var textPosition: TextView
+ private lateinit var textDuration: TextView
+ private lateinit var progressSeekBar: SeekBar
+ private lateinit var switchDirection: ImageView
+ private lateinit var playPauseButton: ImageView
+ private lateinit var videoPlayerView: PlayerView
+ private lateinit var videoTopLayout: RelativeLayout
+ private lateinit var videoFrameLayout: RelativeLayout
+ private lateinit var videoBottomLayout: RelativeLayout
+
+ // debug information
+ private lateinit var debugInfoLayout: LinearLayout
+ private lateinit var debugInfoNameView: TextView
+ private lateinit var debugContentLayout: View
+ private lateinit var debugInfoValueView: TextView
+
+ // scale
+ private var scaleFactor = 1.0f
+ private var oldScaleFactor = 1.0f
+
+ private lateinit var mContext: Activity
+ private lateinit var mListener: OnPresenterListener
+
+ interface OnPresenterListener {
+
+ fun onNextUrl(): String
+
+ fun onSwitchOrientation()
+
+ }
+
+ fun initView(view: View, context: Activity) {
+ mContext = context
+
+ initDebugInfoView(view)
+ initVideoViewScale(view)
+ initVideoProgressView(view)
+ initVideoControllerView(view)
+ }
+
+ fun setOnPresenterListener(listener: OnPresenterListener) {
+ mListener = listener
+ }
+
+ fun initPlayer(url: String) {
+ videoPlayerView.setVideoPath(url)
+ }
+
+ fun getPlayerView(): PlayerView {
+ return videoPlayerView
+ }
+
+ private fun initVideoControllerView(view: View) {
+ backView = view.findViewById(R.id.img_player_back)
+ textPosition = view.findViewById(R.id.tv_player_position)
+ textDuration = view.findViewById(R.id.tv_player_duration)
+ videoTopLayout = view.findViewById(R.id.layout_player_top)
+ playPauseButton = view.findViewById(R.id.img_player_pause)
+ switchDirection = view.findViewById(R.id.img_screen_switch)
+ videoPlayerView = view.findViewById(R.id.view_player_controller)
+ videoBottomLayout = view.findViewById(R.id.layout_player_bottom)
+
+ backView.setOnClickListener {
+ mContext.finish()
+ }
+ switchDirection.setOnClickListener {
+ mListener.onSwitchOrientation()
+ }
+ playPauseButton.setOnClickListener {
+ if (videoPlayerView.isPlaying()) {
+ videoPlayerView.pause()
+ playPauseButton.setBackgroundResource(R.drawable.ic_play)
+ } else {
+ videoPlayerView.start()
+ playPauseButton.setBackgroundResource(R.drawable.ic_pause)
+ }
+ }
+
+ registerPlayerListener()
+ }
+
+ private fun initVideoProgressView(view: View) {
+ progressSeekBar = view.findViewById(R.id.seekbar_player_progress)
+ progressSeekBar.setOnSeekBarChangeListener(object : OnSeekBarChangeListener {
+ override fun onStartTrackingTouch(p0: SeekBar?) {
+ }
+
+ override fun onStopTrackingTouch(seekBar: SeekBar?) {
+ videoPlayerView.seekTo(seekBar!!.progress)
+ }
+
+ override fun onProgressChanged(
+ seekBar: SeekBar?,
+ progress: Int,
+ fromUser: Boolean
+ ) {
+ if (!fromUser)
+ return
+ progressSeekBar.progress = progress
+ }
+ })
+
+ val handler = Handler(Looper.getMainLooper())
+ val updateSeekBar = object : Runnable {
+ override fun run() {
+ val position = videoPlayerView.getCurrentPosition()
+ if (position > 0) {
+ progressSeekBar.progress = position
+ }
+ textPosition.text = TimeUtil.getVideoTime(position.toLong())
+ videoPlayerView.getPlayer()?.let { updateDebugInfo(it) }
+ handler.postDelayed(this, 1000)
+ }
+ }
+ handler.post(updateSeekBar)
+ }
+
+ @SuppressLint("ClickableViewAccessibility")
+ private fun initVideoViewScale(view: View) {
+ videoFrameLayout = view.findViewById(R.id.layout_player_root)
+ val scaleGestureDetector = ScaleGestureDetector(
+ mContext,
+ object : ScaleGestureDetector.SimpleOnScaleGestureListener() {
+ override fun onScale(detector: ScaleGestureDetector): Boolean {
+ oldScaleFactor = scaleFactor
+ scaleFactor *= detector.scaleFactor
+ scaleFactor = max(1.0f, min(scaleFactor, 5.0f))
+ videoPlayerView.setViewScale(scaleFactor)
+ return true
+ }
+ })
+
+ videoFrameLayout.setOnTouchListener { _, event ->
+ scaleGestureDetector.onTouchEvent(event)
+ true
+ }
+ }
+
+ private fun initDebugInfoView(view: View) {
+ debugInfoLayout = view.findViewById(R.id.layout_realtime_info)
+ debugInfoNameView = view.findViewById(R.id.tv_info_name)
+ debugInfoValueView = view.findViewById(R.id.tv_info_value)
+ debugContentLayout = view.findViewById(R.id.layout_player_info)
+ debugContentLayout.visibility = View.VISIBLE
+ debugContentLayout.setOnClickListener {
+ debugInfoLayout.visibility = if (debugInfoLayout.isVisible)
+ View.INVISIBLE
+ else
+ View.VISIBLE
+ }
+ }
+
+ private fun updateDebugInfo(mediaPlayer: IPlayer) {
+ val debugInfo = PlayerUtil.getDebugInfo(mediaPlayer, videoPlayerView.getRenderViewType())
+ debugInfoNameView.text = debugInfo.first
+ debugInfoValueView.text = debugInfo.second
+ }
+
+ private fun registerPlayerListener() {
+ videoPlayerView.setOnPreparedListener { mp ->
+ Log.i(TAG, "onPrepared, duration=" + mp.duration)
+ }
+ videoPlayerView.setOnInfoListener { what, extra ->
+ handleVideoInfoEvent(what, extra)
+ }
+ videoPlayerView.setOnPlayingListener {playing ->
+ if (playing) {
+ playPauseButton.setBackgroundResource(R.drawable.ic_pause)
+ } else {
+ playPauseButton.setBackgroundResource(R.drawable.ic_play)
+ }
+ }
+ videoPlayerView.setOnVideoSizeChangedListener { width, height ->
+ Log.i(TAG, "onVideoSizeChanged, width=$width, height=$height")
+ }
+ videoPlayerView.setOnErrorListener { what, extra ->
+ run {
+ Log.i(TAG, "onError, what=$what, extra=$extra")
+ return@run true
+ }
+ }
+ videoPlayerView.setOnSeekCompleteListener {
+ Log.i(TAG, "onSeekComplete...")
+ }
+ videoPlayerView.setOnCompleteListener {
+ playPauseButton.setBackgroundResource(R.drawable.ic_play)
+ handleVideoPlayCompleteEvent()
+ }
+ }
+
+ private fun handleVideoPlayCompleteEvent() {
+ videoPlayerView.seekTo(0)
+ val autoPlayNext = true
+ val autoLoop = false
+ when {
+ autoPlayNext -> {
+ val nextUrl = mListener.onNextUrl()
+ videoPlayerView.switchVideo(nextUrl)
+ }
+
+ autoLoop -> {
+ videoPlayerView.start()
+ }
+ }
+ }
+
+ private fun handleVideoInfoEvent(what: Int, extra: Int): Boolean {
+ when (what) {
+ IPlayer.MSG_OPEN_INPUT -> {
+
+ }
+ IPlayer.MSG_FIND_STREAM_INFO -> {
+
+ }
+ IPlayer.MSG_VIDEO_FIRST_PACKET -> {
+
+ }
+ IPlayer.MSG_VIDEO_DECODE_START -> {
+
+ }
+ IPlayer.MSG_VIDEO_RENDER_START -> {
+ textDuration.text = TimeUtil.getVideoTime(videoPlayerView.getDuration().toLong())
+ progressSeekBar.max = videoPlayerView.getDuration()
+ }
+ }
+ return true
+ }
+
+}
diff --git a/app/src/main/java/com/frank/ffmpeg/tool/LrcParser.kt b/app/src/main/java/com/frank/ffmpeg/tool/LrcParser.kt
index 0b55bee1..b5a12912 100644
--- a/app/src/main/java/com/frank/ffmpeg/tool/LrcParser.kt
+++ b/app/src/main/java/com/frank/ffmpeg/tool/LrcParser.kt
@@ -28,7 +28,7 @@ class LrcParser {
try {
var charsetName: String? = getCharsetName(path)
inputStream = FileInputStream(path)
- if (charsetName!!.toLowerCase(Locale.getDefault()) == "utf-8") {
+ if (charsetName!!.lowercase(Locale.getDefault()) == "utf-8") {
inputStream = UnicodeInputStream(inputStream, charsetName)
charsetName = inputStream.getEncoding()
}
diff --git a/app/src/main/java/com/frank/ffmpeg/util/FileUtil.kt b/app/src/main/java/com/frank/ffmpeg/util/FileUtil.kt
index 614540f0..93a02bd2 100644
--- a/app/src/main/java/com/frank/ffmpeg/util/FileUtil.kt
+++ b/app/src/main/java/com/frank/ffmpeg/util/FileUtil.kt
@@ -4,9 +4,9 @@ import android.text.TextUtils
import android.util.Log
import java.io.File
-import java.io.FileInputStream
import java.io.FileOutputStream
import java.io.IOException
+import java.util.Locale.getDefault
/**
* file tool
@@ -39,55 +39,6 @@ object FileUtil {
private const val TYPE_MOV = "mov"
private const val TYPE_MPG = "mpg"
- fun concatFile(srcFilePath: String, appendFilePath: String, concatFilePath: String): Boolean {
- if (TextUtils.isEmpty(srcFilePath)
- || TextUtils.isEmpty(appendFilePath)
- || TextUtils.isEmpty(concatFilePath)) {
- return false
- }
- val srcFile = File(srcFilePath)
- if (!srcFile.exists()) {
- return false
- }
- val appendFile = File(appendFilePath)
- if (!appendFile.exists()) {
- return false
- }
- var outputStream: FileOutputStream? = null
- var inputStream1: FileInputStream? = null
- var inputStream2: FileInputStream? = null
- try {
- inputStream1 = FileInputStream(srcFile)
- inputStream2 = FileInputStream(appendFile)
- outputStream = FileOutputStream(File(concatFilePath))
- val data = ByteArray(1024)
- var len = 0
- while (len >= 0) {
- len = inputStream1.read(data)
- if (len > 0) outputStream.write(data, 0, len)
- }
- outputStream.flush()
- len = 0
- while (len >= 0) {
- len = inputStream2.read(data)
- if (len > 0) outputStream.write(data, 0, len)
- }
- outputStream.flush()
- } catch (e: IOException) {
- e.printStackTrace()
- } finally {
- try {
- inputStream1?.close()
- inputStream2?.close()
- outputStream?.close()
- } catch (e: IOException) {
- e.printStackTrace()
- }
-
- }
- return true
- }
-
/**
* check the file exist or not
*
@@ -111,7 +62,7 @@ object FileUtil {
if (TextUtils.isEmpty(path)) {
return false
}
- path = path.toLowerCase()
+ path = path.lowercase(getDefault())
return (path.endsWith(TYPE_MP3)
|| path.endsWith(TYPE_AAC)
|| path.endsWith(TYPE_AMR)
@@ -130,7 +81,7 @@ object FileUtil {
if (TextUtils.isEmpty(path)) {
return false
}
- path = path.toLowerCase()
+ path = path.lowercase(getDefault())
return (path.endsWith(TYPE_MP4)
|| path.endsWith(TYPE_MKV)
|| path.endsWith(TYPE_WEBM)
@@ -150,18 +101,6 @@ object FileUtil {
} else fileName.substring(fileName.lastIndexOf("."))
}
- fun getFilePath(filePath: String): String? {
- return if (TextUtils.isEmpty(filePath) || !filePath.contains("/")) {
- null
- } else filePath.substring(0, filePath.lastIndexOf("/"))
- }
-
- fun getFileName(filePath: String): String? {
- return if (TextUtils.isEmpty(filePath) || !filePath.contains("/")) {
- null
- } else filePath.substring(filePath.lastIndexOf("/") + 1)
- }
-
fun createListFile(listPath: String, fileList: List?): String? {
if (TextUtils.isEmpty(listPath) || fileList == null || fileList.isEmpty()) {
return null
@@ -169,7 +108,7 @@ object FileUtil {
var outputStream: FileOutputStream? = null
try {
val listFile = File(listPath)
- if (!listFile.parentFile.exists()) {
+ if (listFile.parentFile?.exists() == false) {
if (!listFile.mkdirs()) {
return null
}
@@ -227,7 +166,7 @@ object FileUtil {
return tempFile.mkdir()
}
if (tempFile.isDirectory && tempFile.listFiles() != null) {
- val files = tempFile.listFiles()
+ val files = tempFile.listFiles() ?: return false
for (file in files) {
result = result and file.delete()
}
diff --git a/app/src/main/java/com/frank/ffmpeg/util/FilterTypeUtil.kt b/app/src/main/java/com/frank/ffmpeg/util/FilterTypeUtil.kt
deleted file mode 100644
index 4a9a7d39..00000000
--- a/app/src/main/java/com/frank/ffmpeg/util/FilterTypeUtil.kt
+++ /dev/null
@@ -1,30 +0,0 @@
-package com.frank.ffmpeg.util
-
-import com.frank.camerafilter.factory.BeautyFilterType
-import com.frank.ffmpeg.R
-
-/**
- * @author xufulong
- * @date 2022/10/17 5:39 下午
- * @desc
- */
-object FilterTypeUtil {
-
- fun filterTypeToNameId(type: BeautyFilterType): Int {
- return when (type) {
- BeautyFilterType.NONE -> R.string.camera_filter_none
- BeautyFilterType.BRIGHTNESS -> R.string.camera_filter_brightness
- BeautyFilterType.SATURATION -> R.string.camera_filter_saturation
- BeautyFilterType.CONTRAST -> R.string.camera_filter_contrast
- BeautyFilterType.SHARPEN -> R.string.camera_filter_sharpen
- BeautyFilterType.BLUR -> R.string.camera_filter_blur
- BeautyFilterType.HUE -> R.string.camera_filter_hue
- BeautyFilterType.WHITE_BALANCE -> R.string.camera_filter_balance
- BeautyFilterType.SKETCH -> R.string.camera_filter_sketch
- BeautyFilterType.OVERLAY -> R.string.camera_filter_overlay
- BeautyFilterType.BREATH_CIRCLE -> R.string.camera_filter_circle
- else -> R.string.camera_filter_none
- }
- }
-
-}
\ No newline at end of file
diff --git a/app/src/main/java/com/frank/ffmpeg/util/ImageConverter.java b/app/src/main/java/com/frank/ffmpeg/util/ImageConverter.java
deleted file mode 100644
index 856dbc9b..00000000
--- a/app/src/main/java/com/frank/ffmpeg/util/ImageConverter.java
+++ /dev/null
@@ -1,164 +0,0 @@
-package com.frank.ffmpeg.util;
-
-import android.graphics.Bitmap;
-import android.graphics.BitmapFactory;
-import android.graphics.ImageFormat;
-import android.graphics.Matrix;
-import android.graphics.Rect;
-import android.graphics.YuvImage;
-import android.util.Log;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-
-public class ImageConverter {
-
- private byte[] yuv;
- private int[] argb;
- private Rect mRect;
- private int[] pixels;
-
- private final Matrix mMatrix;
- private ByteArrayOutputStream mOutputStream;
- private final static boolean useSystem = false;
-
- public ImageConverter() {
- mMatrix = new Matrix();
- }
-
- public Bitmap yuv2bitmap(byte[] data, int width, int height) {
- if (useSystem) {
- return yuv2bitmapSystem(data, width, height);
- } else {
- return yuvToBitmapFormula(data, width, height);
- }
- }
-
- private Bitmap yuv2bitmapSystem(byte[] data, int width, int height) {
- Bitmap bmp = null;
- try {
- YuvImage image = new YuvImage(data, ImageFormat.NV21, width, height, null);
- if (mRect == null) {
- mRect = new Rect(0, 0, width, height);
- }
- if (mOutputStream == null) {
- mOutputStream = new ByteArrayOutputStream();
- } else {
- mOutputStream.reset();
- }
- image.compressToJpeg(mRect, 100, mOutputStream);
- bmp = BitmapFactory.decodeByteArray(mOutputStream.toByteArray(), 0, mOutputStream.size());
- } catch (Exception ex) {
- Log.e("ImageConverter", "Error:" + ex.getMessage());
- }
- return bmp;
- }
-
- private int yuvToARGB(int y, int u, int v) {
- int r, g, b;
-
- r = y + (int) (1.402f * u);
- g = y - (int) (0.344f * v + 0.714f * u);
- b = y + (int) (1.772f * v);
- r = r > 255 ? 255 : Math.max(r, 0);
- g = g > 255 ? 255 : Math.max(g, 0);
- b = b > 255 ? 255 : Math.max(b, 0);
- return 0xff000000 | (r << 16) | (g << 8) | b;
- }
-
- private Bitmap yuvToBitmapFormula(byte[] data, int width, int height) {
- if (pixels == null) {
- pixels = new int[width * height];
- }
- int size = width * height;
- int offset = size;
- int u, v, y1, y2, y3, y4;
-
- for (int i = 0, k = 0; i < size; i += 2, k += 2) {
- y1 = data[i] & 0xff;
- y2 = data[i + 1] & 0xff;
- y3 = data[width + i] & 0xff;
- y4 = data[width + i + 1] & 0xff;
-
- u = data[offset + k] & 0xff;
- v = data[offset + k + 1] & 0xff;
- u = u - 128;
- v = v - 128;
-
- pixels[i] = yuvToARGB(y1, u, v);
- pixels[i + 1] = yuvToARGB(y2, u, v);
- pixels[width + i] = yuvToARGB(y3, u, v);
- pixels[width + i + 1] = yuvToARGB(y4, u, v);
-
- if (i != 0 && (i + 2) % width == 0)
- i += width;
- }
- return Bitmap.createBitmap(pixels, width, height, Bitmap.Config.ARGB_8888);
- }
-
- public Bitmap rotateBitmap(Bitmap source, float angle) {
- mMatrix.reset();
- mMatrix.postRotate(angle);
- return Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(),
- mMatrix, true);
- }
-
- public Bitmap rotateAndFlipBitmap(Bitmap source, float angle, boolean xFlip, boolean yFlip) {
- mMatrix.reset();
- mMatrix.postRotate(angle);
- mMatrix.postScale(xFlip ? -1 : 1, yFlip ? -1 : 1,
- source.getWidth() / 2f, source.getHeight() / 2f);
- return Bitmap.createBitmap(source, 0, 0,
- source.getWidth(), source.getHeight(), mMatrix, true);
- }
-
-
- public byte[] rgbaToYUV420p(Bitmap bitmap, int width, int height) {
- final int frameSize = width * height;
- int index = 0;
- int yIndex = 0;
- int uIndex = frameSize;
- int vIndex = frameSize * 5 / 4;
- int R, G, B, Y, U, V;
-
- if (argb == null) {
- argb = new int[width * height];
- }
- if (yuv == null) {
- yuv = new byte[width * height * 3 / 2];
- }
- bitmap.getPixels(argb, 0, width, 0, 0, width, height);
-
- for (int j = 0; j < height; j++) {
- for (int i = 0; i < width; i++) {
- R = (argb[index] & 0xff0000) >> 16;
- G = (argb[index] & 0xff00) >> 8;
- B = (argb[index] & 0xff);
-
- // RGB to YUV algorithm
- Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
-
- yuv[yIndex++] = (byte) Y; // -128~127
- if (j % 2 == 0 && i % 2 == 0) {
- U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
- V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
- yuv[uIndex++] = (byte) U;
- yuv[vIndex++] = (byte) V;
- }
- index++;
- }
- }
- return yuv;
- }
-
- public void close() {
- if (mOutputStream != null) {
- try {
- mOutputStream.close();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
- }
-
-}
diff --git a/app/src/main/java/com/frank/ffmpeg/util/PlayerUtil.kt b/app/src/main/java/com/frank/ffmpeg/util/PlayerUtil.kt
new file mode 100644
index 00000000..8854a490
--- /dev/null
+++ b/app/src/main/java/com/frank/ffmpeg/util/PlayerUtil.kt
@@ -0,0 +1,84 @@
+package com.frank.ffmpeg.util
+
+import com.frank.next.player.IPlayer
+import com.frank.ffmpeg.view.PlayerView.Companion.RENDER_TYPE_SURFACE_VIEW
+import com.frank.ffmpeg.view.PlayerView.Companion.RENDER_TYPE_TEXTURE_VIEW
+
+import java.util.Locale
+
+/**
+ * Note: debug util of player
+ * Date: 2026/1/26 21:21
+ * Author: frank
+ */
+
+object PlayerUtil {
+
+ private fun formatDuration(duration: Long): String {
+ return when {
+ duration >= 1000 -> {
+ String.format(Locale.US, "%.2f sec", duration.toFloat() / 1000)
+ }
+
+ else -> {
+ String.format(Locale.US, "%d msec", duration)
+ }
+ }
+ }
+
+ private fun formatSize(bytes: Long): String {
+ return when {
+ bytes >= 100 * 1024 -> {
+ String.format(Locale.US, "%.2f MB", bytes.toFloat() / 1024 / 1024)
+ }
+
+ bytes >= 100 -> {
+ String.format(Locale.US, "%.1f KB", bytes.toFloat() / 1024)
+ }
+
+ else -> {
+ String.format(Locale.US, "%d B", bytes)
+ }
+ }
+ }
+
+ fun getDebugInfo(player: IPlayer, renderViewType: Int): Pair {
+ val videoCodec = player.videoCodecInfo
+ val audioCodec = player.audioCodecInfo
+ val resolution =
+ String.format(Locale.US, "%d * %d", player.videoWidth, player.videoHeight)
+ val fps = String.format(
+ Locale.ENGLISH,
+ "%d / %d / %d",
+ player.videoDecodeFrameRate.toInt(),
+ player.videoRenderFrameRate.toInt(),
+ player.videoFrameRate.toInt()
+ )
+ val bitRate = String.format(Locale.US, "%.2f kbps", player.bitRate / 1000f)
+ val vCache = String.format(
+ Locale.US,
+ "%s / %s",
+ formatDuration(player.videoCacheTime),
+ formatSize(player.getVideoCacheSize()))
+ val aCache = String.format(
+ Locale.US,
+ "%s / %s",
+ formatDuration(player.audioCacheTime),
+ formatSize(player.getAudioCacheSize())
+ )
+ val seekTime = player.seekCostTime
+ val surfaceType = if (renderViewType == RENDER_TYPE_SURFACE_VIEW) {
+ "SurfaceView"
+ } else if (renderViewType == RENDER_TYPE_TEXTURE_VIEW) {
+ "TextureView"
+ } else {
+ "Unknown"
+ }
+ val url = player.playUrl
+ val debugInfoName =
+ "video_codec\n" + "audio_codec\n" + "resolution\n" + "fps\n" + "bitrate\n" + "v_cache\n" + "a_cache\n" + "seek_time\n" + "surface\n" + "url"
+ val debugInfoValue =
+ "${videoCodec}\n" + "${audioCodec}\n" + "${resolution}\n" + "${fps}\n" + "${bitRate}\n" + "${vCache}\n" + "${aCache}\n" + "${seekTime}ms\n" + "${surfaceType}\n" + url
+ return Pair(debugInfoName, debugInfoValue)
+ }
+}
\ No newline at end of file
diff --git a/app/src/main/java/com/frank/ffmpeg/view/PlayerView.kt b/app/src/main/java/com/frank/ffmpeg/view/PlayerView.kt
new file mode 100644
index 00000000..eccba4a0
--- /dev/null
+++ b/app/src/main/java/com/frank/ffmpeg/view/PlayerView.kt
@@ -0,0 +1,417 @@
+package com.frank.ffmpeg.view
+
+import android.content.Context
+import android.media.AudioManager
+import android.media.MediaPlayer
+import android.util.AttributeSet
+import android.util.Log
+import android.view.Gravity
+import android.widget.FrameLayout
+import com.frank.ffmpeg.FFmpegApplication
+import com.frank.next.player.IPlayer
+import com.frank.next.player.NextPlayer
+import com.frank.next.renderview.IRenderView
+import com.frank.next.renderview.IRenderView.IRenderCallback
+import com.frank.next.renderview.IRenderView.ISurfaceHolder
+import com.frank.next.renderview.SurfaceRenderView
+import com.frank.next.renderview.TextureRenderView
+
+/**
+ * Note: View Controller layer of player
+ * Date: 2026/1/27 20:20
+ * Author: frank
+ */
+class PlayerView : FrameLayout {
+
+ companion object {
+ private const val TAG = "PlayerView"
+
+ private const val STATE_ERROR = -1
+ private const val STATE_IDLE = 0
+ private const val STATE_PREPARING = 1
+ private const val STATE_PREPARED = 2
+ private const val STATE_PLAYING = 3
+ private const val STATE_PAUSED = 4
+ private const val STATE_COMPLETED = 5
+
+ const val RENDER_TYPE_SURFACE_VIEW = 1
+ const val RENDER_TYPE_TEXTURE_VIEW = 2
+
+ private val scaleModeList = intArrayOf(
+ IRenderView.RENDER_MODE_ASPECT_FIT,
+ IRenderView.RENDER_MODE_ASPECT_FILL,
+ IRenderView.RENDER_MODE_WRAP,
+ IRenderView.RENDER_MODE_16_9,
+ IRenderView.RENDER_MODE_4_3
+ )
+
+ }
+
+ private var mVideoWidth = 0
+ private var mVideoHeight = 0
+ private var mSurfaceWidth = 0
+ private var mSurfaceHeight = 0
+ private var mSeekWhenPrepared = 0
+
+ private var mUrl: String? = null
+ private var mCurrentState = STATE_IDLE
+ private var mAppContext: Context? = null
+ private var mRenderView: IRenderView? = null
+ private var mStudioPlayer: IPlayer? = null
+ private var mSurfaceHolder: ISurfaceHolder? = null
+ private val mCurrentAspectRatio = scaleModeList[0]
+
+ private var mOnInfoListener: IPlayer.OnInfoListener? = null
+ private var mOnErrorListener: IPlayer.OnErrorListener? = null
+ private var mOnPlayingListener: IPlayer.OnPlayingListener? = null
+ private var mOnPreparedListener: IPlayer.OnPreparedListener? = null
+ private var mOnCompleteListener: IPlayer.OnCompleteListener? = null
+ private var mOnSeekCompleteListener: IPlayer.OnSeekCompleteListener? = null
+ private var mOnBufferingUpdateListener: IPlayer.OnBufferUpdateListener? = null
+ private var mOnVideoSizeChangedListener: IPlayer.OnVideoSizeChangedListener? = null
+
+ constructor(context: Context) : super(context) {
+ initVideoView(context)
+ }
+
+ constructor(context: Context, attrs: AttributeSet?) : super(context, attrs) {
+ initVideoView(context)
+ }
+
+ constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int)
+ : super(context, attrs, defStyleAttr) {
+ initVideoView(context)
+ }
+
+ private fun initVideoView(context: Context) {
+ mAppContext = context.applicationContext
+ mVideoWidth = 0
+ mVideoHeight = 0
+ mCurrentState = STATE_IDLE
+ requestFocus()
+ initRender()
+ }
+
+ private fun setRenderView(renderView: IRenderView?) {
+ if (renderView == null)
+ return
+ if (mRenderView != null) {
+ if (mStudioPlayer != null)
+ mStudioPlayer!!.setDisplay(null)
+ val renderUIView = mRenderView!!.view
+ mRenderView!!.removeRenderCallback(mSurfaceCallback)
+ mRenderView = null
+ removeView(renderUIView)
+ }
+ mRenderView = renderView
+ renderView.setAspectRatio(mCurrentAspectRatio)
+ if (mVideoWidth > 0 && mVideoHeight > 0)
+ renderView.setVideoSize(mVideoWidth, mVideoHeight)
+ val lp = LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT, Gravity.CENTER)
+ mRenderView!!.view.layoutParams = lp
+ addView(mRenderView!!.view)
+ mRenderView!!.addRenderCallback(mSurfaceCallback)
+ }
+
+ private fun initRender() {
+ // use SurfaceView or TextureView
+ if (FFmpegApplication.getInstance().enableSurfaceView()) {
+ val renderView = SurfaceRenderView(context)
+ setRenderView(renderView)
+ } else {
+ val renderView = TextureRenderView(context)
+ if (mStudioPlayer != null) {
+ renderView.surfaceHolder.bindPlayer(mStudioPlayer)
+ renderView.setVideoSize(mStudioPlayer!!.videoWidth, mStudioPlayer!!.videoHeight)
+ renderView.setVideoAspectRatio(
+ mStudioPlayer!!.videoSarNum,
+ mStudioPlayer!!.videoSarDen
+ )
+ renderView.setAspectRatio(mCurrentAspectRatio)
+ }
+ setRenderView(renderView)
+ }
+ }
+
+ private fun bindSurfaceHolder(mp: IPlayer?, holder: ISurfaceHolder?) {
+ if (mp == null)
+ return
+ if (holder == null) {
+ mp.setDisplay(null)
+ return
+ }
+ holder.bindPlayer(mp)
+ }
+
+ private var mSurfaceCallback: IRenderCallback = object : IRenderCallback {
+ override fun onSurfaceCreated(holder: ISurfaceHolder, width: Int, height: Int) {
+ mSurfaceHolder = holder
+ if (mStudioPlayer != null) {
+ bindSurfaceHolder(mStudioPlayer, holder)
+ } else {
+ openVideo()
+ }
+ }
+
+ override fun onSurfaceChanged(holder: ISurfaceHolder, format: Int, w: Int, h: Int) {
+ mSurfaceWidth = w
+ mSurfaceHeight = h
+ }
+
+ override fun onSurfaceDestroyed(holder: ISurfaceHolder) {
+ mSurfaceHolder = null
+ mStudioPlayer?.setDisplay(null)
+ }
+ }
+
+ private fun createMediaPlayer(): IPlayer {
+ val nextPlayer: IPlayer = NextPlayer()
+ nextPlayer.setEnableMediaCodec(FFmpegApplication.getInstance().useMediaCodec())
+ nextPlayer.setLiveMode(false)
+ return nextPlayer
+ }
+
+ private fun openVideo() {
+ if (mUrl!!.isEmpty() || mSurfaceHolder == null) {
+ return
+ }
+ release()
+ val am = mAppContext!!.getSystemService(Context.AUDIO_SERVICE) as AudioManager
+ am.requestAudioFocus(null, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN)
+ try {
+ mStudioPlayer = createMediaPlayer()
+ // set listener
+ mStudioPlayer!!.setOnPreparedListener(mPreparedListener)
+ mStudioPlayer!!.setOnVideoSizeChangedListener(mSizeChangedListener)
+ mStudioPlayer!!.setOnCompletionListener(mCompleteListener)
+ mStudioPlayer!!.setOnErrorListener(mErrorListener)
+ mStudioPlayer!!.setOnInfoListener(mInfoListener)
+ mStudioPlayer!!.setOnBufferingUpdateListener(mBufferingUpdateListener)
+ mStudioPlayer!!.setOnSeekCompleteListener(mSeekCompleteListener)
+ // set data source
+ mStudioPlayer!!.dataSource = mUrl
+ bindSurfaceHolder(mStudioPlayer, mSurfaceHolder)
+ mStudioPlayer!!.setScreenOnWhilePlaying(true)
+ // prepare
+ mStudioPlayer!!.prepareAsync()
+ mCurrentState = STATE_PREPARING
+ } catch (e: Exception) {
+ e.printStackTrace()
+ mCurrentState = STATE_ERROR
+ mErrorListener.onError(MediaPlayer.MEDIA_ERROR_UNKNOWN, 0)
+ }
+ }
+
+ private fun isInPlaybackState(): Boolean {
+ return (mStudioPlayer != null &&
+ mCurrentState != STATE_ERROR &&
+ mCurrentState != STATE_IDLE &&
+ mCurrentState != STATE_PREPARING)
+ }
+
+ /******************************player listener***********************************/
+
+ private val mPreparedListener: IPlayer.OnPreparedListener = object : IPlayer.OnPreparedListener {
+ override fun onPrepared(mp: IPlayer) {
+ mCurrentState = STATE_PREPARED
+ mVideoWidth = mp.videoWidth
+ mVideoHeight = mp.videoHeight
+ mOnPreparedListener?.onPrepared(mStudioPlayer)
+ if (mSeekWhenPrepared > 0) {
+ seekTo(mSeekWhenPrepared)
+ mSeekWhenPrepared = 0
+ }
+ if (mRenderView != null) {
+ mRenderView!!.setVideoSize(mVideoWidth, mVideoHeight)
+ }
+
+ start()
+ }
+ }
+
+ private val mSizeChangedListener = IPlayer.OnVideoSizeChangedListener { width, height ->
+ mVideoWidth = width
+ mVideoHeight = height
+ if (mVideoWidth != 0 && mVideoHeight != 0) {
+ if (mRenderView != null) {
+ mRenderView!!.setVideoSize(mVideoWidth, mVideoHeight)
+ }
+ requestLayout()
+ }
+ if (mOnVideoSizeChangedListener != null) {
+ mOnVideoSizeChangedListener!!.onVideoSizeChanged(width, height)
+ }
+ }
+
+ private val mInfoListener = IPlayer.OnInfoListener {arg1, arg2 ->
+ mOnInfoListener?.onInfo(arg1, arg2)
+ when (arg1) {
+ IPlayer.MSG_AUDIO_RENDER_START -> Log.i(TAG, "onAudioRenderFirstFrame")
+ IPlayer.MSG_VIDEO_RENDER_START -> Log.i(TAG, "onVideoRenderFirstFrame")
+ IPlayer.MSG_BUFFER_START -> Log.i(TAG, "onBufferStart")
+ IPlayer.MSG_BUFFER_END -> Log.i(TAG, "onBufferEnd")
+ IPlayer.MSG_ROTATION_CHANGED -> {
+ Log.d(TAG, "onRotationChanged: $arg2")
+ mRenderView?.setVideoRotation(arg2)
+ }
+ }
+ true
+ }
+
+ private val mErrorListener: IPlayer.OnErrorListener = object : IPlayer.OnErrorListener {
+ override fun onError(kernelError: Int, sdkError: Int): Boolean {
+ Log.e(TAG, "Error: $kernelError, $sdkError")
+ mCurrentState = STATE_ERROR
+ mOnErrorListener?.onError(kernelError, sdkError)
+ return true
+ }
+ }
+
+ private val mBufferingUpdateListener = IPlayer.OnBufferUpdateListener { progress ->
+ mOnBufferingUpdateListener?.onBufferUpdate(progress)
+ }
+
+ private val mSeekCompleteListener = IPlayer.OnSeekCompleteListener { mp: IPlayer? ->
+ mOnSeekCompleteListener?.onSeekComplete(mp)
+ }
+
+ private val mCompleteListener: IPlayer.OnCompleteListener = object : IPlayer.OnCompleteListener {
+ override fun onComplete(mp: IPlayer) {
+ mCurrentState = STATE_COMPLETED
+ mOnCompleteListener?.onComplete(mStudioPlayer)
+ }
+ }
+
+ fun setOnPreparedListener(onPreparedListener: IPlayer.OnPreparedListener?) {
+ mOnPreparedListener = onPreparedListener
+ }
+
+ fun setOnInfoListener(onInfoListener: IPlayer.OnInfoListener?) {
+ mOnInfoListener = onInfoListener
+ }
+
+ fun setOnBufferUpdateListener(onBufferUpdateListener: IPlayer.OnBufferUpdateListener?) {
+ mOnBufferingUpdateListener = onBufferUpdateListener
+ }
+
+ fun setOnVideoSizeChangedListener(onVideoSizeChangedListener: IPlayer.OnVideoSizeChangedListener?) {
+ mOnVideoSizeChangedListener = onVideoSizeChangedListener
+ }
+
+ fun setOnErrorListener(onErrorListener: IPlayer.OnErrorListener?) {
+ mOnErrorListener = onErrorListener
+ }
+
+ fun setOnSeekCompleteListener(onSeekCompleteListener: IPlayer.OnSeekCompleteListener?) {
+ mOnSeekCompleteListener = onSeekCompleteListener
+ }
+
+ fun setOnCompleteListener(onCompleteListener: IPlayer.OnCompleteListener?) {
+ mOnCompleteListener = onCompleteListener
+ }
+
+ fun setOnPlayingListener(onPlayingListener: IPlayer.OnPlayingListener) {
+ mOnPlayingListener = onPlayingListener
+ }
+
+ /******************************player control***********************************/
+
+ fun setVideoPath(path: String?) {
+ mUrl = path
+ mSeekWhenPrepared = 0
+ openVideo()
+ }
+
+ fun switchVideo(path: String?) {
+ setVideoPath(path)
+ }
+
+ fun start() {
+ if (!isInPlaybackState())
+ return
+ mStudioPlayer!!.start()
+ mCurrentState = STATE_PLAYING
+ mOnPlayingListener?.onPlaying(true)
+ }
+
+ fun pause() {
+ if (!isInPlaybackState())
+ return
+ if (mStudioPlayer!!.isPlaying) {
+ mStudioPlayer!!.pause()
+ mCurrentState = STATE_PAUSED
+ mOnPlayingListener?.onPlaying(false)
+ }
+ }
+
+ fun seekTo(msec: Int) {
+ if (isInPlaybackState()) {
+ mStudioPlayer!!.seekTo(msec.toLong())
+ mSeekWhenPrepared = 0
+ } else {
+ mSeekWhenPrepared = msec
+ }
+ }
+
+ fun stop() {
+ if (!isInPlaybackState())
+ return
+ mStudioPlayer?.stop()
+ mCurrentState = STATE_IDLE
+ }
+
+ fun release() {
+ if (mStudioPlayer != null) {
+ mStudioPlayer!!.pause()
+ mStudioPlayer!!.release()
+ mStudioPlayer = null
+ }
+ mCurrentState = STATE_IDLE
+ val am = mAppContext!!.getSystemService(Context.AUDIO_SERVICE) as AudioManager
+ am.abandonAudioFocus(null)
+ }
+
+ /******************************get method***********************************/
+
+ fun isPlaying(): Boolean {
+ return isInPlaybackState() && mStudioPlayer!!.isPlaying
+ }
+
+ fun getCurrentPosition(): Int {
+ if (!isInPlaybackState()) {
+ return 0
+ }
+ return mStudioPlayer!!.currentPosition.toInt()
+ }
+
+ fun getDuration(): Int {
+ if (!isInPlaybackState()) {
+ return -1
+ }
+ return mStudioPlayer!!.duration.toInt()
+ }
+
+ fun getPlayer(): IPlayer? {
+ return mStudioPlayer
+ }
+
+ fun getRenderViewType(): Int {
+ return when (mRenderView) {
+ is SurfaceRenderView -> RENDER_TYPE_SURFACE_VIEW
+ is TextureRenderView -> RENDER_TYPE_TEXTURE_VIEW
+ else -> -1
+ }
+ }
+
+ /******************************set method***********************************/
+
+ fun setViewScale(scale: Float) {
+ mRenderView!!.view.scaleX = scale
+ mRenderView!!.view.scaleY = scale
+ }
+
+ fun setSpeed(speed: Float) {
+ mStudioPlayer?.setSpeed(speed)
+ }
+
+}
diff --git a/app/src/main/java/com/frank/ffmpeg/view/VideoPreviewBar.kt b/app/src/main/java/com/frank/ffmpeg/view/VideoPreviewBar.kt
index f33317c2..02ee6542 100644
--- a/app/src/main/java/com/frank/ffmpeg/view/VideoPreviewBar.kt
+++ b/app/src/main/java/com/frank/ffmpeg/view/VideoPreviewBar.kt
@@ -13,7 +13,7 @@ import android.widget.TextView
import com.frank.ffmpeg.R
import com.frank.ffmpeg.util.ScreenUtil
import com.frank.ffmpeg.util.TimeUtil
-import com.frank.androidmedia.controller.MediaDecodeController
+import com.frank.ffmpeg.controller.MediaDecodeController
/**
* the custom view of preview SeekBar
diff --git a/app/src/main/res/drawable-xhdpi/ic_back.png b/app/src/main/res/drawable-xhdpi/ic_back.png
new file mode 100644
index 00000000..4e6f24cc
Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/ic_back.png differ
diff --git a/app/src/main/res/drawable-xhdpi/ic_screen_switch.png b/app/src/main/res/drawable-xhdpi/ic_screen_switch.png
new file mode 100644
index 00000000..dc6e2933
Binary files /dev/null and b/app/src/main/res/drawable-xhdpi/ic_screen_switch.png differ
diff --git a/app/src/main/res/drawable/btn_circle.xml b/app/src/main/res/drawable/btn_circle.xml
deleted file mode 100644
index 37739e11..00000000
--- a/app/src/main/res/drawable/btn_circle.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/app/src/main/res/drawable/btn_point.xml b/app/src/main/res/drawable/btn_point.xml
deleted file mode 100644
index 80a6f3f6..00000000
--- a/app/src/main/res/drawable/btn_point.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/app/src/main/res/drawable/item_background_gray.xml b/app/src/main/res/drawable/item_background_gray.xml
new file mode 100644
index 00000000..b147a89e
--- /dev/null
+++ b/app/src/main/res/drawable/item_background_gray.xml
@@ -0,0 +1,9 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/app/src/main/res/drawable/white_background.xml b/app/src/main/res/drawable/white_background.xml
deleted file mode 100644
index aba4e8ae..00000000
--- a/app/src/main/res/drawable/white_background.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-
-
-
\ No newline at end of file
diff --git a/app/src/main/res/layout/activity_main.xml b/app/src/main/res/layout/activity_main.xml
index adf528f8..c81c6349 100644
--- a/app/src/main/res/layout/activity_main.xml
+++ b/app/src/main/res/layout/activity_main.xml
@@ -1,23 +1,14 @@
-
-
+
-
+
diff --git a/app/src/main/res/layout/activity_media_handle.xml b/app/src/main/res/layout/activity_media_handle.xml
deleted file mode 100644
index aa9c4c7c..00000000
--- a/app/src/main/res/layout/activity_media_handle.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/app/src/main/res/layout/activity_push.xml b/app/src/main/res/layout/activity_push.xml
deleted file mode 100644
index 3e1cc890..00000000
--- a/app/src/main/res/layout/activity_push.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
diff --git a/app/src/main/res/layout/layout_activity_main.xml b/app/src/main/res/layout/layout_activity_main.xml
new file mode 100644
index 00000000..76b0fb92
--- /dev/null
+++ b/app/src/main/res/layout/layout_activity_main.xml
@@ -0,0 +1,22 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/app/src/main/res/layout/layout_activity_player.xml b/app/src/main/res/layout/layout_activity_player.xml
new file mode 100644
index 00000000..ce193b9a
--- /dev/null
+++ b/app/src/main/res/layout/layout_activity_player.xml
@@ -0,0 +1,43 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/main/res/layout/layout_bottom_item.xml b/app/src/main/res/layout/layout_bottom_item.xml
new file mode 100644
index 00000000..b29e8f43
--- /dev/null
+++ b/app/src/main/res/layout/layout_bottom_item.xml
@@ -0,0 +1,59 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/src/main/res/layout/layout_player_info.xml b/app/src/main/res/layout/layout_player_info.xml
new file mode 100644
index 00000000..cc8d844e
--- /dev/null
+++ b/app/src/main/res/layout/layout_player_info.xml
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/app/src/main/res/layout/layout_top_item.xml b/app/src/main/res/layout/layout_top_item.xml
new file mode 100644
index 00000000..3e272796
--- /dev/null
+++ b/app/src/main/res/layout/layout_top_item.xml
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+
+
diff --git a/app/src/main/res/values-en/strings.xml b/app/src/main/res/values-en/strings.xml
index 0637fd52..0e52c2d8 100644
--- a/app/src/main/res/values-en/strings.xml
+++ b/app/src/main/res/values-en/strings.xml
@@ -31,18 +31,12 @@
Uncle
Audio handle
- Media handle
Video handle
- ffmpeg push
RTMP living
- Media mux
- Audio extract
- Video extract
Media play
Probe media info
Retrieve media info
- Media dubbing
Video cut
Video concat
diff --git a/app/src/main/res/values/colors.xml b/app/src/main/res/values/colors.xml
index 54aa92ae..26ed61e5 100644
--- a/app/src/main/res/values/colors.xml
+++ b/app/src/main/res/values/colors.xml
@@ -3,10 +3,9 @@
#3F51B5
#303F9F
#FF4081
- #FFFFFF
#D7D7D7
- #FF2C29
- #00DDFF
- #BEBEBE
- #FF9800
+
+ #FFFFFF
+ #000000
+ #00DDFF
diff --git a/app/src/main/res/values/dimens.xml b/app/src/main/res/values/dimens.xml
deleted file mode 100644
index fb296eca..00000000
--- a/app/src/main/res/values/dimens.xml
+++ /dev/null
@@ -1,47 +0,0 @@
-
-
- 16sp
- 16sp
- 22sp
- 9sp
-
- 0sp
- 20sp
- 10sp
- 5sp
- 2sp
- 5sp
- 10sp
- 15sp
- 5dp
- 3dp
- 10dp
- 15dp
- 0dp
-
- 2sp
- 1sp
- 16sp
- 24sp
- 20sp
- 12sp
- 9sp
- 10sp
- 11sp
- 16sp
- 14sp
- 20sp
- 30sp
- 22sp
- 17sp
-
- 10dp
- 12dp
- 4dp
- 24dp
- 12dp
-
-
-
-
-
diff --git a/app/src/main/res/values/strings.xml b/app/src/main/res/values/strings.xml
index aff342fc..872a429f 100644
--- a/app/src/main/res/values/strings.xml
+++ b/app/src/main/res/values/strings.xml
@@ -31,18 +31,12 @@
大叔
音频处理
- 音视频处理
视频处理
- ffmpeg推流
- rtmp推流直播
+ rtmp直播
- 音视频合成
- 提取音频
- 提取视频
音视频播放
- probe多媒体信息
- retrieve多媒体信息
- 配音
+ probe信息
+ retrieve信息
视频剪切
视频拼接
diff --git a/build.gradle b/build.gradle
index ddd39979..e1a80b5c 100644
--- a/build.gradle
+++ b/build.gradle
@@ -1,7 +1,7 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
- ext.kotlin_version = '1.8.10'
+ ext.kotlin_version = '2.1.0'
repositories {
gradlePluginPortal()
mavenCentral()
@@ -11,7 +11,7 @@ buildscript {
}
}
dependencies {
- classpath 'com.android.tools.build:gradle:7.2.1'
+ classpath 'com.android.tools.build:gradle:8.8.0'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
diff --git a/engine/common/CMakeLists.txt b/engine/common/CMakeLists.txt
new file mode 100644
index 00000000..52c9b292
--- /dev/null
+++ b/engine/common/CMakeLists.txt
@@ -0,0 +1,38 @@
+cmake_minimum_required(VERSION 3.10.2)
+
+project(common)
+
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -g -Wall")
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror -Wno-deprecated")
+set(CMAKE_CXX_FLAGS_DEBUG "-O0")
+set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG ")
+
+set(SRC_LIST src/NextDictionary.cpp src/NextLog.cpp)
+
+if (CMAKE_SYSTEM_NAME STREQUAL "Android")
+ set(CMAKE_ANDROID_NDK $ENV{ANDROID_NDK})
+ set(CMAKE_SYSTEM_VERSION 21)
+ aux_source_directory(src/android SRC_LIST_ANDROID)
+ set(SRC_LIST
+ ${SRC_LIST}
+ ${SRC_LIST_ANDROID})
+elseif (CMAKE_SYSTEM_NAME STREQUAL "OHOS")
+ add_definitions(-D__HARMONY__)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unused-command-line-argument")
+else ()
+ message(FATAL_ERROR "Don't support ${CMAKE_SYSTEM_NAME}!")
+endif ()
+
+include_directories("${CMAKE_CURRENT_SOURCE_DIR}/include")
+
+add_library(common SHARED ${SRC_LIST})
+
+if (CMAKE_SYSTEM_NAME STREQUAL "Android")
+ find_library(log-lib log)
+elseif (CMAKE_SYSTEM_NAME STREQUAL "OHOS")
+ find_library(log-lib hilog_ndk.z)
+else ()
+ message(FATAL_ERROR "Don't support ${CMAKE_SYSTEM_NAME}!")
+endif ()
+
+target_link_libraries(common ${log-lib})
diff --git a/engine/common/include/CommonUtil.h b/engine/common/include/CommonUtil.h
new file mode 100644
index 00000000..ce0d8eeb
--- /dev/null
+++ b/engine/common/include/CommonUtil.h
@@ -0,0 +1,31 @@
+/**
+ * Note: Common Util
+ * Date: 2025/11/28
+ * Author: frank
+ */
+
+#ifndef COMMON_UTIL_H
+#define COMMON_UTIL_H
+
+#include
+#include
+
+// 字节数组转int类型(大端)
+static inline uint32_t ByteToInt(const uint8_t *src) {
+ return (uint32_t) ((src[0] & 0xFF) << 24 |
+ (src[1] & 0xFF) << 16 |
+ (src[2] & 0xFF) << 8 |
+ (src[3] & 0xFF));
+}
+
+static inline int64_t CurrentTimeUs() {
+ return std::chrono::duration_cast(
+ std::chrono::system_clock::now().time_since_epoch()).count();
+}
+
+static inline int64_t CurrentTimeMs() {
+ return std::chrono::duration_cast(
+ std::chrono::system_clock::now().time_since_epoch()).count();
+}
+
+#endif //COMMON_UTIL_H
diff --git a/engine/common/include/NalUnitParser.h b/engine/common/include/NalUnitParser.h
new file mode 100644
index 00000000..35eb2781
--- /dev/null
+++ b/engine/common/include/NalUnitParser.h
@@ -0,0 +1,77 @@
+/**
+ * Note: Parser of NALUnit
+ * Date: 2025/11/28
+ * Author: frank
+ */
+
+#ifndef NAL_UNIT_PARSER_H
+#define NAL_UNIT_PARSER_H
+
+#include "NextDefine.h"
+
+/** H264 NAL结构
+---------------------------------
+|0|1|2|3|4|5|6|7|
+---------------------------------
+|F|Idc| Type |
+*/
+
+/** HEVC NAL结构
+---------------------------------
+|0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
+---------------------------------
+|F| Type | LayerId | Tid |
+*/
+
+class NALUnitParser {
+public:
+ // 获取h264的nal类型
+ static inline int get_h264_nal_unit_type(const uint8_t *data) {
+ return data[4] & 0x1F;
+ }
+
+ // 获取hevc的nal类型
+ static inline int get_hevc_nal_unit_type(const uint8_t *data) {
+ return (data[4] & 0x7E) >> 1;
+ }
+
+ // 判断h264是否idr帧
+ static inline int is_h264_idr(int type) {
+ return type == NAL_IDR_SLICE;
+ }
+
+ // 判断hevc是否idr帧
+ static inline int is_hevc_idr(int type) {
+ return type == HEVC_NAL_BLA_W_LP ||
+ type == HEVC_NAL_BLA_W_RADL ||
+ type == HEVC_NAL_BLA_N_LP ||
+ type == HEVC_NAL_IDR_W_RADL ||
+ type == HEVC_NAL_IDR_N_LP ||
+ type == HEVC_NAL_CRA_NUT;
+ }
+
+ // 判断hevc是否为非参考帧
+ static inline bool is_hevc_no_ref(int type) {
+ return type == HEVC_NAL_TRAIL_N ||
+ type == HEVC_NAL_TSA_N ||
+ type == HEVC_NAL_STSA_N ||
+ type == HEVC_NAL_RADL_N ||
+ type == HEVC_NAL_RASL_N ||
+ type == HEVC_NAL_VCL_N10 ||
+ type == HEVC_NAL_VCL_N12 ||
+ type == HEVC_NAL_VCL_N14;
+ }
+
+ // 获取h264的ref_idc
+ static inline int get_h264_ref_idc(const uint8_t *data) {
+ return (data[4] >> 5) & 0x03;
+ }
+
+ // 获取hevc的编码层级nuh_layer_id
+ static inline int get_hevc_nuh_layer_id(const uint8_t *data) {
+ return (data[4] & 0x1 << 5) + (data[5] >> 3) & 0x1F;
+ }
+
+};
+
+#endif //NAL_UNIT_PARSER_H
diff --git a/engine/common/include/NextDefine.h b/engine/common/include/NextDefine.h
new file mode 100644
index 00000000..b80d353d
--- /dev/null
+++ b/engine/common/include/NextDefine.h
@@ -0,0 +1,152 @@
+/**
+ * Note: common define of next player
+ * Date: 2025/11/25
+ * Author: frank
+ */
+
+#ifndef NEXT_DEFINE_H
+#define NEXT_DEFINE_H
+
+#define MAX_DEVIATION 60000
+#define MIN_MIN_FRAMES 2
+#define MAX_QUEUE_SIZE (15 * 1024 * 1024)
+#define QUEUE_WAIT_TIMEOUT 100
+#define DEFAULT_MIN_FRAMES 50000
+#define AV_NO_SYNC_THRESHOLD 100.0
+#define BUFFERING_CHECK_PERIOD 500
+#define MAX_ACCURATE_SEEK_TIMEOUT 5000
+
+enum VideoPixelFormat {
+ PIXEL_FORMAT_UNKNOWN = 0,
+ PIXEL_FORMAT_YUV420P = 1,
+ PIXEL_FORMAT_YUV420SP = 2,
+ PIXEL_FORMAT_VIDEOTOOLBOX = 3,
+ PIXEL_FORMAT_YUV420P10LE = 4,
+ PIXEL_FORMAT_YUVJ420P = 5,
+ PIXEL_FORMAT_MEDIACODEC = 6,
+ PIXEL_FORMAT_RGB565 = 7,
+ PIXEL_FORMAT_RGB888 = 8,
+ PIXEL_FORMAT_RGBA8888 = 9,
+ PIXEL_FORMAT_HARMONY = 10
+};
+
+enum FrameQueueSize {
+ FRAME_QUEUE_SIZE = 16,
+ SAMPLE_QUEUE_SIZE = 9,
+ SUBTITLE_PICTURE_QUEUE_SIZE = 16,
+ VIDEO_PICTURE_QUEUE_SIZE_MIN = 3,
+ VIDEO_PICTURE_QUEUE_SIZE_MAX = 16,
+ VIDEO_PICTURE_QUEUE_SIZE_DEFAULT = 3
+};
+
+enum BufferWaterMark {
+ DEFAULT_FIRST_HIGH_WATER_MARK_IN_MS = 500,
+ DEFAULT_NEXT_HIGH_WATER_MARK_IN_MS = 1000,
+ DEFAULT_LAST_HIGH_WATER_MARK_IN_MS = 5000,
+ DEFAULT_HIGH_WATER_MARK_IN_BYTES = 256 * 1024
+};
+
+enum PlayerOption {
+ OPTION_FLOAT_AV_DIFF = 1001,
+ OPTION_FLOAT_AV_DELAY = 1002,
+ OPTION_FLOAT_PLAYBACK_RATE = 1003,
+ OPTION_FLOAT_PLAYBACK_VOLUME = 1004,
+ OPTION_FLOAT_DROP_FRAME_RATE = 1005,
+ OPTION_FLOAT_DROP_PACKET_RATE = 1006,
+ OPTION_FLOAT_VIDEO_FRAME_RATE = 1007,
+ OPTION_FLOAT_VIDEO_DECODE_RATE = 1008,
+ OPTION_FLOAT_VIDEO_RENDER_RATE = 1009,
+
+ OPTION_INT64_BIT_RATE = 2001,
+ OPTION_INT64_FILE_SIZE = 2002,
+ OPTION_INT64_TCP_SPEED = 2003,
+ OPTION_INT64_PIXEL_FORMAT = 2004,
+ OPTION_INT64_VIDEO_DECODER = 2005,
+ OPTION_INT64_AUDIO_DECODER = 2006,
+ OPTION_INT64_LAST_TCP_SPEED = 2007,
+ OPTION_INT64_CACHE_SIZE = 2008,
+ OPTION_INT64_CACHE_POSITION = 2009, // cache relative position
+ OPTION_INT64_CACHE_FILE_POS = 2010, // cache position in file
+ OPTION_INT64_SEEK_LOAD_TIME = 2011,
+ OPTION_INT64_TRANSFER_BYTES = 2012,
+ OPTION_INT64_MAX_BUFFER_SIZE = 2013,
+ OPTION_INT64_CUR_VIDEO_STREAM = 2014,
+ OPTION_INT64_CUR_AUDIO_STREAM = 2015,
+ OPTION_INT64_VIDEO_CACHE_PKT = 2016,
+ OPTION_INT64_AUDIO_CACHE_PKT = 2017,
+ OPTION_INT64_VIDEO_CACHE_DUR = 2018,
+ OPTION_INT64_AUDIO_CACHE_DUR = 2019,
+ OPTION_INT64_VIDEO_CACHE_BYTES = 2020,
+ OPTION_INT64_AUDIO_CACHE_BYTES = 2021,
+
+ OPTION_STR_DECODER_AVCODEC = 3001, // ffmpeg avcodec
+ OPTION_STR_DECODER_MEDIACODEC = 3002, // android MediaCodec
+ OPTION_STR_DECODER_VTB = 3003, // iOS VideoToolBox
+ OPTION_STR_DECODER_HARMONY = 3004, // HarmonyOS decoder
+
+};
+
+// NAL unit type
+enum NALUnitType {
+ NAL_SLICE = 1,
+ NAL_DPA = 2,
+ NAL_DPB = 3,
+ NAL_DPC = 4,
+ NAL_IDR_SLICE = 5,
+ NAL_SEI = 6,
+ NAL_SPS = 7,
+ NAL_PPS = 8,
+ NAL_AUD = 9,
+ NAL_END_SEQUENCE = 10,
+ NAL_END_STREAM = 11,
+ NAL_FILLER_DATA = 12,
+ NAL_SPS_EXT = 13,
+ NAL_AUXILIARY_SLICE = 19
+};
+
+// HEVC frame type
+enum HEVCNALUnitType {
+ HEVC_NAL_TRAIL_N = 0,
+ HEVC_NAL_TRAIL_R = 1,
+ HEVC_NAL_TSA_N = 2,
+ HEVC_NAL_TSA_R = 3,
+ HEVC_NAL_STSA_N = 4,
+ HEVC_NAL_STSA_R = 5,
+ HEVC_NAL_RADL_N = 6,
+ HEVC_NAL_RADL_R = 7,
+ HEVC_NAL_RASL_N = 8,
+ HEVC_NAL_RASL_R = 9,
+ HEVC_NAL_VCL_N10 = 10,
+ HEVC_NAL_VCL_R11 = 11,
+ HEVC_NAL_VCL_N12 = 12,
+ HEVC_NAL_VCL_R13 = 13,
+ HEVC_NAL_VCL_N14 = 14,
+ HEVC_NAL_VCL_R15 = 15,
+ HEVC_NAL_BLA_W_LP = 16,
+ HEVC_NAL_BLA_W_RADL = 17,
+ HEVC_NAL_BLA_N_LP = 18,
+ HEVC_NAL_IDR_W_RADL = 19,
+ HEVC_NAL_IDR_N_LP = 20,
+ HEVC_NAL_CRA_NUT = 21,
+ HEVC_NAL_IRAP_VCL22 = 22,
+ HEVC_NAL_IRAP_VCL23 = 23,
+ HEVC_NAL_RSV_VCL24 = 24,
+ HEVC_NAL_RSV_VCL25 = 25,
+ HEVC_NAL_RSV_VCL26 = 26,
+ HEVC_NAL_RSV_VCL27 = 27,
+ HEVC_NAL_RSV_VCL28 = 28,
+ HEVC_NAL_RSV_VCL29 = 29,
+ HEVC_NAL_RSV_VCL30 = 30,
+ HEVC_NAL_RSV_VCL31 = 31,
+ HEVC_NAL_VPS = 32,
+ HEVC_NAL_SPS = 33,
+ HEVC_NAL_PPS = 34,
+ HEVC_NAL_AUD = 35,
+ HEVC_NAL_EOS_NUT = 36,
+ HEVC_NAL_EOB_NUT = 37,
+ HEVC_NAL_FD_NUT = 38,
+ HEVC_NAL_SEI_PREFIX = 39,
+ HEVC_NAL_SEI_SUFFIX = 40
+};
+
+#endif
diff --git a/engine/common/include/NextDictionary.h b/engine/common/include/NextDictionary.h
new file mode 100644
index 00000000..c6e0cbb3
--- /dev/null
+++ b/engine/common/include/NextDictionary.h
@@ -0,0 +1,61 @@
+
+#ifndef NEXT_DICTIONARY_H
+#define NEXT_DICTIONARY_H
+
+#include
+
+enum ValueType {
+ VALUE_TYPE_UNKNOWN = 0,
+ VALUE_TYPE_INT32 = 1,
+ VALUE_TYPE_INT64 = 2,
+ VALUE_TYPE_STRING = 3
+};
+
+class NextDictionary {
+public:
+ NextDictionary() = default;
+
+ virtual ~NextDictionary();
+
+ void Clear();
+
+ void SetInt64(const char *name, int64_t value);
+
+ bool FindInt64(const char *name, int64_t *value) const;
+
+ int64_t GetInt64(const char *name, int64_t defaultValue) const;
+
+ void SetString(const char *name, const std::string &s);
+
+ std::string GetString(const char *name, std::string *defaultValue) const;
+
+ size_t GetSize() const;
+
+ const char *GetEntryNameAt(size_t index, ValueType *type) const;
+
+protected:
+ struct Item {
+ // TODO: memory align
+// union {
+ int64_t val_int64;
+ std::string *val_string;
+// } u;
+ const char *item_name;
+ size_t name_len;
+ ValueType type;
+
+ void SetName(const char *name, size_t len);
+ };
+
+ std::vector- m_items;
+
+ Item *AllocateItem(const char *name);
+
+ static void FreeItemValue(Item *item);
+
+ const Item *FindItem(const char *name, ValueType type) const;
+
+ size_t FindItemIndex(const char *name, size_t len) const;
+};
+
+#endif
diff --git a/engine/common/include/NextErrorCode.h b/engine/common/include/NextErrorCode.h
new file mode 100644
index 00000000..dfe910d3
--- /dev/null
+++ b/engine/common/include/NextErrorCode.h
@@ -0,0 +1,100 @@
+/**
+ * Note: error code of next player
+ * Date: 2025/11/27
+ * Author: frank
+ */
+
+#ifndef NEXT_ERROR_CODE_H
+#define NEXT_ERROR_CODE_H
+
+/**********************结果成功**********************/
+#define RESULT_OK 0 // 成功
+
+/**********************URL相关**********************/
+#define ERROR_URL_INVALID (-1001) // 无效URL
+#define ERROR_URL_TIMESTAMP (-1002) // 时间戳过期
+#define ERROR_URL_AUTH (-1003) // 鉴权失败
+
+/**********************网络相关**********************/
+#define ERROR_NET_DNS (-2001) // DNS解析失败
+#define ERROR_NET_NO_NETWORK (-2002) // 无网络
+#define ERROR_NET_CON_TIMEOUT (-2003) // 网络连接超时
+#define ERROR_NET_READ_TIMEOUT (-2004) // 网络读取超时
+#define ERROR_NET_CHANGED (-2005) // 网络状态变化,比如WiFi切移动网络
+#define ERROR_NET_HTTP401 (-2006) // HTTP请求未授权
+#define ERROR_NET_HTTP403 (-2007) // HTTP请求禁止
+#define ERROR_NET_HTTP404 (-2008) // HTTP请求不存在
+#define ERROR_NET_SERVER_INNER (-2009) // 服务器内部错误
+#define ERROR_NET_NO_MATCH_STREAM (-2010) // 无匹配清晰度的码流
+#define ERROR_NET_VIDEO_NOT_FOUND (-2011) // 视频不存在
+#define ERROR_NET_REDIRECT (-2012) // URL重定向
+
+/**********************解析相关**********************/
+#define ERROR_PARSE_OPEN (-3001) // 打开失败
+#define ERROR_PARSE_FIND_STREAM (-3002) // 解析流信息失败
+#define ERROR_PARSE_FORMAT (-3003) // 格式不支持
+#define ERROR_PARSE_INVALID_DATA (-3004) // 无效数据
+#define ERROR_PARSE_STREAM_OPEN (-3005) // 流打开失败
+#define ERROR_PARSE_READ_FRAME (-3006) // 读数据失败
+#define ERROR_PARSE_SWITCH_VIDEO (-3007) // 切视频流失败
+#define ERROR_PARSE_SWITCH_AUDIO (-3008) // 切音频轨失败
+#define ERROR_PARSE_SWITCH_SUB (-3009) // 切字幕轨失败
+#define ERROR_PARSE_NOT_INIT (-3010) // 解析器未初始化
+#define ERROR_PARSE_METADATA (-3011) // 解析元数据出错
+
+/**********************解码相关**********************/
+#define ERROR_DECODE_VIDEO_NONE (-4001) // 视频编码格式不支持
+#define ERROR_DECODE_AUDIO_NONE (-4002) // 音频编码格式不支持
+#define ERROR_DECODE_SUB_NONE (-4003) // 字幕编码格式不支持
+#define ERROR_DECODE_VIDEO_OPEN (-4004) // 视频解码器打开失败
+#define ERROR_DECODE_AUDIO_OPEN (-4005) // 音频解码器打开失败
+#define ERROR_DECODE_SUB_OPEN (-4006) // 字幕解码器打开失败
+#define ERROR_DECODE_INVALID (-4007) // 初始化校验:无效参数
+#define ERROR_DECODE_VIDEO_DEC (-4008) // 视频解码失败
+#define ERROR_DECODE_AUDIO_DEC (-4009) // 音频解码失败
+#define ERROR_DECODE_SUB_DEC (-4010) // 字幕解码失败
+#define ERROR_DECODE_NOT_INIT (-4011) // 解码相关未初始化
+#define ERROR_DECODE_BAD_DATA (-4012) // 数据损坏
+#define ERROR_DECODE_MISS_REF (-4013) // 缺失参考帧
+#define ERROR_DECODE_SESSION (-4014) // session失效,比如前后台切换
+#define ERROR_DECODE_PTS_ORDER (-4015) // pts排序出错
+#define ERROR_DECODER_UNSUPPORTED (-4016) // 解码器存在,参数不支持
+
+/**********************渲染相关**********************/
+#define ERROR_RENDER_VIDEO_INIT (-5001) // 渲染器初始化失败
+#define ERROR_RENDER_AUDIO_INIT (-5002) // 音频初始化失败
+#define ERROR_RENDER_HANDLE (-5003) // 渲染处理失败
+#define ERROR_RENDER_INPUT (-5004) // 输入帧处理失败
+#define ERROR_RENDER_PIP (-5005) // 切换画中画失败
+#define ERROR_RENDER_FILTER (-5006) // filter处理失败
+#define ERROR_RENDER_HDR (-5007) // HDR渲染失败
+#define ERROR_RENDER_SUBTITLE (-5008) // 字幕渲染失败
+#define ERROR_RENDER_AUDIO_SWR (-5009) // 音频重采样失败
+#define ERROR_RENDER_VIDEO_SWS (-5010) // 视频像素格式转换失败
+#define ERROR_RENDER_VIDEO_SUR (-5011) // 设置surface失败
+#define ERROR_RENDER_VIDEO_CTX (-5012) // context上下文无效
+
+/**********************解密相关**********************/
+#define ERROR_DECRYPT_SECRET_KEY (-6001) // 获取私钥失败
+#define ERROR_DECRYPT_LICENSE (-6002) // 获取证书失败
+#define ERROR_DECRYPT_VERSION_ (-6003) // 版本不匹配
+#define ERROR_DECRYPT_VALIDATE (-6004) // 校验失败
+#define ERROR_DECRYPT_FAIL (-6005) // 解密失败
+
+/**********************预加载相关**********************/
+#define ERROR_PRELOAD_FAIL (-7001) // 预加载失败
+
+/**********************播放相关**********************/
+#define ERROR_PLAYER_NOT_INIT (-8001) // 播放器未初始化
+#define ERROR_PLAYER_INIT_FAIL (-8002) // 播放器初始化失败
+#define ERROR_PLAYER_TRY_AGAIN (-8003) // 重试
+#define ERROR_PLAYER_EOF (-8004) // EOF结束
+#define ERROR_PLAYER_STATE (-8005) // 播放状态不对
+#define ERROR_PLAYER_UNSUPPORTED (-8006) // 不支持操作
+
+/**********************其他错误**********************/
+#define ERROR_OTHER_UNKNOWN (-9999) // 未知错误
+#define ERROR_OTHER_OOM (-9001) // 内存不足
+#define ERROR_OTHER_PERMISSION (-9002) // 没有权限
+
+#endif
diff --git a/engine/common/include/NextLog.h b/engine/common/include/NextLog.h
new file mode 100644
index 00000000..c3a28468
--- /dev/null
+++ b/engine/common/include/NextLog.h
@@ -0,0 +1,26 @@
+#ifndef NEXT_LOG_H
+#define NEXT_LOG_H
+
+#define AV_LEVEL_FATAL 128
+#define AV_LEVEL_ERROR 64
+#define AV_LEVEL_WARN 32
+#define AV_LEVEL_INFO 16
+#define AV_LEVEL_DEBUG 8
+
+
+int LogPrint(int level, const char *tag, const char *fmt, ...);
+void SetLogLevel(int level);
+void SetLogCallback(void (*callback)(void *, int, const char *), void *userdata);
+
+typedef struct LogContext {
+ int level;
+ void *userdata;
+ void (*callback)(void *arg, int level, const char *buf);
+} LogContext;
+
+#define NEXT_LOGD(...) LogPrint(AV_LEVEL_DEBUG, __VA_ARGS__)
+#define NEXT_LOGI(...) LogPrint(AV_LEVEL_INFO, __VA_ARGS__)
+#define NEXT_LOGW(...) LogPrint(AV_LEVEL_WARN, __VA_ARGS__)
+#define NEXT_LOGE(...) LogPrint(AV_LEVEL_ERROR, __VA_ARGS__)
+
+#endif
diff --git a/engine/common/include/NextMessage.h b/engine/common/include/NextMessage.h
new file mode 100644
index 00000000..41dd79d9
--- /dev/null
+++ b/engine/common/include/NextMessage.h
@@ -0,0 +1,54 @@
+/**
+ * Note: message of next player
+ * Date: 2025/11/26
+ * Author: frank
+ */
+
+#ifndef NEXT_MESSAGE_H
+#define NEXT_MESSAGE_H
+
+enum PlayerRequest {
+ REQUEST_START = 10001,
+ REQUEST_PAUSE = 10002,
+ REQUEST_SEEK = 10003,
+ REQUEST_KERNEL_PAUSE = 10004,
+ REQUEST_PLAY_SPEED = 10005
+};
+
+enum PlayerMsg {
+ MSG_COMPONENT_OPEN = 1000,
+ MSG_OPEN_INPUT = 1001,
+ MSG_FIND_STREAM_INFO = 1002,
+ MSG_VIDEO_DECODER_OPEN = 1003,
+ MSG_VIDEO_FIRST_PACKET = 1004,
+ MSG_AUDIO_DECODE_START = 1005,
+ MSG_VIDEO_DECODE_START = 1006,
+ MSG_ON_PREPARED = 1007,
+ MSG_VIDEO_SIZE_CHANGED = 1008, // arg1 = width, arg2 = height
+ MSG_SAR_CHANGED = 1009, // arg1 = num, arg2 = den
+ MSG_ROTATION_CHANGED = 1010, // arg1 = rotate degree
+ MSG_VIDEO_RENDER_START = 1011,
+ MSG_AUDIO_RENDER_START = 1012,
+ MSG_ON_FLUSH = 1013,
+ MSG_ON_ERROR = 1014, //arg1 = error msg
+ MSG_ON_COMPLETED = 1015,
+ MSG_MEDIA_INFO = 1016,
+
+ MSG_BUFFER_START = 2000, // arg1: 1=seek 2=network 3=decode
+ MSG_BUFFER_UPDATE = 2001, // arg1 = progress percent
+ MSG_BUFFER_BYTE_UPDATE = 2002, // arg1 = cached data in bytes
+ MSG_BUFFER_TIME_UPDATE = 2003, // arg1 = cached duration in ms
+ MSG_BUFFER_END = 2004,
+
+ MSG_VIDEO_SEEK_RENDER_START = 3000,
+ MSG_AUDIO_SEEK_RENDER_START = 3001,
+ MSG_SEEK_LOOP_START = 3002, // arg1 = loop count
+ MSG_SEEK_COMPLETE = 3003, // arg1 = seek position
+ MSG_ACCURATE_SEEK_COMPLETE = 3004,
+ MSG_PLAY_STATE_CHANGED = 3005,
+ MSG_PLAY_URL_CHANGED = 3006,
+ MSG_SUBTITLE_UPDATE = 3007
+
+};
+
+#endif
diff --git a/engine/common/include/NextStructDefine.h b/engine/common/include/NextStructDefine.h
new file mode 100644
index 00000000..4ba101e4
--- /dev/null
+++ b/engine/common/include/NextStructDefine.h
@@ -0,0 +1,72 @@
+/**
+ * Note: define of struct
+ * Date: 2025/11/29
+ * Author: frank
+ */
+
+#ifndef NEXT_STRUCT_DEFINE_H
+#define NEXT_STRUCT_DEFINE_H
+
+#include
+#include
+
+#define sp std::shared_ptr
+
+using LogCallback = std::function;
+using NotifyCallback = std::function;
+using InjectCallback = std::function;
+
+struct TrackInfo {
+ int codec_id = 0;
+ int stream_type = -1;
+ int stream_index = -1;
+ int codec_profile = 0;
+ int64_t bit_rate = 0;
+
+ // Video
+ int width = 0;
+ int height = 0;
+ int sar_num = 0;
+ int sar_den = 1;
+ int fps_num = 0;
+ int fps_den = 1;
+ int tbr_num = 0;
+ int tbr_den = 1;
+ int rotation = 0;
+ int pixel_fmt = -1;
+ uint8_t color_space = 0;
+ uint8_t color_range = 0;
+ uint8_t color_transfer = 0;
+ uint8_t color_primaries = 0;
+
+ // Audio
+ int channels = 0;
+ int sample_fmt = 0;
+ int sample_rate = 0;
+ int time_base_num = 0;
+ int time_base_den = 1;
+ int extra_data_size = 0;
+ uint8_t *extra_data = nullptr;
+};
+
+struct MetaData {
+ ~MetaData() {
+ for (auto & it : track_info) {
+ if (it.extra_data) {
+ delete[] it.extra_data;
+ it.extra_data = nullptr;
+ }
+ }
+ }
+
+ int64_t duration = 0;
+ int64_t bit_rate = 0;
+ int64_t start_time = 0;
+ int audio_index = -1;
+ int video_index = -1;
+ int subtitle_index = -1;
+
+ std::vector track_info;
+};
+
+#endif //NEXT_STRUCT_DEFINE_H
diff --git a/engine/common/include/android/JniEnv.h b/engine/common/include/android/JniEnv.h
new file mode 100644
index 00000000..bfe467b4
--- /dev/null
+++ b/engine/common/include/android/JniEnv.h
@@ -0,0 +1,32 @@
+#ifndef JNI_ENV_H
+#define JNI_ENV_H
+
+#if defined(__ANDROID__)
+
+#include
+
+class JniEnvPtr {
+public:
+ JniEnvPtr();
+
+ ~JniEnvPtr();
+
+ JNIEnv *operator->();
+
+ JNIEnv *Env() const;
+
+ static void GlobalInit(JavaVM *vm);
+
+private:
+ JniEnvPtr(const JniEnvPtr &) = delete;
+
+ JniEnvPtr &operator=(const JniEnvPtr &) = delete;
+
+ static void JniThreadDestroyed(void *value);
+
+private:
+ JNIEnv *mEnv = nullptr;
+};
+
+#endif // __ANDROID__
+#endif // JNI_ENV_H
diff --git a/engine/common/include/android/JniUtil.h b/engine/common/include/android/JniUtil.h
new file mode 100644
index 00000000..b114838a
--- /dev/null
+++ b/engine/common/include/android/JniUtil.h
@@ -0,0 +1,25 @@
+#ifndef JNI_UTIL_H
+#define JNI_UTIL_H
+
+#if defined(__ANDROID__)
+
+#include
+#include
+#include
+
+int JniGetApiLevel();
+bool JniCheckException(JNIEnv *env);
+bool JniCheckExceptionClear(JNIEnv *env);
+
+jclass JniGetClassGlobalRef(JNIEnv *env, const char *name);
+
+std::string JniGetStringUTFChars(JNIEnv *env, jstring str);
+
+
+jbyteArray JniNewByteArrayGlobalRefCatch(JNIEnv *env, jsize capacity);
+
+void JniDeleteLocalRefP(JNIEnv *env, jobject *obj);
+void JniDeleteGlobalRefP(JNIEnv *env, jobject *obj);
+
+#endif // __ANDROID__
+#endif // JNI_UTIL_H
diff --git a/engine/common/src/NextDictionary.cpp b/engine/common/src/NextDictionary.cpp
new file mode 100644
index 00000000..64ad319f
--- /dev/null
+++ b/engine/common/src/NextDictionary.cpp
@@ -0,0 +1,141 @@
+/**
+ * Note: operation of dictionary
+ * Date: 2025/12/2
+ * Author: frank
+ */
+
+#include "NextDictionary.h"
+
+#include
+
+NextDictionary::~NextDictionary() { Clear(); }
+
+void NextDictionary::Clear() {
+ for (auto item: m_items) {
+ if (item) {
+ if (item->item_name) {
+ delete[] item->item_name;
+ item->item_name = nullptr;
+ }
+ FreeItemValue(item);
+ delete item;
+ }
+ }
+ m_items.clear();
+}
+
+void NextDictionary::SetInt64(const char *name, int64_t value) {
+ Item *item = AllocateItem(name);
+ if (item) {
+ item->type = VALUE_TYPE_INT64;
+ item->val_int64 = value;
+ }
+}
+
+bool NextDictionary::FindInt64(const char *name, int64_t *value) const {
+ const Item *item = FindItem(name, VALUE_TYPE_INT64);
+ if (item) {
+ *value = item->val_int64;
+ return true;
+ }
+ return false;
+}
+
+int64_t NextDictionary::GetInt64(const char *name, int64_t defaultValue) const {
+ const Item *item = FindItem(name, VALUE_TYPE_INT64);
+ if (item) {
+ return item->val_int64;
+ }
+ return defaultValue;
+}
+
+
+void NextDictionary::Item::SetName(const char *name, size_t len) {
+ name_len = len;
+ item_name = new char[len + 1];
+ memcpy((void *) item_name, name, len + 1);
+}
+
+NextDictionary::Item *NextDictionary::AllocateItem(const char *name) {
+ size_t len = strlen(name);
+ size_t i = FindItemIndex(name, len);
+ Item *item;
+
+ if (i < m_items.size()) {
+ item = m_items[i];
+ FreeItemValue(item);
+ } else {
+ item = new Item();
+ if (!item) {
+ return item;
+ }
+ item->SetName(name, len);
+ m_items.emplace_back(item);
+ }
+
+ return item;
+}
+
+void NextDictionary::FreeItemValue(Item *item) {
+ switch (item->type) {
+ case VALUE_TYPE_STRING:
+ delete item->val_string;
+ break;
+ default:
+ break;
+ }
+}
+
+const NextDictionary::Item *NextDictionary::FindItem(const char *name, ValueType type) const {
+ size_t i = FindItemIndex(name, strlen(name));
+
+ if (i < m_items.size()) {
+ const Item *item = m_items[i];
+ return item->type == type ? item : nullptr;
+ }
+
+ return nullptr;
+}
+
+size_t NextDictionary::FindItemIndex(const char *name, size_t len) const {
+ size_t i = 0;
+ for (; i < m_items.size(); i++) {
+ if (len != m_items[i]->name_len) {
+ continue;
+ }
+ if (!memcmp(m_items[i]->item_name, name, len)) {
+ break;
+ }
+ }
+ return i;
+}
+
+void NextDictionary::SetString(const char *name, const std::string &s) {
+ Item *item = AllocateItem(name);
+ if (item) {
+ item->type = VALUE_TYPE_STRING;
+ item->val_string = new std::string(s);
+ }
+}
+
+std::string NextDictionary::GetString(const char *name, std::string *defaultValue) const {
+ const Item *item = FindItem(name, VALUE_TYPE_STRING);
+ if (item) {
+ return *(item->val_string);
+ }
+ return defaultValue ? (*defaultValue) : "";
+}
+
+size_t NextDictionary::GetSize() const {
+ return m_items.size();
+}
+
+const char *NextDictionary::GetEntryNameAt(size_t index, ValueType *type) const {
+ if (index >= m_items.size()) {
+ *type = VALUE_TYPE_INT32;
+
+ return nullptr;
+ }
+ *type = m_items[index]->type;
+ return m_items[index]->item_name;
+}
diff --git a/engine/common/src/NextLog.cpp b/engine/common/src/NextLog.cpp
new file mode 100644
index 00000000..a5b1a26b
--- /dev/null
+++ b/engine/common/src/NextLog.cpp
@@ -0,0 +1,120 @@
+#include "NextLog.h"
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#define TIME_BUF_SIZE 32
+#define PRINT_BUF_SIZE 1024
+#define OUTPUT_BUF_SIZE 1256
+
+static LogContext kLogContext = {0};
+
+#if defined(__APPLE__)
+static void GetLocalTimeString(char *buf, int buf_size) {
+ struct timeval t;
+ gettimeofday(&t, NULL);
+ struct tm *ptm = localtime(&t.tv_sec);
+ snprintf(buf, buf_size, "%02d-%02d %02d:%02d:%02d.%03d", ptm->tm_mon + 1,
+ ptm->tm_mday, ptm->tm_hour, ptm->tm_min, ptm->tm_sec,
+ static_cast(t.tv_usec / 1000));
+ return;
+}
+
+static char GetLogLevelChar(int lev) {
+ switch (lev) {
+ case AV_LEVEL_INFO:
+ return 'I';
+ case AV_LEVEL_DEBUG:
+ return 'D';
+ case AV_LEVEL_WARN:
+ return 'W';
+ case AV_LEVEL_ERROR:
+ return 'E';
+ case AV_LEVEL_FATAL:
+ return 'F';
+ default:
+ return ' ';
+ }
+ return ' ';
+}
+#endif
+
+static void FormatLog(int level, const char *tag, int id, char *input_buf,
+ char *output_buf, int out_buf_size) {
+#if defined(__APPLE__) || defined(__HARMONY__)
+ int pid = getpid();
+#endif
+#if defined(__APPLE__)
+ uint64_t atid;
+ pthread_threadid_np(NULL, &atid);
+ char level = GetLogLevelChar(level);
+ char time_buf[TIME_BUF_SIZE] = {0};
+ GetLocalTimeString(time_buf, TIME_BUF_SIZE);
+#elif defined(__HARMONY__)
+ pid_t tid = syscall(SYS_gettid);
+#endif
+ if (id > 0) {
+#if defined(__APPLE__)
+ snprintf(output_buf, out_buf_size,
+ "%s %d 0x%" PRIu64 " %c [%s]: [id @ %04d] %s", time_buf, pid, atid,
+ level, tag, id, input_buf);
+#elif defined(__HARMONY__)
+ snprintf(output_buf, out_buf_size, "%d %d [%s]: [id @ %04d] %s", pid, tid, tag,
+ id, input_buf);
+#else
+ snprintf(output_buf, out_buf_size, "[%s]: [id @ %04d] %s", tag, id, input_buf);
+#endif
+ } else {
+#if defined(__APPLE__)
+ snprintf(output_buf, out_buf_size, "%s %d 0x%" PRIu64 " %c [%s]: %s", time_buf,
+ pid, atid, level, tag, input_buf);
+#elif defined(__HARMONY__)
+ snprintf(output_buf, out_buf_size, "%d %d [%s]: %s", pid, tid, tag, input_buf);
+#else
+ snprintf(output_buf, out_buf_size, "[%s]: %s", tag, input_buf);
+#endif
+ }
+
+}
+
+int LogPrint(int level, const char *tag, const char *fmt, ...) {
+ if (kLogContext.level < level) {
+ return 0;
+ }
+
+ char print_buf[PRINT_BUF_SIZE] = {0};
+ char output_buf[OUTPUT_BUF_SIZE] = {0};
+ va_list args;
+ int printed;
+ va_start(args, fmt);
+ printed = vsnprintf(print_buf, PRINT_BUF_SIZE - 1, fmt, args);
+ va_end(args);
+
+ if (printed <= 0) {
+ return printed;
+ }
+
+ FormatLog(level, tag, 0, print_buf, output_buf, OUTPUT_BUF_SIZE);
+ if (kLogContext.callback) {
+ kLogContext.callback(kLogContext.userdata, level, output_buf);
+ return 0;
+ }
+
+ return 0;
+}
+
+void SetLogLevel(int level) {
+ kLogContext.level = level;
+}
+
+void SetLogCallback(void (*callback)(void *, int, const char *), void *userdata) {
+ kLogContext.callback = callback;
+ kLogContext.userdata = userdata;
+}
diff --git a/engine/common/src/android/JniEnv.cpp b/engine/common/src/android/JniEnv.cpp
new file mode 100644
index 00000000..1d440b82
--- /dev/null
+++ b/engine/common/src/android/JniEnv.cpp
@@ -0,0 +1,51 @@
+#if defined(__ANDROID__)
+
+#include "android/JniEnv.h"
+
+#include
+
+static pthread_key_t kThreadKey;
+JavaVM *kJvm = nullptr;
+
+JniEnvPtr::JniEnvPtr() {
+ if (!kJvm) {
+ return;
+ }
+
+ if (kJvm->GetEnv(reinterpret_cast(&mEnv), JNI_VERSION_1_6) !=
+ JNI_EDETACHED) {
+ return;
+ }
+
+ if (kJvm->AttachCurrentThread(&mEnv, nullptr) < 0) {
+ return;
+ }
+
+ pthread_setspecific(kThreadKey, mEnv);
+}
+
+JniEnvPtr::~JniEnvPtr() {}
+
+JNIEnv *JniEnvPtr::operator->() {
+ return mEnv;
+}
+
+JNIEnv *JniEnvPtr::Env() const {
+ return mEnv;
+}
+
+void JniEnvPtr::JniThreadDestroyed(void *value) {
+ auto *env = reinterpret_cast(value);
+
+ if (env != nullptr && kJvm != nullptr) {
+ kJvm->DetachCurrentThread();
+ pthread_setspecific(kThreadKey, nullptr);
+ }
+}
+
+void JniEnvPtr::GlobalInit(JavaVM *vm) {
+ kJvm = vm;
+ pthread_key_create(&kThreadKey, JniThreadDestroyed);
+}
+
+#endif // __ANDROID__
diff --git a/engine/common/src/android/JniUtil.cpp b/engine/common/src/android/JniUtil.cpp
new file mode 100644
index 00000000..f3c29ad8
--- /dev/null
+++ b/engine/common/src/android/JniUtil.cpp
@@ -0,0 +1,91 @@
+#if defined(__ANDROID__)
+
+#include "android/JniUtil.h"
+
+#include
+#include
+#include
+
+int JniGetApiLevel() {
+ int ret = 0;
+ char value[1024] = {0};
+ __system_property_get("ro.build.version.sdk", value);
+ ret = atoi(value);
+ return ret;
+}
+
+bool JniCheckException(JNIEnv *env) {
+ if (env->ExceptionCheck()) {
+ env->ExceptionDescribe();
+ return true;
+ }
+
+ return false;
+}
+
+bool JniCheckExceptionClear(JNIEnv *env) {
+ if (env->ExceptionCheck()) {
+ env->ExceptionDescribe();
+ env->ExceptionClear();
+ return true;
+ }
+
+ return false;
+}
+
+jclass JniGetClassGlobalRef(JNIEnv *env, const char *name) {
+ jclass clazz = env->FindClass(name);
+ if (!clazz) {
+ return clazz;
+ }
+ auto clazz_global = (jclass) env->NewGlobalRef(clazz);
+ JniDeleteLocalRefP(env, reinterpret_cast(&clazz));
+
+ return clazz_global;
+}
+
+std::string JniGetStringUTFChars(JNIEnv *env, jstring str) {
+ std::string result;
+ if (!str)
+ return result;
+
+ const char *data = env->GetStringUTFChars(str, JNI_FALSE);
+ result = (data ? data : "");
+ env->ReleaseStringUTFChars(str, data);
+
+ return result;
+}
+
+jbyteArray JniNewByteArrayGlobalRefCatch(JNIEnv *env, jsize capacity) {
+ if (capacity <= 0) {
+ return nullptr;
+ }
+ jbyteArray local = env->NewByteArray(capacity);
+ if (JniCheckExceptionClear(env)) {
+ return nullptr;
+ }
+ if (!local) {
+ return nullptr;
+ }
+ auto global = (jbyteArray) env->NewGlobalRef((jobject) local);
+
+ JniDeleteLocalRefP(env, reinterpret_cast(&local));
+
+ return global;
+}
+
+void JniDeleteLocalRefP(JNIEnv *env, jobject *obj) {
+ if (!obj)
+ return;
+ env->DeleteLocalRef(*obj);
+ *obj = nullptr;
+}
+
+void JniDeleteGlobalRefP(JNIEnv *env, jobject *obj) {
+ if (!obj)
+ return;
+ env->DeleteGlobalRef(*obj);
+ *obj = nullptr;
+}
+
+#endif // __ANDROID__
diff --git a/engine/decode/AudioDecoder.h b/engine/decode/AudioDecoder.h
new file mode 100644
index 00000000..2d3f12c2
--- /dev/null
+++ b/engine/decode/AudioDecoder.h
@@ -0,0 +1,59 @@
+/**
+ * Note: interface of Audio decoder
+ * Date: 2025/12/7
+ * Author: frank
+ */
+
+#ifndef AUDIO_DECODER_H
+#define AUDIO_DECODER_H
+
+#include
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include "libavcodec/packet.h"
+#include "libavutil/frame.h"
+#ifdef __cplusplus
+}
+#endif
+
+struct AudioCodecConfig {
+ int profile = -1;
+ int codec_id = -1;
+ int channels = 0;
+ int sample_rate = 0;
+ int extradata_size = 0;
+ uint8_t *extradata = nullptr;
+ char *codec_name = nullptr;
+};
+
+class AudioDecodeCallback {
+public:
+ virtual int OnDecodedFrame(AVFrame *frame) = 0;
+
+ virtual void OnDecodeError(int error) = 0;
+
+ virtual ~AudioDecodeCallback() = default;
+};
+
+class AudioDecoder {
+public:
+
+ virtual ~AudioDecoder() = default;
+
+ virtual int Init(AudioCodecConfig &config) = 0;
+
+ virtual int Decode(const AVPacket *pkt) = 0;
+
+ virtual int Flush() = 0;
+
+ virtual int Release() = 0;
+
+ virtual void SetDecodeCallback(AudioDecodeCallback *callback) = 0;
+
+protected:
+ AudioDecodeCallback *mAudioDecodedCallback = nullptr;
+};
+
+#endif
diff --git a/engine/decode/CMakeLists.txt b/engine/decode/CMakeLists.txt
new file mode 100644
index 00000000..0f485e23
--- /dev/null
+++ b/engine/decode/CMakeLists.txt
@@ -0,0 +1,70 @@
+cmake_minimum_required(VERSION 3.10.2)
+
+project(decode)
+
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 -g -Wall")
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror -Wno-deprecated")
+set(CMAKE_CXX_FLAGS_DEBUG "-O0")
+set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG ")
+
+set(ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR})
+set(EXTRA_DIR "${ROOT_DIR}/../extra")
+set(COMMON_DIR "${ROOT_DIR}/../common")
+set(EXTRA_COMMON_DIR "${COMMON_DIR}")
+set(EXTRA_FFMPEG_ROOT "${EXTRA_DIR}/ffmpeg")
+
+set(SRC_LIST
+ common/nal_convert.c
+ common/MixedBuffer.cpp
+ common/h26x_parser.c
+ VideoDecoder.cpp
+ VideoDecoderFactory.cpp
+ FFmpegVideoDecoder.cpp
+ FFmpegAudioDecoder.cpp
+)
+
+if (CMAKE_SYSTEM_NAME STREQUAL "Android")
+ set(TARGET_PLATFORM android)
+ set(CMAKE_SYSTEM_VERSION 21)
+ set(EXTRA_FFMPEG_DIR
+ "${EXTRA_DIR}/ffmpeg/${TARGET_PLATFORM}/${CMAKE_ANDROID_ARCH_ABI}")
+ set(CMAKE_ANDROID_NDK $ENV{ANDROID_NDK})
+# set(SRC_LIST ${SRC_LIST}
+# android/MediacodecDecoder.cpp)
+elseif (CMAKE_SYSTEM_NAME STREQUAL "OHOS")
+ add_definitions(-D__HARMONY__)
+ set(TARGET_PLATFORM harmony)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unused-command-line-argument")
+ set(EXTRA_FFMPEG_DIR "${EXTRA_DIR}/ffmpeg/${TARGET_PLATFORM}/${OHOS_ARCH}")
+else ()
+ message(FATAL_ERROR "Don't support ${CMAKE_SYSTEM_NAME}!")
+endif ()
+
+include_directories(
+ "${EXTRA_FFMPEG_ROOT}/include"
+ "${EXTRA_COMMON_DIR}/include"
+ "${CMAKE_CURRENT_SOURCE_DIR}/..")
+
+link_directories("${EXTRA_FFMPEG_DIR}")
+
+add_library(decode SHARED ${SRC_LIST})
+
+if (CMAKE_SYSTEM_NAME STREQUAL "Android")
+ target_link_libraries(
+ decode
+ mediandk
+ android
+ common
+ ffmpeg
+ )
+elseif (CMAKE_SYSTEM_NAME STREQUAL "OHOS")
+ target_link_libraries(
+ decode
+ ffmpeg
+ common
+ native_media_vdec
+ native_media_core
+ native_media_codecbase
+ native_window
+ )
+endif ()
diff --git a/engine/decode/FFmpegAudioDecoder.cpp b/engine/decode/FFmpegAudioDecoder.cpp
new file mode 100644
index 00000000..b4edfc5d
--- /dev/null
+++ b/engine/decode/FFmpegAudioDecoder.cpp
@@ -0,0 +1,110 @@
+/**
+ * Note: audio decoder with FFmpeg
+ * Date: 2025/12/7
+ * Author: frank
+ */
+
+#include "FFmpegAudioDecoder.h"
+#include "NextErrorCode.h"
+#include "NextLog.h"
+
+#define FFMPEG_AUDIO_TAG "FFmpegAudioDec"
+
+FFmpegAudioDecoder::FFmpegAudioDecoder() {}
+
+FFmpegAudioDecoder::~FFmpegAudioDecoder() {
+ NEXT_LOGD(FFMPEG_AUDIO_TAG, "~FFmpegAudioDecoder destructor");
+}
+
+int FFmpegAudioDecoder::Init(AudioCodecConfig &config) {
+ int ret = RESULT_OK;
+ if (!config.channels || !config.sample_rate) {
+ return ERROR_DECODE_INVALID;
+ }
+ mCodecContext = avcodec_alloc_context3(nullptr);
+
+ mCodecContext->profile = config.profile;
+ mCodecContext->codec_id = (AVCodecID) config.codec_id;
+ mCodecContext->time_base = {1, 1000}; // TODO: timebase
+ mCodecContext->codec_type = AVMEDIA_TYPE_AUDIO;
+ mCodecContext->sample_rate = config.sample_rate;
+ mCodecContext->thread_count = 1;
+ mCodecContext->ch_layout.nb_channels = config.channels;
+
+ if (config.extradata_size > 0) {
+ mCodecContext->extradata = reinterpret_cast(
+ av_malloc(config.extradata_size + AV_INPUT_BUFFER_PADDING_SIZE));
+ mCodecContext->extradata_size = config.extradata_size;
+ memcpy(mCodecContext->extradata, config.extradata, config.extradata_size);
+ }
+
+ auto *codec = const_cast(avcodec_find_decoder(mCodecContext->codec_id));
+ if (!codec) {
+ NEXT_LOGE(FFMPEG_AUDIO_TAG, "avcodec_find_decoder fail, name=%s",
+ avcodec_get_name(mCodecContext->codec_id));
+ Release();
+ return ERROR_DECODE_AUDIO_OPEN;
+ }
+
+ AVDictionary *opts = nullptr;
+ av_dict_set(&opts, "refcounted_frames", "1", 0);
+ if ((ret = avcodec_open2(mCodecContext, codec, nullptr)) < 0) {
+ NEXT_LOGE(FFMPEG_AUDIO_TAG, "avcodec_open2 fail, msg=%s", av_err2str(ret));
+ av_dict_free(&opts);
+ Release();
+ return ERROR_DECODE_AUDIO_OPEN;
+ }
+ av_dict_free(&opts);
+
+ return RESULT_OK;
+}
+
+int FFmpegAudioDecoder::Decode(const AVPacket *pkt) {
+ if (!pkt || !mCodecContext || !mAudioDecodedCallback) {
+ return ERROR_DECODE_NOT_INIT;
+ }
+
+ int ret = avcodec_send_packet(mCodecContext, pkt);
+ if (ret < 0) {
+ return ret;
+ }
+
+ AVFrame *frame = av_frame_alloc();
+ ret = avcodec_receive_frame(mCodecContext, frame);
+ if (ret < 0) {
+ av_frame_free(&frame);
+ switch (ret) {
+ case AVERROR(EAGAIN):
+ return ERROR_PLAYER_TRY_AGAIN;
+ case AVERROR_EOF:
+ NEXT_LOGI(FFMPEG_AUDIO_TAG, "avcodec_receive_frame EOF...");
+ return ERROR_PLAYER_EOF;
+ default:
+ return ret;
+ }
+ }
+
+ mAudioDecodedCallback->OnDecodedFrame(frame);
+
+ return RESULT_OK;
+}
+
+int FFmpegAudioDecoder::Flush() {
+ if (mCodecContext) {
+ avcodec_flush_buffers(mCodecContext);
+ }
+ return RESULT_OK;
+}
+
+int FFmpegAudioDecoder::Release() {
+ NEXT_LOGI(FFMPEG_AUDIO_TAG, "Release...");
+ if (mCodecContext) {
+ avcodec_free_context(&mCodecContext);
+ mCodecContext = nullptr;
+ }
+ return RESULT_OK;
+}
+
+void FFmpegAudioDecoder::SetDecodeCallback(AudioDecodeCallback *callback) {
+ mAudioDecodedCallback = callback;
+}
diff --git a/engine/decode/FFmpegAudioDecoder.h b/engine/decode/FFmpegAudioDecoder.h
new file mode 100644
index 00000000..e90771ed
--- /dev/null
+++ b/engine/decode/FFmpegAudioDecoder.h
@@ -0,0 +1,31 @@
+#ifndef FFMPEG_AUDIO_DECODER_H
+#define FFMPEG_AUDIO_DECODER_H
+
+#include "AudioDecoder.h"
+
+extern "C" {
+#include "libavcodec/avcodec.h"
+}
+
+class FFmpegAudioDecoder : public AudioDecoder {
+public:
+ FFmpegAudioDecoder();
+
+ ~FFmpegAudioDecoder() override;
+
+ int Init(AudioCodecConfig &config) override;
+
+ int Decode(const AVPacket *pkt) override;
+
+ int Flush() override;
+
+ int Release() override;
+
+ void SetDecodeCallback(AudioDecodeCallback *callback) override;
+
+private:
+ AVCodecContext *mCodecContext;
+
+};
+
+#endif
diff --git a/engine/decode/FFmpegVideoDecoder.cpp b/engine/decode/FFmpegVideoDecoder.cpp
new file mode 100644
index 00000000..017d6d74
--- /dev/null
+++ b/engine/decode/FFmpegVideoDecoder.cpp
@@ -0,0 +1,199 @@
+/**
+ * Note: video decoder with FFmpeg
+ * Date: 2025/12/8
+ * Author: frank
+ */
+
+#include "decode/FFmpegVideoDecoder.h"
+
+#include "NextLog.h"
+
+#define FFMPEG_VIDEO_TAG "FFmpegVideoDec"
+
+static void ReleaseFrame(FFmpegBufferContext *context) {
+ if (context && context->av_frame) {
+ auto *frame = reinterpret_cast(context->av_frame);
+ av_frame_unref(frame);
+ av_frame_free(&frame);
+ }
+}
+
+FFmpegVideoDecoder::FFmpegVideoDecoder(int codecId)
+ : VideoDecoder(codecId) {}
+
+FFmpegVideoDecoder::~FFmpegVideoDecoder() {
+ NEXT_LOGD(FFMPEG_VIDEO_TAG, "~FFmpegVideoDecoder destructor");
+}
+
+int FFmpegVideoDecoder::Init(const MetaData *metadata) {
+ mCodecContext = avcodec_alloc_context3(nullptr);
+
+ mCodecContext->codec_type = AVMEDIA_TYPE_VIDEO;
+ mCodecContext->thread_count = 0; // auto
+ mCodecContext->thread_type = FF_THREAD_FRAME;
+ mCodecContext->time_base = {1, 1000};
+
+ mCodecContext->codec_id = (AVCodecID) mCodecId;
+
+ auto *codec = const_cast(avcodec_find_decoder(mCodecContext->codec_id));
+ if (!codec) {
+ NEXT_LOGE(FFMPEG_VIDEO_TAG, "avcodec_find_decoder fail, name=%s",
+ avcodec_get_name(mCodecContext->codec_id));
+ Release();
+ return ERROR_DECODE_VIDEO_OPEN;
+ }
+
+ AVDictionary *opts = nullptr;
+ if (avcodec_open2(mCodecContext, codec, &opts) < 0) {
+ NEXT_LOGE(FFMPEG_VIDEO_TAG, "avcodec_open2 fail, name=%s, w=%d, h=%d",
+ avcodec_get_name(mCodecContext->codec_id),
+ mCodecContext->width, mCodecContext->height);
+ Release();
+ av_dict_free(&opts);
+ return ERROR_DECODE_VIDEO_OPEN;
+ }
+ av_dict_free(&opts);
+
+ return RESULT_OK;
+}
+
+int FFmpegVideoDecoder::Decode(const AVPacket *pkt) {
+ if (!mCodecContext || !mVideoDecodeCallback) {
+ return ERROR_DECODE_NOT_INIT;
+ }
+
+ AVFrame *frame = av_frame_alloc();
+ int ret = avcodec_receive_frame(mCodecContext, frame);
+
+ if (ret >= 0) {
+ size_t bufferSize = 1;
+ std::unique_ptr output_buffer =
+ std::make_unique(BufferType::BUFFER_VIDEO_FRAME, bufferSize);
+
+ VideoFrameMetadata *meta = output_buffer->GetVideoFrameMetadata();
+ meta->width = frame->width;
+ meta->height = frame->height;
+ meta->stride_y = frame->linesize[0];
+ meta->stride_u = frame->linesize[1];
+ meta->stride_v = frame->linesize[2];
+
+ meta->buffer_y = frame->data[0];
+ meta->buffer_u = frame->data[1];
+ meta->buffer_v = frame->data[2];
+
+ meta->buffer_context = reinterpret_cast(new FFmpegBufferContext{
+ .av_frame = frame,
+ .release_frame = ReleaseFrame,
+ });
+
+ switch (frame->format) {
+ case AV_PIX_FMT_YUVJ420P:
+ meta->pixel_format = VideoPixelFormat::PIXEL_FORMAT_YUVJ420P;
+ break;
+ case AV_PIX_FMT_YUV420P10LE:
+ meta->pixel_format = VideoPixelFormat::PIXEL_FORMAT_YUV420P10LE;
+ break;
+ case AV_PIX_FMT_YUV420P:
+ meta->pixel_format = VideoPixelFormat::PIXEL_FORMAT_YUV420P;
+ break;
+ default:
+ meta->pixel_format = VideoPixelFormat::PIXEL_FORMAT_YUV420P;
+ NEXT_LOGW(FFMPEG_VIDEO_TAG, "unsupported pixel format %d", frame->format);
+ break;
+ }
+
+ meta->pts = frame->best_effort_timestamp;
+ meta->dts = frame->pkt_dts;
+
+ mVideoDecodeCallback->OnDecodedFrame(std::move(output_buffer));
+ } else if (ret == AVERROR_EOF) {
+ av_frame_unref(frame);
+ av_frame_free(&frame);
+ return ERROR_PLAYER_EOF;
+ } else {
+ av_frame_unref(frame);
+ av_frame_free(&frame);
+ }
+
+ AVPacket packet;
+ av_init_packet(&packet);
+ packet.data = pkt->data;
+ packet.size = pkt->size;
+ packet.pts = pkt->pts;
+ packet.dts = pkt->dts;
+
+ if (!bFlushState) {
+ if (packet.size == 0) {
+ bFlushState = true;
+ }
+ ret = avcodec_send_packet(mCodecContext, bFlushState ? nullptr : &packet);
+ if (ret == AVERROR(EAGAIN)) {
+ return ERROR_PLAYER_TRY_AGAIN;
+ }
+ }
+
+ return RESULT_OK;
+}
+
+int FFmpegVideoDecoder::SetVideoFormat(const MetaData *metadata) {
+ if (!metadata || metadata->video_index < 0) {
+ NEXT_LOGE(FFMPEG_VIDEO_TAG, "metadata is invalid");
+ return ERROR_DECODE_INVALID;
+ }
+
+ if (mCodecContext == nullptr) {
+ NEXT_LOGE(FFMPEG_VIDEO_TAG, "alloc codec_context failed...");
+ return ERROR_DECODE_NOT_INIT;
+ }
+
+ auto trackInfo = metadata->track_info[metadata->video_index];
+
+ if (trackInfo.extra_data && trackInfo.extra_data_size > 0) {
+ mCodecContext->extradata = reinterpret_cast(
+ av_malloc(trackInfo.extra_data_size + AV_INPUT_BUFFER_PADDING_SIZE));
+ mCodecContext->extradata_size = trackInfo.extra_data_size;
+ memcpy(mCodecContext->extradata, trackInfo.extra_data, trackInfo.extra_data_size);
+ }
+
+ auto *codec = const_cast(avcodec_find_decoder(mCodecContext->codec_id));
+ if (!codec) {
+ Release();
+ return ERROR_DECODE_VIDEO_OPEN;
+ }
+
+ int ret = 0;
+ if ((ret = avcodec_close(mCodecContext)) < 0) {
+ Release();
+ return ERROR_DECODE_VIDEO_OPEN;
+ }
+
+ AVDictionary *opts = nullptr;
+ av_dict_set(&opts, "threads", "auto", 0);
+ av_dict_set(&opts, "refcounted_frames", "1", 0);
+
+ if ((ret = avcodec_open2(mCodecContext, codec, &opts)) < 0) {
+ av_dict_free(&opts);
+ Release();
+ return ERROR_DECODE_VIDEO_OPEN;
+ }
+ av_dict_free(&opts);
+
+ return RESULT_OK;
+}
+
+int FFmpegVideoDecoder::Flush() {
+ bFlushState = false;
+ if (mCodecContext) {
+ avcodec_flush_buffers(mCodecContext);
+ }
+ return RESULT_OK;
+}
+
+int FFmpegVideoDecoder::Release() {
+ NEXT_LOGD(FFMPEG_VIDEO_TAG, "Release...");
+ if (mCodecContext) {
+ avcodec_free_context(&mCodecContext);
+ mCodecContext = nullptr;
+ }
+ return RESULT_OK;
+}
diff --git a/engine/decode/FFmpegVideoDecoder.h b/engine/decode/FFmpegVideoDecoder.h
new file mode 100644
index 00000000..01845fd1
--- /dev/null
+++ b/engine/decode/FFmpegVideoDecoder.h
@@ -0,0 +1,37 @@
+#ifndef FFMPEG_VIDEO_DECODER_H
+#define FFMPEG_VIDEO_DECODER_H
+
+#include "decode/common/VideoCodecInfo.h"
+#include "decode/VideoDecoder.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include "libavcodec/avcodec.h"
+#ifdef __cplusplus
+}
+#endif
+
+class FFmpegVideoDecoder : public VideoDecoder {
+public:
+ explicit FFmpegVideoDecoder(int codecId);
+
+ ~FFmpegVideoDecoder() override;
+
+ int Init(const MetaData *metadata) override;
+
+ int Decode(const AVPacket *pkt) override;
+
+ int Flush() override;
+
+ int SetVideoFormat(const MetaData *metadata) override;
+
+ int Release() override;
+
+private:
+ bool bFlushState = false;
+ AVCodecContext *mCodecContext = nullptr;
+
+};
+
+#endif
diff --git a/engine/decode/VideoDecoder.cpp b/engine/decode/VideoDecoder.cpp
new file mode 100644
index 00000000..82426e44
--- /dev/null
+++ b/engine/decode/VideoDecoder.cpp
@@ -0,0 +1,16 @@
+/**
+ * Note: interface of video decoder
+ * Date: 2025/12/21
+ * Author: frank
+ */
+
+#include "decode/VideoDecoder.h"
+
+VideoDecoder::VideoDecoder(int codecId)
+ : mCodecId(codecId) {}
+
+VideoDecoder::~VideoDecoder() = default;
+
+void VideoDecoder::SetDecodeCallback(VideoDecodeCallback *callback) {
+ mVideoDecodeCallback = callback;
+}
diff --git a/engine/decode/VideoDecoder.h b/engine/decode/VideoDecoder.h
new file mode 100644
index 00000000..3c3ad0e0
--- /dev/null
+++ b/engine/decode/VideoDecoder.h
@@ -0,0 +1,73 @@
+#ifndef VIDEO_DECODER_H
+#define VIDEO_DECODER_H
+
+#include