mirror of
https://github.com/Shabinder/SpotiFlyer.git
synced 2024-11-22 17:14:32 +01:00
libmp3fixes
This commit is contained in:
parent
df6e969a56
commit
24c0002bb0
@ -103,6 +103,7 @@ dependencies {
|
|||||||
implementation(project(":common:data-models"))
|
implementation(project(":common:data-models"))
|
||||||
implementation(project(":common:core-components"))
|
implementation(project(":common:core-components"))
|
||||||
implementation(project(":common:providers"))
|
implementation(project(":common:providers"))
|
||||||
|
implementation(project(":ffmpeg:android-ffmpeg"))
|
||||||
|
|
||||||
// Koin
|
// Koin
|
||||||
implementation(Koin.android)
|
implementation(Koin.android)
|
||||||
|
@ -77,6 +77,7 @@ import kotlinx.coroutines.flow.MutableSharedFlow
|
|||||||
import kotlinx.coroutines.flow.conflate
|
import kotlinx.coroutines.flow.conflate
|
||||||
import kotlinx.coroutines.flow.emitAll
|
import kotlinx.coroutines.flow.emitAll
|
||||||
import kotlinx.coroutines.launch
|
import kotlinx.coroutines.launch
|
||||||
|
import nl.bravobit.ffmpeg.*
|
||||||
import org.koin.android.ext.android.inject
|
import org.koin.android.ext.android.inject
|
||||||
import org.koin.core.parameter.parametersOf
|
import org.koin.core.parameter.parametersOf
|
||||||
import java.io.File
|
import java.io.File
|
||||||
@ -107,13 +108,37 @@ class MainActivity : ComponentActivity() {
|
|||||||
// This app draws behind the system bars, so we want to handle fitting system windows
|
// This app draws behind the system bars, so we want to handle fitting system windows
|
||||||
WindowCompat.setDecorFitsSystemWindows(window, false)
|
WindowCompat.setDecorFitsSystemWindows(window, false)
|
||||||
rootComponent = spotiFlyerRoot(defaultComponentContext())
|
rootComponent = spotiFlyerRoot(defaultComponentContext())
|
||||||
|
val ffmpeg = FFmpeg.getInstance(this@MainActivity)
|
||||||
|
val ffprobe = FFprobe.getInstance(this@MainActivity)
|
||||||
lifecycleScope.launch {
|
lifecycleScope.launch {
|
||||||
Log.d("FFmpeg", "init")
|
Log.d("FFmpeg", "init")
|
||||||
AndroidMediaConverter().convertAudioFile("/storage/emulated/0/Music/SpotiFlyer/Playlists/Sing-along_Punjabi/Kya_Baat_Ay.mp3","/storage/emulated/0/Music/SpotiFlyer/Playlists/Sing-along_Punjabi/Kya_Baat_Ay.temp.mp3").fold({
|
FFmpegConfig.versionFFmpeg(this@MainActivity)
|
||||||
|
FFmpegConfig.codecsFFmpeg(this@MainActivity)
|
||||||
|
FFmpegConfig.versionFFprobe(this@MainActivity)
|
||||||
|
Log.d("FFmpeg Support", ffmpeg.isSupported.toString())
|
||||||
|
val inputFilePath = "/storage/emulated/0/Music/SpotiFlyer/Playlists/Sing-along_Punjabi/Kya_Baat_Ay.mp3"
|
||||||
|
val outputFilePath = "/storage/emulated/0/Music/SpotiFlyer/Playlists/Sing-along_Punjabi/Kya_Baat_Ay.temp.mp3"
|
||||||
|
val kbpsArg = "-b:a 192k"
|
||||||
|
ffmpeg.execute(arrayOf("-i", inputFilePath, /*"-acodec", "libmp3lame",*/ "-vn", outputFilePath),object : ExecuteBinaryResponseHandler() {
|
||||||
|
override fun onSuccess(message: String?) {
|
||||||
|
Log.d("FFmpeg Command", "Success $message")
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onProgress(message: String?) {
|
||||||
|
Log.d("FFmpeg Command", "Progress $message")
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onFailure(message: String?) {
|
||||||
|
Log.d("FFmpeg Command", "Failed $message")
|
||||||
|
}
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
/* AndroidMediaConverter().convertAudioFile("/storage/emulated/0/Music/SpotiFlyer/Playlists/Sing-along_Punjabi/Kya_Baat_Ay.mp3","/storage/emulated/0/Music/SpotiFlyer/Playlists/Sing-along_Punjabi/Kya_Baat_Ay.temp.mp3").fold({
|
||||||
Log.d("FFmpeg Success",it)
|
Log.d("FFmpeg Success",it)
|
||||||
}){
|
}){
|
||||||
it.printStackTrace()
|
it.printStackTrace()
|
||||||
}
|
}*/
|
||||||
}
|
}
|
||||||
/*FFmpeg.testInit()*/
|
/*FFmpeg.testInit()*/
|
||||||
setContent {
|
setContent {
|
||||||
|
@ -19,7 +19,7 @@ kotlin {
|
|||||||
dependencies {
|
dependencies {
|
||||||
implementation(Extras.mp3agic)
|
implementation(Extras.mp3agic)
|
||||||
implementation(Extras.Android.countly)
|
implementation(Extras.Android.countly)
|
||||||
implementation(project(":ffmpeg:ffmpeg-kit-android-lib"))
|
implementation(project(":ffmpeg:android-ffmpeg"))
|
||||||
// implementation("com.arthenica:ffmpeg-kit-audio:4.4.LTS")
|
// implementation("com.arthenica:ffmpeg-kit-audio:4.4.LTS")
|
||||||
//api(files("$rootDir/libs/mobile-ffmpeg.aar"))
|
//api(files("$rootDir/libs/mobile-ffmpeg.aar"))
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
package com.shabinder.common.core_components.media_converter
|
package com.shabinder.common.core_components.media_converter
|
||||||
|
|
||||||
import com.shabinder.spotiflyer.ffmpeg.AndroidFFmpeg.runTranscode
|
|
||||||
import com.shabinder.common.models.AudioQuality
|
import com.shabinder.common.models.AudioQuality
|
||||||
import org.koin.dsl.bind
|
import org.koin.dsl.bind
|
||||||
import org.koin.dsl.module
|
import org.koin.dsl.module
|
||||||
@ -14,8 +13,12 @@ class AndroidMediaConverter : MediaConverter() {
|
|||||||
progressCallbacks: (Long) -> Unit,
|
progressCallbacks: (Long) -> Unit,
|
||||||
) = executeSafelyInPool {
|
) = executeSafelyInPool {
|
||||||
// 192 is Default
|
// 192 is Default
|
||||||
val audioBitrate = if (audioQuality == AudioQuality.UNKNOWN) 192 else audioQuality.kbps.toIntOrNull() ?: 192
|
val audioBitrate =
|
||||||
runTranscode(inputFilePath,outputFilePath,audioBitrate).toString()
|
if (audioQuality == AudioQuality.UNKNOWN) 192 else audioQuality.kbps.toIntOrNull()
|
||||||
|
?: 192
|
||||||
|
|
||||||
|
""
|
||||||
|
//runTranscode(inputFilePath,outputFilePath,audioBitrate).toString()
|
||||||
/*val kbpsArg = if (audioQuality == AudioQuality.UNKNOWN) {
|
/*val kbpsArg = if (audioQuality == AudioQuality.UNKNOWN) {
|
||||||
val mediaInformation = FFprobeKit.getMediaInformation(inputFilePath)
|
val mediaInformation = FFprobeKit.getMediaInformation(inputFilePath)
|
||||||
val bitrate = ((mediaInformation.mediaInformation.bitrate).toFloat()/1000).roundToInt()
|
val bitrate = ((mediaInformation.mediaInformation.bitrate).toFloat()/1000).roundToInt()
|
||||||
|
1
ffmpeg/android-ffmpeg/.gitignore
vendored
Normal file
1
ffmpeg/android-ffmpeg/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
/build
|
@ -13,24 +13,12 @@ android {
|
|||||||
|
|
||||||
minSdk = Versions.minSdkVersion
|
minSdk = Versions.minSdkVersion
|
||||||
targetSdk = Versions.targetSdkVersion
|
targetSdk = Versions.targetSdkVersion
|
||||||
|
// versionCode = Versions.versionCode
|
||||||
|
// versionName = Versions.versionName
|
||||||
|
|
||||||
/*versionCode = Versions.versionCode
|
/*ndk {
|
||||||
versionName = Versions.versionName*/
|
|
||||||
|
|
||||||
ndk {
|
|
||||||
abiFilters.addAll(setOf("x86", "x86_64", "armeabi-v7a", "arm64-v8a"))
|
abiFilters.addAll(setOf("x86", "x86_64", "armeabi-v7a", "arm64-v8a"))
|
||||||
}
|
}*/
|
||||||
}
|
|
||||||
|
|
||||||
sourceSets {
|
|
||||||
named("main") {
|
|
||||||
jniLibs.srcDir("../ffmpeg-android-maker/output/lib")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
externalNativeBuild {
|
|
||||||
cmake {
|
|
||||||
path("CMakeLists.txt")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
buildTypes {
|
buildTypes {
|
||||||
@ -61,3 +49,4 @@ android {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies { /**/ }
|
dependencies { /**/ }
|
||||||
|
|
0
ffmpeg/android-ffmpeg/gradle.properties
Normal file
0
ffmpeg/android-ffmpeg/gradle.properties
Normal file
21
ffmpeg/android-ffmpeg/proguard-rules.pro
vendored
Normal file
21
ffmpeg/android-ffmpeg/proguard-rules.pro
vendored
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
# Add project specific ProGuard rules here.
|
||||||
|
# You can control the set of applied configuration files using the
|
||||||
|
# proguardFiles setting in build.gradle.kts.
|
||||||
|
#
|
||||||
|
# For more details, see
|
||||||
|
# http://developer.android.com/guide/developing/tools/proguard.html
|
||||||
|
|
||||||
|
# If your project uses WebView with JS, uncomment the following
|
||||||
|
# and specify the fully qualified class name to the JavaScript interface
|
||||||
|
# class:
|
||||||
|
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
|
||||||
|
# public *;
|
||||||
|
#}
|
||||||
|
|
||||||
|
# Uncomment this to preserve the line number information for
|
||||||
|
# debugging stack traces.
|
||||||
|
#-keepattributes SourceFile,LineNumberTable
|
||||||
|
|
||||||
|
# If you keep the line number information, uncomment this to
|
||||||
|
# hide the original source file name.
|
||||||
|
#-renamesourcefileattribute SourceFile
|
1
ffmpeg/android-ffmpeg/src/main/AndroidManifest.xml
Normal file
1
ffmpeg/android-ffmpeg/src/main/AndroidManifest.xml
Normal file
@ -0,0 +1 @@
|
|||||||
|
<manifest package="bravobit.nl.ffmpegandroid" />
|
@ -0,0 +1,30 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
class CommandResult {
|
||||||
|
final String output;
|
||||||
|
final boolean success;
|
||||||
|
|
||||||
|
CommandResult(boolean success, String output) {
|
||||||
|
this.success = success;
|
||||||
|
this.output = output;
|
||||||
|
}
|
||||||
|
|
||||||
|
static CommandResult getDummyFailureResponse() {
|
||||||
|
return new CommandResult(false, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
static CommandResult getOutputFromProcess(Process process) {
|
||||||
|
String output;
|
||||||
|
if (success(process.exitValue())) {
|
||||||
|
output = Util.convertInputStreamToString(process.getInputStream());
|
||||||
|
} else {
|
||||||
|
output = Util.convertInputStreamToString(process.getErrorStream());
|
||||||
|
}
|
||||||
|
return new CommandResult(success(process.exitValue()), output);
|
||||||
|
}
|
||||||
|
|
||||||
|
static boolean success(Integer exitValue) {
|
||||||
|
return exitValue != null && exitValue == 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,29 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
public class ExecuteBinaryResponseHandler implements FFcommandExecuteResponseHandler {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onSuccess(String message) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onProgress(String message) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(String message) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onStart() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFinish() {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,8 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
|
||||||
|
public interface FFbinaryContextProvider {
|
||||||
|
|
||||||
|
Context provide();
|
||||||
|
}
|
@ -0,0 +1,55 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
interface FFbinaryInterface {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Executes a command
|
||||||
|
*
|
||||||
|
* @param environmentVars Environment variables
|
||||||
|
* @param cmd command to execute
|
||||||
|
* @param ffcommandExecuteResponseHandler {@link FFcommandExecuteResponseHandler}
|
||||||
|
* @return the task
|
||||||
|
*/
|
||||||
|
FFtask execute(Map<String, String> environmentVars, String[] cmd, FFcommandExecuteResponseHandler ffcommandExecuteResponseHandler);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Executes a command
|
||||||
|
*
|
||||||
|
* @param cmd command to execute
|
||||||
|
* @param ffcommandExecuteResponseHandler {@link FFcommandExecuteResponseHandler}
|
||||||
|
* @return the task
|
||||||
|
*/
|
||||||
|
FFtask execute(String[] cmd, FFcommandExecuteResponseHandler ffcommandExecuteResponseHandler);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if FF binary is supported on this device
|
||||||
|
*
|
||||||
|
* @return true if FF binary is supported on this device
|
||||||
|
*/
|
||||||
|
boolean isSupported();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if a command with given task is currently running
|
||||||
|
*
|
||||||
|
* @param task - the task that you want to check
|
||||||
|
* @return true if a command is running
|
||||||
|
*/
|
||||||
|
boolean isCommandRunning(FFtask task);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Kill given running process
|
||||||
|
*
|
||||||
|
* @param task - the task to kill
|
||||||
|
* @return true if process is killed successfully
|
||||||
|
*/
|
||||||
|
boolean killRunningProcesses(FFtask task);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Timeout for binary process, should be minimum of 10 seconds
|
||||||
|
*
|
||||||
|
* @param timeout in milliseconds
|
||||||
|
*/
|
||||||
|
void setTimeout(long timeout);
|
||||||
|
}
|
@ -0,0 +1,6 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
public interface FFbinaryObserver extends Runnable {
|
||||||
|
|
||||||
|
void cancel();
|
||||||
|
}
|
@ -0,0 +1,142 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
import android.os.AsyncTask;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.io.OutputStream;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.TimeoutException;
|
||||||
|
|
||||||
|
class FFcommandExecuteAsyncTask extends AsyncTask<Void, String, CommandResult> implements FFtask {
|
||||||
|
|
||||||
|
private final String[] cmd;
|
||||||
|
private Map<String, String> environment;
|
||||||
|
private final FFcommandExecuteResponseHandler ffmpegExecuteResponseHandler;
|
||||||
|
private final ShellCommand shellCommand;
|
||||||
|
private final long timeout;
|
||||||
|
private long startTime;
|
||||||
|
private Process process;
|
||||||
|
private String output = "";
|
||||||
|
private boolean quitPending;
|
||||||
|
|
||||||
|
FFcommandExecuteAsyncTask(String[] cmd, Map<String, String> environment, long timeout, FFcommandExecuteResponseHandler ffmpegExecuteResponseHandler) {
|
||||||
|
this.cmd = cmd;
|
||||||
|
this.timeout = timeout;
|
||||||
|
this.environment = environment;
|
||||||
|
this.ffmpegExecuteResponseHandler = ffmpegExecuteResponseHandler;
|
||||||
|
this.shellCommand = new ShellCommand();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onPreExecute() {
|
||||||
|
startTime = System.currentTimeMillis();
|
||||||
|
if (ffmpegExecuteResponseHandler != null) {
|
||||||
|
ffmpegExecuteResponseHandler.onStart();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected CommandResult doInBackground(Void... params) {
|
||||||
|
try {
|
||||||
|
process = shellCommand.run(cmd, environment);
|
||||||
|
if (process == null) {
|
||||||
|
return CommandResult.getDummyFailureResponse();
|
||||||
|
}
|
||||||
|
Log.d("Running publishing updates method");
|
||||||
|
checkAndUpdateProcess();
|
||||||
|
return CommandResult.getOutputFromProcess(process);
|
||||||
|
} catch (TimeoutException e) {
|
||||||
|
Log.e("FFmpeg binary timed out", e);
|
||||||
|
return new CommandResult(false, e.getMessage());
|
||||||
|
} catch (Exception e) {
|
||||||
|
Log.e("Error running FFmpeg binary", e);
|
||||||
|
} finally {
|
||||||
|
Util.destroyProcess(process);
|
||||||
|
}
|
||||||
|
return CommandResult.getDummyFailureResponse();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onProgressUpdate(String... values) {
|
||||||
|
if (values != null && values[0] != null && ffmpegExecuteResponseHandler != null) {
|
||||||
|
ffmpegExecuteResponseHandler.onProgress(values[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onPostExecute(CommandResult commandResult) {
|
||||||
|
if (ffmpegExecuteResponseHandler != null) {
|
||||||
|
output += commandResult.output;
|
||||||
|
if (commandResult.success) {
|
||||||
|
ffmpegExecuteResponseHandler.onSuccess(output);
|
||||||
|
} else {
|
||||||
|
ffmpegExecuteResponseHandler.onFailure(output);
|
||||||
|
}
|
||||||
|
ffmpegExecuteResponseHandler.onFinish();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkAndUpdateProcess() throws TimeoutException, InterruptedException {
|
||||||
|
while (!Util.isProcessCompleted(process)) {
|
||||||
|
|
||||||
|
// checking if process is completed
|
||||||
|
if (Util.isProcessCompleted(process)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handling timeout
|
||||||
|
if (timeout != Long.MAX_VALUE && System.currentTimeMillis() > startTime + timeout) {
|
||||||
|
throw new TimeoutException("FFmpeg binary timed out");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
String line;
|
||||||
|
BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()));
|
||||||
|
while ((line = reader.readLine()) != null) {
|
||||||
|
if (isCancelled()) {
|
||||||
|
process.destroy();
|
||||||
|
process.waitFor();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (quitPending) {
|
||||||
|
sendQ();
|
||||||
|
process = null;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
output += line + "\n";
|
||||||
|
publishProgress(line);
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isProcessCompleted() {
|
||||||
|
return Util.isProcessCompleted(process);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean killRunningProcess() {
|
||||||
|
return Util.killAsync(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void sendQuitSignal() {
|
||||||
|
quitPending = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void sendQ() {
|
||||||
|
OutputStream outputStream = process.getOutputStream();
|
||||||
|
try {
|
||||||
|
outputStream.write("q\n".getBytes());
|
||||||
|
outputStream.flush();
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,26 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
public interface FFcommandExecuteResponseHandler extends ResponseHandler {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* on Success
|
||||||
|
*
|
||||||
|
* @param message complete output of the binary command
|
||||||
|
*/
|
||||||
|
void onSuccess(String message);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* on Progress
|
||||||
|
*
|
||||||
|
* @param message current output of binary command
|
||||||
|
*/
|
||||||
|
void onProgress(String message);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* on Failure
|
||||||
|
*
|
||||||
|
* @param message complete output of the binary command
|
||||||
|
*/
|
||||||
|
void onFailure(String message);
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,88 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
import android.os.AsyncTask;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class FFmpeg implements FFbinaryInterface {
|
||||||
|
|
||||||
|
private final FFbinaryContextProvider context;
|
||||||
|
|
||||||
|
private static final long MINIMUM_TIMEOUT = 10 * 1000;
|
||||||
|
private long timeout = Long.MAX_VALUE;
|
||||||
|
|
||||||
|
private static FFmpeg instance = null;
|
||||||
|
|
||||||
|
private FFmpeg(FFbinaryContextProvider context) {
|
||||||
|
this.context = context;
|
||||||
|
Log.setDebug(Util.isDebug(this.context.provide()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static FFmpeg getInstance(final Context context) {
|
||||||
|
if (instance == null) {
|
||||||
|
instance = new FFmpeg(new FFbinaryContextProvider() {
|
||||||
|
@Override
|
||||||
|
public Context provide() {
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isSupported() {
|
||||||
|
|
||||||
|
// get ffmpeg file
|
||||||
|
File ffmpeg = FileUtils.getFFmpeg(context.provide());
|
||||||
|
|
||||||
|
// check if ffmpeg can be executed
|
||||||
|
if (!ffmpeg.canExecute()) {
|
||||||
|
// try to make executable
|
||||||
|
Log.e("ffmpeg cannot execute");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.d("ffmpeg is ready!");
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FFtask execute(Map<String, String> environvenmentVars, String[] cmd, FFcommandExecuteResponseHandler ffmpegExecuteResponseHandler) {
|
||||||
|
if (cmd.length != 0) {
|
||||||
|
final String[] command = new String[cmd.length + 1];
|
||||||
|
command[0] = FileUtils.getFFmpeg(context.provide()).getAbsolutePath();
|
||||||
|
System.arraycopy(cmd, 0, command, 1, cmd.length);
|
||||||
|
FFcommandExecuteAsyncTask task = new FFcommandExecuteAsyncTask(command, environvenmentVars, timeout, ffmpegExecuteResponseHandler);
|
||||||
|
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
|
||||||
|
return task;
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("shell command cannot be empty");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FFtask execute(String[] cmd, FFcommandExecuteResponseHandler ffmpegExecuteResponseHandler) {
|
||||||
|
return execute(null, cmd, ffmpegExecuteResponseHandler);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isCommandRunning(FFtask task) {
|
||||||
|
return task != null && !task.isProcessCompleted();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean killRunningProcesses(FFtask task) {
|
||||||
|
return task != null && task.killRunningProcess();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setTimeout(long timeout) {
|
||||||
|
if (timeout >= MINIMUM_TIMEOUT) {
|
||||||
|
this.timeout = timeout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,42 @@
|
|||||||
|
package nl.bravobit.ffmpeg
|
||||||
|
|
||||||
|
import android.content.Context
|
||||||
|
|
||||||
|
object FFmpegConfig {
|
||||||
|
fun versionFFmpeg(context: Context) {
|
||||||
|
FFmpeg.getInstance(context).execute(arrayOf("-version"), object : ExecuteBinaryResponseHandler() {
|
||||||
|
override fun onSuccess(message: String) {
|
||||||
|
Log.d(message)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onProgress(message: String) {
|
||||||
|
Log.d(message)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fun codecsFFmpeg(context: Context) {
|
||||||
|
FFmpeg.getInstance(context).execute(arrayOf("-codecs"), object : ExecuteBinaryResponseHandler() {
|
||||||
|
override fun onSuccess(message: String) {
|
||||||
|
Log.d(message)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onProgress(message: String) {
|
||||||
|
Log.d(message)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fun versionFFprobe(context: Context) {
|
||||||
|
Log.d("version ffprobe")
|
||||||
|
FFprobe.getInstance(context).execute(arrayOf("-version"), object : ExecuteBinaryResponseHandler() {
|
||||||
|
override fun onSuccess(message: String) {
|
||||||
|
Log.d(message)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun onProgress(message: String) {
|
||||||
|
Log.d(message)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,85 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
import android.os.AsyncTask;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class FFprobe implements FFbinaryInterface {
|
||||||
|
|
||||||
|
private final FFbinaryContextProvider context;
|
||||||
|
|
||||||
|
private static final long MINIMUM_TIMEOUT = 10 * 1000;
|
||||||
|
private long timeout = Long.MAX_VALUE;
|
||||||
|
|
||||||
|
private static FFprobe instance = null;
|
||||||
|
|
||||||
|
private FFprobe(FFbinaryContextProvider context) {
|
||||||
|
this.context = context;
|
||||||
|
Log.setDebug(Util.isDebug(this.context.provide()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static FFprobe getInstance(final Context context) {
|
||||||
|
if (instance == null) {
|
||||||
|
instance = new FFprobe(new FFbinaryContextProvider() {
|
||||||
|
@Override
|
||||||
|
public Context provide() {
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isSupported() {
|
||||||
|
// get ffprobe file
|
||||||
|
File ffprobe = FileUtils.getFFprobe(context.provide());
|
||||||
|
|
||||||
|
// check if ffprobe can be executed
|
||||||
|
if (!ffprobe.canExecute()) {
|
||||||
|
Log.e("ffprobe cannot execute");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
Log.d("ffprobe is ready!");
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FFtask execute(Map<String, String> environvenmentVars, String[] cmd, FFcommandExecuteResponseHandler ffcommandExecuteResponseHandler) {
|
||||||
|
if (cmd.length != 0) {
|
||||||
|
final String[] command = new String[cmd.length + 1];
|
||||||
|
command[0] = FileUtils.getFFprobe(context.provide()).getAbsolutePath();
|
||||||
|
System.arraycopy(cmd, 0, command, 1, cmd.length);
|
||||||
|
FFcommandExecuteAsyncTask task = new FFcommandExecuteAsyncTask(command, environvenmentVars, timeout, ffcommandExecuteResponseHandler);
|
||||||
|
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
|
||||||
|
return task;
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("shell command cannot be empty");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FFtask execute(String[] cmd, FFcommandExecuteResponseHandler ffcommandExecuteResponseHandler) {
|
||||||
|
return execute(null, cmd, ffcommandExecuteResponseHandler);
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isCommandRunning(FFtask task) {
|
||||||
|
return task != null && !task.isProcessCompleted();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean killRunningProcesses(FFtask task) {
|
||||||
|
return task != null && task.killRunningProcess();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setTimeout(long timeout) {
|
||||||
|
if (timeout >= MINIMUM_TIMEOUT) {
|
||||||
|
this.timeout = timeout;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,22 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
public interface FFtask {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sends 'q' to the ff binary running process asynchronously
|
||||||
|
*/
|
||||||
|
void sendQuitSignal();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if process is completed
|
||||||
|
* @return <code>true</code> if a process is running
|
||||||
|
*/
|
||||||
|
boolean isProcessCompleted();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Kill given running process
|
||||||
|
*
|
||||||
|
* @return true if process is killed successfully
|
||||||
|
*/
|
||||||
|
boolean killRunningProcess();
|
||||||
|
}
|
@ -0,0 +1,20 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
|
||||||
|
class FileUtils {
|
||||||
|
private static final String FFMPEG_FILE_NAME = "ffmpeg";
|
||||||
|
private static final String FFPROBE_FILE_NAME = "ffprobe";
|
||||||
|
|
||||||
|
static File getFFmpeg(Context context) {
|
||||||
|
File folder = new File(context.getApplicationInfo().nativeLibraryDir);
|
||||||
|
return new File(folder, FFMPEG_FILE_NAME);
|
||||||
|
}
|
||||||
|
|
||||||
|
static File getFFprobe(Context context) {
|
||||||
|
File folder = new File(context.getApplicationInfo().nativeLibraryDir);
|
||||||
|
return new File(folder, FFPROBE_FILE_NAME);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,58 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
class Log {
|
||||||
|
|
||||||
|
private static String TAG = FFmpeg.class.getSimpleName();
|
||||||
|
private static boolean DEBUG = false;
|
||||||
|
|
||||||
|
public static void setDebug(boolean debug) {
|
||||||
|
Log.DEBUG = debug;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setTag(String tag) {
|
||||||
|
Log.TAG = tag;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void d(Object obj) {
|
||||||
|
if (DEBUG) {
|
||||||
|
android.util.Log.d(TAG, obj != null ? obj.toString() : "");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void e(Object obj) {
|
||||||
|
if (DEBUG) {
|
||||||
|
android.util.Log.e(TAG, obj != null ? obj.toString() : "");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void w(Object obj) {
|
||||||
|
if (DEBUG) {
|
||||||
|
android.util.Log.w(TAG, obj != null ? obj.toString() : "");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void i(Object obj) {
|
||||||
|
if (DEBUG) {
|
||||||
|
android.util.Log.i(TAG, obj != null ? obj.toString() : "");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void v(Object obj) {
|
||||||
|
if (DEBUG) {
|
||||||
|
android.util.Log.v(TAG, obj != null ? obj.toString() : "");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void e(Object obj, Throwable throwable) {
|
||||||
|
if (DEBUG) {
|
||||||
|
android.util.Log.e(TAG, obj != null ? obj.toString() : "", throwable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void e(Throwable throwable) {
|
||||||
|
if (DEBUG) {
|
||||||
|
android.util.Log.e(TAG, "", throwable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,15 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
public interface ResponseHandler {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* on Start
|
||||||
|
*/
|
||||||
|
void onStart();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* on Finish
|
||||||
|
*/
|
||||||
|
void onFinish();
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,22 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
class ShellCommand {
|
||||||
|
|
||||||
|
Process run(String[] commandString, Map<String, String> environment) {
|
||||||
|
Process process = null;
|
||||||
|
try {
|
||||||
|
ProcessBuilder processBuilder = new ProcessBuilder(commandString);
|
||||||
|
if (environment != null) {
|
||||||
|
processBuilder.environment().putAll(environment);
|
||||||
|
}
|
||||||
|
process = processBuilder.start();
|
||||||
|
} catch (Throwable t) {
|
||||||
|
Log.e("Exception while trying to run: " + Arrays.toString(commandString), t);
|
||||||
|
}
|
||||||
|
return process;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
105
ffmpeg/android-ffmpeg/src/main/java/nl/bravobit/ffmpeg/Util.java
Normal file
105
ffmpeg/android-ffmpeg/src/main/java/nl/bravobit/ffmpeg/Util.java
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
package nl.bravobit.ffmpeg;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
import android.content.pm.ApplicationInfo;
|
||||||
|
import android.os.AsyncTask;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
|
||||||
|
class Util {
|
||||||
|
|
||||||
|
static boolean isDebug(Context context) {
|
||||||
|
return (context.getApplicationContext().getApplicationInfo().flags & ApplicationInfo.FLAG_DEBUGGABLE) != 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static String convertInputStreamToString(InputStream inputStream) {
|
||||||
|
try {
|
||||||
|
BufferedReader r = new BufferedReader(new InputStreamReader(inputStream));
|
||||||
|
String str;
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
while ((str = r.readLine()) != null) {
|
||||||
|
sb.append(str);
|
||||||
|
}
|
||||||
|
return sb.toString();
|
||||||
|
} catch (IOException e) {
|
||||||
|
Log.e("error converting input stream to string", e);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void destroyProcess(Process process) {
|
||||||
|
if (process != null) {
|
||||||
|
try {
|
||||||
|
process.destroy();
|
||||||
|
} catch (Exception e) {
|
||||||
|
Log.e("progress destroy error", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static boolean killAsync(AsyncTask asyncTask) {
|
||||||
|
return asyncTask != null && !asyncTask.isCancelled() && asyncTask.cancel(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
static boolean isProcessCompleted(Process process) {
|
||||||
|
try {
|
||||||
|
if (process == null) return true;
|
||||||
|
process.exitValue();
|
||||||
|
return true;
|
||||||
|
} catch (IllegalThreadStateException e) {
|
||||||
|
// do nothing
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public interface ObservePredicate {
|
||||||
|
Boolean isReadyToProceed();
|
||||||
|
}
|
||||||
|
|
||||||
|
static FFbinaryObserver observeOnce(final ObservePredicate predicate, final Runnable run, final int timeout) {
|
||||||
|
final android.os.Handler observer = new android.os.Handler();
|
||||||
|
|
||||||
|
|
||||||
|
final FFbinaryObserver observeAction = new FFbinaryObserver() {
|
||||||
|
private boolean canceled = false;
|
||||||
|
private int timeElapsed = 0;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
if (timeElapsed + 40 > timeout) cancel();
|
||||||
|
timeElapsed += 40;
|
||||||
|
|
||||||
|
if (canceled) return;
|
||||||
|
|
||||||
|
boolean readyToProceed = false;
|
||||||
|
try {
|
||||||
|
readyToProceed = predicate.isReadyToProceed();
|
||||||
|
} catch (Exception e) {
|
||||||
|
Log.v("Observing " + e.getMessage());
|
||||||
|
observer.postDelayed(this, 40);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (readyToProceed) {
|
||||||
|
Log.v("Observed");
|
||||||
|
run.run();
|
||||||
|
} else {
|
||||||
|
Log.v("Observing");
|
||||||
|
observer.postDelayed(this, 40);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void cancel() {
|
||||||
|
canceled = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
observer.post(observeAction);
|
||||||
|
|
||||||
|
return observeAction;
|
||||||
|
}
|
||||||
|
}
|
3
ffmpeg/android-ffmpeg/src/main/res/values/strings.xml
Normal file
3
ffmpeg/android-ffmpeg/src/main/res/values/strings.xml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
<resources>
|
||||||
|
<string name="app_name">FFMpegAndroid</string>
|
||||||
|
</resources>
|
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/arm64-v8a/ffmpeg
Normal file
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/arm64-v8a/ffmpeg
Normal file
Binary file not shown.
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/arm64-v8a/ffprobe
Normal file
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/arm64-v8a/ffprobe
Normal file
Binary file not shown.
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/armeabi-v7a/ffmpeg
Normal file
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/armeabi-v7a/ffmpeg
Normal file
Binary file not shown.
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/armeabi-v7a/ffprobe
Normal file
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/armeabi-v7a/ffprobe
Normal file
Binary file not shown.
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/x86/ffmpeg
Normal file
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/x86/ffmpeg
Normal file
Binary file not shown.
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/x86/ffprobe
Normal file
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/x86/ffprobe
Normal file
Binary file not shown.
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/x86_64/ffmpeg
Normal file
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/x86_64/ffmpeg
Normal file
Binary file not shown.
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/x86_64/ffprobe
Normal file
BIN
ffmpeg/android-ffmpeg/src/main/resources/lib/x86_64/ffprobe
Normal file
Binary file not shown.
@ -1,61 +0,0 @@
|
|||||||
cmake_minimum_required(VERSION 3.4.1)
|
|
||||||
|
|
||||||
set(ffmpeg_dir ${CMAKE_SOURCE_DIR}/../ffmpeg-android-maker/output)
|
|
||||||
set(ffmpeg_libs ${ffmpeg_dir}/lib/${ANDROID_ABI})
|
|
||||||
|
|
||||||
include_directories(${ffmpeg_dir}/include/${ANDROID_ABI})
|
|
||||||
|
|
||||||
set(
|
|
||||||
# List variable name
|
|
||||||
ffmpeg_libs_names
|
|
||||||
# Values in the list
|
|
||||||
avutil avformat avcodec swresample avdevice avfilter swscale
|
|
||||||
)
|
|
||||||
|
|
||||||
foreach (ffmpeg_lib_name ${ffmpeg_libs_names})
|
|
||||||
add_library(
|
|
||||||
${ffmpeg_lib_name}
|
|
||||||
SHARED
|
|
||||||
IMPORTED
|
|
||||||
)
|
|
||||||
set_target_properties(
|
|
||||||
${ffmpeg_lib_name}
|
|
||||||
PROPERTIES
|
|
||||||
IMPORTED_LOCATION
|
|
||||||
${ffmpeg_libs}/lib${ffmpeg_lib_name}.so
|
|
||||||
)
|
|
||||||
endforeach ()
|
|
||||||
|
|
||||||
add_library(
|
|
||||||
# Name for a library to build
|
|
||||||
spotiflyer-ffmpeg
|
|
||||||
# Type of a library
|
|
||||||
SHARED
|
|
||||||
# All cpp files to compile
|
|
||||||
# mobile-ffmpeg
|
|
||||||
src/main/cpp/doc_examples_transcode_aac.c
|
|
||||||
|
|
||||||
# ffmpeg-kit
|
|
||||||
# src/main/cpp/ffmpegkit.c
|
|
||||||
# src/main/cpp/ffprobekit.c
|
|
||||||
# src/main/cpp/ffmpegkit_exception.c
|
|
||||||
# src/main/cpp/fftools_cmdutils.c
|
|
||||||
# src/main/cpp/fftools_ffmpeg.c
|
|
||||||
# src/main/cpp/fftools_ffprobe.c
|
|
||||||
# src/main/cpp/fftools_ffmpeg_opt.c
|
|
||||||
# src/main/cpp/fftools_ffmpeg_hw.c
|
|
||||||
# src/main/cpp/fftools_ffmpeg_filter.c
|
|
||||||
# src/main/cpp/saf_wrapper.c
|
|
||||||
)
|
|
||||||
|
|
||||||
target_link_libraries(
|
|
||||||
# Library to link
|
|
||||||
spotiflyer-ffmpeg
|
|
||||||
# List of libraries to link against:
|
|
||||||
# Library for writing messages in LogCat
|
|
||||||
log
|
|
||||||
# Library for processing Bitmap objects
|
|
||||||
jnigraphics
|
|
||||||
# FFmpeg libraries
|
|
||||||
${ffmpeg_libs_names}
|
|
||||||
)
|
|
17
ffmpeg/ffmpeg-kit-android-lib/proguard-rules.pro
vendored
17
ffmpeg/ffmpeg-kit-android-lib/proguard-rules.pro
vendored
@ -1,17 +0,0 @@
|
|||||||
# Add project specific ProGuard rules here.
|
|
||||||
# You can control the set of applied configuration files using the
|
|
||||||
# proguardFiles setting in build.gradle.kts.
|
|
||||||
#
|
|
||||||
# For more details, see
|
|
||||||
# http://developer.android.com/guide/developing/tools/proguard.html
|
|
||||||
|
|
||||||
-keep class com.arthenica.ffmpegkit.FFmpegKitConfig {
|
|
||||||
native <methods>;
|
|
||||||
void log(long, int, byte[]);
|
|
||||||
void statistics(long, int, float, float, long , int, double, double);
|
|
||||||
void closeParcelFileDescriptor(int);
|
|
||||||
}
|
|
||||||
|
|
||||||
-keep class com.arthenica.ffmpegkit.AbiDetect {
|
|
||||||
native <methods>;
|
|
||||||
}
|
|
@ -1,4 +0,0 @@
|
|||||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
|
||||||
package="com.shabinder.spotiflyer.ffmpeg">
|
|
||||||
|
|
||||||
</manifest>
|
|
@ -1,2 +0,0 @@
|
|||||||
/android_lts_support.o
|
|
||||||
/libandroidltssupport.a
|
|
@ -1,915 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013-2018 Andreas Unterweger
|
|
||||||
*
|
|
||||||
* This file is part of FFmpeg.
|
|
||||||
*
|
|
||||||
* FFmpeg is free software; you can redistribute it and/or
|
|
||||||
* modify it under the terms of the GNU Lesser General Public
|
|
||||||
* License as published by the Free Software Foundation; either
|
|
||||||
* version 2.1 of the License, or (at your option) any later version.
|
|
||||||
*
|
|
||||||
* FFmpeg is distributed in the hope that it will be useful,
|
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
* Lesser General Public License for more details.
|
|
||||||
*
|
|
||||||
* You should have received a copy of the GNU Lesser General Public
|
|
||||||
* License along with FFmpeg; if not, write to the Free Software
|
|
||||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @file
|
|
||||||
* Simple audio converter
|
|
||||||
*
|
|
||||||
* @example transcode_aac.c
|
|
||||||
* Convert an input audio file to AAC in an MP4 container using FFmpeg.
|
|
||||||
* Formats other than MP4 are supported based on the output file extension.
|
|
||||||
* @author Andreas Unterweger (dustsigns@gmail.com)
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <jni.h>
|
|
||||||
#include <android/log.h>
|
|
||||||
|
|
||||||
#include "libavformat/avformat.h"
|
|
||||||
#include "libavformat/avio.h"
|
|
||||||
|
|
||||||
#include "libavcodec/avcodec.h"
|
|
||||||
|
|
||||||
#include "libavutil/audio_fifo.h"
|
|
||||||
#include "libavutil/avassert.h"
|
|
||||||
#include "libavutil/avstring.h"
|
|
||||||
#include "libavutil/frame.h"
|
|
||||||
#include "libavutil/opt.h"
|
|
||||||
|
|
||||||
#include "libswresample/swresample.h"
|
|
||||||
#include <jni.h>
|
|
||||||
|
|
||||||
/* The number of output channels */
|
|
||||||
#define OUTPUT_CHANNELS 2
|
|
||||||
/* The index of audio stream that will be transcoded */
|
|
||||||
static int audio_stream_idx = -1;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Open an input file and the required decoder.
|
|
||||||
* @param filename File to be opened
|
|
||||||
* @param[out] input_format_context Format context of opened file
|
|
||||||
* @param[out] input_codec_context Codec context of opened file
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int open_input_file(const char *filename,
|
|
||||||
AVFormatContext **input_format_context,
|
|
||||||
AVCodecContext **input_codec_context)
|
|
||||||
{
|
|
||||||
AVCodecContext *avctx;
|
|
||||||
AVCodec *input_codec;
|
|
||||||
int error;
|
|
||||||
|
|
||||||
/* Open the input file to read from it. */
|
|
||||||
if ((error = avformat_open_input(input_format_context, filename, NULL,
|
|
||||||
NULL)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not open input file '%s' (error '%s')\n",
|
|
||||||
filename, av_err2str(error));
|
|
||||||
*input_format_context = NULL;
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Get information on the input file (number of streams etc.). */
|
|
||||||
if ((error = avformat_find_stream_info(*input_format_context, NULL)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not open find stream info (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
avformat_close_input(input_format_context);
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (audio_stream_idx = 0; audio_stream_idx < (*input_format_context)->nb_streams; audio_stream_idx++) {
|
|
||||||
if ((*input_format_context)->streams[audio_stream_idx]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
|
|
||||||
break;
|
|
||||||
|
|
||||||
__android_log_print(ANDROID_LOG_INFO, "transcode_aac", "Skip non-audio input stream %d\n", audio_stream_idx);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Make sure that there is at least one audio stream in the input file. */
|
|
||||||
if (audio_stream_idx >= (*input_format_context)->nb_streams) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not find an audio (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
avformat_close_input(input_format_context);
|
|
||||||
return AVERROR_EXIT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Find a decoder for the audio stream. */
|
|
||||||
if (!(input_codec = avcodec_find_decoder((*input_format_context)->streams[audio_stream_idx]->codecpar->codec_id))) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not find input codec\n");
|
|
||||||
avformat_close_input(input_format_context);
|
|
||||||
return AVERROR_EXIT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Allocate a new decoding context. */
|
|
||||||
avctx = avcodec_alloc_context3(input_codec);
|
|
||||||
if (!avctx) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate a decoding context\n");
|
|
||||||
avformat_close_input(input_format_context);
|
|
||||||
return AVERROR(ENOMEM);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Initialize the stream parameters with demuxer information. */
|
|
||||||
error = avcodec_parameters_to_context(avctx, (*input_format_context)->streams[audio_stream_idx]->codecpar);
|
|
||||||
if (error < 0) {
|
|
||||||
avformat_close_input(input_format_context);
|
|
||||||
avcodec_free_context(&avctx);
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Open the decoder for the audio stream to use it later. */
|
|
||||||
if ((error = avcodec_open2(avctx, input_codec, NULL)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not open input codec (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
avcodec_free_context(&avctx);
|
|
||||||
avformat_close_input(input_format_context);
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Save the decoder context for easier access later. */
|
|
||||||
*input_codec_context = avctx;
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Open an output file and the required encoder.
|
|
||||||
* Also set some basic encoder parameters.
|
|
||||||
* Some of these parameters are based on the input file's parameters.
|
|
||||||
* @param filename File to be opened
|
|
||||||
* @param input_codec_context Codec context of input file
|
|
||||||
* @param[out] output_format_context Format context of output file
|
|
||||||
* @param[out] output_codec_context Codec context of output file
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int open_output_file(const char *filename,
|
|
||||||
AVCodecContext *input_codec_context,
|
|
||||||
AVFormatContext **output_format_context,
|
|
||||||
AVCodecContext **output_codec_context,
|
|
||||||
int audioBitrate
|
|
||||||
)
|
|
||||||
{
|
|
||||||
AVCodecContext *avctx = NULL;
|
|
||||||
AVIOContext *output_io_context = NULL;
|
|
||||||
AVStream *stream = NULL;
|
|
||||||
AVCodec *output_codec = NULL;
|
|
||||||
int error;
|
|
||||||
|
|
||||||
/* Open the output file to write to it. */
|
|
||||||
if ((error = avio_open(&output_io_context, filename,
|
|
||||||
AVIO_FLAG_WRITE)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not open output file '%s' (error '%s')\n",
|
|
||||||
filename, av_err2str(error));
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Create a new format context for the output container format. */
|
|
||||||
if (!(*output_format_context = avformat_alloc_context())) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate output format context\n");
|
|
||||||
return AVERROR(ENOMEM);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Associate the output file (pointer) with the container format context. */
|
|
||||||
(*output_format_context)->pb = output_io_context;
|
|
||||||
|
|
||||||
/* Guess the desired container format based on the file extension. */
|
|
||||||
if (!((*output_format_context)->oformat = av_guess_format(NULL, filename,
|
|
||||||
NULL))) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not find output file format\n");
|
|
||||||
goto cleanup;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!((*output_format_context)->url = av_strdup(filename))) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate url.\n");
|
|
||||||
error = AVERROR(ENOMEM);
|
|
||||||
goto cleanup;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Find the encoder to be used by its name. */
|
|
||||||
if (!(output_codec = avcodec_find_encoder((*output_format_context)->oformat->audio_codec))) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not find an encoder for %s(%d).\n",
|
|
||||||
(*output_format_context)->oformat->long_name,
|
|
||||||
(*output_format_context)->oformat->audio_codec);
|
|
||||||
goto cleanup;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Create a new audio stream in the output file container. */
|
|
||||||
if (!(stream = avformat_new_stream(*output_format_context, NULL))) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not create new stream\n");
|
|
||||||
error = AVERROR(ENOMEM);
|
|
||||||
goto cleanup;
|
|
||||||
}
|
|
||||||
|
|
||||||
avctx = avcodec_alloc_context3(output_codec);
|
|
||||||
if (!avctx) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate an encoding context\n");
|
|
||||||
error = AVERROR(ENOMEM);
|
|
||||||
goto cleanup;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Set the basic encoder parameters.
|
|
||||||
* The input file's sample rate is used to avoid a sample rate conversion. */
|
|
||||||
avctx->channels = OUTPUT_CHANNELS;
|
|
||||||
avctx->channel_layout = av_get_default_channel_layout(OUTPUT_CHANNELS);
|
|
||||||
avctx->sample_rate = input_codec_context->sample_rate;
|
|
||||||
avctx->sample_fmt = output_codec->sample_fmts[0];
|
|
||||||
avctx->bit_rate = audioBitrate;
|
|
||||||
|
|
||||||
/* Allow the use of the experimental AAC encoder. */
|
|
||||||
avctx->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;
|
|
||||||
|
|
||||||
/* Set the sample rate for the container. */
|
|
||||||
stream->time_base.den = input_codec_context->sample_rate;
|
|
||||||
stream->time_base.num = 1;
|
|
||||||
|
|
||||||
/* Some container formats (like MP4) require global headers to be present.
|
|
||||||
* Mark the encoder so that it behaves accordingly. */
|
|
||||||
if ((*output_format_context)->oformat->flags & AVFMT_GLOBALHEADER)
|
|
||||||
avctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
|
|
||||||
|
|
||||||
/* Open the encoder for the audio stream to use it later. */
|
|
||||||
if ((error = avcodec_open2(avctx, output_codec, NULL)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not open output codec (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
goto cleanup;
|
|
||||||
}
|
|
||||||
|
|
||||||
error = avcodec_parameters_from_context(stream->codecpar, avctx);
|
|
||||||
if (error < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not initialize stream parameters\n");
|
|
||||||
goto cleanup;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Save the encoder context for easier access later. */
|
|
||||||
*output_codec_context = avctx;
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
|
|
||||||
cleanup:
|
|
||||||
avcodec_free_context(&avctx);
|
|
||||||
avio_closep(&(*output_format_context)->pb);
|
|
||||||
avformat_free_context(*output_format_context);
|
|
||||||
*output_format_context = NULL;
|
|
||||||
return error < 0 ? error : AVERROR_EXIT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize one data packet for reading or writing.
|
|
||||||
* @param packet Packet to be initialized
|
|
||||||
*/
|
|
||||||
static void init_packet(AVPacket *packet)
|
|
||||||
{
|
|
||||||
av_init_packet(packet);
|
|
||||||
/* Set the packet data and size so that it is recognized as being empty. */
|
|
||||||
packet->data = NULL;
|
|
||||||
packet->size = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize one audio frame for reading from the input file.
|
|
||||||
* @param[out] frame Frame to be initialized
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int init_input_frame(AVFrame **frame)
|
|
||||||
{
|
|
||||||
if (!(*frame = av_frame_alloc())) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate input frame\n");
|
|
||||||
return AVERROR(ENOMEM);
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize the audio resampler based on the input and output codec settings.
|
|
||||||
* If the input and output sample formats differ, a conversion is required
|
|
||||||
* libswresample takes care of this, but requires initialization.
|
|
||||||
* @param input_codec_context Codec context of the input file
|
|
||||||
* @param output_codec_context Codec context of the output file
|
|
||||||
* @param[out] resample_context Resample context for the required conversion
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int init_resampler(AVCodecContext *input_codec_context,
|
|
||||||
AVCodecContext *output_codec_context,
|
|
||||||
SwrContext **resample_context)
|
|
||||||
{
|
|
||||||
int error;
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Create a resampler context for the conversion.
|
|
||||||
* Set the conversion parameters.
|
|
||||||
* Default channel layouts based on the number of channels
|
|
||||||
* are assumed for simplicity (they are sometimes not detected
|
|
||||||
* properly by the demuxer and/or decoder).
|
|
||||||
*/
|
|
||||||
*resample_context = swr_alloc_set_opts(NULL,
|
|
||||||
av_get_default_channel_layout(output_codec_context->channels),
|
|
||||||
output_codec_context->sample_fmt,
|
|
||||||
output_codec_context->sample_rate,
|
|
||||||
av_get_default_channel_layout(input_codec_context->channels),
|
|
||||||
input_codec_context->sample_fmt,
|
|
||||||
input_codec_context->sample_rate,
|
|
||||||
0, NULL);
|
|
||||||
if (!*resample_context) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate resample context\n");
|
|
||||||
return AVERROR(ENOMEM);
|
|
||||||
}
|
|
||||||
/*
|
|
||||||
* Perform a sanity check so that the number of converted samples is
|
|
||||||
* not greater than the number of samples to be converted.
|
|
||||||
* If the sample rates differ, this case has to be handled differently
|
|
||||||
*/
|
|
||||||
av_assert0(output_codec_context->sample_rate == input_codec_context->sample_rate);
|
|
||||||
|
|
||||||
/* Open the resampler with the specified parameters. */
|
|
||||||
if ((error = swr_init(*resample_context)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not open resample context\n");
|
|
||||||
swr_free(resample_context);
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize a FIFO buffer for the audio samples to be encoded.
|
|
||||||
* @param[out] fifo Sample buffer
|
|
||||||
* @param output_codec_context Codec context of the output file
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int init_fifo(AVAudioFifo **fifo, AVCodecContext *output_codec_context)
|
|
||||||
{
|
|
||||||
/* Create the FIFO buffer based on the specified output sample format. */
|
|
||||||
if (!(*fifo = av_audio_fifo_alloc(output_codec_context->sample_fmt,
|
|
||||||
output_codec_context->channels, 1))) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate FIFO\n");
|
|
||||||
return AVERROR(ENOMEM);
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write the header of the output file container.
|
|
||||||
* @param output_format_context Format context of the output file
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int write_output_file_header(AVFormatContext *output_format_context)
|
|
||||||
{
|
|
||||||
int error;
|
|
||||||
if ((error = avformat_write_header(output_format_context, NULL)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not write output file header (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Decode one audio frame from the input file.
|
|
||||||
* @param frame Audio frame to be decoded
|
|
||||||
* @param input_format_context Format context of the input file
|
|
||||||
* @param input_codec_context Codec context of the input file
|
|
||||||
* @param[out] data_present Indicates whether data has been decoded
|
|
||||||
* @param[out] finished Indicates whether the end of file has
|
|
||||||
* been reached and all data has been
|
|
||||||
* decoded. If this flag is false, there
|
|
||||||
* is more data to be decoded, i.e., this
|
|
||||||
* function has to be called again.
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int decode_audio_frame(AVFrame *frame,
|
|
||||||
AVFormatContext *input_format_context,
|
|
||||||
AVCodecContext *input_codec_context,
|
|
||||||
int *data_present, int *finished)
|
|
||||||
{
|
|
||||||
/* Packet used for temporary storage. */
|
|
||||||
AVPacket input_packet;
|
|
||||||
int error;
|
|
||||||
init_packet(&input_packet);
|
|
||||||
|
|
||||||
/* Read one audio frame from the input file into a temporary packet. */
|
|
||||||
if ((error = av_read_frame(input_format_context, &input_packet)) < 0) {
|
|
||||||
/* If we are at the end of the file, flush the decoder below. */
|
|
||||||
if (error == AVERROR_EOF)
|
|
||||||
*finished = 1;
|
|
||||||
else {
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not read frame (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (error != AVERROR_EOF && input_packet.stream_index != audio_stream_idx) {
|
|
||||||
goto cleanup;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Send the audio frame stored in the temporary packet to the decoder.
|
|
||||||
* The input audio stream decoder is used to do this. */
|
|
||||||
if ((error = avcodec_send_packet(input_codec_context, &input_packet)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not send packet for decoding (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Receive one frame from the decoder. */
|
|
||||||
error = avcodec_receive_frame(input_codec_context, frame);
|
|
||||||
/* If the decoder asks for more data to be able to decode a frame,
|
|
||||||
* return indicating that no data is present. */
|
|
||||||
if (error == AVERROR(EAGAIN)) {
|
|
||||||
error = 0;
|
|
||||||
goto cleanup;
|
|
||||||
/* If the end of the input file is reached, stop decoding. */
|
|
||||||
} else if (error == AVERROR_EOF) {
|
|
||||||
*finished = 1;
|
|
||||||
error = 0;
|
|
||||||
goto cleanup;
|
|
||||||
} else if (error < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not decode frame (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
goto cleanup;
|
|
||||||
/* Default case: Return decoded data. */
|
|
||||||
} else {
|
|
||||||
*data_present = 1;
|
|
||||||
goto cleanup;
|
|
||||||
}
|
|
||||||
|
|
||||||
cleanup:
|
|
||||||
av_packet_unref(&input_packet);
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize a temporary storage for the specified number of audio samples.
|
|
||||||
* The conversion requires temporary storage due to the different format.
|
|
||||||
* The number of audio samples to be allocated is specified in frame_size.
|
|
||||||
* @param[out] converted_input_samples Array of converted samples. The
|
|
||||||
* dimensions are reference, channel
|
|
||||||
* (for multi-channel audio), sample.
|
|
||||||
* @param output_codec_context Codec context of the output file
|
|
||||||
* @param frame_size Number of samples to be converted in
|
|
||||||
* each round
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int init_converted_samples(uint8_t ***converted_input_samples,
|
|
||||||
AVCodecContext *output_codec_context,
|
|
||||||
int frame_size)
|
|
||||||
{
|
|
||||||
int error;
|
|
||||||
|
|
||||||
/* Allocate as many pointers as there are audio channels.
|
|
||||||
* Each pointer will later point to the audio samples of the corresponding
|
|
||||||
* channels (although it may be NULL for interleaved formats).
|
|
||||||
*/
|
|
||||||
if (!(*converted_input_samples = calloc(output_codec_context->channels,
|
|
||||||
sizeof(**converted_input_samples)))) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not allocate converted input sample pointers\n");
|
|
||||||
return AVERROR(ENOMEM);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Allocate memory for the samples of all channels in one consecutive
|
|
||||||
* block for convenience. */
|
|
||||||
if ((error = av_samples_alloc(*converted_input_samples, NULL,
|
|
||||||
output_codec_context->channels,
|
|
||||||
frame_size,
|
|
||||||
output_codec_context->sample_fmt, 0)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac",
|
|
||||||
"Could not allocate converted input samples (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
av_freep(&(*converted_input_samples)[0]);
|
|
||||||
free(*converted_input_samples);
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert the input audio samples into the output sample format.
|
|
||||||
* The conversion happens on a per-frame basis, the size of which is
|
|
||||||
* specified by frame_size.
|
|
||||||
* @param input_data Samples to be decoded. The dimensions are
|
|
||||||
* channel (for multi-channel audio), sample.
|
|
||||||
* @param[out] converted_data Converted samples. The dimensions are channel
|
|
||||||
* (for multi-channel audio), sample.
|
|
||||||
* @param frame_size Number of samples to be converted
|
|
||||||
* @param resample_context Resample context for the conversion
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int convert_samples(const uint8_t **input_data,
|
|
||||||
uint8_t **converted_data, const int frame_size,
|
|
||||||
SwrContext *resample_context)
|
|
||||||
{
|
|
||||||
int error;
|
|
||||||
|
|
||||||
/* Convert the samples using the resampler. */
|
|
||||||
if ((error = swr_convert(resample_context,
|
|
||||||
converted_data, frame_size,
|
|
||||||
input_data , frame_size)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not convert input samples (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add converted input audio samples to the FIFO buffer for later processing.
|
|
||||||
* @param fifo Buffer to add the samples to
|
|
||||||
* @param converted_input_samples Samples to be added. The dimensions are channel
|
|
||||||
* (for multi-channel audio), sample.
|
|
||||||
* @param frame_size Number of samples to be converted
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int add_samples_to_fifo(AVAudioFifo *fifo,
|
|
||||||
uint8_t **converted_input_samples,
|
|
||||||
const int frame_size)
|
|
||||||
{
|
|
||||||
int error;
|
|
||||||
|
|
||||||
/* Make the FIFO as large as it needs to be to hold both,
|
|
||||||
* the old and the new samples. */
|
|
||||||
if ((error = av_audio_fifo_realloc(fifo, av_audio_fifo_size(fifo) + frame_size)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not reallocate FIFO\n");
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Store the new samples in the FIFO buffer. */
|
|
||||||
if (av_audio_fifo_write(fifo, (void **)converted_input_samples,
|
|
||||||
frame_size) < frame_size) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not write data to FIFO\n");
|
|
||||||
return AVERROR_EXIT;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Read one audio frame from the input file, decode, convert and store
|
|
||||||
* it in the FIFO buffer.
|
|
||||||
* @param fifo Buffer used for temporary storage
|
|
||||||
* @param input_format_context Format context of the input file
|
|
||||||
* @param input_codec_context Codec context of the input file
|
|
||||||
* @param output_codec_context Codec context of the output file
|
|
||||||
* @param resampler_context Resample context for the conversion
|
|
||||||
* @param[out] finished Indicates whether the end of file has
|
|
||||||
* been reached and all data has been
|
|
||||||
* decoded. If this flag is false,
|
|
||||||
* there is more data to be decoded,
|
|
||||||
* i.e., this function has to be called
|
|
||||||
* again.
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int read_decode_convert_and_store(AVAudioFifo *fifo,
|
|
||||||
AVFormatContext *input_format_context,
|
|
||||||
AVCodecContext *input_codec_context,
|
|
||||||
AVCodecContext *output_codec_context,
|
|
||||||
SwrContext *resampler_context,
|
|
||||||
int *finished)
|
|
||||||
{
|
|
||||||
/* Temporary storage of the input samples of the frame read from the file. */
|
|
||||||
AVFrame *input_frame = NULL;
|
|
||||||
/* Temporary storage for the converted input samples. */
|
|
||||||
uint8_t **converted_input_samples = NULL;
|
|
||||||
int data_present = 0;
|
|
||||||
int ret = AVERROR_EXIT;
|
|
||||||
|
|
||||||
/* Initialize temporary storage for one input frame. */
|
|
||||||
if (init_input_frame(&input_frame))
|
|
||||||
goto cleanup;
|
|
||||||
/* Decode one frame worth of audio samples. */
|
|
||||||
if (decode_audio_frame(input_frame, input_format_context,
|
|
||||||
input_codec_context, &data_present, finished))
|
|
||||||
goto cleanup;
|
|
||||||
/* If we are at the end of the file and there are no more samples
|
|
||||||
* in the decoder which are delayed, we are actually finished.
|
|
||||||
* This must not be treated as an error. */
|
|
||||||
if (*finished) {
|
|
||||||
ret = 0;
|
|
||||||
goto cleanup;
|
|
||||||
}
|
|
||||||
/* If there is decoded data, convert and store it. */
|
|
||||||
if (data_present) {
|
|
||||||
/* Initialize the temporary storage for the converted input samples. */
|
|
||||||
if (init_converted_samples(&converted_input_samples, output_codec_context,
|
|
||||||
input_frame->nb_samples))
|
|
||||||
goto cleanup;
|
|
||||||
|
|
||||||
/* Convert the input samples to the desired output sample format.
|
|
||||||
* This requires a temporary storage provided by converted_input_samples. */
|
|
||||||
if (convert_samples((const uint8_t**)input_frame->extended_data, converted_input_samples,
|
|
||||||
input_frame->nb_samples, resampler_context))
|
|
||||||
goto cleanup;
|
|
||||||
|
|
||||||
/* Add the converted input samples to the FIFO buffer for later processing. */
|
|
||||||
if (add_samples_to_fifo(fifo, converted_input_samples,
|
|
||||||
input_frame->nb_samples))
|
|
||||||
goto cleanup;
|
|
||||||
ret = 0;
|
|
||||||
}
|
|
||||||
ret = 0;
|
|
||||||
|
|
||||||
cleanup:
|
|
||||||
if (converted_input_samples) {
|
|
||||||
av_freep(&converted_input_samples[0]);
|
|
||||||
free(converted_input_samples);
|
|
||||||
}
|
|
||||||
av_frame_free(&input_frame);
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize one input frame for writing to the output file.
|
|
||||||
* The frame will be exactly frame_size samples large.
|
|
||||||
* @param[out] frame Frame to be initialized
|
|
||||||
* @param output_codec_context Codec context of the output file
|
|
||||||
* @param frame_size Size of the frame
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int init_output_frame(AVFrame **frame,
|
|
||||||
AVCodecContext *output_codec_context,
|
|
||||||
int frame_size)
|
|
||||||
{
|
|
||||||
int error;
|
|
||||||
|
|
||||||
/* Create a new frame to store the audio samples. */
|
|
||||||
if (!(*frame = av_frame_alloc())) {
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not allocate output frame\n");
|
|
||||||
return AVERROR_EXIT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Set the frame's parameters, especially its size and format.
|
|
||||||
* av_frame_get_buffer needs this to allocate memory for the
|
|
||||||
* audio samples of the frame.
|
|
||||||
* Default channel layouts based on the number of channels
|
|
||||||
* are assumed for simplicity. */
|
|
||||||
(*frame)->nb_samples = frame_size;
|
|
||||||
(*frame)->channel_layout = output_codec_context->channel_layout;
|
|
||||||
(*frame)->format = output_codec_context->sample_fmt;
|
|
||||||
(*frame)->sample_rate = output_codec_context->sample_rate;
|
|
||||||
|
|
||||||
/* Allocate the samples of the created frame. This call will make
|
|
||||||
* sure that the audio frame can hold as many samples as specified. */
|
|
||||||
if ((error = av_frame_get_buffer(*frame, 0)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not allocate output frame samples (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
av_frame_free(frame);
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Global timestamp for the audio frames. */
|
|
||||||
static int64_t pts = 0;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Encode one frame worth of audio to the output file.
|
|
||||||
* @param frame Samples to be encoded
|
|
||||||
* @param output_format_context Format context of the output file
|
|
||||||
* @param output_codec_context Codec context of the output file
|
|
||||||
* @param[out] data_present Indicates whether data has been
|
|
||||||
* encoded
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int encode_audio_frame(AVFrame *frame,
|
|
||||||
AVFormatContext *output_format_context,
|
|
||||||
AVCodecContext *output_codec_context,
|
|
||||||
int *data_present)
|
|
||||||
{
|
|
||||||
/* Packet used for temporary storage. */
|
|
||||||
AVPacket output_packet;
|
|
||||||
int error;
|
|
||||||
init_packet(&output_packet);
|
|
||||||
|
|
||||||
/* Set a timestamp based on the sample rate for the container. */
|
|
||||||
if (frame) {
|
|
||||||
frame->pts = pts;
|
|
||||||
pts += frame->nb_samples;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Send the audio frame stored in the temporary packet to the encoder.
|
|
||||||
* The output audio stream encoder is used to do this. */
|
|
||||||
error = avcodec_send_frame(output_codec_context, frame);
|
|
||||||
/* The encoder signals that it has nothing more to encode. */
|
|
||||||
if (error == AVERROR_EOF) {
|
|
||||||
error = 0;
|
|
||||||
goto cleanup;
|
|
||||||
} else if (error < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not send packet for encoding (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Receive one encoded frame from the encoder. */
|
|
||||||
error = avcodec_receive_packet(output_codec_context, &output_packet);
|
|
||||||
/* If the encoder asks for more data to be able to provide an
|
|
||||||
* encoded frame, return indicating that no data is present. */
|
|
||||||
if (error == AVERROR(EAGAIN)) {
|
|
||||||
error = 0;
|
|
||||||
goto cleanup;
|
|
||||||
/* If the last frame has been encoded, stop encoding. */
|
|
||||||
} else if (error == AVERROR_EOF) {
|
|
||||||
error = 0;
|
|
||||||
goto cleanup;
|
|
||||||
} else if (error < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not encode frame (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
goto cleanup;
|
|
||||||
/* Default case: Return encoded data. */
|
|
||||||
} else {
|
|
||||||
*data_present = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Write one audio frame from the temporary packet to the output file. */
|
|
||||||
if (*data_present &&
|
|
||||||
(error = av_write_frame(output_format_context, &output_packet)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not write frame (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
goto cleanup;
|
|
||||||
}
|
|
||||||
|
|
||||||
cleanup:
|
|
||||||
av_packet_unref(&output_packet);
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load one audio frame from the FIFO buffer, encode and write it to the
|
|
||||||
* output file.
|
|
||||||
* @param fifo Buffer used for temporary storage
|
|
||||||
* @param output_format_context Format context of the output file
|
|
||||||
* @param output_codec_context Codec context of the output file
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int load_encode_and_write(AVAudioFifo *fifo,
|
|
||||||
AVFormatContext *output_format_context,
|
|
||||||
AVCodecContext *output_codec_context)
|
|
||||||
{
|
|
||||||
/* Temporary storage of the output samples of the frame written to the file. */
|
|
||||||
AVFrame *output_frame;
|
|
||||||
/* Use the maximum number of possible samples per frame.
|
|
||||||
* If there is less than the maximum possible frame size in the FIFO
|
|
||||||
* buffer use this number. Otherwise, use the maximum possible frame size. */
|
|
||||||
const int frame_size = FFMIN(av_audio_fifo_size(fifo),
|
|
||||||
output_codec_context->frame_size);
|
|
||||||
int data_written;
|
|
||||||
|
|
||||||
/* Initialize temporary storage for one output frame. */
|
|
||||||
if (init_output_frame(&output_frame, output_codec_context, frame_size))
|
|
||||||
return AVERROR_EXIT;
|
|
||||||
|
|
||||||
/* Read as many samples from the FIFO buffer as required to fill the frame.
|
|
||||||
* The samples are stored in the frame temporarily. */
|
|
||||||
if (av_audio_fifo_read(fifo, (void **)output_frame->data, frame_size) < frame_size) {
|
|
||||||
__android_log_print(ANDROID_LOG_WARN, "transcode_aac", "Could not read data from FIFO\n");
|
|
||||||
av_frame_free(&output_frame);
|
|
||||||
return AVERROR_EXIT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Encode one frame worth of audio samples. */
|
|
||||||
if (encode_audio_frame(output_frame, output_format_context,
|
|
||||||
output_codec_context, &data_written)) {
|
|
||||||
av_frame_free(&output_frame);
|
|
||||||
return AVERROR_EXIT;
|
|
||||||
}
|
|
||||||
av_frame_free(&output_frame);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write the trailer of the output file container.
|
|
||||||
* @param output_format_context Format context of the output file
|
|
||||||
* @return Error code (0 if successful)
|
|
||||||
*/
|
|
||||||
static int write_output_file_trailer(AVFormatContext *output_format_context)
|
|
||||||
{
|
|
||||||
int error;
|
|
||||||
if ((error = av_write_trailer(output_format_context)) < 0) {
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, "transcode_aac", "Could not write output file trailer (error '%s')\n",
|
|
||||||
av_err2str(error));
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
JNIEXPORT jint JNICALL Java_com_shabinder_spotiflyer_ffmpeg_AndroidFFmpeg_runTranscode(
|
|
||||||
JNIEnv *env, jobject c,
|
|
||||||
jstring inFilename,
|
|
||||||
jstring outFilename,
|
|
||||||
jint audioBitrate
|
|
||||||
) {
|
|
||||||
AVFormatContext *input_format_context = NULL, *output_format_context = NULL;
|
|
||||||
AVCodecContext *input_codec_context = NULL, *output_codec_context = NULL;
|
|
||||||
SwrContext *resample_context = NULL;
|
|
||||||
AVAudioFifo *fifo = NULL;
|
|
||||||
int ret = AVERROR_EXIT;
|
|
||||||
|
|
||||||
const char *in_filename = (*env)->GetStringUTFChars(env, inFilename, 0);
|
|
||||||
const char *out_filename = (*env)->GetStringUTFChars(env, outFilename, 0);
|
|
||||||
|
|
||||||
__android_log_print(ANDROID_LOG_INFO, "transcode_aac", "Bitrate:%d :: %s -> %s\n", audioBitrate, in_filename, out_filename);
|
|
||||||
|
|
||||||
/* Open the input file for reading. */
|
|
||||||
if (open_input_file(in_filename, &input_format_context,
|
|
||||||
&input_codec_context))
|
|
||||||
goto cleanup;
|
|
||||||
|
|
||||||
__android_log_print(ANDROID_LOG_INFO, "transcode_aac", "Input format: %s.\n",
|
|
||||||
input_format_context->iformat->long_name);
|
|
||||||
|
|
||||||
/* Open the output file for writing. */
|
|
||||||
if (open_output_file(out_filename, input_codec_context,
|
|
||||||
&output_format_context, &output_codec_context, (audioBitrate*1000)))
|
|
||||||
goto cleanup;
|
|
||||||
|
|
||||||
__android_log_print(ANDROID_LOG_INFO, "transcode_aac", "Output format: %s.\n",
|
|
||||||
output_format_context->oformat->long_name);
|
|
||||||
|
|
||||||
/* Initialize the resampler to be able to convert audio sample formats. */
|
|
||||||
if (init_resampler(input_codec_context, output_codec_context,
|
|
||||||
&resample_context))
|
|
||||||
goto cleanup;
|
|
||||||
/* Initialize the FIFO buffer to store audio samples to be encoded. */
|
|
||||||
if (init_fifo(&fifo, output_codec_context))
|
|
||||||
goto cleanup;
|
|
||||||
/* Write the header of the output file container. */
|
|
||||||
if (write_output_file_header(output_format_context))
|
|
||||||
goto cleanup;
|
|
||||||
|
|
||||||
/* Loop as long as we have input samples to read or output samples
|
|
||||||
* to write; abort as soon as we have neither. */
|
|
||||||
while (1) {
|
|
||||||
/* Use the encoder's desired frame size for processing. */
|
|
||||||
const int output_frame_size = output_codec_context->frame_size;
|
|
||||||
int finished = 0;
|
|
||||||
|
|
||||||
/* Make sure that there is one frame worth of samples in the FIFO
|
|
||||||
* buffer so that the encoder can do its work.
|
|
||||||
* Since the decoder's and the encoder's frame size may differ, we
|
|
||||||
* need to FIFO buffer to store as many frames worth of input samples
|
|
||||||
* that they make up at least one frame worth of output samples. */
|
|
||||||
while (av_audio_fifo_size(fifo) < output_frame_size) {
|
|
||||||
/* Decode one frame worth of audio samples, convert it to the
|
|
||||||
* output sample format and put it into the FIFO buffer. */
|
|
||||||
if (read_decode_convert_and_store(fifo, input_format_context,
|
|
||||||
input_codec_context,
|
|
||||||
output_codec_context,
|
|
||||||
resample_context, &finished))
|
|
||||||
goto cleanup;
|
|
||||||
|
|
||||||
/* If we are at the end of the input file, we continue
|
|
||||||
* encoding the remaining audio samples to the output file. */
|
|
||||||
if (finished)
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* If we have enough samples for the encoder, we encode them.
|
|
||||||
* At the end of the file, we pass the remaining samples to
|
|
||||||
* the encoder. */
|
|
||||||
while (av_audio_fifo_size(fifo) >= output_frame_size ||
|
|
||||||
(finished && av_audio_fifo_size(fifo) > 0))
|
|
||||||
/* Take one frame worth of audio samples from the FIFO buffer,
|
|
||||||
* encode it and write it to the output file. */
|
|
||||||
if (load_encode_and_write(fifo, output_format_context,
|
|
||||||
output_codec_context))
|
|
||||||
goto cleanup;
|
|
||||||
|
|
||||||
/* If we are at the end of the input file and have encoded
|
|
||||||
* all remaining samples, we can exit this loop and finish. */
|
|
||||||
if (finished) {
|
|
||||||
int data_written;
|
|
||||||
/* Flush the encoder as it may have delayed frames. */
|
|
||||||
do {
|
|
||||||
data_written = 0;
|
|
||||||
if (encode_audio_frame(NULL, output_format_context,
|
|
||||||
output_codec_context, &data_written))
|
|
||||||
goto cleanup;
|
|
||||||
} while (data_written);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Write the trailer of the output file container. */
|
|
||||||
if (write_output_file_trailer(output_format_context))
|
|
||||||
goto cleanup;
|
|
||||||
ret = 0;
|
|
||||||
|
|
||||||
cleanup:
|
|
||||||
if (fifo)
|
|
||||||
av_audio_fifo_free(fifo);
|
|
||||||
swr_free(&resample_context);
|
|
||||||
if (output_codec_context)
|
|
||||||
avcodec_free_context(&output_codec_context);
|
|
||||||
if (output_format_context) {
|
|
||||||
avio_closep(&output_format_context->pb);
|
|
||||||
avformat_free_context(output_format_context);
|
|
||||||
}
|
|
||||||
if (input_codec_context)
|
|
||||||
avcodec_free_context(&input_codec_context);
|
|
||||||
if (input_format_context)
|
|
||||||
avformat_close_input(&input_format_context);
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
}
|
|
@ -1,27 +0,0 @@
|
|||||||
package com.shabinder.spotiflyer.ffmpeg
|
|
||||||
|
|
||||||
import android.util.Log
|
|
||||||
|
|
||||||
object AndroidFFmpeg {
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* Run transcode_aac from doc/examples.
|
|
||||||
*
|
|
||||||
* @return zero if transcoding was successful
|
|
||||||
*/
|
|
||||||
@JvmStatic
|
|
||||||
external fun runTranscode(inFilename: String?, outFilename: String?, audioBitrate: Int): Int
|
|
||||||
|
|
||||||
init {
|
|
||||||
Log.i("FFmpeg", "Loading mobile-ffmpeg.")
|
|
||||||
System.loadLibrary("avutil")
|
|
||||||
System.loadLibrary("swscale")
|
|
||||||
System.loadLibrary("swresample")
|
|
||||||
System.loadLibrary("avcodec")
|
|
||||||
System.loadLibrary("avformat")
|
|
||||||
System.loadLibrary("avfilter")
|
|
||||||
System.loadLibrary("avdevice")
|
|
||||||
//System.loadLibrary("avresample")
|
|
||||||
System.loadLibrary("spotiflyer-ffmpeg")
|
|
||||||
}
|
|
||||||
}
|
|
@ -27,7 +27,7 @@ include(
|
|||||||
":common:providers",
|
":common:providers",
|
||||||
":common:core-components",
|
":common:core-components",
|
||||||
":common:dependency-injection",
|
":common:dependency-injection",
|
||||||
":ffmpeg:ffmpeg-kit-android-lib",
|
":ffmpeg:android-ffmpeg",
|
||||||
":android",
|
":android",
|
||||||
":desktop",
|
":desktop",
|
||||||
":web-app",
|
":web-app",
|
||||||
|
Loading…
Reference in New Issue
Block a user