decoderWrapperEntry : mDecoderWrappers.entrySet()) {
+ decoderWrapperEntry.getValue().release();
+ }
+ }
+ @Override
+ public void releaseEncoder() {
+ TLog.d(TAG, "ReleaseEncoder");
+ if (mEncoder != null) {
+ if (mEncoderStarted) mEncoder.stop();
+ mEncoder.release();
+ mEncoder = null;
+ }
+ }
+ @Override
+ public void release() {
+ releaseDecoders();
+ releaseEncoder();
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/InputSurface.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/InputSurface.java
new file mode 100644
index 0000000..9793c89
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/InputSurface.java
@@ -0,0 +1,175 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// from: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/InputSurface.java
+// blob: 157ed88d143229e4edb6889daf18fb73aa2fc5a5
+package net.ypresto.androidtranscoder.engine;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.view.Surface;
+/**
+ * Holds state associated with a Surface used for MediaCodec encoder input.
+ *
+ * The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that
+ * to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent
+ * to the video encoder.
+ */
+class InputSurface {
+ private static final String TAG = "InputSurface";
+ private static final int EGL_RECORDABLE_ANDROID = 0x3142;
+ private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
+ private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
+ private Surface mSurface;
+ /**
+ * Creates an InputSurface from a Surface.
+ */
+ public InputSurface(Surface surface) {
+ if (surface == null) {
+ throw new NullPointerException();
+ }
+ mSurface = surface;
+ eglSetup();
+ }
+ /**
+ * Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
+ */
+ private void eglSetup() {
+ mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
+ mEGLDisplay = null;
+ throw new RuntimeException("unable to initialize EGL14");
+ }
+ // Configure EGL for recordable and OpenGL ES 2.0. We want enough RGB bits
+ // to minimize artifacts from possible YUV conversion.
+ int[] attribList = {
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+ EGL_RECORDABLE_ANDROID, 1,
+ EGL14.EGL_NONE
+ };
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
+ numConfigs, 0)) {
+ throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
+ }
+ // Configure context for OpenGL ES 2.0.
+ int[] attrib_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+ EGL14.EGL_NONE
+ };
+ mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
+ attrib_list, 0);
+ checkEglError("eglCreateContext");
+ if (mEGLContext == null) {
+ throw new RuntimeException("null context");
+ }
+ // Create a window surface, and attach it to the Surface we received.
+ int[] surfaceAttribs = {
+ EGL14.EGL_NONE
+ };
+ mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
+ surfaceAttribs, 0);
+ checkEglError("eglCreateWindowSurface");
+ if (mEGLSurface == null) {
+ throw new RuntimeException("surface was null");
+ }
+ }
+ /**
+ * Discard all resources held by this class, notably the EGL context. Also releases the
+ * Surface that was passed to our constructor.
+ */
+ public void release() {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
+ EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(mEGLDisplay);
+ }
+ mSurface.release();
+ mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ mEGLContext = EGL14.EGL_NO_CONTEXT;
+ mEGLSurface = EGL14.EGL_NO_SURFACE;
+ mSurface = null;
+ }
+ /**
+ * Makes our EGL context and surface current.
+ */
+ public void makeCurrent() {
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+ public void makeUnCurrent() {
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
+ EGL14.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+ /**
+ * Calls eglSwapBuffers. Use this to "publish" the current frame.
+ */
+ public boolean swapBuffers() {
+ return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
+ }
+ /**
+ * Returns the Surface that the MediaCodec receives buffers from.
+ */
+ public Surface getSurface() {
+ return mSurface;
+ }
+ /**
+ * Queries the surface's width.
+ */
+ public int getWidth() {
+ int[] value = new int[1];
+ EGL14.eglQuerySurface(mEGLDisplay, mEGLSurface, EGL14.EGL_WIDTH, value, 0);
+ return value[0];
+ }
+ /**
+ * Queries the surface's height.
+ */
+ public int getHeight() {
+ int[] value = new int[1];
+ EGL14.eglQuerySurface(mEGLDisplay, mEGLSurface, EGL14.EGL_HEIGHT, value, 0);
+ return value[0];
+ }
+ /**
+ * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
+ */
+ public void setPresentationTime(long nsecs) {
+ EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs);
+ }
+ /**
+ * Checks for EGL errors.
+ */
+ private void checkEglError(String msg) {
+ int error;
+ if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+ throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
+ }
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/InvalidOutputFormatException.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/InvalidOutputFormatException.java
new file mode 100644
index 0000000..4c6bbd9
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/InvalidOutputFormatException.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2015 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.engine;
+
+public class InvalidOutputFormatException extends RuntimeException {
+ public InvalidOutputFormatException(String detailMessage) {
+ super(detailMessage);
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/MediaFormatValidator.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/MediaFormatValidator.java
new file mode 100644
index 0000000..755a9fb
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/MediaFormatValidator.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2015 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.engine;
+
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+
+public class MediaFormatValidator {
+ // Refer: http://en.wikipedia.org/wiki/H.264/MPEG-4_AVC#Profiles
+ private static final byte PROFILE_IDC_BASELINE = 66;
+
+
+ public static void validateVideoOutputFormat(MediaFormat format) {
+ String mime = format.getString(MediaFormat.KEY_MIME);
+ // Refer: http://developer.android.com/guide/appendix/media-formats.html#core
+ // Refer: http://en.wikipedia.org/wiki/MPEG-4_Part_14#Data_streams
+ if (!validateEncoderMimeType(mime))
+ throw new InvalidOutputFormatException("Video codecs not supported, actual mime type: " + mime);
+ }
+
+ public static void validateAudioOutputFormat(MediaFormat format) {
+ String mime = format.getString(MediaFormat.KEY_MIME);
+ if (!validateEncoderMimeType(mime))
+ throw new InvalidOutputFormatException("Audio codecs not supported, actual mime type: " + mime);
+ }
+
+ private static boolean validateEncoderMimeType(String mime) {
+ // See https://developer.android.com/reference/android/media/MediaCodecInfo
+ // Code on that page was updated to use getCodecInfos rather than deprecated getCodeInfoAt()
+ MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
+ MediaCodecInfo[] codecInfos = list.getCodecInfos();
+ for (MediaCodecInfo info : codecInfos) {
+ if (info.isEncoder()) {
+ String[] types = info.getSupportedTypes();
+ for (int j = 0; j < types.length; j++) {
+ if (types[j].equalsIgnoreCase(mime)) {
+ return true;
+ }
+ }
+ }
+ }
+ return false;
+ }
+ public static boolean validateResolution(int width, int height) {
+ // See https://developer.android.com/reference/android/media/MediaCodecInfo
+ // Code on that page was updated to use getCodecInfos rather than deprecated getCodeInfoAt()
+ MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
+ MediaCodecInfo[] codecInfos = list.getCodecInfos();
+ for (MediaCodecInfo info : codecInfos) {
+ if (info.isEncoder()) {
+ String[] types = info.getSupportedTypes();
+ for (int j = 0; j < types.length; j++) {
+ MediaCodecInfo.CodecCapabilities cap = info.getCapabilitiesForType(types[j]);
+ MediaCodecInfo.VideoCapabilities vcap = cap != null ? cap.getVideoCapabilities() : null;
+ if (vcap != null && vcap.isSizeSupported(width, height))
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java
new file mode 100644
index 0000000..67c893b
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/MediaTranscoderEngine.java
@@ -0,0 +1,424 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.engine;
+
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.MediaMetadataRetriever;
+import android.media.MediaMuxer;
+import net.ypresto.androidtranscoder.TLog;
+
+import net.ypresto.androidtranscoder.format.MediaFormatStrategy;
+import net.ypresto.androidtranscoder.utils.MediaExtractorUtils;
+
+import java.io.FileDescriptor;
+import java.io.IOException;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+/**
+ * Internal engine, do not use this directly.
+ */
+// TODO: treat encrypted data
+public class MediaTranscoderEngine {
+ private static final String TAG = "MediaTranscoderEngine";
+ private static final double PROGRESS_UNKNOWN = -1.0;
+ private static final long SLEEP_TO_WAIT_TRACK_TRANSCODERS = 10;
+ private static final long PROGRESS_INTERVAL_STEPS = 10;
+ private FileDescriptor mFirstFileDescriptorWithVideo;
+ private TrackTranscoder mVideoTrackTranscoder;
+ private TrackTranscoder mAudioTrackTranscoder;
+ private LinkedHashMap mVideoExtractor;
+ private LinkedHashMap mAudioExtractor;
+ private MediaMuxer mMuxer;
+ private volatile double mProgress;
+ private ProgressCallback mProgressCallback;
+ private long mDurationUs;
+ private long mOutputPresentationTimeUs = 0l;
+ int mOutputRotation = 0;
+ int mOutputWidth = 0;
+ int mOutputHeight = 0;
+
+ /**
+ * The throttle ensures that an encoder doesn't overrun another encoder and produce output
+ * time stamps that are two far apart from one another. A low-water mark is kept for the
+ * presentation time and all decoder actions must yield a presentation time at least that
+ * high or they must re-queue the buffer until they catch up
+ */
+ private long ThrottleLimit = 250000l;
+ private long ThrottleSeed = 24l * 60l * 60l * 1000000l;
+ private long maxBlockTime = 5000l;
+ //private long maxBlockTime = 500000l;
+ public class TranscodeThrottle {
+ private long mPresentationThreshold = ThrottleLimit;
+ private Date mBlockedStartTime = null;
+ private boolean mBufferProcessed = false;
+ private boolean mShouldCancel = false;
+ LinkedHashMap mLowestPresentationTime;
+
+ public void participate (String channel) {
+ mLowestPresentationTime.put(channel, null);
+ }
+ public void departicipate(String channel) { mLowestPresentationTime.remove(channel);}
+ public void startSegment() {
+ mLowestPresentationTime = new LinkedHashMap();
+ }
+
+ public boolean canProceed(String channel, long presentationTime, boolean endOfStream) {
+
+ mLowestPresentationTime.put(channel, endOfStream ? -1 : presentationTime);
+
+ // If not too far ahead of target allow processing
+ return presentationTime <= mPresentationThreshold;
+ }
+
+ public void step() {
+ long newPresentationThreshold = ThrottleSeed;
+ boolean allChannelsReporting = true;
+ for (Map.Entry entry : mLowestPresentationTime.entrySet()) {
+
+ if (entry.getValue() == null)
+ allChannelsReporting = false;
+ else if (entry.getValue() < 1)
+ continue;
+ else if (entry.getValue() < newPresentationThreshold)
+ newPresentationThreshold = entry.getValue();
+ }
+ if (allChannelsReporting) {
+ if (mPresentationThreshold != newPresentationThreshold + ThrottleLimit)
+ mBufferProcessed = true;
+ mPresentationThreshold = newPresentationThreshold + ThrottleLimit;
+ for (Map.Entry entry : mLowestPresentationTime.entrySet()) {
+ entry.setValue(null);
+ }
+
+ }
+ if (!mBufferProcessed) {
+ if (mBlockedStartTime == null)
+ mBlockedStartTime = new Date();
+ else
+ mShouldCancel = ((new Date()).getTime() > (mBlockedStartTime.getTime() + maxBlockTime));
+ } else {
+ mShouldCancel = false;
+ mBlockedStartTime = null;
+ }
+ mBufferProcessed = false;
+ }
+ public boolean shouldCancel() {
+ return mShouldCancel;
+ }
+ public void log() {
+ TLog.e(TAG, "Threshold " + mPresentationThreshold);
+ for (Map.Entry entry : mLowestPresentationTime.entrySet()) {
+ TLog.e(TAG, "Channel " + entry.getKey() + " PT:" + entry.getValue());
+ }
+ }
+ }
+ private TranscodeThrottle mThrottle = new TranscodeThrottle();
+
+ /**
+ * Do not use this constructor unless you know what you are doing.
+ */
+ public MediaTranscoderEngine() {
+ mAudioExtractor = new LinkedHashMap();
+ mVideoExtractor = new LinkedHashMap();
+ }
+
+ public ProgressCallback getProgressCallback() {
+ return mProgressCallback;
+ }
+
+ public void setProgressCallback(ProgressCallback progressCallback) {
+ mProgressCallback = progressCallback;
+ }
+
+ /**
+ * NOTE: This method is thread safe.
+ */
+ public double getProgress() {
+ return mProgress;
+ }
+
+ /**
+ * Run video transcoding. Blocks current thread.
+ * Audio data will not be transcoded; original stream will be wrote to output file.
+ *
+ * @param timeLine Time line of segments
+ * @param outputPath File path to output transcoded video file.
+ * @param formatStrategy Output format strategy.
+ * @throws IOException when input or output file could not be opened.
+ * @throws InvalidOutputFormatException when output format is not supported.
+ * @throws InterruptedException when cancel to transcode.
+ */
+ public void transcodeVideo(TimeLine timeLine, String outputPath, MediaFormatStrategy formatStrategy) throws IOException, InterruptedException {
+
+ timeLine.prepare();
+ if (outputPath == null) {
+ throw new NullPointerException("Output path cannot be null.");
+ }
+ try {
+ mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
+ setupTrackTranscoders(timeLine, formatStrategy);
+ if (mFirstFileDescriptorWithVideo == null) {
+ throw new IllegalStateException("Data source is not set.");
+ }
+ try {
+ mMuxer.setOrientationHint(mOutputRotation);
+ } catch (NumberFormatException e) {
+ TLog.e(TAG, "Unable to set orientaiton in Muxer");
+ }
+
+ runPipelines(timeLine);
+ mMuxer.stop();
+ TLog.d(TAG, "Muxer Stopped");
+ } finally {
+ try {
+ if (mVideoTrackTranscoder != null) {
+ //mVideoTrackTranscoder.release();
+ mVideoTrackTranscoder = null;
+ }
+ if (mAudioTrackTranscoder != null) {
+ //mAudioTrackTranscoder.release();
+ mAudioTrackTranscoder = null;
+ }
+ for (Map.Entry entry : mAudioExtractor.entrySet()) {
+ entry.getValue().release();
+ }
+ for (Map.Entry entry : mVideoExtractor.entrySet()) {
+ entry.getValue().release();
+ }
+ } catch (RuntimeException e) {
+ // Too fatal to make alive the app, because it may leak native resources.
+ //noinspection ThrowFromFinallyBlock
+ throw new Error("Could not shutdown extractor, codecs and muxer pipeline.", e);
+ }
+ try {
+ if (mMuxer != null) {
+ mMuxer.release();
+ mMuxer = null;
+ }
+ } catch (RuntimeException e) {
+ TLog.e(TAG, "Failed to release muxer.", e);
+ }
+ }
+ }
+
+
+ /**
+ * Setup MediaExtractors for ever possible case in each output segment but defer connecting
+ * up the decoders until they are needed. There is a limit based on device resources as to
+ * how many decoders can run at the same time and this reduced to absolute minimum needed.
+ *
+ * Invoke the extractor to get track information which will be used to determine high level
+ * output output format details. Setup a queuedMuxer which delays Muxing until the decoder has
+ * enough information to call it's setOutputFormat method and set the detailed output format.
+ *
+ * @param timeLine
+ * @param formatStrategy
+ * @throws IOException
+ */
+ private void setupTrackTranscoders(TimeLine timeLine, MediaFormatStrategy formatStrategy) throws IOException {
+
+ // Setup all extractors for all segments, finding the first video and audio track to establish an interim output format
+ MediaFormat videoOutputFormat = null;
+ MediaFormat audioOutputFormat = null;
+ MediaExtractorUtils.TrackResult trackResult = null;
+ boolean allowPassthru = false;//timeLine.getChannels().size() == 1;
+ for (Map.Entry inputChannelEntry : timeLine.getChannels().entrySet()) {
+
+ TimeLine.InputChannel inputChannel = inputChannelEntry.getValue();
+ String channelName = inputChannelEntry.getKey();
+ FileDescriptor fileDescriptor = inputChannel.mInputFileDescriptor;
+ if (inputChannel.mChannelType == TimeLine.ChannelType.VIDEO || inputChannel.mChannelType == TimeLine.ChannelType.AUDIO_VIDEO) {
+ MediaExtractor videoExtractor = new MediaExtractor();
+ try {
+ videoExtractor.setDataSource(fileDescriptor);
+ } catch (IOException e) {
+ TLog.w(TAG, "Transcode failed: input file (fd: " + fileDescriptor.toString() + ") not found");
+ throw e;
+ }
+ trackResult = MediaExtractorUtils.getFirstVideoAndAudioTrack(videoExtractor);
+ if (trackResult.mVideoTrackFormat != null) {
+ videoExtractor.selectTrack(trackResult.mVideoTrackIndex);
+ mVideoExtractor.put(channelName, videoExtractor);
+ if (videoOutputFormat == null) {
+ videoOutputFormat = formatStrategy.createVideoOutputFormat(trackResult.mVideoTrackFormat, allowPassthru);
+ mFirstFileDescriptorWithVideo = fileDescriptor;
+ }
+ MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
+ mediaMetadataRetriever.setDataSource(fileDescriptor);
+ Long duration;
+ try {
+ duration = Long.parseLong(mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)) * 1000;
+ } catch (NumberFormatException e) {
+ duration = -1l;
+ }
+ TLog.d(TAG, "Duration of " + channelName + ": (us): " + duration);
+ inputChannel.mLengthUs = duration;
+ MediaFormat format = videoExtractor.getTrackFormat(trackResult.mVideoTrackIndex);
+ String mime = format.getString(MediaFormat.KEY_MIME);
+ if (mime.startsWith("video/")) {
+ if (format.containsKey(MediaFormat.KEY_FRAME_RATE)) {
+ long frameLength = 1000000 / format.getInteger(MediaFormat.KEY_FRAME_RATE);
+ TLog.d(TAG, "Frame Length of " + channelName + ": " + frameLength);
+ inputChannel.mVideoFrameLength = frameLength;
+ }
+ }
+ }
+ }
+ if (inputChannel.mChannelType == TimeLine.ChannelType.AUDIO || inputChannel.mChannelType == TimeLine.ChannelType.AUDIO_VIDEO) {
+ MediaExtractor audioExtractor = new MediaExtractor();
+ try {
+ audioExtractor.setDataSource(fileDescriptor);
+ } catch (IOException e) {
+ TLog.w(TAG, "Transcode failed: input file (fd: " + fileDescriptor.toString() + ") not found");
+ throw e;
+ }
+ trackResult = MediaExtractorUtils.getFirstVideoAndAudioTrack(audioExtractor);
+ if (trackResult.mAudioTrackFormat != null) {
+ audioExtractor.selectTrack(trackResult.mAudioTrackIndex);
+ mAudioExtractor.put(inputChannelEntry.getKey(), audioExtractor);
+ if (audioOutputFormat == null) {
+ audioOutputFormat = formatStrategy.createAudioOutputFormat(trackResult.mAudioTrackFormat, allowPassthru);
+ }
+ }
+ }
+ }
+ mDurationUs = timeLine.getDuration();
+ TLog.d(TAG, "Total duration " + mDurationUs);
+ if (videoOutputFormat == null && audioOutputFormat == null) {
+ throw new InvalidOutputFormatException("MediaFormatStrategy returned pass-through for both video and audio. No transcoding is necessary.");
+ }
+ QueuedMuxer queuedMuxer = new QueuedMuxer(mMuxer, mVideoExtractor.keySet().size() > 0, mAudioExtractor.keySet().size() > 0,
+ new QueuedMuxer.Listener() {
+ @Override
+ public void onDetermineOutputFormat() {
+ if (mVideoTrackTranscoder != null)
+ MediaFormatValidator.validateVideoOutputFormat(mVideoTrackTranscoder.getDeterminedFormat());
+ if (mAudioTrackTranscoder != null)
+ MediaFormatValidator.validateAudioOutputFormat(mAudioTrackTranscoder.getDeterminedFormat());
+ }
+ }
+ );
+
+ if (mVideoExtractor.keySet().size() > 0) {
+ if (videoOutputFormat == null && trackResult != null) {
+ mVideoTrackTranscoder = new PassThroughTrackTranscoder(mVideoExtractor.entrySet().iterator().next().getValue(),
+ trackResult.mVideoTrackIndex, queuedMuxer, QueuedMuxer.SampleType.VIDEO);
+ } else {
+ mVideoTrackTranscoder = new VideoTrackTranscoder(mVideoExtractor, videoOutputFormat, queuedMuxer);
+ }
+
+ MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
+ mediaMetadataRetriever.setDataSource(mFirstFileDescriptorWithVideo);
+ mOutputRotation = Integer.parseInt(mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION));
+ mOutputHeight = Integer.parseInt(mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT));
+ mOutputWidth = Integer.parseInt(mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH));
+ mVideoTrackTranscoder.setupEncoder();
+ }
+
+ if (mAudioExtractor.keySet().size() > 0) {
+ if (audioOutputFormat == null) {
+ mAudioTrackTranscoder = new PassThroughTrackTranscoder(mAudioExtractor.entrySet().iterator().next().getValue(),
+ trackResult.mAudioTrackIndex, queuedMuxer, QueuedMuxer.SampleType.AUDIO);
+ } else {
+ mAudioTrackTranscoder = new AudioTrackTranscoder(mAudioExtractor, audioOutputFormat, queuedMuxer);
+ }
+ mAudioTrackTranscoder.setupEncoder();
+ }
+ }
+
+ private void runPipelines(TimeLine timeLine) throws IOException, InterruptedException {
+ long loopCount = 0;
+
+ long outputSyncTimeUs = 0l;
+ double lastProgress = -1;
+ if (mDurationUs <= 0) {
+ double progress = PROGRESS_UNKNOWN;
+ mProgress = progress;
+ if (mProgressCallback != null)
+ mProgressCallback.onProgress(progress); // unknown
+ }
+ for (TimeLine.Segment outputSegment : timeLine.getSegments()) {
+ outputSegment.start(mOutputPresentationTimeUs,
+ mVideoTrackTranscoder != null ? mVideoTrackTranscoder.getOutputPresentationTimeDecodedUs() : 0l,
+ mAudioTrackTranscoder != null ? mAudioTrackTranscoder.getOutputPresentationTimeDecodedUs() : 0l,
+ mVideoTrackTranscoder != null ? mVideoTrackTranscoder.getOutputPresentationTimeEncodedUs() : 0l,
+ mAudioTrackTranscoder != null ? mAudioTrackTranscoder.getOutputPresentationTimeEncodedUs() : 0l);
+ mThrottle.startSegment();
+ if (mAudioTrackTranscoder != null)
+ mAudioTrackTranscoder.setupDecoders(outputSegment, mThrottle, mOutputRotation, mOutputWidth, mOutputHeight);
+ if (mVideoTrackTranscoder != null)
+ mVideoTrackTranscoder.setupDecoders(outputSegment, mThrottle, mOutputRotation, mOutputWidth, mOutputHeight);
+ while (!((mVideoTrackTranscoder != null ? mVideoTrackTranscoder.isSegmentFinished() : true) &&
+ (mAudioTrackTranscoder != null ? mAudioTrackTranscoder.isSegmentFinished() : true))) {
+
+ boolean videoStepped = mVideoTrackTranscoder != null ? mVideoTrackTranscoder.stepPipeline(outputSegment, mThrottle) : true;
+ boolean audioStepped = mAudioTrackTranscoder != null ? mAudioTrackTranscoder.stepPipeline(outputSegment, mThrottle) : true;
+ boolean stepped = videoStepped || audioStepped;
+ mOutputPresentationTimeUs = Math.max(
+ mVideoTrackTranscoder != null ? mVideoTrackTranscoder.getOutputPresentationTimeDecodedUs() : 0l,
+ mAudioTrackTranscoder != null ? mAudioTrackTranscoder.getOutputPresentationTimeDecodedUs() : 0l);
+ loopCount++;
+
+
+ if (mVideoTrackTranscoder != null && mDurationUs > 0 && loopCount % PROGRESS_INTERVAL_STEPS == 0) {
+ double progress = Math.min(1.0, (double) mVideoTrackTranscoder.getOutputPresentationTimeDecodedUs() / mDurationUs);
+ mProgress = progress;
+ double roundedProgress = Math.round(progress * 100);
+ if (mProgressCallback != null && roundedProgress != lastProgress) mProgressCallback.onProgress(progress);
+ lastProgress = roundedProgress;
+ }
+
+ if (!stepped) {
+ try {
+ Thread.sleep(SLEEP_TO_WAIT_TRACK_TRANSCODERS);
+ } catch (InterruptedException e) {
+ // nothing to do
+ }
+ }
+ mThrottle.step();
+ if (mThrottle.shouldCancel()) {
+ TLog.d(TAG, "Cancel because of waiting for buffer");
+ mThrottle.log();
+ throw new IllegalStateException("Timed out waiting for buffer");
+ }
+ }
+
+ }
+ TLog.d(TAG, "Releasing transcoders");
+ if (mVideoTrackTranscoder != null) {
+ mVideoTrackTranscoder.release();
+ TLog.d(TAG, "Video PT: " + mVideoTrackTranscoder.getOutputPresentationTimeDecodedUs() +
+ " Time " + mVideoTrackTranscoder.getOutputPresentationTimeEncodedUs());
+ }
+ if (mAudioTrackTranscoder != null) {
+ mAudioTrackTranscoder.release();
+ TLog.d(TAG, " -- Audio PT:" + mAudioTrackTranscoder.getOutputPresentationTimeDecodedUs() +
+ " Time " + mAudioTrackTranscoder.getOutputPresentationTimeEncodedUs());
+ }
+ }
+
+ public interface ProgressCallback {
+ /**
+ * Called to notify progress. Same thread which initiated transcode is used.
+ *
+ * @param progress Progress in [0.0, 1.0] range, or negative value if progress is unknown.
+ */
+ void onProgress(double progress);
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/OutputSurface.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/OutputSurface.java
new file mode 100644
index 0000000..58babaf
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/OutputSurface.java
@@ -0,0 +1,318 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// from: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/OutputSurface.java
+// blob: fc8ad9cd390c5c311f015d3b7c1359e4d295bc52
+// modified: change TIMEOUT_MS from 500 to 10000
+package net.ypresto.androidtranscoder.engine;
+import android.graphics.RectF;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLSurface;
+import android.opengl.GLES20;
+import net.ypresto.androidtranscoder.TLog;
+import android.view.Surface;
+
+/**
+ * Holds state associated with a Surface used for MediaCodec decoder output.
+ *
+ * The (width,height) constructor for this class will prepare GL, create a SurfaceTexture,
+ * and then create a Surface for that SurfaceTexture. The Surface can be passed to
+ * MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the
+ * texture with updateTexImage, then render the texture with GL to a pbuffer.
+ *
+ * The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer.
+ * Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives
+ * we just draw it on whatever surface is current.
+ *
+ * By default, the Surface will be using a BufferQueue in asynchronous mode, so we
+ * can potentially drop frames.
+ */
+class OutputSurface implements SurfaceTexture.OnFrameAvailableListener {
+ private static final String TAG = "OutputSurface";
+ private static final boolean VERBOSE = false;
+ private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
+ private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
+ private SurfaceTexture mSurfaceTexture;
+ private Surface mSurface;
+ private Object mFrameSyncObject = new Object(); // guards mFrameAvailable
+ private boolean mFrameAvailable;
+ private boolean mTextureReady = false;
+ private boolean mEndOfStream;
+ private int mTextureID = -12345;
+ private float mAlpha = 1.0f;
+ private int mRotation = 0;
+ private int mSourceRotation = 0;
+ private RectF mOriginalSourceRect;
+ private RectF mSourceRect;
+ private RectF mDestRect;
+ private long mExtraTextureRenders = 0l;
+ private boolean mIsDuplicateTexture = false;
+ /**
+ * Creates an OutputSurface backed by a pbuffer with the specifed dimensions. The new
+ * EGL context and surface will be made current. Creates a Surface that can be passed
+ * to MediaCodec.configure().
+ */
+ public OutputSurface(int width, int height) {
+ if (width <= 0 || height <= 0) {
+ throw new IllegalArgumentException();
+ }
+ //eglSetup(width, height);
+ //makeCurrent();
+ setup(width, height);
+ }
+ /**
+ * Creates an OutputSurface using the current EGL context (rather than establishing a
+ * new one). Creates a Surface that can be passed to MediaCodec.configure().
+ */
+ public OutputSurface() {
+ setup(0, 0);
+ }
+ /**
+ * Creates instances of TextureRender and SurfaceTexture, and a Surface associated
+ * with the SurfaceTexture.
+ */
+ private void setup(int width, int height) {
+
+ // Generate a texture ID
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+ mTextureID = textures[0];
+
+ // Even if we don't access the SurfaceTexture after the constructor returns, we
+ // still need to keep a reference to it. The Surface doesn't retain a reference
+ // at the Java level, so if we don't either then the object can get GCed, which
+ // causes the native finalizer to run.
+ if (VERBOSE) TLog.d(TAG, "textureID=" + mTextureID);
+ mSurfaceTexture = new SurfaceTexture(mTextureID);
+ // This doesn't work if OutputSurface is created on the thread that CTS started for
+ // these test cases.
+ //
+ // The CTS-created thread has a Looper, and the SurfaceTexture constructor will
+ // create a Handler that uses it. The "frame available" message is delivered
+ // there, but since we're not a Looper-based thread we'll never see it. For
+ // this to do anything useful, OutputSurface must be created on a thread without
+ // a Looper, so that SurfaceTexture uses the main application Looper instead.
+ //
+ // Java language note: passing "this" out of a constructor is generally unwise,
+ // but we should be able to get away with it here.
+ mSurfaceTexture.setOnFrameAvailableListener(this);
+ if (width > 0)
+ mSurfaceTexture.setDefaultBufferSize(width, height);
+ mSurface = new Surface(mSurfaceTexture);
+ }
+ public int getTextureID () {
+ return mTextureID;
+ }
+ SurfaceTexture getSurfaceTexture () {
+ return mSurfaceTexture;
+ }
+ /**
+ * Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer.
+ */
+ private void eglSetup(int width, int height) {
+ mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
+ mEGLDisplay = null;
+ throw new RuntimeException("unable to initialize EGL14");
+ }
+ // Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
+ // to be able to tell if the frame is reasonable.
+ int[] attribList = {
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+ EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT,
+ EGL14.EGL_NONE
+ };
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
+ numConfigs, 0)) {
+ throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
+ }
+ // Configure context for OpenGL ES 2.0.
+ int[] attrib_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+ EGL14.EGL_NONE
+ };
+ mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
+ attrib_list, 0);
+ checkEglError("eglCreateContext");
+ if (mEGLContext == null) {
+ throw new RuntimeException("null context");
+ }
+ // Create a pbuffer surface. By using this for output, we can use glReadPixels
+ // to test values in the output.
+ int[] surfaceAttribs = {
+ EGL14.EGL_WIDTH, width,
+ EGL14.EGL_HEIGHT, height,
+ EGL14.EGL_NONE
+ };
+ mEGLSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, configs[0], surfaceAttribs, 0);
+ checkEglError("eglCreatePbufferSurface");
+ if (mEGLSurface == null) {
+ throw new RuntimeException("surface was null");
+ }
+ }
+ /**
+ * Discard all resources held by this class, notably the EGL context.
+ */
+ public void release() {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
+ EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(mEGLDisplay);
+ }
+ mSurface.release();
+ // this causes a bunch of warnings that appear harmless but might confuse someone:
+ // W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
+ mSurfaceTexture.release();
+ mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ mEGLContext = EGL14.EGL_NO_CONTEXT;
+ mEGLSurface = EGL14.EGL_NO_SURFACE;
+ mSurface = null;
+ mSurfaceTexture = null;
+ }
+ /**
+ * Makes our EGL context and surface current.
+ */
+ public void makeCurrent() {
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+ /**
+ * Returns the Surface that we draw onto.
+ */
+ public Surface getSurface() {
+ return mSurface;
+ }
+ public boolean isTextureReady() {
+ return mTextureReady;
+ }
+ public boolean isDuplicateTexture() { return mIsDuplicateTexture;}
+ public void duplicateTextures(long extraTextureRenders) {
+ mExtraTextureRenders = extraTextureRenders;
+ }
+
+ public boolean isExtraTextures () {
+ return mExtraTextureRenders > 0;
+ }
+ public boolean consumeDuplicateTexture () {
+ if (mExtraTextureRenders > 0) {
+ --mExtraTextureRenders;
+ return true;
+ } else
+ return false;
+ }
+ public void setDuplicateTextureReady() {
+ mTextureReady = true;
+ mIsDuplicateTexture = true;
+ }
+ public void clearTextureReady() { mTextureReady = false;}
+
+ public float getAlpha () {return mAlpha;}
+ public void setAlpha(float alpha) {
+ mAlpha = alpha;
+ }
+ public int getRotation () {return mRotation;}
+ public void setRotation(int rotation) {mRotation = rotation;}
+ public int getSourceRotation () {return mSourceRotation;}
+ public void setSourceRotation(int rotation) {mSourceRotation = rotation;}
+ public RectF getOriginalSourceRect () {return mOriginalSourceRect;}
+ public void setOriginalSourceRect(RectF rect) { mOriginalSourceRect = rect;} public RectF getSourceRect () {return mSourceRect;}
+ public void setSourceRect(RectF rect) { mSourceRect = rect;}
+ public RectF getDestRect () {return mDestRect;}
+ public void setDestRect(RectF rect) { mDestRect = rect;}
+ /**
+ * Latches the next buffer into the texture. Must be called from the thread that created
+ * the OutputSurface object, after the onFrameAvailable callback has signaled that new
+ * data is available.
+ */
+ public void awaitNewImage() {
+ final int TIMEOUT_MS = 10000;
+ synchronized (mFrameSyncObject) {
+ while (!mFrameAvailable) {
+ try {
+ // Wait for onFrameAvailable() to signal us. Use a timeout to avoid
+ // stalling the test if it doesn't arrive.
+ mFrameSyncObject.wait(TIMEOUT_MS);
+ if (!mFrameAvailable) {
+ // TODO: if "spurious wakeup", continue while loop
+ throw new RuntimeException("Surface frame wait timed out");
+ }
+ } catch (InterruptedException ie) {
+ // shouldn't happen
+ throw new RuntimeException(ie);
+ }
+ }
+ mFrameAvailable = false;
+ }
+ // Latch the data.
+
+ updateTexture();
+
+ }
+ public void updateTexture () {
+ this.checkGlError("before updateTexImage");
+ mSurfaceTexture.updateTexImage();
+ mTextureReady = true;
+ mIsDuplicateTexture = false;
+ }
+ @Override
+ public void onFrameAvailable(SurfaceTexture st) {
+ if (VERBOSE) TLog.d(TAG, "new frame available");
+ synchronized (mFrameSyncObject) {
+ //if (mFrameAvailable) {
+ // throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
+ //}
+ mFrameAvailable = true;
+ mFrameSyncObject.notifyAll();
+ }
+ }
+ /**
+ * Checks for EGL errors.
+ */
+ public void checkEglError(String msg) {
+ int error;
+ if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+ throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
+ }
+ }
+ public void checkGlError(String op) {
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ TLog.e(TAG, op + ": glError " + error);
+ throw new RuntimeException(op + ": glError " + error);
+ }
+ }
+ public boolean isEndOfInputStream() {
+ return mEndOfStream;
+ }
+ public void signalEndOfInputStream () {
+ mEndOfStream = true;
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java
new file mode 100644
index 0000000..a3b63fe
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/PassThroughTrackTranscoder.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.engine;
+
+import android.annotation.SuppressLint;
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+public class PassThroughTrackTranscoder implements TrackTranscoder {
+ private final MediaExtractor mExtractor;
+ private final int mTrackIndex;
+ private final QueuedMuxer mMuxer;
+ private final QueuedMuxer.SampleType mSampleType;
+ private final MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
+ private int mBufferSize;
+ private ByteBuffer mBuffer;
+ private boolean mIsEOS;
+ private MediaFormat mActualOutputFormat;
+ private long mOutputPresentationTimeExtractedUs;
+ private long mOutputPresentationTimeEncodedUs = 0;
+
+ public PassThroughTrackTranscoder(MediaExtractor extractor, int trackIndex,
+ QueuedMuxer muxer, QueuedMuxer.SampleType sampleType) {
+ mExtractor = extractor;
+ mTrackIndex = trackIndex;
+ mMuxer = muxer;
+ mSampleType = sampleType;
+
+ mActualOutputFormat = mExtractor.getTrackFormat(mTrackIndex);
+ mMuxer.setOutputFormat(mSampleType, mActualOutputFormat);
+ mBufferSize = mActualOutputFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
+ mBuffer = ByteBuffer.allocateDirect(mBufferSize).order(ByteOrder.nativeOrder());
+ }
+
+ @Override
+ public void setupDecoders(TimeLine.Segment segment, MediaTranscoderEngine.TranscodeThrottle throttle, int outputRotation, int width, int height) {
+ }
+
+ @Override
+ public void setupEncoder () {
+
+ }
+
+ @Override
+ public MediaFormat getDeterminedFormat() {
+ return mActualOutputFormat;
+ }
+
+ @SuppressLint("Assert")
+ @Override
+ public boolean stepPipeline(TimeLine.Segment segment, MediaTranscoderEngine.TranscodeThrottle throttle) {
+ if (mIsEOS) return false;
+ int trackIndex = mExtractor.getSampleTrackIndex();
+ if (trackIndex < 0) {
+ mBuffer.clear();
+ mBufferInfo.set(0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+ mMuxer.writeSampleData(mSampleType, mBuffer, mBufferInfo);
+ mOutputPresentationTimeEncodedUs = mBufferInfo.presentationTimeUs;
+ mIsEOS = true;
+ return true;
+ }
+ if (trackIndex != mTrackIndex) return false;
+
+ mBuffer.clear();
+ int sampleSize = mExtractor.readSampleData(mBuffer, 0);
+ assert sampleSize <= mBufferSize;
+ boolean isKeyFrame = (mExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
+ int flags = isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0;
+ mBufferInfo.set(0, sampleSize, mExtractor.getSampleTime(), flags);
+ mMuxer.writeSampleData(mSampleType, mBuffer, mBufferInfo);
+ mOutputPresentationTimeEncodedUs = mBufferInfo.presentationTimeUs;
+ mOutputPresentationTimeExtractedUs = mBufferInfo.presentationTimeUs;
+
+ mExtractor.advance();
+ return true;
+ }
+
+ @Override
+ public long getOutputPresentationTimeDecodedUs() { return mOutputPresentationTimeExtractedUs; }
+
+ @Override
+ public long getOutputPresentationTimeEncodedUs() {return mOutputPresentationTimeEncodedUs;}
+
+ @Override
+ public void setOutputPresentationTimeDecodedUs(long presentationTimeDecodedUs) {
+ mOutputPresentationTimeExtractedUs = presentationTimeDecodedUs;
+ }
+ @Override
+ public boolean isSegmentFinished() {
+ return mIsEOS;
+ }
+
+ @Override
+ public void releaseEncoder() {
+ }
+ @Override
+ public void releaseDecoders() {
+ }
+ @Override
+ public void release() {
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java
new file mode 100644
index 0000000..07f8043
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/QueuedMuxer.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright (C) 2015 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.engine;
+
+import android.media.MediaCodec;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import net.ypresto.androidtranscoder.TLog;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * This class queues until all output track formats are determined.
+ */
+public class QueuedMuxer {
+ private static final String TAG = "QueuedMuxer";
+ private static final int BUFFER_SIZE = 512 * 1024; // I have no idea whether this value is appropriate or not...
+ private final MediaMuxer mMuxer;
+ private final Listener mListener;
+ private MediaFormat mVideoFormat;
+ private MediaFormat mAudioFormat;
+ private int mVideoTrackIndex;
+ private int mAudioTrackIndex;
+ private ByteBuffer mByteBuffer;
+ private final List mSampleInfoList;
+ private boolean mStarted;
+ private boolean mHasVideo;
+ private boolean mHasAudio;
+
+ public QueuedMuxer(MediaMuxer muxer, boolean hasVideo, boolean hasAudio, Listener listener) {
+ mHasAudio = hasAudio;
+ mHasVideo = hasVideo;
+ mMuxer = muxer;
+ mListener = listener;
+ mSampleInfoList = new ArrayList<>();
+ }
+
+ public void setOutputFormat(SampleType sampleType, MediaFormat format) {
+ switch (sampleType) {
+ case VIDEO:
+ mVideoFormat = format;
+ break;
+ case AUDIO:
+ mAudioFormat = format;
+ break;
+ default:
+ throw new AssertionError();
+ }
+ onSetOutputFormat();
+ }
+
+ private void onSetOutputFormat() {
+ if ((mHasVideo && mVideoFormat == null) || (mHasAudio && mAudioFormat == null))
+ return;
+ mListener.onDetermineOutputFormat();
+
+ if (mHasVideo) {
+ mVideoTrackIndex = mMuxer.addTrack(mVideoFormat);
+ TLog.v(TAG, "Added track #" + mVideoTrackIndex + " with " + mVideoFormat.getString(MediaFormat.KEY_MIME) + " to muxer");
+ }
+ if (mHasAudio) {
+ mAudioTrackIndex = mMuxer.addTrack(mAudioFormat);
+ TLog.v(TAG, "Added track #" + mAudioTrackIndex + " with " + mAudioFormat.getString(MediaFormat.KEY_MIME) + " to muxer");
+ }
+ mMuxer.start();
+ mStarted = true;
+
+ if (mByteBuffer == null) {
+ mByteBuffer = ByteBuffer.allocate(0);
+ }
+ mByteBuffer.flip();
+ TLog.v(TAG, "Output format determined, writing " + mSampleInfoList.size() +
+ " samples / " + mByteBuffer.limit() + " bytes to muxer.");
+ MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
+ int offset = 0;
+ for (SampleInfo sampleInfo : mSampleInfoList) {
+ sampleInfo.writeToBufferInfo(bufferInfo, offset);
+ mMuxer.writeSampleData(getTrackIndexForSampleType(sampleInfo.mSampleType), mByteBuffer, bufferInfo);
+ offset += sampleInfo.mSize;
+ }
+ mSampleInfoList.clear();
+ mByteBuffer = null;
+ }
+
+ public void writeSampleData(SampleType sampleType, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
+ if (mStarted) {
+ mMuxer.writeSampleData(getTrackIndexForSampleType(sampleType), byteBuf, bufferInfo);
+ return;
+ }
+ byteBuf.limit(bufferInfo.offset + bufferInfo.size);
+ byteBuf.position(bufferInfo.offset);
+ if (mByteBuffer == null) {
+ mByteBuffer = ByteBuffer.allocateDirect(BUFFER_SIZE).order(ByteOrder.nativeOrder());
+ }
+ mByteBuffer.put(byteBuf);
+ mSampleInfoList.add(new SampleInfo(sampleType, bufferInfo.size, bufferInfo));
+ }
+
+ private int getTrackIndexForSampleType(SampleType sampleType) {
+ switch (sampleType) {
+ case VIDEO:
+ return mVideoTrackIndex;
+ case AUDIO:
+ return mAudioTrackIndex;
+ default:
+ throw new AssertionError();
+ }
+ }
+
+ public enum SampleType {VIDEO, AUDIO}
+
+ private static class SampleInfo {
+ private final SampleType mSampleType;
+ private final int mSize;
+ private final long mPresentationTimeUs;
+ private final int mFlags;
+
+ private SampleInfo(SampleType sampleType, int size, MediaCodec.BufferInfo bufferInfo) {
+ mSampleType = sampleType;
+ mSize = size;
+ mPresentationTimeUs = bufferInfo.presentationTimeUs;
+ mFlags = bufferInfo.flags;
+ }
+
+ private void writeToBufferInfo(MediaCodec.BufferInfo bufferInfo, int offset) {
+ bufferInfo.set(offset, mSize, mPresentationTimeUs, mFlags);
+ }
+ }
+
+ public interface Listener {
+ void onDetermineOutputFormat();
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/TextureRender.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/TextureRender.java
new file mode 100644
index 0000000..c9ee882
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/TextureRender.java
@@ -0,0 +1,284 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// from: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/TextureRender.java
+// blob: 4125dcfcfed6ed7fddba5b71d657dec0d433da6a
+// modified: removed unused method bodies
+// modified: use GL_LINEAR for GL_TEXTURE_MIN_FILTER to improve quality.
+package net.ypresto.androidtranscoder.engine;
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+import net.ypresto.androidtranscoder.TLog;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import static android.opengl.GLES20.GL_COLOR_BUFFER_BIT;
+
+/**
+ * Code for rendering a texture onto a surface using OpenGL ES 2.0.
+ */
+class TextureRender {
+ private static final String TAG = "TextureRender";
+ private static final int FLOAT_SIZE_BYTES = 4;
+ private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+ private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
+ private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
+ private final float[] mTriangleVerticesData = {
+ // X, Y, Z, U, V
+ -1.0f, -1.0f, 0, 0.f, 0.f,
+ 1.0f, -1.0f, 0, 1.f, 0.f,
+ -1.0f, 1.0f, 0, 0.f, 1.f,
+ 1.0f, 1.0f, 0, 1.f, 1.f,
+ };
+ private FloatBuffer mTriangleVertices;
+ private static final String VERTEX_SHADER =
+ "uniform mat4 uMVPMatrix;\n" +
+ "uniform mat4 uSTMatrix;\n" +
+ "attribute vec4 aPosition;\n" +
+ "attribute vec4 aTextureCoord;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "void main() {\n" +
+ " gl_Position = uMVPMatrix * aPosition;\n" +
+ " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+ "}\n";
+ private static final String FRAGMENT_SHADER =
+ "#extension GL_OES_EGL_image_external : require\n" +
+ "precision mediump float;\n" + // highp here doesn't seem to matter
+ "varying vec2 vTextureCoord;\n" +
+ "uniform samplerExternalOES sTexture;\n" +
+ "uniform float uAlpha;\n" +
+ "void main() {\n" +
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+ " gl_FragColor.a *= uAlpha;\n" +
+ "}\n";
+ private float[] mMVPMatrix = new float[16];
+ private float[] mSTMatrix = new float[16];
+ private int mProgram;
+ private int muMVPMatrixHandle;
+ private int muSTMatrixHandle;
+ private int maPositionHandle;
+ private int maTextureHandle;
+ private int muAlphaHandle;
+
+ List mOutputSurfaces;
+
+
+ public TextureRender(List outputSurfaces, OutputSurface overlaySurface) {
+
+ mOutputSurfaces = new ArrayList<>(outputSurfaces);
+ if (overlaySurface != null)
+ mOutputSurfaces.add(overlaySurface);
+
+ mTriangleVertices = ByteBuffer.allocateDirect(
+ mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
+ .order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mTriangleVertices.put(mTriangleVerticesData).position(0);
+ Matrix.setIdentityM(mSTMatrix, 0);
+ }
+
+ public void drawFrame() {
+
+ checkGlError("onDrawFrame start");
+
+ // Get first surface texture which will be used as the reference orientation
+
+
+ // Set up GLES
+ GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
+ GLES20.glUseProgram(mProgram);
+ checkGlError("glUseProgram");
+ GLES20.glUniform1f(muAlphaHandle, 0.5f);
+
+ // Allow transparent blending
+ GLES20.glEnable(GLES20.GL_BLEND);
+ GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
+
+ // Setup vertices
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
+ GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maPosition");
+ GLES20.glEnableVertexAttribArray(maPositionHandle);
+ checkGlError("glEnableVertexAttribArray maPositionHandle");
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
+ GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maTextureHandle");
+ GLES20.glEnableVertexAttribArray(maTextureHandle);
+ checkGlError("glEnableVertexAttribArray maTextureHandle");
+
+ //if (mOutputSurfaces.get(0).getFlip())
+ // Matrix.rotateM(mMVPMatrix, 0, 180, 0, 0, 1);
+
+
+ // Draw each texture
+ for (int textureIndex = 0; textureIndex < mOutputSurfaces.size(); ++textureIndex) {
+
+ OutputSurface outputSurface = mOutputSurfaces.get(textureIndex);
+
+ // The default matrix when rotating is to stretch the image full width or full height
+ // So we have to un-stretch it to the correct aspect ratio.
+ Matrix.setIdentityM(mMVPMatrix, 0);
+
+ if (outputSurface.getDestRect().width() != outputSurface.getSourceRect().width()) {
+ float inputWidth = outputSurface.getSourceRect().width();
+ float inputHeight = outputSurface.getSourceRect().height();
+ float outputWidth = outputSurface.getDestRect().width();
+ float outputHeight = outputSurface.getDestRect().height();
+ if (outputSurface.getDestRect().width() > outputSurface.getDestRect().height()) {
+ float aspectRatio = inputWidth / inputHeight;
+ float width = outputHeight * aspectRatio;
+ float widthScale = width / outputWidth;
+ Matrix.scaleM(mMVPMatrix, 0, widthScale, 1.0f, 1.0f);
+ } else {
+ float aspectRatio = inputWidth / inputHeight;
+ float height = outputWidth * aspectRatio;
+ float heightScale = height / outputHeight;
+ Matrix.scaleM(mMVPMatrix, 0, 1.0F, heightScale, 1.0f);
+ }
+ }
+ outputSurface.getSurfaceTexture().getTransformMatrix(mSTMatrix);
+
+ GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, outputSurface.getTextureID());
+ GLES20.glUniform1f(muAlphaHandle, mOutputSurfaces.get(textureIndex).getAlpha());
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ checkGlError("glDrawArrays");
+ }
+
+ GLES20.glDisable(GLES20.GL_BLEND);
+ GLES20.glFinish();
+
+ for (int textureIndex = 0; textureIndex < mOutputSurfaces.size(); ++textureIndex) {
+ mOutputSurfaces.get(textureIndex).clearTextureReady();
+ }
+ }
+ /**
+ * Initializes GL state. Call this after the EGL surface has been created and made current.
+ */
+ public void surfaceCreated() {
+ mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
+ if (mProgram == 0) {
+ throw new RuntimeException("failed creating program");
+ }
+ maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+ checkGlError("glGetAttribLocation aPosition");
+ if (maPositionHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aPosition");
+ }
+ maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+ checkGlError("glGetAttribLocation aTextureCoord");
+ if (maTextureHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aTextureCoord");
+ }
+ muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+ checkGlError("glGetUniformLocation uMVPMatrix");
+ if (muMVPMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uMVPMatrix");
+ }
+ muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+ checkGlError("glGetUniformLocation uSTMatrix");
+ if (muSTMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uSTMatrix");
+ }
+ muAlphaHandle = GLES20.glGetUniformLocation(mProgram, "uAlpha");
+
+ for (int textureIndex = 0; textureIndex < mOutputSurfaces.size(); ++textureIndex) {
+
+ OutputSurface outputSurface = mOutputSurfaces.get(textureIndex);
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, outputSurface.getTextureID());
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+ GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+ GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+ GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+ GLES20.GL_CLAMP_TO_EDGE);
+ checkGlError("glTexParameter");
+ }
+ }
+ private int loadShader(int shaderType, String source) {
+ int shader = GLES20.glCreateShader(shaderType);
+ checkGlError("glCreateShader type=" + shaderType);
+ GLES20.glClearColor(0,0,0, 1);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compiled = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ TLog.e(TAG, "Could not compile shader " + shaderType + ":");
+ TLog.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ shader = 0;
+ }
+ return shader;
+ }
+ private int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ if (vertexShader == 0) {
+ return 0;
+ }
+ int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ if (pixelShader == 0) {
+ return 0;
+ }
+ int program = GLES20.glCreateProgram();
+ checkGlError("glCreateProgram");
+ if (program == 0) {
+ TLog.e(TAG, "Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ checkGlError("glAttachShader");
+ GLES20.glAttachShader(program, pixelShader);
+ checkGlError("glAttachShader");
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ TLog.e(TAG, "Could not link program: ");
+ TLog.e(TAG, GLES20.glGetProgramInfoLog(program));
+ GLES20.glDeleteProgram(program);
+ program = 0;
+ }
+ return program;
+ }
+ public void checkGlError(String op) {
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ TLog.e(TAG, op + ": glError " + error);
+ throw new RuntimeException(op + ": glError " + error);
+ }
+ }
+ /**
+ * Saves the current frame to disk as a PNG image. Frame starts from (0,0).
+ *
+ * Useful for debugging.
+ */
+ public static void saveFrame(String filename, int width, int height) {
+ throw new UnsupportedOperationException("Not implemented.");
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/TimeLine.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/TimeLine.java
new file mode 100644
index 0000000..ef932f1
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/TimeLine.java
@@ -0,0 +1,512 @@
+package net.ypresto.androidtranscoder.engine;
+
+import net.ypresto.androidtranscoder.TLog;
+
+import java.io.FileDescriptor;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Represents the wiring for a time sequence in terms of input channels, output channels and filters
+ *
+ TimeLine timeline = new TimeLine(LogLevelForTests)
+ .addChannel("A", in1.getFileDescriptor())
+ .addChannel("B", in1.getFileDescriptor())
+ .addChannel("C", in1.getFileDescriptor())
+ .addAudioOnlyChannel("D", in2.getFileDescriptor())
+ .createSegment()
+ .output("C")
+ .output("D")
+ .duration(1000)
+ .timeLine().createSegment()
+ .output("C", TimeLine.Filter.OPACITY_DOWN_RAMP)
+ .output("A", TimeLine.Filter.OPACITY_UP_RAMP)
+ .output("D")
+ .duration(2000)
+ .timeLine().createSegment()
+ .duration(1500)
+ .output("A")
+ .output("D")
+ .text("Hello World").top(50).left(10).size(5).width(90).color(255,255,255,255)
+ .timeLine().createSegment()
+ .seek("B", 1000)
+ .output("B")
+ .duration(1500)
+ .output("D")
+ .timeLine();
+
+
+/**
+ * TimeLine contains a list of segments which defines each sequential segment of time that
+ * has a unique configuration of channels to be combined and output during that time range.
+ * TimeLine also contains inputChannels which may be assigned to each segment
+ */
+public class TimeLine {
+ private static final String TAG = "TimeLine";
+
+ static long TO_END_OF_FILE = -1;
+ private List mSegments = new ArrayList();
+ private LinkedHashMap mTimeLineChannels = new LinkedHashMap();
+ public TimeLine () {}
+ public TimeLine (int logLevel) {
+ TLog.setLevel(logLevel);
+ }
+ public TimeLine (int logLevel, String tags) {
+ TLog.setLevel(logLevel);
+ TLog.setTags(tags);
+ }
+
+ // Make sure we don't have audio video tracks that have no audio so that the setup of
+ // the encoders can omit the audio since the muxer will otherwise complain if it receives no audio
+ public void prepare() {
+ HashMap trackHasAudio = new HashMap();
+
+ for (Segment segment : mSegments) {
+ for (HashMap.Entry segmentChannelEntry : segment.mSegmentChannels.entrySet()) {
+
+ SegmentChannel segmentChannel = segmentChannelEntry.getValue();
+ if (segmentChannel.mTimeScale == null && segmentChannel.mFilter != Filter.MUTE)
+ trackHasAudio.put(segmentChannelEntry.getKey(), true);
+ }
+ }
+ for (HashMap.Entry channelEntry : mTimeLineChannels.entrySet()) {
+ if (trackHasAudio.get(channelEntry.getKey()) == null && channelEntry.getValue().mChannelType == ChannelType.AUDIO_VIDEO)
+ channelEntry.getValue().mChannelType = ChannelType.VIDEO;
+ }
+
+ }
+
+ public Segment createSegment() {
+ TLog.i(TAG, "createSegment: ");
+ for (Segment segment : mSegments)
+ segment.isLastSegment = false;
+ Segment segment = new Segment(this);
+ mSegments.add(segment);
+ return segment;
+ }
+
+ /**
+ * Get a List of all segments
+ * @return
+ */
+ public List getSegments() {
+ return mSegments;
+ }
+
+ /**
+ * Get a List of all channels used for creating the master list of extractors
+ * @return
+ */
+ public LinkedHashMap getChannels() {return mTimeLineChannels;}
+
+
+ /**
+ * Add a video/audio input and assign as a channel
+ *
+ * @param inputFileDescriptor
+ * @param inputChannel
+ * @return
+ */
+ public TimeLine addChannel(String inputChannel, FileDescriptor inputFileDescriptor) {
+ TLog.i(TAG, "addChannel: " + inputChannel + ":" + inputFileDescriptor.toString());
+ mTimeLineChannels.put(inputChannel, new InputChannel(inputFileDescriptor, ChannelType.AUDIO_VIDEO));
+ return this;
+ }
+
+
+ /**
+ * Add a video/audio input and assign as a channel
+ *
+ * @param inputFileDescriptor
+ * @param inputChannel
+ * @return
+ */
+ public TimeLine addImageChannel(String inputChannel, FileDescriptor inputFileDescriptor) {
+ TLog.i(TAG, "addChannel (image): " + inputChannel + ":" + inputFileDescriptor.toString());
+ mTimeLineChannels.put(inputChannel, new InputChannel(inputFileDescriptor, ChannelType.IMAGE));
+ return this;
+ }
+
+ /**
+ * Add a video/audio input and assign as a channel
+ *
+ * @param inputFileDescriptor
+ * @param inputChannel
+ * @return
+ */
+ public TimeLine addVideoOnlyChannel(String inputChannel, FileDescriptor inputFileDescriptor) {
+ TLog.i(TAG, "addChannel (video only): " + inputChannel + ":" + inputFileDescriptor.toString());
+ mTimeLineChannels.put(inputChannel, new InputChannel(inputFileDescriptor, ChannelType.VIDEO));
+ return this;
+ }
+
+ /**
+ * Add a video/audio input and assign as a channel
+ *
+ * @param inputFileDescriptor
+ * @param inputChannel
+ * @return
+ */
+ public TimeLine addAudioOnlyChannel(String inputChannel, FileDescriptor inputFileDescriptor) {
+ TLog.i(TAG, "addChannel (audio only): " + inputChannel + ":" + inputFileDescriptor.toString());
+ mTimeLineChannels.put(inputChannel, new InputChannel(inputFileDescriptor, ChannelType.AUDIO));
+ return this;
+ }
+
+ /**
+ * Add a video/audio and assign as a channel
+ *
+ * @param inputFileDescriptor
+ * @param inputChannel
+ * @param channelType
+ * @return
+ */
+ public TimeLine addChannel(String inputChannel, FileDescriptor inputFileDescriptor, ChannelType channelType) {
+ TLog.i(TAG, "addChannel (" + channelType + "): " + inputChannel + ":" + inputFileDescriptor.toString());
+ mTimeLineChannels.put(inputChannel, new InputChannel(inputFileDescriptor, channelType));
+ return this;
+ }
+
+ /**
+ * Get the entire timeline duration
+ * @return
+ */
+ public Long getDuration () {
+ long durationUs = 0l;
+ for (Segment segment : getSegments()) {
+ durationUs += segment.getDuration();
+ }
+ return durationUs;
+ }
+
+ /**
+ * Represents a mapping of one or two input channels to an output channel, optionally
+ * applying a filter.
+ */
+ public class SegmentChannel {
+ public InputChannel mChannel;
+ public Filter mFilter;
+ public Long mTimeScale;
+ public Long mSeek;
+ boolean mAudioSeekRequested = false;
+ boolean mVideoSeekRequested = false;
+
+ public Long getVideoSeek() {
+ return mVideoSeekRequested ? null : mSeek;
+ }
+ public Long getAudioSeek() {
+ return mAudioSeekRequested ? null : mSeek;
+ }
+ public void seekRequestedVideo() {
+ mVideoSeekRequested = true;
+ }
+ public void seekRequestedAudio() {
+ mAudioSeekRequested = true;
+ }
+
+ SegmentChannel(InputChannel input, Filter filter) {
+ mChannel = input;
+ mFilter = filter;
+ }
+ }
+
+ public enum Filter {OPACITY_UP_RAMP, OPACITY_DOWN_RAMP, MUTE, SUPPRESS};
+ public enum ChannelType {VIDEO, AUDIO, AUDIO_VIDEO, IMAGE}
+
+ /**
+ * An input file / start time combination
+ */
+ public class InputChannel {
+ public Long mLengthUs; // Length based on metadata
+ public Long mVideoInputStartTimeUs = 0l;
+ public Long mAudioInputStartTimeUs = 0l;
+ public Long mInputEndTimeUs = 0l;
+ public Long mVideoInputOffsetUs = 0l;
+ public Long mAudioInputOffsetUs = 0l;
+ public Long mVideoInputAcutalEndTimeUs =0l;
+ public Long mAudioInputAcutalEndTimeUs =0l;
+ public long mVideoFrameLength = 1000000 / 24;
+ public long mSeekShortage = 0l;
+ public long mDurationShortage = 0l;
+ public Filter mFilter;
+ public ChannelType mChannelType;
+ public FileDescriptor mInputFileDescriptor = null;
+ public long mTimeToCut = 0l;
+ public long mTimeAlreadyCut = 0l;
+ public long mTimeToAdd = 0l;
+ public long mTimeAlreadyAdded = 0l;
+ public boolean mFrameWasCut = false;
+ public long mLastBufferPresentationTime = 0;
+ public boolean mMuteAudio = false;
+ InputChannel() {
+ }
+
+ InputChannel(FileDescriptor inputFileDescriptor, ChannelType channelType) {
+ mInputFileDescriptor = inputFileDescriptor;
+ mChannelType = channelType;
+ }
+ }
+
+ public class Segment {
+ private TimeLine mTimeLine;
+ private LinkedHashMap mSegmentChannels = new LinkedHashMap();
+ private SegmentChannel mLastSegmentChannel = null;
+ private HashMap mSeeks = new HashMap();
+ private Long mDuration;
+ public Long mOutputStartTimeUs;
+ public boolean isLastSegment = true;
+
+ public Long getDuration () {
+ if (mDuration != null)
+ return mDuration;
+
+ HashMap.Entry firstChannelEntry = mSegmentChannels.entrySet().iterator().next();
+ return firstChannelEntry.getValue().mChannel.mLengthUs -
+ (mSeeks.get(firstChannelEntry.getKey()) == null ? 0l : mSeeks.get(firstChannelEntry.getKey()));
+ }
+ public SegmentChannel getSegmentChannel(String channel) {
+ return mSegmentChannels.get(channel);
+ }
+ public void start (Long presentationTime, Long videoPresentationTime, Long audioPresentationTime, Long videoEncodedTime, Long audioEncodedTime) {
+
+ mOutputStartTimeUs = presentationTime;
+
+ for (HashMap.Entry segmentChannelEntry : mSegmentChannels.entrySet()) {
+
+ SegmentChannel segmentChannel = segmentChannelEntry.getValue();
+ String channelName = segmentChannelEntry.getKey();
+ InputChannel inputChannel = segmentChannel.mChannel;
+
+ // If we are at the start of a stream align the presentation times
+ if (inputChannel.mInputEndTimeUs == 0l) {
+ Long maxPresentation = Math.max(videoPresentationTime, audioPresentationTime);
+ videoPresentationTime = maxPresentation;
+ audioPresentationTime = maxPresentation;
+ }
+
+ // Round seeks up to a frame
+ Long actualSeek = mSeeks.get(channelName) != null ? mSeeks.get(channelName) : 0l;
+ Long seek = (actualSeek / inputChannel.mVideoFrameLength) * inputChannel.mVideoFrameLength;
+
+ // Calculate how much this rounding will effect the segment and add this to an ongoing error accumulator
+ // If the error gets to be the size of a frame then add it to the seek and remove it from the error accumulator
+ inputChannel.mSeekShortage += (actualSeek - seek);
+ Long seekAddition = (inputChannel.mSeekShortage / inputChannel.mVideoFrameLength) * inputChannel.mVideoFrameLength;
+ inputChannel.mSeekShortage -= seekAddition;
+ seek += seekAddition;
+
+ // Get the requested track duration which is either the segment duration or in the case of time
+ // time scaling up it is the duration of the track
+ Long segmentDuration = getDuration();
+ Long actualTrackDuration = segmentChannel.mTimeScale != null ? segmentChannel.mTimeScale : segmentDuration;
+
+ // As with seeks we round up to a frame, accumulate the error and add it back if it gets to be as big as a frame
+ Long trackDuration = (actualTrackDuration / inputChannel.mVideoFrameLength) * inputChannel.mVideoFrameLength;
+ inputChannel.mDurationShortage += (actualTrackDuration - trackDuration);
+ Long durationAddition = (inputChannel.mDurationShortage / inputChannel.mVideoFrameLength) * inputChannel.mVideoFrameLength;
+ inputChannel.mDurationShortage -= durationAddition;
+ trackDuration += durationAddition;
+
+ // The starting point in the track is where we left off plus the amount of the seek
+ inputChannel.mVideoInputStartTimeUs = seek + inputChannel.mInputEndTimeUs;
+ inputChannel.mAudioInputStartTimeUs = seek + inputChannel.mInputEndTimeUs;
+
+ // The amount we must add to the input time stamp to get the output time stamp is where we left off with
+ // the output time stamp minus where we left off with the input time stamp plus the amount we are seeking
+ inputChannel.mVideoInputOffsetUs = videoPresentationTime - (seek + inputChannel.mVideoInputAcutalEndTimeUs);
+ inputChannel.mAudioInputOffsetUs = audioPresentationTime - (seek + inputChannel.mAudioInputAcutalEndTimeUs);
+
+ // Calculate the time to be used to know when we end the segment and seed the actual
+ // end times which will be updated during transcoding
+ inputChannel.mInputEndTimeUs = inputChannel.mInputEndTimeUs + seek + trackDuration;
+ inputChannel.mAudioInputAcutalEndTimeUs = inputChannel.mInputEndTimeUs;
+ inputChannel.mVideoInputAcutalEndTimeUs = inputChannel.mInputEndTimeUs;
+
+ // Keep the time we seek in the segment channel so if we revisit this track
+ // in subsequent segements we know where we left off
+ segmentChannel.mSeek = (seek > 0) ? inputChannel.mVideoInputStartTimeUs : null;
+
+ inputChannel.mFilter = segmentChannel.mFilter;
+ inputChannel.mMuteAudio = inputChannel.mFilter == Filter.MUTE || segmentChannel.mTimeScale != null;
+
+ if (segmentChannel.mTimeScale != null) {
+
+ // For time scaling up we figure out how much time should be cut and setup an
+ // accumulator the transcoder can use to dole out the cuts evenly over the segment
+ // this will be used to determine how many frames should be cut
+ if (trackDuration > segmentDuration) {
+ inputChannel.mTimeToCut = trackDuration - segmentDuration;
+ inputChannel.mTimeAlreadyCut = 0;
+ }
+ // For time scaling down we figure out how much time should be added and setup an
+ // accumulator the transcoder can use to dole out the additions evenly over the segment
+ // this will be used to determine how many duplicate frames should be inserted
+ if (trackDuration < segmentDuration) {
+ inputChannel.mTimeToAdd = segmentDuration - trackDuration;
+ inputChannel.mTimeAlreadyAdded = 0;
+ }
+ inputChannel.mAudioInputOffsetUs -= trackDuration;
+ inputChannel.mAudioInputOffsetUs += segmentDuration;
+ }
+ inputChannel.mFrameWasCut = false;
+
+ TLog.d(TAG, "Segment Channel " + channelName + " PT: " + presentationTime +
+ " VStart: " + inputChannel.mVideoInputStartTimeUs +
+ " AStart: " + inputChannel.mAudioInputStartTimeUs +
+ " Add: " + inputChannel.mTimeToAdd +
+ " VOff: " + inputChannel.mVideoInputOffsetUs +
+ " AOff: " + inputChannel.mAudioInputOffsetUs +
+ " duration: " + trackDuration +
+ " seek: " + seek + " ASeek: " +
+ " Cut: " + inputChannel.mTimeToCut +
+ " End: " + inputChannel.mInputEndTimeUs +
+ " VPT:" + videoPresentationTime +
+ " APT:" + audioPresentationTime +
+ " VET:" + videoEncodedTime +
+ " AET:" + audioEncodedTime +
+ " drift:" + (videoPresentationTime - audioPresentationTime));
+ }
+ }
+
+ public TimeLine timeLine () {return mTimeLine;}
+
+ /**
+ * Get all channels that participate in this segment
+ * @return
+ */
+ public LinkedHashMap getChannels() {
+ LinkedHashMap channels = new LinkedHashMap();
+ for (Map.Entry entry : mSegmentChannels.entrySet())
+ channels.put(entry.getKey(), entry.getValue().mChannel);
+ return channels;
+ }
+
+ /**
+ * Get all video channels that participate in this segment
+ * @return
+ */
+ public LinkedHashMap getImageChannels() {
+ LinkedHashMap channels = new LinkedHashMap();
+ for (Map.Entry entry : mSegmentChannels.entrySet())
+ if (entry.getValue().mChannel.mChannelType == ChannelType.IMAGE)
+ channels.put(entry.getKey(), entry.getValue().mChannel);
+ return channels;
+ }
+
+ /**
+ * Get all video channels that participate in this segment
+ * @return
+ */
+ public LinkedHashMap getVideoChannels() {
+ LinkedHashMap channels = new LinkedHashMap();
+ for (Map.Entry entry : mSegmentChannels.entrySet())
+ if (entry.getValue().mChannel.mChannelType == ChannelType.VIDEO || entry.getValue().mChannel.mChannelType == ChannelType.AUDIO_VIDEO)
+ channels.put(entry.getKey(), entry.getValue().mChannel);
+ return channels;
+ }
+
+ /**
+ * Get all audio channels that participate in this segment
+ * @return
+ */
+ public LinkedHashMap getAudioChannels() {
+ LinkedHashMap channels = new LinkedHashMap();
+ for (Map.Entry entry : mSegmentChannels.entrySet())
+ if (entry.getValue().mChannel.mChannelType == ChannelType.AUDIO || entry.getValue().mChannel.mChannelType == ChannelType.AUDIO_VIDEO)
+ channels.put(entry.getKey(), entry.getValue().mChannel);
+ return channels;
+ }
+
+ /**
+ * Private constructor - use Segment.create() to create a segment
+ */
+ private Segment(TimeLine timeLine) {
+ mTimeLine = timeLine;
+ }
+
+ /**
+ * Set the duration of the channel for this segment, otherwise to end of stream
+ * @param time
+ * @return
+ */
+ public Segment duration(long time) {
+ TLog.i(TAG, "duration: " + time);
+ this.mDuration = time * 1000l;
+ return this;
+ }
+
+ /**
+ * Set start time of input channel, otherwise where it left off
+ * @param channel
+ * @param time
+ * @return
+ */
+ public Segment seek(String channel, long time) {
+ TLog.i(TAG, "seek: " + channel + " " + time);
+ this.mSeeks.put(channel, time * 1000l);
+ return this;
+ }
+
+ /**
+ * Add a single channel routed directly to the encoder
+ *
+ * @param inputChannelName
+ */
+ public Segment output(String inputChannelName) {
+ TLog.i(TAG, "output: " + inputChannelName);
+ InputChannel inputChannel = mTimeLineChannels.get(inputChannelName);
+ //if (inputChannel.mChannelType != ChannelType.AUDIO)
+ mLastSegmentChannel = new SegmentChannel(inputChannel, null);
+ mSegmentChannels.put(inputChannelName, mLastSegmentChannel);
+ return this;
+ }
+
+ /**
+ * Add a single channel input that is filtered before being sent to the encoder
+ *
+ * @param inputChannelName
+ * @param filter to be applied of type Filter
+ */
+ public Segment output(String inputChannelName, Filter filter) {
+ TLog.i(TAG, "output: " + inputChannelName + " with " + filter);
+ InputChannel inputChannel = mTimeLineChannels.get(inputChannelName);
+ mLastSegmentChannel = new SegmentChannel(inputChannel, filter);
+ mSegmentChannels.put(inputChannelName, mLastSegmentChannel );
+ return this;
+ }
+
+ /**
+ * Add a filter to the previous segment channel
+ * @param filter to be applied of type Filter
+ * @return Segment
+ */
+ public Segment filter(Filter filter) {
+ mLastSegmentChannel.mFilter = filter;
+ return this;
+ }
+
+ /**
+ * Scale the input so it matches the time of the segment
+ * Only larger values than the segement are currently supported (for speedups)
+ * @param timeScale
+ * @return
+ */
+ public Segment timeScale(long timeScale) {
+ mLastSegmentChannel.mTimeScale = timeScale * 1000l;
+ return this;
+ }
+
+
+ int getChannelCount() {
+ return mSegmentChannels.size();
+ }
+
+ LinkedHashMap getSegmentChannels() {
+ return mSegmentChannels;
+ }
+
+ }
+}
\ No newline at end of file
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/TrackTranscoder.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/TrackTranscoder.java
new file mode 100644
index 0000000..832936c
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/TrackTranscoder.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.engine;
+
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+
+import java.util.LinkedHashMap;
+
+public interface TrackTranscoder {
+
+ void setupEncoder();
+ void setupDecoders(TimeLine.Segment segment, MediaTranscoderEngine.TranscodeThrottle throttle, int outputRotation, int width, int height);
+
+ /**
+ * Get actual MediaFormat which is used to write to muxer.
+ * To determine you should call stepPipeline() several times.
+ *
+ * @return Actual output format determined by coder, or {@code null} if not yet determined.
+ */
+ MediaFormat getDeterminedFormat();
+
+ /**
+ * Step pipeline if output is available in any step of it.
+ * It assumes muxer has been started, so you should call muxer.start() first.
+ *
+ * @return true if data moved in pipeline.
+ */
+ boolean stepPipeline(TimeLine.Segment segment, MediaTranscoderEngine.TranscodeThrottle throttle);
+
+ /**
+ * Get presentation time of last sample written to muxer.
+ *
+ * @return Presentation time in micro-second. Return value is undefined if finished writing.
+ */
+ long getOutputPresentationTimeDecodedUs();
+ long getOutputPresentationTimeEncodedUs();
+
+ void setOutputPresentationTimeDecodedUs(long presentationTimeDecodedUs);
+ abstract boolean isSegmentFinished();
+
+ void releaseDecoders();
+ void releaseEncoder();
+ void release();
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/VideoTrackTranscoder.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/VideoTrackTranscoder.java
new file mode 100644
index 0000000..4c33204
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/engine/VideoTrackTranscoder.java
@@ -0,0 +1,777 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.engine;
+
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.PorterDuff;
+import android.graphics.RectF;
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.view.Surface;
+
+import net.ypresto.androidtranscoder.TLog;
+
+import net.ypresto.androidtranscoder.format.MediaFormatExtraConstants;
+import net.ypresto.androidtranscoder.utils.MediaExtractorUtils;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+// Refer: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/ExtractDecodeEditEncodeMuxTest.java
+public class VideoTrackTranscoder implements TrackTranscoder {
+
+ /**
+ * Wraps an extractor -> decoder -> output surface that corresponds to an input channel.
+ * The extractor is passed in when created and the start should be called when a segment
+ * is found that needs the wrapper.
+ */
+ private class DecoderWrapper {
+ private boolean mIsExtractorEOS;
+ private boolean mIsDecoderEOS;
+ private boolean mIsSegmentEOS;
+ private boolean mDecoderStarted;
+ private MediaExtractor mExtractor;
+ private MediaCodec mDecoder;
+ private ByteBuffer [] mDecoderInputBuffers;
+ private OutputSurface mOutputSurface;
+ private Integer mTrackIndex;
+ boolean mBufferRequeued;
+ int mResult;
+ private final MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
+ DecoderWrapper(MediaExtractor mediaExtractor) {
+ mExtractor = mediaExtractor;
+ }
+
+ public void start(int outputRotation, int outputWidth, int outputHeight) {
+ mOutputSurface = new OutputSurface();
+ MediaExtractorUtils.TrackResult trackResult = MediaExtractorUtils.getFirstVideoAndAudioTrack(mExtractor);
+ if (trackResult.mVideoTrackFormat != null) {
+ int trackIndex = trackResult.mVideoTrackIndex;
+ mTrackIndex = trackIndex;
+ mExtractor.selectTrack(trackIndex);
+ MediaFormat inputFormat = mExtractor.getTrackFormat(trackIndex);
+ int clipRotation = 0;
+ if (inputFormat.containsKey(MediaFormatExtraConstants.KEY_ROTATION_DEGREES))
+ clipRotation = inputFormat.getInteger(MediaFormatExtraConstants.KEY_ROTATION_DEGREES);
+ mOutputSurface.setSourceRotation(clipRotation);
+ // Decoded video is rotated automatically in Android 5.0 lollipop.
+ // Turn off here because we don't want to encode rotated one.
+ // refer: https://android.googlesource.com/platform/frameworks/av/+blame/lollipop-release/media/libstagefright/Utils.cpp
+ int rotation = clipRotation - outputRotation;
+ if (rotation < 0)
+ rotation = 360 + rotation;
+ inputFormat.setInteger(MediaFormatExtraConstants.KEY_ROTATION_DEGREES, rotation);
+ mOutputSurface.setRotation(rotation);
+ int clipWidth = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
+ int clipHeight = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
+ if (rotation == 90 || rotation == 270)
+ mOutputSurface.setSourceRect(new RectF(0, 0, clipHeight, clipWidth));
+ else
+ mOutputSurface.setSourceRect(new RectF(0, 0, clipWidth, clipHeight));
+ mOutputSurface.setOriginalSourceRect(new RectF(0, 0, clipWidth, clipHeight));
+ mOutputSurface.setDestRect(new RectF(0, 0, outputWidth, outputHeight));
+
+ try {
+ mDecoder = MediaCodec.createDecoderByType(inputFormat.getString(MediaFormat.KEY_MIME));
+ } catch (IOException e) {
+ throw new IllegalStateException(e);
+ }
+ mDecoder.configure(inputFormat, mOutputSurface.getSurface(), null, 0);
+ mDecoder.start();
+ mDecoderStarted = true;
+ mDecoderInputBuffers = mDecoder.getInputBuffers();
+ }
+ }
+ private float mPresentationTimeus;
+ private float mDurationUs;
+ private TimeLine.Filter mFilter;
+ private void setFilter(TimeLine.Filter filter, long presentationTimeUs, long durationUs) {
+ mFilter = filter;
+ mPresentationTimeus = presentationTimeUs;
+ mDurationUs = durationUs;
+
+ }
+ private void filterTick (float presentationTimeUs) {
+ if (mFilter == TimeLine.Filter.OPACITY_UP_RAMP) {
+ mOutputSurface.setAlpha((presentationTimeUs - mPresentationTimeus) / mDurationUs);
+ }
+ if (mFilter == TimeLine.Filter.OPACITY_DOWN_RAMP) {
+ mOutputSurface.setAlpha(1.0f - (presentationTimeUs - mPresentationTimeus) / mDurationUs);
+ }
+ }
+ private int dequeueOutputBuffer(long timeoutUs) {
+ if (!mBufferRequeued)
+ mResult = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
+ mBufferRequeued = false;
+ return mResult;
+ }
+ private void requeueOutputBuffer() {
+ mBufferRequeued = true;
+ }
+
+ private void release() {
+ if (mOutputSurface != null) {
+ mOutputSurface.release();
+ mOutputSurface = null;
+ }
+ if (mDecoder != null) {
+ mDecoder.stop();
+ mDecoder.release();
+ mDecoder = null;
+ }
+ }
+
+ };
+ /**
+ * Wraps an output surface with logic to write on it's canvas. It must be passed the media
+ * extractor of the first video track so that it can coordinate it's rotation with the vide
+ * that it will ultimately be blended with in the TextureRenderer
+ */
+ private class CanvasWrapper {
+ private MediaExtractor mExtractor;
+ private OutputSurface mOutputSurface;
+ private boolean mDrawn = false;
+ CanvasWrapper(MediaExtractor mediaExtractor) {
+ mExtractor = mediaExtractor;
+ }
+
+ public void start(int outputRotation, int outputWidth, int outputHeight) {
+
+ MediaExtractorUtils.TrackResult trackResult = MediaExtractorUtils.getFirstVideoAndAudioTrack(mExtractor);
+ if (trackResult.mVideoTrackFormat != null) {
+ int trackIndex = trackResult.mVideoTrackIndex;
+ mExtractor.selectTrack(trackIndex);
+ MediaFormat inputFormat = mExtractor.getTrackFormat(trackIndex);
+
+ // Determine rotation of this particular video base on meta tag
+ int clipRotation = 0;
+ if (inputFormat.containsKey(MediaFormatExtraConstants.KEY_ROTATION_DEGREES))
+ clipRotation = inputFormat.getInteger(MediaFormatExtraConstants.KEY_ROTATION_DEGREES);
+
+ // Decoded video is rotated automatically in Android 5.0 lollipop and above (our target)
+ // Turn off here because we don't want to rotate the video but rather preserve the meta tag for rotation
+ int rotation = clipRotation - outputRotation; // Subsequent videos may have to rotated to align
+ if (rotation < 0)
+ rotation = 360 + rotation;
+ inputFormat.setInteger(MediaFormatExtraConstants.KEY_ROTATION_DEGREES, rotation);
+
+ // width & height of clip though this may be swapped if the clip has to be rotated
+ int clipWidth = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
+ int clipHeight = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
+
+ mOutputSurface = new OutputSurface(clipWidth, clipHeight);
+ mOutputSurface.setRotation(rotation); // Actual rotation in fragment shader
+ mOutputSurface.setSourceRotation(clipRotation); // Original rotation
+
+ // Compute rectangle of rotated video
+ if (rotation == 90 || rotation == 270)
+ mOutputSurface.setSourceRect(new RectF(0, 0, clipHeight, clipWidth));
+ else
+ mOutputSurface.setSourceRect(new RectF(0, 0, clipWidth, clipHeight));
+
+ // Original pre-rotated rectangle
+ mOutputSurface.setOriginalSourceRect(new RectF(0, 0, clipWidth, clipHeight));
+
+ // Rectangle of output
+ mOutputSurface.setDestRect(new RectF(0, 0, outputWidth, outputHeight));
+
+ }
+ }
+ void draw (String str) {
+ if (str == null)
+ return;
+ int pivotX = 0;
+ int pivotY = 0;
+ int rotation = 0;
+ int width = Math.round(mOutputSurface.getOriginalSourceRect().width());
+ int height = Math.round(mOutputSurface.getOriginalSourceRect().height());
+ int outputHeight = height;
+ int outputWidth = width;
+
+ int fontSize = outputWidth / 40;
+ int offsetX = 0;
+
+ switch (mOutputSurface.getSourceRotation()) {
+ case 0:
+ pivotX = width / 2;
+ pivotY = height / 2;
+ break;
+ case 90:
+ pivotX = height / 2;
+ pivotY = height / 2;
+ rotation = -90;
+ outputHeight = width;
+ outputWidth = height;
+ break;
+ case 180:
+ pivotX = width / 2;
+ pivotY = height / 2;
+ rotation = 180;
+ break;
+ case 270:
+ pivotX = width - height / 2;
+ pivotY = height / 2;
+ rotation = -270;
+ offsetX = width - height;
+ outputHeight = width;
+ outputWidth = height;
+ break;
+ }
+ Surface surface = mOutputSurface.getSurface();
+ Canvas canvas = surface.lockCanvas(null);
+
+ Paint textPaint = new Paint();
+ //textPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC_OVER));
+ textPaint.setStyle(Paint.Style.FILL);
+ textPaint.setTextSize(fontSize);
+ textPaint.setAntiAlias(true);
+ textPaint.setARGB(0xff, 0xff, 0xff, 0xff);
+ // Removed when upgrading to 28 canvas.save(Canvas.MATRIX_SAVE_FLAG);
+ canvas.save();
+ canvas.rotate(rotation, pivotX, pivotY);
+ canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
+ canvas.drawText(str, 10 + offsetX,10 + fontSize, textPaint);
+ canvas.drawText(str, 10 + offsetX,outputHeight / 2, textPaint);
+ canvas.drawText(str, 10 + offsetX,outputHeight - 10, textPaint);
+ canvas.restore();
+ surface.unlockCanvasAndPost(canvas);
+
+ }
+ private float mPresentationTimeus;
+ private float mDurationUs;
+ private TimeLine.Filter mFilter;
+ private void setFilter(TimeLine.Filter filter, long presentationTimeUs, long durationUs) {
+ mFilter = filter;
+ mPresentationTimeus = presentationTimeUs;
+ mDurationUs = durationUs;
+
+ }
+ private void filterTick (float presentationTimeUs) {
+ if (mFilter == TimeLine.Filter.OPACITY_UP_RAMP) {
+ mOutputSurface.setAlpha((presentationTimeUs - mPresentationTimeus) / mDurationUs);
+ }
+ if (mFilter == TimeLine.Filter.OPACITY_DOWN_RAMP) {
+ mOutputSurface.setAlpha(1.0f - (presentationTimeUs - mPresentationTimeus) / mDurationUs);
+ }
+ }
+
+
+ private void release() {
+ if (mOutputSurface != null) {
+ mOutputSurface.release();
+ mOutputSurface = null;
+ }
+ }
+
+ };
+ LinkedHashMap mDecoderWrappers = new LinkedHashMap();
+ CanvasWrapper mCanvasWrapper = null;
+
+ private static final String TAG = "VideoTrackTranscoder";
+ private static final long BUFFER_LEAD_TIME = 0;//100000; // Amount we will let other decoders get ahead
+ private static final int DRAIN_STATE_NONE = 0;
+ private static final int DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY = 1;
+ private static final int DRAIN_STATE_CONSUMED = 2;
+ private final LinkedHashMap mExtractors;
+ private final MediaFormat mOutputFormat;
+ private final QueuedMuxer mMuxer;
+ private MediaCodec mEncoder;
+ private ByteBuffer[] mEncoderOutputBuffers;
+ private MediaFormat mActualOutputFormat;
+ private InputSurface mEncoderInputSurfaceWrapper;
+ private boolean mIsEncoderEOS;
+ private boolean mIsSegmentFinished;
+ private boolean mEncoderStarted;
+ private int mTexturesReady = 0;
+ private int mTextures = 0;
+ private long mOutputPresentationTimeDecodedUs = 0l;
+ private long mOutputPresentationTimeEncodedUs = 0;
+ private long mLastBufferPresentationTime = 0l;
+ private long mFrameLength = 0l;
+ private TextureRender mTextureRender;
+ private boolean mIsLastSegment = false;
+ private final MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
+
+ public VideoTrackTranscoder(LinkedHashMap extractors,
+ MediaFormat outputFormat, QueuedMuxer muxer) {
+ mOutputFormat = outputFormat;
+ mMuxer = muxer;
+ mExtractors = extractors;
+ }
+
+ @Override
+ public void setupEncoder() {
+ try {
+ mEncoder = MediaCodec.createEncoderByType(mOutputFormat.getString(MediaFormat.KEY_MIME));
+ } catch (IOException e) {
+ throw new IllegalStateException(e);
+ }
+ mEncoder.configure(mOutputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+ mEncoderInputSurfaceWrapper = new InputSurface(mEncoder.createInputSurface());
+ mEncoderInputSurfaceWrapper.makeCurrent();
+ mEncoder.start();
+ mEncoderStarted = true;
+ mEncoderOutputBuffers = mEncoder.getOutputBuffers();
+ }
+ private void createWrapperSlot (TimeLine.Segment segment) {
+
+ if (mDecoderWrappers.keySet().size() < 2)
+ return;
+
+ // Release any inactive decoders
+ Iterator> iterator = mDecoderWrappers.entrySet().iterator();
+ while (iterator.hasNext()) {
+ Map.Entry decoderWrapperEntry = iterator.next();
+ if (!segment.getVideoChannels().containsKey(decoderWrapperEntry.getKey())) {
+ decoderWrapperEntry.getValue().release();
+ segment.timeLine().getChannels().get(decoderWrapperEntry.getKey()).mInputEndTimeUs = 0l;
+ iterator.remove();
+ TLog.d(TAG, "setupDecoders Releasing Decoder " + decoderWrapperEntry.getKey());
+ return;
+ }
+ }
+
+ }
+ /**
+ * Setup all decoders and texture renderers needed for this segment - called at start of segment processing
+ * We also close any ones not needed for this segment that may have been opened in a previous segment
+ * @param segment
+ */
+ @Override
+ public void setupDecoders(TimeLine.Segment segment, MediaTranscoderEngine.TranscodeThrottle throttle, int outputRotation, int width, int height) {
+ if (mCanvasWrapper != null) {
+ mCanvasWrapper.release();
+ mCanvasWrapper = null;
+ }
+ // Start any decoders being opened for the first time
+
+ for (Map.Entry entry : segment.getVideoChannels().entrySet()) {
+ TimeLine.InputChannel inputChannel = entry.getValue();
+ String channelName = entry.getKey();
+ DecoderWrapper decoderWrapper = mDecoderWrappers.get(channelName);
+ if (decoderWrapper == null) {
+ createWrapperSlot(segment);
+ decoderWrapper = new DecoderWrapper(mExtractors.get(channelName));
+ mDecoderWrappers.put(channelName, decoderWrapper);
+ }
+ decoderWrapper.mIsSegmentEOS = false;
+ if (!decoderWrapper.mDecoderStarted) {
+ TLog.d(TAG, "setupDecoders starting decoder for " + channelName);
+ decoderWrapper.start(outputRotation, width, height);
+ }
+
+ if (false && mCanvasWrapper == null) {
+ mCanvasWrapper = new CanvasWrapper(mExtractors.get(channelName));
+ mCanvasWrapper.start(outputRotation, width, height);
+ }
+
+ }
+
+
+ // Create array of texture renderers for each patch in the segment
+
+ ArrayList outputSurfaces = new ArrayList(2);
+ for (Map.Entry inputChannelEntry : segment.getVideoChannels().entrySet()) {
+ String channelName = inputChannelEntry.getKey();
+ TimeLine.InputChannel inputChannel = inputChannelEntry.getValue();
+ DecoderWrapper decoderWrapper = mDecoderWrappers.get(channelName);
+ decoderWrapper.mOutputSurface.setAlpha(1.0f);
+ if (!decoderWrapper.mIsDecoderEOS) {
+ outputSurfaces.add(decoderWrapper.mOutputSurface);
+ decoderWrapper.setFilter(inputChannel.mFilter, mOutputPresentationTimeDecodedUs, segment.getDuration());
+ throttle.participate("Video" + channelName);
+ } else
+ decoderWrapper.mIsSegmentEOS = true;
+ }
+ if (mCanvasWrapper != null)
+ mTextureRender = new TextureRender(outputSurfaces, mCanvasWrapper.mOutputSurface);
+ else
+ mTextureRender = new TextureRender(outputSurfaces, null);
+ mTextureRender.surfaceCreated();
+ TLog.d(TAG, "Surface Texture Created for " + outputSurfaces.size() + " surfaces");
+ mTextures = outputSurfaces.size();
+ mIsSegmentFinished = false;
+ mIsEncoderEOS = false;
+ mIsLastSegment = segment.isLastSegment;
+ mTexturesReady = 0;
+ if (mCanvasWrapper != null)
+ mCanvasWrapper.draw(null);
+ }
+
+ @Override
+ public MediaFormat getDeterminedFormat() {
+ return mActualOutputFormat;
+ }
+
+ @Override
+ public boolean stepPipeline(TimeLine.Segment outputSegment, MediaTranscoderEngine.TranscodeThrottle throttle) {
+ boolean stepped = false;
+ int status;
+ while (drainEncoder(0) != DRAIN_STATE_NONE) stepped = true;
+ do {
+ status = drainDecoders(outputSegment, 0, throttle);
+ if (status != DRAIN_STATE_NONE) stepped = true;
+ // NOTE: not repeating to keep from deadlock when encoder is full.
+ } while (status == DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY);
+ while (drainExtractors(outputSegment, 0) != DRAIN_STATE_NONE) stepped = true;
+
+ return stepped;
+ }
+
+
+ @Override
+ public long getOutputPresentationTimeDecodedUs() {
+ return mOutputPresentationTimeDecodedUs;
+ }
+
+ @Override
+ public long getOutputPresentationTimeEncodedUs() {return mOutputPresentationTimeEncodedUs;}
+
+ @Override
+ public void setOutputPresentationTimeDecodedUs(long presentationTimeDecodedUs) {
+ mOutputPresentationTimeDecodedUs = presentationTimeDecodedUs;
+ }
+
+ @Override
+ public boolean isSegmentFinished() {
+ return mIsSegmentFinished;
+ }
+
+ // TODO: CloseGuard
+ @Override
+ public void releaseEncoder() {
+ TLog.d(TAG, "ReleaseEncoder");
+ if (mEncoderInputSurfaceWrapper != null) {
+ mEncoderInputSurfaceWrapper.release();
+ mEncoderInputSurfaceWrapper = null;
+ }
+ if (mEncoder != null) {
+ if (mEncoderStarted) mEncoder.stop();
+ mEncoder.release();
+ mEncoder = null;
+ }
+ //mTextureRender.surfaceFinished();
+ }
+
+ /**
+ * Release any decoders not needed in the next segment
+ */
+ @Override
+ public void releaseDecoders() {
+ for (Map.Entry decoderWrapperEntry : mDecoderWrappers.entrySet()) {
+ decoderWrapperEntry.getValue().release();
+ }
+ }
+
+ /**
+ * Release encoder and any lingering decoders
+ */
+ @Override
+ public void release () {
+ releaseDecoders();
+ releaseEncoder();
+ }
+
+ /**
+ * Drain extractors
+ * @param segment
+ * @param timeoutUs
+ * @return DRAIN_STATE_CONSUMED - pipeline has been stepped, DRAIN_STATE_NONE - could not step
+ */
+ private int drainExtractors(TimeLine.Segment segment, long timeoutUs) {
+
+ boolean sampleProcessed = false;
+
+ for (Map.Entry inputChannelEntry : segment.getVideoChannels().entrySet()) {
+
+ String channelName = inputChannelEntry.getKey();
+ DecoderWrapper decoderWrapper = mDecoderWrappers.get(channelName);
+ if (!decoderWrapper.mIsExtractorEOS && !decoderWrapper.mOutputSurface.isExtraTextures()) {
+
+ // Find out which track the extractor has samples for next
+ int trackIndex = decoderWrapper.mExtractor.getSampleTrackIndex();
+
+ // Sample is for a different track (like audio) ignore
+ if (trackIndex >= 0 && trackIndex != decoderWrapper.mTrackIndex) {
+ if (inputChannelEntry.getValue().mChannelType == TimeLine.ChannelType.AUDIO)
+ decoderWrapper.mExtractor.advance(); // Skip video
+ continue;
+ }
+
+ // Get buffer index to be filled
+ int result = decoderWrapper.mDecoder.dequeueInputBuffer(timeoutUs);
+
+ // If no buffers available ignore
+ if (result < 0)
+ continue;
+
+ // If end of stream
+ if (trackIndex < 0) {
+ decoderWrapper.mIsExtractorEOS = true;
+ decoderWrapper.mDecoder.queueInputBuffer(result, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
+ continue;
+ }
+
+ // Get the sample into the buffer
+ int sampleSize = decoderWrapper.mExtractor.readSampleData(decoderWrapper.mDecoderInputBuffers[result], 0);
+ long sampleTime = decoderWrapper.mExtractor.getSampleTime();
+ boolean isKeyFrame = (decoderWrapper.mExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_SYNC) != 0;
+ decoderWrapper.mDecoder.queueInputBuffer(result, 0, sampleSize, sampleTime, isKeyFrame ? MediaCodec.BUFFER_FLAG_SYNC_FRAME : 0);
+ decoderWrapper.mExtractor.advance();
+ sampleProcessed = true;
+
+ // Seek at least to previous key frame if needed cause it's a lot faster
+ TimeLine.SegmentChannel segmentChannel = segment.getSegmentChannel(channelName);
+ Long seek = segmentChannel.getVideoSeek();
+ if (seek != null && (sampleTime + 500000) < seek) {
+ decoderWrapper.mExtractor.seekTo(seek, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
+ segmentChannel.seekRequestedVideo(); // So we don't repeat
+ TLog.d(TAG, "Extractor Seek " + seek);
+ }
+ }
+ }
+ return sampleProcessed ? DRAIN_STATE_CONSUMED : DRAIN_STATE_NONE;
+ }
+
+ /**
+ * We have to drain all decoders
+ * @param segment
+ * @param timeoutUs
+ * @return
+ */
+ private int drainDecoders(TimeLine.Segment segment, long timeoutUs, MediaTranscoderEngine.TranscodeThrottle throttle) {
+ boolean consumed = false;
+ String info = "";
+
+ // Go through each decoder in the segment and get it's frame into a texture
+ for (Map.Entry inputChannelEntry : segment.getVideoChannels().entrySet()) {
+
+ String channelName = inputChannelEntry.getKey();
+ TimeLine.InputChannel inputChannel = inputChannelEntry.getValue();
+ DecoderWrapper decoderWrapper = mDecoderWrappers.get(channelName);
+
+ // Only process if we have not end end of stream for this decoder or extractor
+ if (throttle.canProceed("Video" + channelName, mLastBufferPresentationTime, decoderWrapper.mIsDecoderEOS) &&
+ !decoderWrapper.mIsDecoderEOS && !decoderWrapper.mIsSegmentEOS) {
+
+ info += channelName + ": " + (decoderWrapper.mOutputSurface.isExtraTextures() ? "duplicate" :
+ decoderWrapper.mOutputSurface.isTextureReady() ? "deferred" : "fresh");
+
+ if (!decoderWrapper.mOutputSurface.isTextureReady() && decoderWrapper.mOutputSurface.consumeDuplicateTexture()) {
+ inputChannel.mVideoInputOffsetUs += mFrameLength;
+ mOutputPresentationTimeDecodedUs = Math.max(mOutputPresentationTimeDecodedUs, decoderWrapper.mBufferInfo.presentationTimeUs + inputChannel.mVideoInputOffsetUs);
+ decoderWrapper.filterTick(mOutputPresentationTimeDecodedUs);
+ ++mTexturesReady;
+ TLog.v(TAG, "Duplicate Texture ready " + mOutputPresentationTimeDecodedUs + " (" + decoderWrapper.mBufferInfo.presentationTimeUs + ")" + " for decoder " + channelName);
+
+ consumed = true;
+ decoderWrapper.mOutputSurface.setDuplicateTextureReady();
+ } else if (!decoderWrapper.mOutputSurface.isTextureReady() && !decoderWrapper.mOutputSurface.isEndOfInputStream()) {
+
+ int result = decoderWrapper.dequeueOutputBuffer(timeoutUs);
+ switch (result) {
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ continue;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ TLog.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED for decoder " + channelName);
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ }
+ consumed = true;
+ mFrameLength = decoderWrapper.mBufferInfo.presentationTimeUs - inputChannel.mLastBufferPresentationTime;
+ if (mFrameLength == 0)
+ mFrameLength = inputChannel.mVideoFrameLength;
+ if (inputChannel.mFrameWasCut) {
+ inputChannel.mFrameWasCut = false;
+ inputChannel.mVideoInputOffsetUs -= mFrameLength;
+ inputChannel.mTimeAlreadyCut += mFrameLength;
+ }
+
+ long bufferInputStartTime = decoderWrapper.mBufferInfo.presentationTimeUs;
+ long bufferInputEndTime = bufferInputStartTime + mFrameLength;
+ long bufferOutputTime = bufferInputStartTime + inputChannel.mVideoInputOffsetUs;
+ long bufferOutputEndTime = bufferInputEndTime + inputChannel.mVideoInputOffsetUs;
+ inputChannel.mLastBufferPresentationTime = bufferInputStartTime;
+ mLastBufferPresentationTime = bufferOutputTime;
+
+ TLog.v(TAG, "Processing Video Buffer on channel " + channelName +
+ " bufferInputStartTime=" + bufferInputStartTime +
+ " mFrameLength= " + mFrameLength +
+ " bufferOutputTime=" + bufferOutputTime +
+ " mVideoInputOffsetUs=" + inputChannel.mVideoInputOffsetUs +
+ " mOutputPresentationTimeDecodedUs=" + mOutputPresentationTimeDecodedUs);
+
+ // See if encoder is end-of-stream and propagate to output surface
+ if ((decoderWrapper.mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ decoderWrapper.mBufferInfo.size = 0;
+ decoderWrapper.mOutputSurface.signalEndOfInputStream();
+ decoderWrapper.mIsDecoderEOS = true;
+ TLog.d(TAG, "End of video stream on channel " + channelName);
+ mTextures = 1; // Write if there is a texture
+ decoderWrapper.mDecoder.releaseOutputBuffer(result, false);
+ } else {
+
+ boolean doRender = (decoderWrapper.mBufferInfo.size > 0);
+ // NOTE: doRender will block if buffer (of encoder) is full.
+ // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
+
+ // End of Segment
+ if (doRender && inputChannel.mInputEndTimeUs != null && bufferInputStartTime >= inputChannel.mInputEndTimeUs) {
+ decoderWrapper.requeueOutputBuffer();
+ decoderWrapper.mIsSegmentEOS = true;
+ TLog.d(TAG, "End of video Segment on channel " + channelName );
+ mTextures = 1; // Write if there is a texture
+
+ } else if (doRender && bufferInputStartTime >= inputChannel.mVideoInputStartTimeUs) {
+
+ // Determine whether time scaling down progress thus far dictates cutting a frame
+ boolean cutFrame = false;
+ if (inputChannel.mTimeToCut > inputChannel.mTimeAlreadyCut) {
+ double processed = decoderWrapper.mBufferInfo.presentationTimeUs - inputChannel.mVideoInputStartTimeUs;
+ double trackDuration = inputChannel.mInputEndTimeUs - inputChannel.mVideoInputStartTimeUs;
+ double progress = processed / trackDuration;
+ if (Math.round(progress * inputChannel.mTimeToCut) > inputChannel.mTimeAlreadyCut)
+ cutFrame = true;
+ }
+
+ // Determine whether time scaling up progress thus far dictates adding frames
+ if (inputChannel.mTimeToAdd > inputChannel.mTimeAlreadyAdded) {
+ double progress = ((double) decoderWrapper.mBufferInfo.presentationTimeUs) /
+ ((double) (inputChannel.mInputEndTimeUs - inputChannel.mVideoInputStartTimeUs));
+ if (Math.round(progress * inputChannel.mTimeToAdd) > inputChannel.mTimeAlreadyAdded) {
+ double timeLeftToAdd = inputChannel.mTimeToAdd - inputChannel.mTimeAlreadyAdded;
+ double timeLeft = inputChannel.mInputEndTimeUs - decoderWrapper.mBufferInfo.presentationTimeUs;
+ long framesToAdd = Math.round(((double) timeLeftToAdd) / ((double) timeLeft));
+ if (framesToAdd > 0) {
+ decoderWrapper.mOutputSurface.duplicateTextures(framesToAdd);
+ TLog.v(TAG, "Scaling up channel " + channelName + " framesToAdd=" + framesToAdd );
+ inputChannel.mTimeAlreadyAdded += framesToAdd * mFrameLength;
+ }
+ }
+ }
+
+ // If we are cutting a frame make adjustments to the offset and cut amounts
+ if (cutFrame) {
+ TLog.v(TAG, "Scaling down channel " + channelName + " skipping buffer");
+ inputChannel.mFrameWasCut = true;
+ inputChannel.mVideoInputAcutalEndTimeUs = bufferInputEndTime;
+ decoderWrapper.mDecoder.releaseOutputBuffer(result, false);
+
+ // Otherwise prepare texture for rending
+ } else {
+ decoderWrapper.mDecoder.releaseOutputBuffer(result, true);
+ decoderWrapper.mOutputSurface.awaitNewImage();
+ decoderWrapper.filterTick(mOutputPresentationTimeDecodedUs);
+ ++mTexturesReady;
+ consumed = true;
+ mOutputPresentationTimeDecodedUs = bufferOutputTime;
+ TLog.v(TAG, "Texture ready channel " + channelName + " mOutputPresentationTimeDecodedUs=" + mOutputPresentationTimeDecodedUs);
+ inputChannel.mVideoInputAcutalEndTimeUs = bufferInputEndTime;
+ }
+
+ // Seeking - release it without rendering
+ } else {
+ TLog.v(TAG, "Skipping video on channel" + channelName);
+ decoderWrapper.mDecoder.releaseOutputBuffer(result, false);
+ inputChannel.mVideoInputAcutalEndTimeUs = bufferInputEndTime;
+ mOutputPresentationTimeDecodedUs = bufferOutputEndTime;
+
+ }
+ }
+ }
+ }
+ }
+
+
+ if (allDecodersEndOfStream()) {
+ if (mIsLastSegment && !mIsSegmentFinished)
+ mEncoder.signalEndOfInputStream();
+ mIsSegmentFinished = true;
+ }
+
+
+ // If all textures have been accumulated draw the image and send it to the encoder
+ if (mTexturesReady >= mTextures && mTextures > 0) {
+ // Wait for Canvas
+ if (mCanvasWrapper != null && !mCanvasWrapper.mOutputSurface.isTextureReady()) {
+ mCanvasWrapper.draw(info);
+ mCanvasWrapper.mOutputSurface.updateTexture();
+ }
+ mTextureRender.drawFrame();
+
+ TLog.v(TAG, "Encoded video " + mOutputPresentationTimeDecodedUs + " for decoder ");
+ mEncoderInputSurfaceWrapper.setPresentationTime(mOutputPresentationTimeDecodedUs * 1000);
+ mEncoderInputSurfaceWrapper.swapBuffers();
+ mTexturesReady = 0;
+ mOutputPresentationTimeEncodedUs += mFrameLength;
+ mOutputPresentationTimeDecodedUs = Math.max(mFrameLength + mOutputPresentationTimeDecodedUs, mOutputPresentationTimeDecodedUs);
+ }
+
+ return consumed ? DRAIN_STATE_CONSUMED : DRAIN_STATE_NONE;
+ }
+
+ boolean allDecodersEndOfStream () {
+ boolean isDecoderEndOfStream = true;
+ for (Map.Entry decoderWrapperEntry : mDecoderWrappers.entrySet()) {
+ if (!(decoderWrapperEntry.getValue().mIsDecoderEOS || decoderWrapperEntry.getValue().mIsSegmentEOS))
+ isDecoderEndOfStream = false;
+ }
+ return isDecoderEndOfStream;
+ }
+
+ private int drainEncoder(long timeoutUs) {
+ if (mIsEncoderEOS) return DRAIN_STATE_NONE;
+ int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
+ switch (result) {
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return DRAIN_STATE_NONE;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ if (mActualOutputFormat != null)
+ throw new RuntimeException("Video output format changed twice.");
+ mActualOutputFormat = mEncoder.getOutputFormat();
+ mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ mEncoderOutputBuffers = mEncoder.getOutputBuffers();
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ }
+ if (mActualOutputFormat == null) {
+ throw new RuntimeException("Could not determine actual output format.");
+ }
+
+ if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ mIsEncoderEOS = true;
+ mIsSegmentFinished = true;
+ mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
+ }
+ if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
+ // SPS or PPS, which should be passed by MediaFormat.
+ mEncoder.releaseOutputBuffer(result, false);
+ return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
+ }
+
+ mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo);
+ mEncoder.releaseOutputBuffer(result, false);
+ return DRAIN_STATE_CONSUMED;
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/Android16By9FormatStrategy.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/Android16By9FormatStrategy.java
new file mode 100644
index 0000000..9ac8d68
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/Android16By9FormatStrategy.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.format;
+
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.util.Log;
+
+public class Android16By9FormatStrategy implements MediaFormatStrategy {
+
+ public static final int AUDIO_BITRATE_AS_IS = -1;
+ public static final int AUDIO_CHANNELS_AS_IS = -1;
+ private static final String TAG = "Android16By9FmtStrategy";
+ private final int mPSize;
+ private final int mVideoBitrate;
+ private final int mAudioBitrate;
+ private final int mAudioChannels;
+
+ public Android16By9FormatStrategy(int scale, int videoBitrate) {
+ this(scale, videoBitrate, AUDIO_BITRATE_AS_IS, AUDIO_CHANNELS_AS_IS);
+ }
+
+ public Android16By9FormatStrategy(int pSize, int videoBitrate, int audioBitrate, int audioChannels) {
+ mPSize = pSize;
+ mVideoBitrate = videoBitrate;
+ mAudioBitrate = audioBitrate;
+ mAudioChannels = audioChannels;
+ }
+
+ @Override
+ public MediaFormat createVideoOutputFormat(MediaFormat inputFormat, boolean allowPassthru) {
+ int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
+ int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
+ int targetShorter = mPSize;
+
+ // WJ: force 16:9 aspect ratio - this causes the video to be stretched
+ // if you want to not force an aspect ratio, use createAndroidStrategy720P
+ // instead
+ int targetLonger = mPSize * 16 / 9;
+
+ int longer, shorter, outWidth, outHeight;
+ if (width >= height) {
+ longer = width;
+ shorter = height;
+ outWidth = targetLonger;
+ outHeight = targetShorter;
+ } else {
+ shorter = width;
+ longer = height;
+ outWidth = targetShorter;
+ outHeight = targetLonger;
+ }
+ if (allowPassthru && shorter <= targetShorter) {
+ Log.d(TAG, "This video's height is less or equal to " + targetShorter + ",pass-through. (" + width + "x"
+ + height + ")");
+ return null;
+ }
+ MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight);
+ // From Nexus 4 Camera in 720p
+ format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+ return format;
+ }
+
+ @Override
+ public MediaFormat createAudioOutputFormat(MediaFormat inputFormat, boolean allowPassthru) {
+ if (allowPassthru && mAudioBitrate == AUDIO_BITRATE_AS_IS && mAudioChannels == AUDIO_CHANNELS_AS_IS)
+ return null;
+
+ // Use original sample rate, as resampling is not supported yet.
+ final MediaFormat format = MediaFormat.createAudioFormat(MediaFormatExtraConstants.MIMETYPE_AUDIO_AAC,
+ inputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE),
+ mAudioChannels == AUDIO_CHANNELS_AS_IS ? inputFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT)
+ : mAudioChannels);
+ format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitrate);
+ return format;
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/Android720pFormatStrategy.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/Android720pFormatStrategy.java
new file mode 100644
index 0000000..67606e7
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/Android720pFormatStrategy.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.format;
+
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.util.Log;
+
+public class Android720pFormatStrategy implements MediaFormatStrategy {
+ public static final int AUDIO_BITRATE_AS_IS = -1;
+ public static final int AUDIO_CHANNELS_AS_IS = -1;
+ private static final String TAG = "720pFormatStrategy";
+ private static final int LONGER_LENGTH = 1280;
+ private static final int SHORTER_LENGTH = 720;
+ public static final int DEFAULT_VIDEO_BITRATE = 8000 * 1000; // From Nexus 4 Camera in 720p
+ private final int mVideoBitrate;
+ private final int mAudioBitrate;
+ private final int mAudioChannels;
+
+ public Android720pFormatStrategy() {
+ this(DEFAULT_VIDEO_BITRATE);
+ }
+
+ public Android720pFormatStrategy(int videoBitrate) {
+ this(videoBitrate, AUDIO_BITRATE_AS_IS, AUDIO_CHANNELS_AS_IS);
+ }
+
+ public Android720pFormatStrategy(int videoBitrate, int audioBitrate, int audioChannels) {
+ mVideoBitrate = videoBitrate;
+ mAudioBitrate = audioBitrate;
+ mAudioChannels = audioChannels;
+ }
+
+ @Override
+ public MediaFormat createVideoOutputFormat(MediaFormat inputFormat, boolean allowPassthru) {
+ int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
+ int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
+ int longer, shorter, outWidth, outHeight;
+ if (width >= height) {
+ longer = width;
+ shorter = height;
+ outWidth = LONGER_LENGTH;
+ outHeight = SHORTER_LENGTH;
+ } else {
+ shorter = width;
+ longer = height;
+ outWidth = SHORTER_LENGTH;
+ outHeight = LONGER_LENGTH;
+ }
+ if (longer * 9 != shorter * 16) {
+ throw new OutputFormatUnavailableException(
+ "This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")");
+ }
+ if (allowPassthru && shorter <= SHORTER_LENGTH) {
+ Log.d(TAG, "This video is less or equal to 720p, pass-through. (" + width + "x" + height + ")");
+ return null;
+ }
+ MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight);
+ // From Nexus 4 Camera in 720p
+ format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+ return format;
+ }
+
+ @Override
+ public MediaFormat createAudioOutputFormat(MediaFormat inputFormat, boolean allowPassthru) {
+ if (allowPassthru && mAudioBitrate == AUDIO_BITRATE_AS_IS && mAudioChannels == AUDIO_CHANNELS_AS_IS)
+ return null;
+ // Use original sample rate, as resampling is not supported yet.
+ final MediaFormat format = MediaFormat.createAudioFormat(MediaFormatExtraConstants.MIMETYPE_AUDIO_AAC,
+ inputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE),
+ mAudioChannels == AUDIO_CHANNELS_AS_IS ? inputFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT)
+ : mAudioChannels);
+ format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitrate);
+ return format;
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/AndroidFormatStrategy.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/AndroidFormatStrategy.java
new file mode 100644
index 0000000..fa40101
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/AndroidFormatStrategy.java
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.format;
+
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.util.Log;
+import net.ypresto.androidtranscoder.engine.MediaFormatValidator;
+
+public class AndroidFormatStrategy implements MediaFormatStrategy {
+
+ public static final int AUDIO_BITRATE_AS_IS = -1;
+ public static final int AUDIO_CHANNELS_AS_IS = -1;
+ private static final String TAG = "Android16By9FmtStrategy";
+ private int mPSize;
+ private final int mVideoBitrate;
+ private final int mAudioBitrate;
+ private final int mAudioChannels;
+
+ public AndroidFormatStrategy(int scale, int videoBitrate) {
+ this(scale, videoBitrate, AUDIO_BITRATE_AS_IS, AUDIO_CHANNELS_AS_IS);
+ }
+
+ public AndroidFormatStrategy(int pSize, int videoBitrate, int audioBitrate, int audioChannels) {
+ mPSize = pSize;
+ mVideoBitrate = videoBitrate;
+ mAudioBitrate = audioBitrate;
+ mAudioChannels = audioChannels;
+ }
+
+ @Override
+ public MediaFormat createVideoOutputFormat(MediaFormat inputFormat, boolean allowPassthru) {
+ int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
+ int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
+ int longer, shorter, outWidth, outHeight;
+ if (width >= height) {
+ longer = width;
+ shorter = height;
+ outWidth = mPSize * width / height;
+ outHeight = mPSize;
+ } else {
+ shorter = width;
+ longer = height;
+ outWidth = mPSize;
+ outHeight = mPSize * height / width;
+ }
+
+ boolean isSupported = MediaFormatValidator.validateResolution(outWidth, outHeight);
+
+ // Mostly ultrawide is only supported at 1080 so try that if 720 not supported
+ // if (!isSupported)
+ // if (mPSize == 720) {
+ // mPSize = 1080;
+ // return createVideoOutputFormat(inputFormat, allowPassthru);
+ // } else
+ // throw new OutputFormatUnavailableException("The output height (" + outHeight
+ // + ") and width (" + outWidth + ") cannot be transcoded");
+
+ if (allowPassthru && shorter <= mPSize) {
+ Log.d(TAG, "This video's height is less or equal to " + mPSize + ", pass-through. (" + width + "x" + height
+ + ")");
+ return null;
+ }
+ MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight);
+ // From Nexus 4 Camera in 720p
+ format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+ return format;
+ }
+
+ @Override
+ public MediaFormat createAudioOutputFormat(MediaFormat inputFormat, boolean allowPassthru) {
+ if (allowPassthru && mAudioBitrate == AUDIO_BITRATE_AS_IS && mAudioChannels == AUDIO_CHANNELS_AS_IS)
+ return null;
+
+ // Use original sample rate, as resampling is not supported yet.
+ final MediaFormat format = MediaFormat.createAudioFormat(MediaFormatExtraConstants.MIMETYPE_AUDIO_AAC,
+ inputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE),
+ mAudioChannels == AUDIO_CHANNELS_AS_IS ? inputFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT)
+ : mAudioChannels);
+ format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitrate);
+ return format;
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/ExportPreset960x540Strategy.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/ExportPreset960x540Strategy.java
new file mode 100644
index 0000000..19eec40
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/ExportPreset960x540Strategy.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.format;
+
+import android.media.MediaFormat;
+import android.util.Log;
+
+/**
+* Created by yuya.tanaka on 2014/11/20.
+*/
+class ExportPreset960x540Strategy implements MediaFormatStrategy {
+ private static final String TAG = "ExportPst960x540Strat";
+
+ @Override
+ public MediaFormat createVideoOutputFormat(MediaFormat inputFormat, boolean allowPassthru) {
+ // TODO: detect non-baseline profile and throw exception
+ int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
+ int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
+ MediaFormat outputFormat = MediaFormatPresets.getExportPreset960x540(width, height);
+ int outWidth = outputFormat.getInteger(MediaFormat.KEY_WIDTH);
+ int outHeight = outputFormat.getInteger(MediaFormat.KEY_HEIGHT);
+ Log.d(TAG, String.format("inputFormat: %dx%d => outputFormat: %dx%d", width, height, outWidth, outHeight));
+ return outputFormat;
+ }
+
+ @Override
+ public MediaFormat createAudioOutputFormat(MediaFormat inputFormat, boolean allowPassthru) {
+ // TODO
+ return null;
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatExtraConstants.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatExtraConstants.java
new file mode 100644
index 0000000..7f34adc
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatExtraConstants.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.format;
+
+public class MediaFormatExtraConstants {
+ // from MediaFormat of API level >= 21, but might be usable in older APIs as native code implementation exists.
+ // https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/ACodec.cpp#2621
+ // NOTE: native code enforces baseline profile.
+ // https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/ACodec.cpp#2638
+ /** For encoder parameter. Use value of MediaCodecInfo.CodecProfileLevel.AVCProfile* . */
+ public static final String KEY_PROFILE = "profile";
+
+ // from https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/ACodec.cpp#2623
+ /** For encoder parameter. Use value of MediaCodecInfo.CodecProfileLevel.AVCLevel* . */
+ public static final String KEY_LEVEL = "level";
+
+ // from https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/MediaCodec.cpp#2197
+ /** Included in MediaFormat from {@link android.media.MediaExtractor#getTrackFormat(int)}. Value is {@link java.nio.ByteBuffer}. */
+ public static final String KEY_AVC_SPS = "csd-0";
+ /** Included in MediaFormat from {@link android.media.MediaExtractor#getTrackFormat(int)}. Value is {@link java.nio.ByteBuffer}. */
+ public static final String KEY_AVC_PPS = "csd-1";
+
+ /**
+ * For decoder parameter and included in MediaFormat from {@link android.media.MediaExtractor#getTrackFormat(int)}.
+ * Decoder rotates specified degrees before rendering video to surface.
+ * NOTE: Only included in track format of API >= 21.
+ */
+ public static final String KEY_ROTATION_DEGREES = "rotation-degrees";
+
+ // Video formats
+ // from MediaFormat of API level >= 21
+ public static final String MIMETYPE_VIDEO_AVC = "video/avc";
+ public static final String MIMETYPE_VIDEO_H263 = "video/3gpp";
+ public static final String MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
+
+ // Audio formats
+ // from MediaFormat of API level >= 21
+ public static final String MIMETYPE_AUDIO_AAC = "audio/mp4a-latm";
+
+ private MediaFormatExtraConstants() {
+ throw new RuntimeException();
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatPresets.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatPresets.java
new file mode 100644
index 0000000..086bcd3
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatPresets.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.format;
+
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+
+// Refer for example: https://gist.github.com/wobbals/3990442
+// Refer for preferred parameters: https://developer.apple.com/library/ios/documentation/networkinginternet/conceptual/streamingmediaguide/UsingHTTPLiveStreaming/UsingHTTPLiveStreaming.html#//apple_ref/doc/uid/TP40008332-CH102-SW8
+// Refer for available keys: (ANDROID ROOT)/media/libstagefright/ACodec.cpp
+public class MediaFormatPresets {
+ private static final int LONGER_LENGTH_960x540 = 960;
+
+ private MediaFormatPresets() {
+ }
+
+ // preset similar to iOS SDK's AVAssetExportPreset960x540
+ @Deprecated
+ public static MediaFormat getExportPreset960x540() {
+ MediaFormat format = MediaFormat.createVideoFormat("video/avc", 960, 540);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, 5500 * 1000);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+ return format;
+ }
+
+ /**
+ * Preset similar to iOS SDK's AVAssetExportPreset960x540.
+ * Note that encoding resolutions of this preset are not supported in all devices e.g. Nexus 4.
+ * On unsupported device encoded video stream will be broken without any exception.
+ * @param originalWidth Input video width.
+ * @param originalHeight Input video height.
+ * @return MediaFormat instance, or null if pass through.
+ */
+ public static MediaFormat getExportPreset960x540(int originalWidth, int originalHeight) {
+ int longerLength = Math.max(originalWidth, originalHeight);
+ int shorterLength = Math.min(originalWidth, originalHeight);
+
+ if (longerLength <= LONGER_LENGTH_960x540) return null; // don't upscale
+
+ int residue = LONGER_LENGTH_960x540 * shorterLength % longerLength;
+ if (residue != 0) {
+ double ambiguousShorter = (double) LONGER_LENGTH_960x540 * shorterLength / longerLength;
+ throw new OutputFormatUnavailableException(String.format(
+ "Could not fit to integer, original: (%d, %d), scaled: (%d, %f)",
+ longerLength, shorterLength, LONGER_LENGTH_960x540, ambiguousShorter));
+ }
+
+ int scaledShorter = LONGER_LENGTH_960x540 * shorterLength / longerLength;
+ int width, height;
+ if (originalWidth >= originalHeight) {
+ width = LONGER_LENGTH_960x540;
+ height = scaledShorter;
+ } else {
+ width = scaledShorter;
+ height = LONGER_LENGTH_960x540;
+ }
+
+ MediaFormat format = MediaFormat.createVideoFormat("video/avc", width, height);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, 5500 * 1000);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
+ return format;
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatStrategy.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatStrategy.java
new file mode 100644
index 0000000..2a6b465
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatStrategy.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.format;
+
+import android.media.MediaFormat;
+
+public interface MediaFormatStrategy {
+
+ /**
+ * Returns preferred video format for encoding.
+ *
+ * @param inputFormat MediaFormat from MediaExtractor, contains csd-0/csd-1.
+ * @return null for passthrough.
+ * @throws OutputFormatUnavailableException if input could not be transcoded because of restrictions.
+ */
+ public MediaFormat createVideoOutputFormat(MediaFormat inputFormat, boolean allowPassthru);
+
+ /**
+ * Caution: this method should return null currently.
+ *
+ * @return null for passthrough.
+ * @throws OutputFormatUnavailableException if input could not be transcoded because of restrictions.
+ */
+ public MediaFormat createAudioOutputFormat(MediaFormat inputFormat, boolean allowPassthru);
+
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatStrategyPresets.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatStrategyPresets.java
new file mode 100644
index 0000000..4efc454
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/MediaFormatStrategyPresets.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.format;
+
+public class MediaFormatStrategyPresets {
+ /**
+ * @deprecated Use {@link #createExportPreset960x540Strategy()}.
+ */
+ @Deprecated
+ public static final MediaFormatStrategy EXPORT_PRESET_960x540 = new ExportPreset960x540Strategy();
+
+ /**
+ * Preset based on Nexus 4 camera recording with 720p quality.
+ * This preset is ensured to work on any Android >=4.3 devices by Android CTS
+ * (if codec is available).
+ * Default bitrate is 8Mbps. {@link #createAndroid720pStrategy(int)} to specify
+ * bitrate.
+ */
+ public static MediaFormatStrategy createAndroid720pStrategy() {
+ return new Android720pFormatStrategy();
+ }
+
+ /**
+ * Preset based on Nexus 4 camera recording with 720p quality.
+ * This preset is ensured to work on any Android >=4.3 devices by Android CTS
+ * (if codec is available).
+ * Audio track will be copied as-is.
+ *
+ * @param bitrate Preferred bitrate for video encoding.
+ */
+ public static MediaFormatStrategy createAndroid720pStrategy(int bitrate) {
+ return new Android720pFormatStrategy(bitrate);
+ }
+
+ /**
+ * Preset based on Nexus 4 camera recording with 720p quality.
+ * This preset is ensured to work on any Android >=4.3 devices by Android CTS
+ * (if codec is available).
+ * Note: audio transcoding is experimental feature.
+ *
+ * @param bitrate Preferred bitrate for video encoding.
+ * @param audioBitrate Preferred bitrate for audio encoding.
+ * @param audioChannels Output audio channels.
+ */
+ public static MediaFormatStrategy createAndroid720pStrategy(int bitrate, int audioBitrate, int audioChannels) {
+ return new Android720pFormatStrategy(bitrate, audioBitrate, audioChannels);
+ }
+
+ /**
+ *
+ * @param audioBitrate Preferred bitrate for audio encoding.
+ * @param audioChannels Output audio channels.
+ */
+ public static MediaFormatStrategy createAndroid16x9Strategy720P(int audioBitrate, int audioChannels) {
+ return new Android16By9FormatStrategy(720, 8000000, audioBitrate, audioChannels);
+ }
+
+ /**
+ *
+ * @param audioBitrate Preferred bitrate for audio encoding.
+ * @param audioChannels Output audio channels.
+ */
+ public static MediaFormatStrategy createAndroid16x9Strategy1080P(int audioBitrate, int audioChannels) {
+ return new Android16By9FormatStrategy(1080, 10000000, audioBitrate, audioChannels);
+ }
+
+ /**
+ *
+ * @param audioBitrate Preferred bitrate for audio encoding.
+ * @param audioChannels Output audio channels.
+ */
+ public static MediaFormatStrategy createAndroidStrategy720P(int audioBitrate, int audioChannels) {
+ return new AndroidFormatStrategy(720, 8000000, audioBitrate, audioChannels);
+ }
+
+ /**
+ *
+ * @param audioBitrate Preferred bitrate for audio encoding.
+ * @param audioChannels Output audio channels.
+ */
+ public static MediaFormatStrategy createAndroidStrategy1080P(int audioBitrate, int audioChannels) {
+ return new AndroidFormatStrategy(1080, 10000000, audioBitrate, audioChannels);
+ }
+
+ /**
+ * Preset similar to iOS SDK's AVAssetExportPreset960x540.
+ * Note that encoding resolutions of this preset are not supported in all
+ * devices e.g. Nexus 4.
+ * On unsupported device encoded video stream will be broken without any
+ * exception.
+ */
+ public static MediaFormatStrategy createExportPreset960x540Strategy() {
+ return new ExportPreset960x540Strategy();
+ }
+
+ private MediaFormatStrategyPresets() {
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/OutputFormatUnavailableException.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/OutputFormatUnavailableException.java
new file mode 100644
index 0000000..4fb9b33
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/format/OutputFormatUnavailableException.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.format;
+
+public class OutputFormatUnavailableException extends RuntimeException {
+ public OutputFormatUnavailableException(String detailMessage) {
+ super(detailMessage);
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/utils/AvcCsdUtils.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/utils/AvcCsdUtils.java
new file mode 100644
index 0000000..d458f66
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/utils/AvcCsdUtils.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2015 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.utils;
+
+import android.media.MediaFormat;
+
+import net.ypresto.androidtranscoder.format.MediaFormatExtraConstants;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+
+public class AvcCsdUtils {
+ // Refer: https://android.googlesource.com/platform/frameworks/av/+/lollipop-release/media/libstagefright/MediaCodec.cpp#2198
+ // Refer: http://stackoverflow.com/a/2861340
+ private static final byte[] AVC_START_CODE_3 = {0x00, 0x00, 0x01};
+ private static final byte[] AVC_START_CODE_4 = {0x00, 0x00, 0x00, 0x01};
+ // Refer: http://www.cardinalpeak.com/blog/the-h-264-sequence-parameter-set/
+ private static final byte AVC_SPS_NAL = 103; // 0<<7 + 3<<5 + 7<<0
+
+ /**
+ * @return ByteBuffer contains SPS without NAL header.
+ */
+ public static ByteBuffer getSpsBuffer(MediaFormat format) {
+ ByteBuffer sourceBuffer = format.getByteBuffer(MediaFormatExtraConstants.KEY_AVC_SPS).asReadOnlyBuffer(); // might be direct buffer
+ ByteBuffer prefixedSpsBuffer = ByteBuffer.allocate(sourceBuffer.limit()).order(sourceBuffer.order());
+ prefixedSpsBuffer.put(sourceBuffer);
+ prefixedSpsBuffer.flip();
+
+ skipStartCode(prefixedSpsBuffer);
+ if (prefixedSpsBuffer.get() != AVC_SPS_NAL) {
+ throw new IllegalStateException("Got non SPS NAL data.");
+ }
+ return prefixedSpsBuffer.slice();
+ }
+
+ private static void skipStartCode(ByteBuffer prefixedSpsBuffer) {
+ byte[] prefix3 = new byte[3];
+ prefixedSpsBuffer.get(prefix3);
+ if (Arrays.equals(prefix3, AVC_START_CODE_3)) return;
+
+ byte[] prefix4 = Arrays.copyOf(prefix3, 4);
+ prefix4[3] = prefixedSpsBuffer.get();
+ if (Arrays.equals(prefix4, AVC_START_CODE_4)) return;
+ throw new IllegalStateException("AVC NAL start code does not found in csd.");
+ }
+
+ private AvcCsdUtils() {
+ throw new RuntimeException();
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/utils/AvcSpsUtils.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/utils/AvcSpsUtils.java
new file mode 100644
index 0000000..3eafbed
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/utils/AvcSpsUtils.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2016 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.utils;
+
+import java.nio.ByteBuffer;
+
+public class AvcSpsUtils {
+ public static byte getProfileIdc(ByteBuffer spsBuffer) {
+ // Refer: http://www.cardinalpeak.com/blog/the-h-264-sequence-parameter-set/
+ // First byte after NAL.
+ return spsBuffer.get(0);
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/utils/MediaExtractorUtils.java b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/utils/MediaExtractorUtils.java
new file mode 100644
index 0000000..9197ca2
--- /dev/null
+++ b/packages/nativescript-transcoder/platforms/android/java/net/ypresto/androidtranscoder/utils/MediaExtractorUtils.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2014 Yuya Tanaka
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package net.ypresto.androidtranscoder.utils;
+
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+
+public class MediaExtractorUtils {
+
+ private MediaExtractorUtils() {
+ }
+
+ public static class TrackResult {
+
+ private TrackResult() {
+ }
+
+ public int mVideoTrackIndex;
+ public String mVideoTrackMime;
+ public MediaFormat mVideoTrackFormat;
+ public int mAudioTrackIndex;
+ public String mAudioTrackMime;
+ public MediaFormat mAudioTrackFormat;
+ }
+
+ public static TrackResult getFirstVideoAndAudioTrack(MediaExtractor extractor) {
+ TrackResult trackResult = new TrackResult();
+ trackResult.mVideoTrackIndex = -1;
+ trackResult.mAudioTrackIndex = -1;
+ int trackCount = extractor.getTrackCount();
+ for (int i = 0; i < trackCount; i++) {
+ MediaFormat format = extractor.getTrackFormat(i);
+ String mime = format.getString(MediaFormat.KEY_MIME);
+ if (trackResult.mVideoTrackIndex < 0 && mime.startsWith("video/")) {
+ trackResult.mVideoTrackIndex = i;
+ trackResult.mVideoTrackMime = mime;
+ trackResult.mVideoTrackFormat = format;
+ } else if (trackResult.mAudioTrackIndex < 0 && mime.startsWith("audio/")) {
+ trackResult.mAudioTrackIndex = i;
+ trackResult.mAudioTrackMime = mime;
+ trackResult.mAudioTrackFormat = format;
+ }
+ if (trackResult.mVideoTrackIndex >= 0 && trackResult.mAudioTrackIndex >= 0)
+ break;
+ }
+ if (trackResult.mVideoTrackIndex < 0 && trackResult.mAudioTrackIndex < 0) {
+ throw new IllegalArgumentException("extractor does not contain video and/or audio tracks.");
+ }
+ return trackResult;
+ }
+}
diff --git a/packages/nativescript-transcoder/platforms/android/nativescript_transcoder.aar b/packages/nativescript-transcoder/platforms/android/nativescript_transcoder.aar
new file mode 100644
index 0000000..46faccd
Binary files /dev/null and b/packages/nativescript-transcoder/platforms/android/nativescript_transcoder.aar differ
diff --git a/packages/nativescript-transcoder/project.json b/packages/nativescript-transcoder/project.json
new file mode 100644
index 0000000..b8dd094
--- /dev/null
+++ b/packages/nativescript-transcoder/project.json
@@ -0,0 +1,73 @@
+{
+ "name": "nativescript-transcoder",
+ "$schema": "../../node_modules/nx/schemas/project-schema.json",
+ "projectType": "library",
+ "sourceRoot": "packages/nativescript-transcoder",
+ "targets": {
+ "build": {
+ "executor": "@nrwl/js:tsc",
+ "options": {
+ "outputPath": "dist/packages/nativescript-transcoder",
+ "tsConfig": "packages/nativescript-transcoder/tsconfig.json",
+ "packageJson": "packages/nativescript-transcoder/package.json",
+ "main": "packages/nativescript-transcoder/index.d.ts",
+ "assets": [
+ "packages/nativescript-transcoder/*.md",
+ "packages/nativescript-transcoder/index.d.ts",
+ "LICENSE",
+ {
+ "glob": "**/*",
+ "input": "packages/nativescript-transcoder/platforms/",
+ "output": "./platforms/"
+ }
+ ],
+ "dependsOn": [
+ {
+ "target": "build.all",
+ "projects": "dependencies"
+ }
+ ]
+ }
+ },
+ "build.all": {
+ "executor": "nx:run-commands",
+ "options": {
+ "commands": [
+ "node tools/scripts/build-finish.ts nativescript-transcoder"
+ ],
+ "parallel": false
+ },
+ "outputs": [
+ "dist/packages/nativescript-transcoder"
+ ],
+ "dependsOn": [
+ {
+ "target": "build.all",
+ "projects": "dependencies"
+ },
+ {
+ "target": "build",
+ "projects": "self"
+ }
+ ]
+ },
+ "focus": {
+ "executor": "nx:run-commands",
+ "options": {
+ "commands": [
+ "nx g @nativescript/plugin-tools:focus-packages nativescript-transcoder"
+ ],
+ "parallel": false
+ }
+ },
+ "lint": {
+ "executor": "@nrwl/linter:eslint",
+ "options": {
+ "lintFilePatterns": [
+ "packages/nativescript-transcoder/**/*.ts"
+ ]
+ }
+ }
+ },
+ "tags": []
+}
diff --git a/packages/nativescript-transcoder/references.d.ts b/packages/nativescript-transcoder/references.d.ts
new file mode 100644
index 0000000..170d476
--- /dev/null
+++ b/packages/nativescript-transcoder/references.d.ts
@@ -0,0 +1,2 @@
+///
+///
diff --git a/packages/nativescript-transcoder/tsconfig.json b/packages/nativescript-transcoder/tsconfig.json
new file mode 100644
index 0000000..aed7323
--- /dev/null
+++ b/packages/nativescript-transcoder/tsconfig.json
@@ -0,0 +1,9 @@
+{
+ "extends": "../../tsconfig.base.json",
+ "compilerOptions": {
+ "outDir": "../../dist/out-tsc",
+ "rootDir": "."
+ },
+ "exclude": ["**/*.spec.ts", "**/*.test.ts", "angular"],
+ "include": ["**/*.ts", "references.d.ts"]
+}
diff --git a/packages/nativescript-transcoder/typings/android.d.ts b/packages/nativescript-transcoder/typings/android.d.ts
new file mode 100644
index 0000000..2dbbd68
--- /dev/null
+++ b/packages/nativescript-transcoder/typings/android.d.ts
@@ -0,0 +1,860 @@
+///
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export class BuildConfig {
+ public static class: java.lang.Class;
+ public static DEBUG: boolean;
+ public static LIBRARY_PACKAGE_NAME: string;
+ public static APPLICATION_ID: string;
+ public static BUILD_TYPE: string;
+ public static FLAVOR: string;
+ public static VERSION_CODE: number;
+ public static VERSION_NAME: string;
+ public constructor();
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export class MediaTranscoder {
+ public static class: java.lang.Class;
+ public transcodeVideo(
+ param0: string,
+ param1: string,
+ param2: net.ypresto.androidtranscoder.format.MediaFormatStrategy,
+ param3: net.ypresto.androidtranscoder.MediaTranscoder.Listener
+ ): java.util.concurrent.Future;
+ /** @deprecated */
+ public transcodeVideo(param0: java.io.FileDescriptor, param1: string, param2: net.ypresto.androidtranscoder.MediaTranscoder.Listener): java.util.concurrent.Future;
+ public transcodeVideo(
+ param0: net.ypresto.androidtranscoder.engine.TimeLine,
+ param1: string,
+ param2: net.ypresto.androidtranscoder.format.MediaFormatStrategy,
+ param3: net.ypresto.androidtranscoder.MediaTranscoder.Listener
+ ): java.util.concurrent.Future;
+ public transcodeVideo(
+ param0: java.io.FileDescriptor,
+ param1: string,
+ param2: net.ypresto.androidtranscoder.format.MediaFormatStrategy,
+ param3: net.ypresto.androidtranscoder.MediaTranscoder.Listener
+ ): java.util.concurrent.Future;
+ public static getInstance(): net.ypresto.androidtranscoder.MediaTranscoder;
+ }
+ export module MediaTranscoder {
+ export class Listener {
+ public static class: java.lang.Class;
+ /**
+ * Constructs a new instance of the net.ypresto.androidtranscoder.MediaTranscoder$Listener interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
+ */
+ public constructor(implementation: {
+ onTranscodeProgress(param0: number): void;
+ onTranscodeCompleted(): void;
+ onTranscodeCanceled(): void;
+ onTranscodeFailed(param0: java.lang.Exception): void;
+ });
+ public constructor();
+ public onTranscodeCompleted(): void;
+ public onTranscodeProgress(param0: number): void;
+ public onTranscodeFailed(param0: java.lang.Exception): void;
+ public onTranscodeCanceled(): void;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export class TLog {
+ public static class: java.lang.Class;
+ public static d(param0: string, param1: string, param2: java.lang.Throwable): void;
+ public static v(param0: string, param1: string): void;
+ public static setLevel(param0: number): void;
+ public static w(param0: string, param1: string): void;
+ public static d(param0: string, param1: string): void;
+ public static wtf(param0: string, param1: string): void;
+ public constructor();
+ public static e(param0: string, param1: string, param2: java.lang.Throwable): void;
+ public static e(param0: string, param1: string): void;
+ public static i(param0: string, param1: string, param2: java.lang.Throwable): void;
+ public static v(param0: string, param1: string, param2: java.lang.Throwable): void;
+ public static i(param0: string, param1: string): void;
+ public static w(param0: string, param1: string, param2: java.lang.Throwable): void;
+ public static wtf(param0: string, param1: string, param2: java.lang.Throwable): void;
+ public static setTags(param0: string): void;
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module compat {
+ export class MediaCodecBufferCompatWrapper {
+ public static class: java.lang.Class;
+ public getOutputBuffer(param0: number): java.nio.ByteBuffer;
+ public constructor(param0: globalAndroid.media.MediaCodec);
+ public getInputBuffer(param0: number): java.nio.ByteBuffer;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module compat {
+ export class MediaCodecListCompat {
+ public static class: java.lang.Class;
+ public static REGULAR_CODECS: number;
+ public static ALL_CODECS: number;
+ public getCodecInfos(): androidNative.Array;
+ public findEncoderForFormat(param0: globalAndroid.media.MediaFormat): string;
+ public constructor(param0: number);
+ public findDecoderForFormat(param0: globalAndroid.media.MediaFormat): string;
+ }
+ export module MediaCodecListCompat {
+ export class MediaCodecInfoIterator extends java.util.Iterator {
+ public static class: java.lang.Class;
+ public next(): globalAndroid.media.MediaCodecInfo;
+ public hasNext(): boolean;
+ public remove(): void;
+ }
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class AudioChannel {
+ public static class: java.lang.Class;
+ public static BUFFER_INDEX_END_OF_STREAM: number;
+ public setEndOfSegment(param0: string): void;
+ public finalize(): void;
+ public createFromExisting(
+ param0: java.util.LinkedHashMap,
+ param1: globalAndroid.media.MediaCodec,
+ param2: globalAndroid.media.MediaFormat
+ ): net.ypresto.androidtranscoder.engine.AudioChannel;
+ public sampleCountToOutputDurationUs(param0: number): number;
+ public getDeterminedFormat(): globalAndroid.media.MediaFormat;
+ public getBufferDurationUs(param0: string, param1: number): number;
+ public sampleCountToInputDurationUs(param0: number): number;
+ public constructor(param0: java.util.LinkedHashMap, param1: globalAndroid.media.MediaCodec, param2: globalAndroid.media.MediaFormat);
+ public removeBuffers(param0: string): void;
+ public drainDecoderBufferAndQueue(param0: string, param1: number, param2: java.lang.Long, param3: java.lang.Long, param4: number, param5: number): void;
+ public feedEncoder(param0: number): java.lang.Long;
+ public setMute(param0: string): void;
+ public setActualDecodedFormat(param0: globalAndroid.media.MediaFormat): void;
+ }
+ export module AudioChannel {
+ export class AudioBuffer {
+ public static class: java.lang.Class;
+ }
+ export class RemixResult {
+ public static class: java.lang.Class;
+ public mPresentationTime: number;
+ public mDuration: number;
+ public mBufferPosition: number;
+ public mBufferOverflowPosition: number;
+ }
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class AudioRemixer {
+ public static class: java.lang.Class;
+ public constructor();
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class AudioTrackTranscoder extends net.ypresto.androidtranscoder.engine.TrackTranscoder {
+ public static class: java.lang.Class;
+ public setupDecoders(
+ param0: net.ypresto.androidtranscoder.engine.TimeLine.Segment,
+ param1: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.TranscodeThrottle,
+ param2: number,
+ param3: number,
+ param4: number
+ ): void;
+ public getDeterminedFormat(): globalAndroid.media.MediaFormat;
+ public getOutputPresentationTimeEncodedUs(): number;
+ public releaseEncoder(): void;
+ public releaseDecoders(): void;
+ public getOutputPresentationTimeDecodedUs(): number;
+ public isSegmentFinished(): boolean;
+ public release(): void;
+ public setupEncoder(): void;
+ public setOutputPresentationTimeDecodedUs(param0: number): void;
+ public stepPipeline(param0: net.ypresto.androidtranscoder.engine.TimeLine.Segment, param1: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.TranscodeThrottle): boolean;
+ public constructor(
+ param0: java.util.LinkedHashMap,
+ param1: globalAndroid.media.MediaFormat,
+ param2: net.ypresto.androidtranscoder.engine.QueuedMuxer
+ );
+ }
+ export module AudioTrackTranscoder {
+ export class DecoderWrapper {
+ public static class: java.lang.Class;
+ }
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class InputSurface {
+ public static class: java.lang.Class;
+ public swapBuffers(): boolean;
+ public getHeight(): number;
+ public makeCurrent(): void;
+ public makeUnCurrent(): void;
+ public getWidth(): number;
+ public constructor(param0: globalAndroid.view.Surface);
+ public release(): void;
+ public getSurface(): globalAndroid.view.Surface;
+ public setPresentationTime(param0: number): void;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class InvalidOutputFormatException {
+ public static class: java.lang.Class;
+ public constructor(param0: string);
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class MediaFormatValidator {
+ public static class: java.lang.Class;
+ public static validateAudioOutputFormat(param0: globalAndroid.media.MediaFormat): void;
+ public constructor();
+ public static validateVideoOutputFormat(param0: globalAndroid.media.MediaFormat): void;
+ public static validateResolution(param0: number, param1: number): boolean;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class MediaTranscoderEngine {
+ public static class: java.lang.Class;
+ public getProgress(): number;
+ public constructor();
+ public transcodeVideo(param0: net.ypresto.androidtranscoder.engine.TimeLine, param1: string, param2: net.ypresto.androidtranscoder.format.MediaFormatStrategy): void;
+ public getProgressCallback(): net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.ProgressCallback;
+ public setProgressCallback(param0: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.ProgressCallback): void;
+ }
+ export module MediaTranscoderEngine {
+ export class ProgressCallback {
+ public static class: java.lang.Class;
+ /**
+ * Constructs a new instance of the net.ypresto.androidtranscoder.engine.MediaTranscoderEngine$ProgressCallback interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
+ */
+ public constructor(implementation: { onProgress(param0: number): void });
+ public constructor();
+ public onProgress(param0: number): void;
+ }
+ export class TranscodeThrottle {
+ public static class: java.lang.Class;
+ public step(): void;
+ public departicipate(param0: string): void;
+ public log(): void;
+ public participate(param0: string): void;
+ public constructor(param0: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine);
+ public canProceed(param0: string, param1: number, param2: boolean): boolean;
+ public startSegment(): void;
+ public shouldCancel(): boolean;
+ }
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class OutputSurface {
+ public static class: java.lang.Class;
+ public signalEndOfInputStream(): void;
+ public consumeDuplicateTexture(): boolean;
+ public isEndOfInputStream(): boolean;
+ public checkGlError(param0: string): void;
+ public getSurface(): globalAndroid.view.Surface;
+ public getOriginalSourceRect(): globalAndroid.graphics.RectF;
+ public getSourceRect(): globalAndroid.graphics.RectF;
+ public isDuplicateTexture(): boolean;
+ public onFrameAvailable(param0: globalAndroid.graphics.SurfaceTexture): void;
+ public setRotation(param0: number): void;
+ public setSourceRect(param0: globalAndroid.graphics.RectF): void;
+ public constructor();
+ public makeCurrent(): void;
+ public setSourceRotation(param0: number): void;
+ public setOriginalSourceRect(param0: globalAndroid.graphics.RectF): void;
+ public setDuplicateTextureReady(): void;
+ public updateTexture(): void;
+ public getTextureID(): number;
+ public constructor(param0: number, param1: number);
+ public getAlpha(): number;
+ public release(): void;
+ public setAlpha(param0: number): void;
+ public duplicateTextures(param0: number): void;
+ public getSourceRotation(): number;
+ public getDestRect(): globalAndroid.graphics.RectF;
+ public checkEglError(param0: string): void;
+ public getRotation(): number;
+ public isExtraTextures(): boolean;
+ public isTextureReady(): boolean;
+ public clearTextureReady(): void;
+ public setDestRect(param0: globalAndroid.graphics.RectF): void;
+ public awaitNewImage(): void;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class PassThroughTrackTranscoder extends net.ypresto.androidtranscoder.engine.TrackTranscoder {
+ public static class: java.lang.Class;
+ public setupDecoders(
+ param0: net.ypresto.androidtranscoder.engine.TimeLine.Segment,
+ param1: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.TranscodeThrottle,
+ param2: number,
+ param3: number,
+ param4: number
+ ): void;
+ public getDeterminedFormat(): globalAndroid.media.MediaFormat;
+ public getOutputPresentationTimeEncodedUs(): number;
+ public releaseEncoder(): void;
+ public releaseDecoders(): void;
+ public getOutputPresentationTimeDecodedUs(): number;
+ public constructor(
+ param0: globalAndroid.media.MediaExtractor,
+ param1: number,
+ param2: net.ypresto.androidtranscoder.engine.QueuedMuxer,
+ param3: net.ypresto.androidtranscoder.engine.QueuedMuxer.SampleType
+ );
+ public isSegmentFinished(): boolean;
+ public release(): void;
+ public setupEncoder(): void;
+ public setOutputPresentationTimeDecodedUs(param0: number): void;
+ public stepPipeline(param0: net.ypresto.androidtranscoder.engine.TimeLine.Segment, param1: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.TranscodeThrottle): boolean;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class QueuedMuxer {
+ public static class: java.lang.Class;
+ public setOutputFormat(param0: net.ypresto.androidtranscoder.engine.QueuedMuxer.SampleType, param1: globalAndroid.media.MediaFormat): void;
+ public writeSampleData(param0: net.ypresto.androidtranscoder.engine.QueuedMuxer.SampleType, param1: java.nio.ByteBuffer, param2: globalAndroid.media.MediaCodec.BufferInfo): void;
+ public constructor(param0: globalAndroid.media.MediaMuxer, param1: boolean, param2: boolean, param3: net.ypresto.androidtranscoder.engine.QueuedMuxer.Listener);
+ }
+ export module QueuedMuxer {
+ export class Listener {
+ public static class: java.lang.Class;
+ /**
+ * Constructs a new instance of the net.ypresto.androidtranscoder.engine.QueuedMuxer$Listener interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
+ */
+ public constructor(implementation: { onDetermineOutputFormat(): void });
+ public constructor();
+ public onDetermineOutputFormat(): void;
+ }
+ export class SampleInfo {
+ public static class: java.lang.Class;
+ }
+ export class SampleType {
+ public static class: java.lang.Class;
+ public static VIDEO: net.ypresto.androidtranscoder.engine.QueuedMuxer.SampleType;
+ public static AUDIO: net.ypresto.androidtranscoder.engine.QueuedMuxer.SampleType;
+ public static valueOf(param0: string): net.ypresto.androidtranscoder.engine.QueuedMuxer.SampleType;
+ public static values(): androidNative.Array;
+ }
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class TextureRender {
+ public static class: java.lang.Class;
+ public static saveFrame(param0: string, param1: number, param2: number): void;
+ public surfaceCreated(): void;
+ public checkGlError(param0: string): void;
+ public constructor(param0: java.util.List, param1: net.ypresto.androidtranscoder.engine.OutputSurface);
+ public drawFrame(): void;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class TimeLine {
+ public static class: java.lang.Class;
+ public getDuration(): java.lang.Long;
+ public addImageChannel(param0: string, param1: java.io.FileDescriptor): net.ypresto.androidtranscoder.engine.TimeLine;
+ public constructor(param0: number, param1: string);
+ public addAudioOnlyChannel(param0: string, param1: java.io.FileDescriptor): net.ypresto.androidtranscoder.engine.TimeLine;
+ public getChannels(): java.util.LinkedHashMap;
+ public addChannel(param0: string, param1: java.io.FileDescriptor, param2: net.ypresto.androidtranscoder.engine.TimeLine.ChannelType): net.ypresto.androidtranscoder.engine.TimeLine;
+ public getSegments(): java.util.List;
+ public constructor();
+ public createSegment(): net.ypresto.androidtranscoder.engine.TimeLine.Segment;
+ public addChannel(param0: string, param1: java.io.FileDescriptor): net.ypresto.androidtranscoder.engine.TimeLine;
+ public addVideoOnlyChannel(param0: string, param1: java.io.FileDescriptor): net.ypresto.androidtranscoder.engine.TimeLine;
+ public constructor(param0: number);
+ public prepare(): void;
+ }
+ export module TimeLine {
+ export class ChannelType {
+ public static class: java.lang.Class;
+ public static VIDEO: net.ypresto.androidtranscoder.engine.TimeLine.ChannelType;
+ public static AUDIO: net.ypresto.androidtranscoder.engine.TimeLine.ChannelType;
+ public static AUDIO_VIDEO: net.ypresto.androidtranscoder.engine.TimeLine.ChannelType;
+ public static IMAGE: net.ypresto.androidtranscoder.engine.TimeLine.ChannelType;
+ public static valueOf(param0: string): net.ypresto.androidtranscoder.engine.TimeLine.ChannelType;
+ public static values(): androidNative.Array;
+ }
+ export class Filter {
+ public static class: java.lang.Class;
+ public static OPACITY_UP_RAMP: net.ypresto.androidtranscoder.engine.TimeLine.Filter;
+ public static OPACITY_DOWN_RAMP: net.ypresto.androidtranscoder.engine.TimeLine.Filter;
+ public static MUTE: net.ypresto.androidtranscoder.engine.TimeLine.Filter;
+ public static SUPPRESS: net.ypresto.androidtranscoder.engine.TimeLine.Filter;
+ public static values(): androidNative.Array;
+ public static valueOf(param0: string): net.ypresto.androidtranscoder.engine.TimeLine.Filter;
+ }
+ export class InputChannel {
+ public static class: java.lang.Class;
+ public mLengthUs: java.lang.Long;
+ public mVideoInputStartTimeUs: java.lang.Long;
+ public mAudioInputStartTimeUs: java.lang.Long;
+ public mInputEndTimeUs: java.lang.Long;
+ public mVideoInputOffsetUs: java.lang.Long;
+ public mAudioInputOffsetUs: java.lang.Long;
+ public mVideoInputAcutalEndTimeUs: java.lang.Long;
+ public mAudioInputAcutalEndTimeUs: java.lang.Long;
+ public mVideoFrameLength: number;
+ public mSeekShortage: number;
+ public mDurationShortage: number;
+ public mFilter: net.ypresto.androidtranscoder.engine.TimeLine.Filter;
+ public mChannelType: net.ypresto.androidtranscoder.engine.TimeLine.ChannelType;
+ public mInputFileDescriptor: java.io.FileDescriptor;
+ public mTimeToCut: number;
+ public mTimeAlreadyCut: number;
+ public mTimeToAdd: number;
+ public mTimeAlreadyAdded: number;
+ public mFrameWasCut: boolean;
+ public mLastBufferPresentationTime: number;
+ public mMuteAudio: boolean;
+ }
+ export class Segment {
+ public static class: java.lang.Class;
+ public mOutputStartTimeUs: java.lang.Long;
+ public isLastSegment: boolean;
+ public seek(param0: string, param1: number): net.ypresto.androidtranscoder.engine.TimeLine.Segment;
+ public getDuration(): java.lang.Long;
+ public timeScale(param0: number): net.ypresto.androidtranscoder.engine.TimeLine.Segment;
+ public output(param0: string): net.ypresto.androidtranscoder.engine.TimeLine.Segment;
+ public timeLine(): net.ypresto.androidtranscoder.engine.TimeLine;
+ public getChannels(): java.util.LinkedHashMap;
+ public output(param0: string, param1: net.ypresto.androidtranscoder.engine.TimeLine.Filter): net.ypresto.androidtranscoder.engine.TimeLine.Segment;
+ public filter(param0: net.ypresto.androidtranscoder.engine.TimeLine.Filter): net.ypresto.androidtranscoder.engine.TimeLine.Segment;
+ public getAudioChannels(): java.util.LinkedHashMap;
+ public duration(param0: number): net.ypresto.androidtranscoder.engine.TimeLine.Segment;
+ public getImageChannels(): java.util.LinkedHashMap;
+ public getVideoChannels(): java.util.LinkedHashMap;
+ public start(param0: java.lang.Long, param1: java.lang.Long, param2: java.lang.Long, param3: java.lang.Long, param4: java.lang.Long): void;
+ public getSegmentChannel(param0: string): net.ypresto.androidtranscoder.engine.TimeLine.SegmentChannel;
+ }
+ export class SegmentChannel {
+ public static class: java.lang.Class;
+ public mChannel: net.ypresto.androidtranscoder.engine.TimeLine.InputChannel;
+ public mFilter: net.ypresto.androidtranscoder.engine.TimeLine.Filter;
+ public mTimeScale: java.lang.Long;
+ public mSeek: java.lang.Long;
+ public getAudioSeek(): java.lang.Long;
+ public seekRequestedAudio(): void;
+ public getVideoSeek(): java.lang.Long;
+ public seekRequestedVideo(): void;
+ }
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class TrackTranscoder {
+ public static class: java.lang.Class;
+ /**
+ * Constructs a new instance of the net.ypresto.androidtranscoder.engine.TrackTranscoder interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
+ */
+ public constructor(implementation: {
+ setupEncoder(): void;
+ setupDecoders(
+ param0: net.ypresto.androidtranscoder.engine.TimeLine.Segment,
+ param1: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.TranscodeThrottle,
+ param2: number,
+ param3: number,
+ param4: number
+ ): void;
+ getDeterminedFormat(): globalAndroid.media.MediaFormat;
+ stepPipeline(param0: net.ypresto.androidtranscoder.engine.TimeLine.Segment, param1: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.TranscodeThrottle): boolean;
+ getOutputPresentationTimeDecodedUs(): number;
+ getOutputPresentationTimeEncodedUs(): number;
+ setOutputPresentationTimeDecodedUs(param0: number): void;
+ isSegmentFinished(): boolean;
+ releaseDecoders(): void;
+ releaseEncoder(): void;
+ release(): void;
+ });
+ public constructor();
+ public setupDecoders(
+ param0: net.ypresto.androidtranscoder.engine.TimeLine.Segment,
+ param1: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.TranscodeThrottle,
+ param2: number,
+ param3: number,
+ param4: number
+ ): void;
+ public getDeterminedFormat(): globalAndroid.media.MediaFormat;
+ public getOutputPresentationTimeEncodedUs(): number;
+ public releaseEncoder(): void;
+ public releaseDecoders(): void;
+ public getOutputPresentationTimeDecodedUs(): number;
+ public isSegmentFinished(): boolean;
+ public release(): void;
+ public setupEncoder(): void;
+ public setOutputPresentationTimeDecodedUs(param0: number): void;
+ public stepPipeline(param0: net.ypresto.androidtranscoder.engine.TimeLine.Segment, param1: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.TranscodeThrottle): boolean;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module engine {
+ export class VideoTrackTranscoder extends net.ypresto.androidtranscoder.engine.TrackTranscoder {
+ public static class: java.lang.Class;
+ public setupDecoders(
+ param0: net.ypresto.androidtranscoder.engine.TimeLine.Segment,
+ param1: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.TranscodeThrottle,
+ param2: number,
+ param3: number,
+ param4: number
+ ): void;
+ public getDeterminedFormat(): globalAndroid.media.MediaFormat;
+ public getOutputPresentationTimeEncodedUs(): number;
+ public releaseEncoder(): void;
+ public releaseDecoders(): void;
+ public getOutputPresentationTimeDecodedUs(): number;
+ public isSegmentFinished(): boolean;
+ public release(): void;
+ public setupEncoder(): void;
+ public setOutputPresentationTimeDecodedUs(param0: number): void;
+ public stepPipeline(param0: net.ypresto.androidtranscoder.engine.TimeLine.Segment, param1: net.ypresto.androidtranscoder.engine.MediaTranscoderEngine.TranscodeThrottle): boolean;
+ public constructor(
+ param0: java.util.LinkedHashMap,
+ param1: globalAndroid.media.MediaFormat,
+ param2: net.ypresto.androidtranscoder.engine.QueuedMuxer
+ );
+ }
+ export module VideoTrackTranscoder {
+ export class CanvasWrapper {
+ public static class: java.lang.Class;
+ public start(param0: number, param1: number, param2: number): void;
+ }
+ export class DecoderWrapper {
+ public static class: java.lang.Class;
+ public start(param0: number, param1: number, param2: number): void;
+ }
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module format {
+ export class Android16By9FormatStrategy extends net.ypresto.androidtranscoder.format.MediaFormatStrategy {
+ public static class: java.lang.Class;
+ public static AUDIO_BITRATE_AS_IS: number;
+ public static AUDIO_CHANNELS_AS_IS: number;
+ public constructor(param0: number, param1: number, param2: number, param3: number);
+ public createAudioOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ public createVideoOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ public constructor(param0: number, param1: number);
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module format {
+ export class Android720pFormatStrategy extends net.ypresto.androidtranscoder.format.MediaFormatStrategy {
+ public static class: java.lang.Class;
+ public static AUDIO_BITRATE_AS_IS: number;
+ public static AUDIO_CHANNELS_AS_IS: number;
+ public static DEFAULT_VIDEO_BITRATE: number;
+ public createAudioOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ public constructor();
+ public constructor(param0: number, param1: number, param2: number);
+ public constructor(param0: number);
+ public createVideoOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module format {
+ export class AndroidFormatStrategy extends net.ypresto.androidtranscoder.format.MediaFormatStrategy {
+ public static class: java.lang.Class;
+ public static AUDIO_BITRATE_AS_IS: number;
+ public static AUDIO_CHANNELS_AS_IS: number;
+ public constructor(param0: number, param1: number, param2: number, param3: number);
+ public createAudioOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ public createVideoOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ public constructor(param0: number, param1: number);
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module format {
+ export class ExportPreset960x540Strategy extends net.ypresto.androidtranscoder.format.MediaFormatStrategy {
+ public static class: java.lang.Class;
+ public createAudioOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ public createVideoOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module format {
+ export class MediaFormatExtraConstants {
+ public static class: java.lang.Class;
+ public static KEY_PROFILE: string;
+ public static KEY_LEVEL: string;
+ public static KEY_AVC_SPS: string;
+ public static KEY_AVC_PPS: string;
+ public static KEY_ROTATION_DEGREES: string;
+ public static MIMETYPE_VIDEO_AVC: string;
+ public static MIMETYPE_VIDEO_H263: string;
+ public static MIMETYPE_VIDEO_VP8: string;
+ public static MIMETYPE_AUDIO_AAC: string;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module format {
+ export class MediaFormatPresets {
+ public static class: java.lang.Class;
+ /** @deprecated */
+ public static getExportPreset960x540(): globalAndroid.media.MediaFormat;
+ public static getExportPreset960x540(param0: number, param1: number): globalAndroid.media.MediaFormat;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module format {
+ export class MediaFormatStrategy {
+ public static class: java.lang.Class;
+ /**
+ * Constructs a new instance of the net.ypresto.androidtranscoder.format.MediaFormatStrategy interface with the provided implementation. An empty constructor exists calling super() when extending the interface class.
+ */
+ public constructor(implementation: {
+ createVideoOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ createAudioOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ });
+ public constructor();
+ public createAudioOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ public createVideoOutputFormat(param0: globalAndroid.media.MediaFormat, param1: boolean): globalAndroid.media.MediaFormat;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module format {
+ export class MediaFormatStrategyPresets {
+ public static class: java.lang.Class;
+ public static EXPORT_PRESET_960x540: net.ypresto.androidtranscoder.format.MediaFormatStrategy;
+ public static createAndroidStrategy1080P(param0: number, param1: number): net.ypresto.androidtranscoder.format.MediaFormatStrategy;
+ public static createAndroidStrategy720P(param0: number, param1: number): net.ypresto.androidtranscoder.format.MediaFormatStrategy;
+ public static createExportPreset960x540Strategy(): net.ypresto.androidtranscoder.format.MediaFormatStrategy;
+ public static createAndroid720pStrategy(param0: number): net.ypresto.androidtranscoder.format.MediaFormatStrategy;
+ public static createAndroid720pStrategy(param0: number, param1: number, param2: number): net.ypresto.androidtranscoder.format.MediaFormatStrategy;
+ public static createAndroid720pStrategy(): net.ypresto.androidtranscoder.format.MediaFormatStrategy;
+ public static createAndroid16x9Strategy1080P(param0: number, param1: number): net.ypresto.androidtranscoder.format.MediaFormatStrategy;
+ public static createAndroid16x9Strategy720P(param0: number, param1: number): net.ypresto.androidtranscoder.format.MediaFormatStrategy;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module format {
+ export class OutputFormatUnavailableException {
+ public static class: java.lang.Class;
+ public constructor(param0: string);
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module utils {
+ export class AvcCsdUtils {
+ public static class: java.lang.Class;
+ public static getSpsBuffer(param0: globalAndroid.media.MediaFormat): java.nio.ByteBuffer;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module utils {
+ export class AvcSpsUtils {
+ public static class: java.lang.Class;
+ public constructor();
+ public static getProfileIdc(param0: java.nio.ByteBuffer): number;
+ }
+ }
+ }
+ }
+}
+
+declare module net {
+ export module ypresto {
+ export module androidtranscoder {
+ export module utils {
+ export class MediaExtractorUtils {
+ public static class: java.lang.Class;
+ public static getFirstVideoAndAudioTrack(param0: globalAndroid.media.MediaExtractor): net.ypresto.androidtranscoder.utils.MediaExtractorUtils.TrackResult;
+ }
+ export module MediaExtractorUtils {
+ export class TrackResult {
+ public static class: java.lang.Class;
+ public mVideoTrackIndex: number;
+ public mVideoTrackMime: string;
+ public mVideoTrackFormat: globalAndroid.media.MediaFormat;
+ public mAudioTrackIndex: number;
+ public mAudioTrackMime: string;
+ public mAudioTrackFormat: globalAndroid.media.MediaFormat;
+ }
+ }
+ }
+ }
+ }
+}
+
+//Generics information:
diff --git a/tools/assets/App_Resources/iOS/Podfile b/tools/assets/App_Resources/iOS/Podfile
index 3c8210b..f8d6b05 100644
--- a/tools/assets/App_Resources/iOS/Podfile
+++ b/tools/assets/App_Resources/iOS/Podfile
@@ -3,7 +3,9 @@ platform :ios, '13.0'
post_install do |installer|
installer.pods_project.targets.each do |target|
target.build_configurations.each do |config|
- config.build_settings.delete 'IPHONEOS_DEPLOYMENT_TARGET'
+ config.build_settings['IPHONEOS_DEPLOYMENT_TARGET'] = '13.0'
+
+ # config.build_settings.delete 'IPHONEOS_DEPLOYMENT_TARGET'
end
end
end
diff --git a/tools/demo/index.ts b/tools/demo/index.ts
index 094d5db..deba546 100644
--- a/tools/demo/index.ts
+++ b/tools/demo/index.ts
@@ -5,3 +5,4 @@ export * from './nativescript-camera';
export * from './nativescript-custom-rotors';
export * from './nativescript-downloader';
export * from './nativescript-filepicker';
+export * from './nativescript-transcoder';
diff --git a/tools/demo/nativescript-transcoder/index.ts b/tools/demo/nativescript-transcoder/index.ts
new file mode 100644
index 0000000..59ea34e
--- /dev/null
+++ b/tools/demo/nativescript-transcoder/index.ts
@@ -0,0 +1,8 @@
+import { DemoSharedBase } from '../utils';
+import {} from '@voicethread/nativescript-transcoder';
+
+export class DemoSharedNativescriptTranscoder extends DemoSharedBase {
+ testIt() {
+ console.log('test nativescript-transcoder!');
+ }
+}
diff --git a/tools/workspace-scripts.js b/tools/workspace-scripts.js
index eb0c288..3d6f389 100644
--- a/tools/workspace-scripts.js
+++ b/tools/workspace-scripts.js
@@ -96,6 +96,13 @@ module.exports = {
description: '@voicethread/nativescript-audio-recorder: Build',
},
},
+ // @voicethread/nativescript-transcoder
+ 'nativescript-transcoder': {
+ build: {
+ script: 'nx run nativescript-transcoder:build.all',
+ description: '@voicethread/nativescript-transcoder: Build',
+ },
+ },
// @voicethread/nativescript-camera
'nativescript-camera': {
build: {
@@ -133,6 +140,10 @@ module.exports = {
script: 'nx run nativescript-audio-recorder:focus',
description: 'Focus on @voicethread/nativescript-audio-recorder',
},
+ 'nativescript-transcoder': {
+ script: 'nx run nativescript-transcoder:focus',
+ description: 'Focus on @voicethread/nativescript-transcoder',
+ },
'nativescript-camera': {
script: 'nx run nativescript-camera:focus',
description: 'Focus on @voicethread/nativescript-camera',
diff --git a/tsconfig.base.json b/tsconfig.base.json
index 3d6c85a..015ce18 100644
--- a/tsconfig.base.json
+++ b/tsconfig.base.json
@@ -44,6 +44,9 @@
"@voicethread/nativescript-audio-recorder": [
"packages/nativescript-audio-recorder/index.d.ts"
],
+ "@voicethread/nativescript-transcoder": [
+ "packages/nativescript-transcoder/index.d.ts"
+ ],
"@voicethread/nativescript-camera": [
"packages/nativescript-camera/index.d.ts"
]
@@ -53,4 +56,4 @@
"node_modules",
"tmp"
]
-}
+}
\ No newline at end of file