Loading media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java +23 −0 Original line number Diff line number Diff line Loading @@ -56,6 +56,7 @@ import java.io.IOException; import android.util.Log; import android.util.Size; import android.widget.RadioGroup; import android.widget.TextView; import android.widget.Toast; import java.lang.ref.WeakReference; Loading @@ -80,6 +81,14 @@ public class MainActivity extends AppCompatActivity private static final int VIDEO_BITRATE = 8000000 /* 8 Mbps */; private static final int VIDEO_FRAMERATE = 30; /** * Constant values to frame types assigned here are internal to this app. * These values does not correspond to the actual values defined in avc/hevc specifications. */ public static final int FRAME_TYPE_I = 0; public static final int FRAME_TYPE_P = 1; public static final int FRAME_TYPE_B = 2; private String mMime = MediaFormat.MIMETYPE_VIDEO_AVC; private String mOutputVideoPath = null; Loading @@ -89,6 +98,7 @@ public class MainActivity extends AppCompatActivity private boolean mIsRecording; private AutoFitTextureView mTextureView; private TextView mTextView; private CameraDevice mCameraDevice; private CameraCaptureSession mPreviewSession; private CaptureRequest.Builder mPreviewBuilder; Loading @@ -101,6 +111,8 @@ public class MainActivity extends AppCompatActivity private Button mStartButton; private int[] mFrameTypeOccurrences; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Loading Loading @@ -129,6 +141,8 @@ public class MainActivity extends AppCompatActivity final CheckBox checkBox_mr = findViewById(R.id.checkBox_media_recorder); final CheckBox checkBox_mc = findViewById(R.id.checkBox_media_codec); mTextureView = findViewById(R.id.texture); mTextView = findViewById(R.id.textViewResults); checkBox_mr.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Loading Loading @@ -162,6 +176,7 @@ public class MainActivity extends AppCompatActivity @Override public void onClick(View v) { if (v.getId() == R.id.start_button) { mTextView.setText(null); if (mIsMediaRecorder) { if (mIsRecording) { stopRecordingVideo(); Loading Loading @@ -198,6 +213,7 @@ public class MainActivity extends AppCompatActivity mainActivity.mOutputVideoPath); try { encodingStatus = codecSurfaceEncoder.startEncodingSurface(); mainActivity.mFrameTypeOccurrences = codecSurfaceEncoder.getFrameTypes(); } catch (IOException | InterruptedException e) { e.printStackTrace(); } Loading @@ -211,6 +227,13 @@ public class MainActivity extends AppCompatActivity if (encodingStatus == 0) { Toast.makeText(mainActivity.getApplicationContext(), "Encoding Completed", Toast.LENGTH_SHORT).show(); mainActivity.mTextView.append("\n Encoded stream contains: "); mainActivity.mTextView.append("\n Number of I-Frames: " + mainActivity.mFrameTypeOccurrences[FRAME_TYPE_I]); mainActivity.mTextView.append("\n Number of P-Frames: " + mainActivity.mFrameTypeOccurrences[FRAME_TYPE_P]); mainActivity.mTextView.append("\n Number of B-Frames: " + mainActivity.mFrameTypeOccurrences[FRAME_TYPE_B]); } else { Toast.makeText(mainActivity.getApplicationContext(), "Error occurred while " + "encoding", Toast.LENGTH_SHORT).show(); Loading media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java +31 −2 Original line number Diff line number Diff line Loading @@ -31,10 +31,14 @@ import android.view.Surface; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P; public class MediaCodecSurfaceEncoder { private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName(); private static final boolean DEBUG = false; private static final int VIDEO_BITRATE = 8000000 /*8 Mbps*/; private static final int VIDEO_FRAMERATE = 30; Loading @@ -44,6 +48,8 @@ public class MediaCodecSurfaceEncoder { private final String mMime; private final String mOutputPath; private int mTrackID = -1; private int mFrameNum = 0; private int[] mFrameTypeOccurrences = {0, 0, 0}; private Surface mSurface; private MediaExtractor mExtractor; Loading Loading @@ -128,8 +134,10 @@ public class MediaCodecSurfaceEncoder { mEncoder.reset(); mSurface.release(); mSurface = null; Log.i(TAG, "Number of I-frames = " + mFrameTypeOccurrences[FRAME_TYPE_I]); Log.i(TAG, "Number of P-frames = " + mFrameTypeOccurrences[FRAME_TYPE_P]); Log.i(TAG, "Number of B-frames = " + mFrameTypeOccurrences[FRAME_TYPE_B]); } mEncoder.release(); mDecoder.release(); mExtractor.release(); Loading Loading @@ -193,6 +201,8 @@ public class MediaCodecSurfaceEncoder { mSawEncOutputEOS = false; mDecOutputCount = 0; mEncOutputCount = 0; mFrameNum = 0; Arrays.fill(mFrameTypeOccurrences, 0); } private void configureCodec(MediaFormat decFormat, MediaFormat encFormat) { Loading Loading @@ -336,6 +346,21 @@ public class MediaCodecSurfaceEncoder { } if (info.size > 0) { ByteBuffer buf = mEncoder.getOutputBuffer(bufferIndex); // Parse the buffer to get the frame type if (DEBUG) Log.d(TAG, "[ Frame : " + (mFrameNum++) + " ]"); int frameTypeResult = -1; if (mMime == MediaFormat.MIMETYPE_VIDEO_AVC) { frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromAVC(buf); } else if (mMime == MediaFormat.MIMETYPE_VIDEO_HEVC){ frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromHEVC(buf); } else { Log.e(TAG, "Mime type " + mMime + " is not supported."); return; } if (frameTypeResult != -1) { mFrameTypeOccurrences[frameTypeResult]++; } if (mMuxer != null) { if (mTrackID == -1) { mTrackID = mMuxer.addTrack(mEncoder.getOutputFormat()); Loading @@ -353,4 +378,8 @@ public class MediaCodecSurfaceEncoder { private boolean hasSeenError() { return mAsyncHandleDecoder.hasSeenError() || mAsyncHandleEncoder.hasSeenError(); } public int[] getFrameTypes() { return mFrameTypeOccurrences; } } media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java 0 → 100644 +168 −0 Original line number Diff line number Diff line /* * Copyright (C) 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.media.samplevideoencoder; import android.util.Log; import java.nio.ByteBuffer; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P; public class NalUnitUtil { private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName(); private static final boolean DEBUG = false; public static int findNalUnit(byte[] dataArray, int pos, int limit) { int startOffset = 0; if (limit - pos < 4) { return startOffset; } if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 1) { startOffset = 3; } else { if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 0 && dataArray[pos + 3] == 1) { startOffset = 4; } } return startOffset; } private static int getAVCNalUnitType(byte[] dataArray, int nalUnitOffset) { return dataArray[nalUnitOffset] & 0x1F; } private static int parseAVCNALUnitData(byte[] dataArray, int offset, int limit) { ParsableBitArray bitArray = new ParsableBitArray(dataArray); bitArray.reset(dataArray, offset, limit); bitArray.skipBit(); // forbidden_zero_bit bitArray.readBits(2); // nal_ref_idc bitArray.skipBits(5); // nal_unit_type bitArray.readUEV(); // first_mb_in_slice if (!bitArray.canReadUEV()) { return -1; } int sliceType = bitArray.readUEV(); if (DEBUG) Log.d(TAG, "slice_type = " + sliceType); if (sliceType == 0) { return FRAME_TYPE_P; } else if (sliceType == 1) { return FRAME_TYPE_B; } else if (sliceType == 2) { return FRAME_TYPE_I; } else { return -1; } } private static int getHEVCNalUnitType(byte[] dataArray, int nalUnitOffset) { return (dataArray[nalUnitOffset] & 0x7E) >> 1; } private static int parseHEVCNALUnitData(byte[] dataArray, int offset, int limit, int nalUnitType) { // nal_unit_type values from H.265/HEVC Table 7-1. final int BLA_W_LP = 16; final int RSV_IRAP_VCL23 = 23; ParsableBitArray bitArray = new ParsableBitArray(dataArray); bitArray.reset(dataArray, offset, limit); bitArray.skipBit(); // forbidden zero bit bitArray.readBits(6); // nal_unit_header bitArray.readBits(6); // nuh_layer_id bitArray.readBits(3); // nuh_temporal_id_plus1 // Parsing slice_segment_header values from H.265/HEVC Table 7.3.6.1 boolean first_slice_segment = bitArray.readBit(); // first_slice_segment_in_pic_flag if (!first_slice_segment) return -1; if (nalUnitType >= BLA_W_LP && nalUnitType <= RSV_IRAP_VCL23) { bitArray.readBit(); // no_output_of_prior_pics_flag } bitArray.readUEV(); // slice_pic_parameter_set_id // Assume num_extra_slice_header_bits element of PPS data to be 0 int sliceType = bitArray.readUEV(); if (DEBUG) Log.d(TAG, "slice_type = " + sliceType); if (sliceType == 0) { return FRAME_TYPE_B; } else if (sliceType == 1) { return FRAME_TYPE_P; } else if (sliceType == 2) { return FRAME_TYPE_I; } else { return -1; } } public static int getStandardizedFrameTypesFromAVC(ByteBuffer buf) { int limit = buf.limit(); byte[] dataArray = new byte[buf.remaining()]; buf.get(dataArray); int frameType = -1; for (int pos = 0; pos + 3 < limit; ) { int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit); if (startOffset != 0) { int nalUnitType = getAVCNalUnitType(dataArray, (pos + startOffset)); if (DEBUG) { Log.d(TAG, "NalUnitOffset = " + (pos + startOffset)); Log.d(TAG, "NalUnitType = " + nalUnitType); } // SLICE_NAL = 1; IDR_SLICE_NAL = 5 if (nalUnitType == 1 || nalUnitType == 5) { frameType = parseAVCNALUnitData(dataArray, (pos + startOffset), (limit - pos - startOffset)); break; } pos += 3; } else { pos++; } } return frameType; } public static int getStandardizedFrameTypesFromHEVC(ByteBuffer buf) { int limit = buf.limit(); byte[] dataArray = new byte[buf.remaining()]; buf.get(dataArray); int frameType = -1; for (int pos = 0; pos + 3 < limit; ) { int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit); if (startOffset != 0) { int nalUnitType = NalUnitUtil.getHEVCNalUnitType(dataArray, (pos + startOffset)); if (DEBUG) { Log.d(TAG, "NalUnitOffset = " + (pos + startOffset)); Log.d(TAG, "NalUnitType = " + nalUnitType); } // Parse NALUnits containing slice_headers which lies in the range of 0 to 21 if (nalUnitType >= 0 && nalUnitType <= 21) { frameType = parseHEVCNALUnitData(dataArray, (pos + startOffset), (limit - pos - startOffset), nalUnitType); break; } pos += 3; } else { pos++; } } return frameType; } } media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java 0 → 100644 +128 −0 Original line number Diff line number Diff line /* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.media.samplevideoencoder; public class ParsableBitArray { public byte[] data; private int byteOffset; private int bitOffset; private int byteLimit; public ParsableBitArray(byte[] dataArray) { this(dataArray, dataArray.length); } public ParsableBitArray(byte[] dataArray, int limit) { this.data = dataArray; byteLimit = limit; } public void reset(byte[] data, int offset, int limit) { this.data = data; byteOffset = offset; bitOffset = 0; byteLimit = limit; } public void skipBit() { if (++bitOffset == 8) { bitOffset = 0; byteOffset++; } } public void skipBits(int numBits) { int numBytes = numBits / 8; byteOffset += numBytes; bitOffset += numBits - (numBytes * 8); if (bitOffset > 7) { byteOffset++; bitOffset -= 8; } } public boolean readBit() { boolean returnValue = (data[byteOffset] & (0x80 >> bitOffset)) != 0; skipBit(); return returnValue; } public int readBits(int numBits) { if (numBits == 0) { return 0; } int returnValue = 0; bitOffset += numBits; while (bitOffset > 8) { bitOffset -= 8; returnValue |= (data[byteOffset++] & 0xFF) << bitOffset; } returnValue |= (data[byteOffset] & 0xFF) >> (8 - bitOffset); returnValue &= 0xFFFFFFFF >>> (32 - numBits); if (bitOffset == 8) { bitOffset = 0; byteOffset++; } return returnValue; } public boolean canReadUEV() { int initialByteOffset = byteOffset; int initialBitOffset = bitOffset; int leadingZeros = 0; while (byteOffset < byteLimit && !readBit()) { leadingZeros++; } boolean hitLimit = byteOffset == byteLimit; byteOffset = initialByteOffset; bitOffset = initialBitOffset; return !hitLimit && canReadBits(leadingZeros * 2 + 1); } public int readUEV() { int leadingZeros = 0; while (!readBit()) { leadingZeros++; } return (1 << leadingZeros) - 1 + (leadingZeros > 0 ? readBits(leadingZeros) : 0); } public boolean canReadBits(int numBits) { int oldByteOffset = byteOffset; int numBytes = numBits / 8; int newByteOffset = byteOffset + numBytes; int newBitOffset = bitOffset + numBits - (numBytes * 8); if (newBitOffset > 7) { newByteOffset++; newBitOffset -= 8; } for (int i = oldByteOffset + 1; i <= newByteOffset && newByteOffset < byteLimit; i++) { if (shouldSkipByte(i)) { // Skip the byte and check three bytes ahead. newByteOffset++; i += 2; } } return newByteOffset < byteLimit || (newByteOffset == byteLimit && newBitOffset == 0); } private boolean shouldSkipByte(int offset) { return (2 <= offset && offset < byteLimit && data[offset] == (byte) 0x03 && data[offset - 2] == (byte) 0x00 && data[offset - 1] == (byte) 0x00); } } media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml +11 −0 Original line number Diff line number Diff line Loading @@ -124,4 +124,15 @@ </FrameLayout> <TextView android:id="@+id/textViewResults" android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_marginTop="10dp" android:fontFamily="sans-serif-medium" android:textSize="18sp" android:textStyle="normal" app:layout_constraintStart_toStartOf="parent" app:layout_constraintTop_toBottomOf = "@+id/frameLayout2" /> </androidx.constraintlayout.widget.ConstraintLayout> Loading
media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MainActivity.java +23 −0 Original line number Diff line number Diff line Loading @@ -56,6 +56,7 @@ import java.io.IOException; import android.util.Log; import android.util.Size; import android.widget.RadioGroup; import android.widget.TextView; import android.widget.Toast; import java.lang.ref.WeakReference; Loading @@ -80,6 +81,14 @@ public class MainActivity extends AppCompatActivity private static final int VIDEO_BITRATE = 8000000 /* 8 Mbps */; private static final int VIDEO_FRAMERATE = 30; /** * Constant values to frame types assigned here are internal to this app. * These values does not correspond to the actual values defined in avc/hevc specifications. */ public static final int FRAME_TYPE_I = 0; public static final int FRAME_TYPE_P = 1; public static final int FRAME_TYPE_B = 2; private String mMime = MediaFormat.MIMETYPE_VIDEO_AVC; private String mOutputVideoPath = null; Loading @@ -89,6 +98,7 @@ public class MainActivity extends AppCompatActivity private boolean mIsRecording; private AutoFitTextureView mTextureView; private TextView mTextView; private CameraDevice mCameraDevice; private CameraCaptureSession mPreviewSession; private CaptureRequest.Builder mPreviewBuilder; Loading @@ -101,6 +111,8 @@ public class MainActivity extends AppCompatActivity private Button mStartButton; private int[] mFrameTypeOccurrences; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Loading Loading @@ -129,6 +141,8 @@ public class MainActivity extends AppCompatActivity final CheckBox checkBox_mr = findViewById(R.id.checkBox_media_recorder); final CheckBox checkBox_mc = findViewById(R.id.checkBox_media_codec); mTextureView = findViewById(R.id.texture); mTextView = findViewById(R.id.textViewResults); checkBox_mr.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Loading Loading @@ -162,6 +176,7 @@ public class MainActivity extends AppCompatActivity @Override public void onClick(View v) { if (v.getId() == R.id.start_button) { mTextView.setText(null); if (mIsMediaRecorder) { if (mIsRecording) { stopRecordingVideo(); Loading Loading @@ -198,6 +213,7 @@ public class MainActivity extends AppCompatActivity mainActivity.mOutputVideoPath); try { encodingStatus = codecSurfaceEncoder.startEncodingSurface(); mainActivity.mFrameTypeOccurrences = codecSurfaceEncoder.getFrameTypes(); } catch (IOException | InterruptedException e) { e.printStackTrace(); } Loading @@ -211,6 +227,13 @@ public class MainActivity extends AppCompatActivity if (encodingStatus == 0) { Toast.makeText(mainActivity.getApplicationContext(), "Encoding Completed", Toast.LENGTH_SHORT).show(); mainActivity.mTextView.append("\n Encoded stream contains: "); mainActivity.mTextView.append("\n Number of I-Frames: " + mainActivity.mFrameTypeOccurrences[FRAME_TYPE_I]); mainActivity.mTextView.append("\n Number of P-Frames: " + mainActivity.mFrameTypeOccurrences[FRAME_TYPE_P]); mainActivity.mTextView.append("\n Number of B-Frames: " + mainActivity.mFrameTypeOccurrences[FRAME_TYPE_B]); } else { Toast.makeText(mainActivity.getApplicationContext(), "Error occurred while " + "encoding", Toast.LENGTH_SHORT).show(); Loading
media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/MediaCodecSurfaceEncoder.java +31 −2 Original line number Diff line number Diff line Loading @@ -31,10 +31,14 @@ import android.view.Surface; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P; public class MediaCodecSurfaceEncoder { private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName(); private static final boolean DEBUG = false; private static final int VIDEO_BITRATE = 8000000 /*8 Mbps*/; private static final int VIDEO_FRAMERATE = 30; Loading @@ -44,6 +48,8 @@ public class MediaCodecSurfaceEncoder { private final String mMime; private final String mOutputPath; private int mTrackID = -1; private int mFrameNum = 0; private int[] mFrameTypeOccurrences = {0, 0, 0}; private Surface mSurface; private MediaExtractor mExtractor; Loading Loading @@ -128,8 +134,10 @@ public class MediaCodecSurfaceEncoder { mEncoder.reset(); mSurface.release(); mSurface = null; Log.i(TAG, "Number of I-frames = " + mFrameTypeOccurrences[FRAME_TYPE_I]); Log.i(TAG, "Number of P-frames = " + mFrameTypeOccurrences[FRAME_TYPE_P]); Log.i(TAG, "Number of B-frames = " + mFrameTypeOccurrences[FRAME_TYPE_B]); } mEncoder.release(); mDecoder.release(); mExtractor.release(); Loading Loading @@ -193,6 +201,8 @@ public class MediaCodecSurfaceEncoder { mSawEncOutputEOS = false; mDecOutputCount = 0; mEncOutputCount = 0; mFrameNum = 0; Arrays.fill(mFrameTypeOccurrences, 0); } private void configureCodec(MediaFormat decFormat, MediaFormat encFormat) { Loading Loading @@ -336,6 +346,21 @@ public class MediaCodecSurfaceEncoder { } if (info.size > 0) { ByteBuffer buf = mEncoder.getOutputBuffer(bufferIndex); // Parse the buffer to get the frame type if (DEBUG) Log.d(TAG, "[ Frame : " + (mFrameNum++) + " ]"); int frameTypeResult = -1; if (mMime == MediaFormat.MIMETYPE_VIDEO_AVC) { frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromAVC(buf); } else if (mMime == MediaFormat.MIMETYPE_VIDEO_HEVC){ frameTypeResult = NalUnitUtil.getStandardizedFrameTypesFromHEVC(buf); } else { Log.e(TAG, "Mime type " + mMime + " is not supported."); return; } if (frameTypeResult != -1) { mFrameTypeOccurrences[frameTypeResult]++; } if (mMuxer != null) { if (mTrackID == -1) { mTrackID = mMuxer.addTrack(mEncoder.getOutputFormat()); Loading @@ -353,4 +378,8 @@ public class MediaCodecSurfaceEncoder { private boolean hasSeenError() { return mAsyncHandleDecoder.hasSeenError() || mAsyncHandleEncoder.hasSeenError(); } public int[] getFrameTypes() { return mFrameTypeOccurrences; } }
media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/NalUnitUtil.java 0 → 100644 +168 −0 Original line number Diff line number Diff line /* * Copyright (C) 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.media.samplevideoencoder; import android.util.Log; import java.nio.ByteBuffer; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_B; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_I; import static com.android.media.samplevideoencoder.MainActivity.FRAME_TYPE_P; public class NalUnitUtil { private static final String TAG = MediaCodecSurfaceEncoder.class.getSimpleName(); private static final boolean DEBUG = false; public static int findNalUnit(byte[] dataArray, int pos, int limit) { int startOffset = 0; if (limit - pos < 4) { return startOffset; } if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 1) { startOffset = 3; } else { if (dataArray[pos] == 0 && dataArray[pos + 1] == 0 && dataArray[pos + 2] == 0 && dataArray[pos + 3] == 1) { startOffset = 4; } } return startOffset; } private static int getAVCNalUnitType(byte[] dataArray, int nalUnitOffset) { return dataArray[nalUnitOffset] & 0x1F; } private static int parseAVCNALUnitData(byte[] dataArray, int offset, int limit) { ParsableBitArray bitArray = new ParsableBitArray(dataArray); bitArray.reset(dataArray, offset, limit); bitArray.skipBit(); // forbidden_zero_bit bitArray.readBits(2); // nal_ref_idc bitArray.skipBits(5); // nal_unit_type bitArray.readUEV(); // first_mb_in_slice if (!bitArray.canReadUEV()) { return -1; } int sliceType = bitArray.readUEV(); if (DEBUG) Log.d(TAG, "slice_type = " + sliceType); if (sliceType == 0) { return FRAME_TYPE_P; } else if (sliceType == 1) { return FRAME_TYPE_B; } else if (sliceType == 2) { return FRAME_TYPE_I; } else { return -1; } } private static int getHEVCNalUnitType(byte[] dataArray, int nalUnitOffset) { return (dataArray[nalUnitOffset] & 0x7E) >> 1; } private static int parseHEVCNALUnitData(byte[] dataArray, int offset, int limit, int nalUnitType) { // nal_unit_type values from H.265/HEVC Table 7-1. final int BLA_W_LP = 16; final int RSV_IRAP_VCL23 = 23; ParsableBitArray bitArray = new ParsableBitArray(dataArray); bitArray.reset(dataArray, offset, limit); bitArray.skipBit(); // forbidden zero bit bitArray.readBits(6); // nal_unit_header bitArray.readBits(6); // nuh_layer_id bitArray.readBits(3); // nuh_temporal_id_plus1 // Parsing slice_segment_header values from H.265/HEVC Table 7.3.6.1 boolean first_slice_segment = bitArray.readBit(); // first_slice_segment_in_pic_flag if (!first_slice_segment) return -1; if (nalUnitType >= BLA_W_LP && nalUnitType <= RSV_IRAP_VCL23) { bitArray.readBit(); // no_output_of_prior_pics_flag } bitArray.readUEV(); // slice_pic_parameter_set_id // Assume num_extra_slice_header_bits element of PPS data to be 0 int sliceType = bitArray.readUEV(); if (DEBUG) Log.d(TAG, "slice_type = " + sliceType); if (sliceType == 0) { return FRAME_TYPE_B; } else if (sliceType == 1) { return FRAME_TYPE_P; } else if (sliceType == 2) { return FRAME_TYPE_I; } else { return -1; } } public static int getStandardizedFrameTypesFromAVC(ByteBuffer buf) { int limit = buf.limit(); byte[] dataArray = new byte[buf.remaining()]; buf.get(dataArray); int frameType = -1; for (int pos = 0; pos + 3 < limit; ) { int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit); if (startOffset != 0) { int nalUnitType = getAVCNalUnitType(dataArray, (pos + startOffset)); if (DEBUG) { Log.d(TAG, "NalUnitOffset = " + (pos + startOffset)); Log.d(TAG, "NalUnitType = " + nalUnitType); } // SLICE_NAL = 1; IDR_SLICE_NAL = 5 if (nalUnitType == 1 || nalUnitType == 5) { frameType = parseAVCNALUnitData(dataArray, (pos + startOffset), (limit - pos - startOffset)); break; } pos += 3; } else { pos++; } } return frameType; } public static int getStandardizedFrameTypesFromHEVC(ByteBuffer buf) { int limit = buf.limit(); byte[] dataArray = new byte[buf.remaining()]; buf.get(dataArray); int frameType = -1; for (int pos = 0; pos + 3 < limit; ) { int startOffset = NalUnitUtil.findNalUnit(dataArray, pos, limit); if (startOffset != 0) { int nalUnitType = NalUnitUtil.getHEVCNalUnitType(dataArray, (pos + startOffset)); if (DEBUG) { Log.d(TAG, "NalUnitOffset = " + (pos + startOffset)); Log.d(TAG, "NalUnitType = " + nalUnitType); } // Parse NALUnits containing slice_headers which lies in the range of 0 to 21 if (nalUnitType >= 0 && nalUnitType <= 21) { frameType = parseHEVCNALUnitData(dataArray, (pos + startOffset), (limit - pos - startOffset), nalUnitType); break; } pos += 3; } else { pos++; } } return frameType; } }
media/tests/SampleVideoEncoder/app/src/main/java/com/android/media/samplevideoencoder/ParsableBitArray.java 0 → 100644 +128 −0 Original line number Diff line number Diff line /* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.media.samplevideoencoder; public class ParsableBitArray { public byte[] data; private int byteOffset; private int bitOffset; private int byteLimit; public ParsableBitArray(byte[] dataArray) { this(dataArray, dataArray.length); } public ParsableBitArray(byte[] dataArray, int limit) { this.data = dataArray; byteLimit = limit; } public void reset(byte[] data, int offset, int limit) { this.data = data; byteOffset = offset; bitOffset = 0; byteLimit = limit; } public void skipBit() { if (++bitOffset == 8) { bitOffset = 0; byteOffset++; } } public void skipBits(int numBits) { int numBytes = numBits / 8; byteOffset += numBytes; bitOffset += numBits - (numBytes * 8); if (bitOffset > 7) { byteOffset++; bitOffset -= 8; } } public boolean readBit() { boolean returnValue = (data[byteOffset] & (0x80 >> bitOffset)) != 0; skipBit(); return returnValue; } public int readBits(int numBits) { if (numBits == 0) { return 0; } int returnValue = 0; bitOffset += numBits; while (bitOffset > 8) { bitOffset -= 8; returnValue |= (data[byteOffset++] & 0xFF) << bitOffset; } returnValue |= (data[byteOffset] & 0xFF) >> (8 - bitOffset); returnValue &= 0xFFFFFFFF >>> (32 - numBits); if (bitOffset == 8) { bitOffset = 0; byteOffset++; } return returnValue; } public boolean canReadUEV() { int initialByteOffset = byteOffset; int initialBitOffset = bitOffset; int leadingZeros = 0; while (byteOffset < byteLimit && !readBit()) { leadingZeros++; } boolean hitLimit = byteOffset == byteLimit; byteOffset = initialByteOffset; bitOffset = initialBitOffset; return !hitLimit && canReadBits(leadingZeros * 2 + 1); } public int readUEV() { int leadingZeros = 0; while (!readBit()) { leadingZeros++; } return (1 << leadingZeros) - 1 + (leadingZeros > 0 ? readBits(leadingZeros) : 0); } public boolean canReadBits(int numBits) { int oldByteOffset = byteOffset; int numBytes = numBits / 8; int newByteOffset = byteOffset + numBytes; int newBitOffset = bitOffset + numBits - (numBytes * 8); if (newBitOffset > 7) { newByteOffset++; newBitOffset -= 8; } for (int i = oldByteOffset + 1; i <= newByteOffset && newByteOffset < byteLimit; i++) { if (shouldSkipByte(i)) { // Skip the byte and check three bytes ahead. newByteOffset++; i += 2; } } return newByteOffset < byteLimit || (newByteOffset == byteLimit && newBitOffset == 0); } private boolean shouldSkipByte(int offset) { return (2 <= offset && offset < byteLimit && data[offset] == (byte) 0x03 && data[offset - 2] == (byte) 0x00 && data[offset - 1] == (byte) 0x00); } }
media/tests/SampleVideoEncoder/app/src/main/res/layout/activity_main.xml +11 −0 Original line number Diff line number Diff line Loading @@ -124,4 +124,15 @@ </FrameLayout> <TextView android:id="@+id/textViewResults" android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_marginTop="10dp" android:fontFamily="sans-serif-medium" android:textSize="18sp" android:textStyle="normal" app:layout_constraintStart_toStartOf="parent" app:layout_constraintTop_toBottomOf = "@+id/frameLayout2" /> </androidx.constraintlayout.widget.ConstraintLayout>