diff --git a/app/src/main/java/com/openipc/pixelpilot/VideoActivity.java b/app/src/main/java/com/openipc/pixelpilot/VideoActivity.java
index 9136845..5642d4f 100644
--- a/app/src/main/java/com/openipc/pixelpilot/VideoActivity.java
+++ b/app/src/main/java/com/openipc/pixelpilot/VideoActivity.java
@@ -21,15 +21,19 @@
import android.util.Base64;
import android.util.Log;
import android.view.MenuItem;
+import android.view.MotionEvent;
import android.view.SubMenu;
import android.view.View;
import android.view.WindowManager;
import android.widget.PopupMenu;
+import android.widget.SeekBar;
+import android.widget.SeekBar.OnSeekBarChangeListener;
import android.widget.Toast;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.constraintlayout.widget.ConstraintLayout;
+import androidx.constraintlayout.widget.ConstraintSet;
import androidx.documentfile.provider.DocumentFile;
import com.github.mikephil.charting.charts.PieChart;
@@ -87,6 +91,24 @@ public void run() {
private OSDManager osdManager;
private ParcelFileDescriptor dvrFd = null;
private Timer dvrIconTimer = null;
+ private Timer recordTimer = null;
+ private int seconds = 0;
+ private boolean isVRMode = false;
+ private boolean isStreaming = false;
+ private ConstraintLayout constraintLayout;
+ private ConstraintSet constraintSet;
+
+ public boolean getVRSetting() {
+ return getSharedPreferences("general", Context.MODE_PRIVATE).getBoolean("vr-mode", false);
+ }
+
+ public void setVRSetting(boolean v)
+ {
+ SharedPreferences prefs = getSharedPreferences("general", Context.MODE_PRIVATE);
+ SharedPreferences.Editor editor = prefs.edit();
+ editor.putBoolean("vr-mode", v);
+ editor.apply();
+ }
public static int getChannel(Context context) {
return context.getSharedPreferences("general",
@@ -119,6 +141,19 @@ public static String bytesToHex(byte[] bytes) {
return hexString.toString();
}
+ private void resetApp()
+ {
+ // Restart the app
+ Intent intent = getPackageManager().getLaunchIntentForPackage(getPackageName());
+ if (intent != null) {
+ intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK);
+ startActivity(intent);
+ finish();
+ System.exit(0); // Ensure the app is fully restarted
+ }
+ }
+
+ @SuppressLint("ClickableViewAccessibility")
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.d(TAG, "lifecycle onCreate");
@@ -139,7 +174,84 @@ protected void onCreate(Bundle savedInstanceState) {
setContentView(binding.getRoot());
videoPlayer = new VideoPlayer(this);
videoPlayer.setIVideoParamsChanged(this);
- binding.mainVideo.getHolder().addCallback(videoPlayer.configure1());
+ isVRMode = getVRSetting();
+ if(isVRMode) {
+ binding.mainVideo.setVisibility(View.GONE);
+ binding.surfaceViewLeft.getHolder().addCallback(videoPlayer.configure1(0));
+ binding.surfaceViewRight.getHolder().addCallback(videoPlayer.configure1(1));
+
+ SeekBar seekBar = binding.seekBar;
+ // Retrieve saved progress value
+ SharedPreferences sharedPreferences = getSharedPreferences("SeekBarPrefs", MODE_PRIVATE);
+ int savedProgress = sharedPreferences.getInt("seekBarProgress", 0); // Default to 0 if no value is found
+ seekBar.setProgress(savedProgress);
+ seekBar.setVisibility(View.VISIBLE);
+ constraintLayout = binding.frameLayout;
+ constraintSet = new ConstraintSet();
+ constraintSet.clone(constraintLayout);
+
+ // Apply the saved margin
+ int margin = savedProgress * 10; // Adjust the multiplier as needed
+ constraintSet.setMargin(R.id.surfaceViewLeft, ConstraintSet.END, margin);
+ constraintSet.setMargin(R.id.surfaceViewRight, ConstraintSet.START, margin);
+ constraintSet.applyTo(constraintLayout);
+
+ // Hide SeekBar after 3 seconds
+ handler.postDelayed(new Runnable() {
+ @Override
+ public void run() {
+ seekBar.setVisibility(View.GONE);
+ }
+ }, 3000);
+
+ // Show SeekBar when touched
+ constraintLayout.setOnTouchListener(new View.OnTouchListener() {
+ @Override
+ public boolean onTouch(View v, MotionEvent event) {
+ if (event.getAction() == MotionEvent.ACTION_DOWN) {
+ seekBar.setVisibility(View.VISIBLE);
+ // Hide SeekBar again after 3 seconds of inactivity
+ handler.postDelayed(new Runnable() {
+ @Override
+ public void run() {
+ seekBar.setVisibility(View.GONE);
+ }
+ }, 3000);
+ }
+ return false;
+ }
+ });
+
+ seekBar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() {
+ @Override
+ public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
+ int margin = progress * 10; // Adjust the multiplier as needed
+ constraintSet.setMargin(R.id.surfaceViewLeft, ConstraintSet.END, margin);
+ constraintSet.setMargin(R.id.surfaceViewRight, ConstraintSet.START, margin);
+ constraintSet.applyTo(constraintLayout);
+ // Save progress value
+ SharedPreferences sharedPreferences = getSharedPreferences("SeekBarPrefs", MODE_PRIVATE);
+ SharedPreferences.Editor editor = sharedPreferences.edit();
+ editor.putInt("seekBarProgress", progress);
+ editor.apply();
+ }
+
+ @Override
+ public void onStartTrackingTouch(SeekBar seekBar) {
+
+ }
+
+ @Override
+ public void onStopTrackingTouch(SeekBar seekBar) {
+
+ }
+ });
+ }
+ else {
+ binding.surfaceViewRight.setVisibility(View.GONE);
+ binding.surfaceViewLeft.setVisibility(View.GONE);
+ binding.mainVideo.getHolder().addCallback(videoPlayer.configure1(0));
+ }
osdManager = new OSDManager(this, binding);
osdManager.setUp();
@@ -160,8 +272,37 @@ protected void onCreate(Bundle savedInstanceState) {
PieData noData = new PieData(new PieDataSet(new ArrayList<>(), ""));
chart.setData(noData);
+ binding.imgBtnRecord.setOnClickListener(item -> {
+ if(!isStreaming) return;
+
+ if (dvrFd == null) {
+ Uri dvrUri = openDvrFile();
+ if (dvrUri != null) {
+ startDvr(dvrUri);
+ } else {
+ Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT_TREE);
+ intent.addCategory(Intent.CATEGORY_DEFAULT);
+ startActivityForResult(intent, PICK_DVR_REQUEST_CODE);
+ }
+ } else {
+ stopDvr();
+ }
+ });
+
binding.btnSettings.setOnClickListener(v -> {
PopupMenu popup = new PopupMenu(this, v);
+ SubMenu vrMenu = popup.getMenu().addSubMenu("VR mode");
+ MenuItem vrItem = vrMenu.add(getVRSetting() ? "On" : "Off");
+ vrItem.setOnMenuItemClickListener(item -> {
+ isVRMode = !getVRSetting();
+ setVRSetting(isVRMode);
+ vrItem.setTitle(isVRMode ? "On" : "Off");
+ item.setShowAsAction(MenuItem.SHOW_AS_ACTION_COLLAPSE_ACTION_VIEW);
+ item.setActionView(new View(this));
+ resetApp();
+ return false;
+ });
+
SubMenu chnMenu = popup.getMenu().addSubMenu("Channel");
int channelPref = getChannel(this);
chnMenu.setHeaderTitle("Current: " + channelPref);
@@ -211,6 +352,7 @@ protected void onCreate(Bundle savedInstanceState) {
SubMenu recording = popup.getMenu().addSubMenu("Recording");
MenuItem dvrBtn = recording.add(dvrFd == null ? "Start" : "Stop");
dvrBtn.setOnMenuItemClickListener(item -> {
+ if(!isStreaming) return false;
if (dvrFd == null) {
Uri dvrUri = openDvrFile();
if (dvrUri != null) {
@@ -285,6 +427,7 @@ private Uri openDvrFile() {
String dvrFolder = getSharedPreferences("general",
Context.MODE_PRIVATE).getString("dvr_folder_", "");
if (dvrFolder.isEmpty()) {
+ Log.e(TAG, "dvrFolder is empty");
return null;
}
Uri uri = Uri.parse(dvrFolder);
@@ -297,6 +440,8 @@ private Uri openDvrFile() {
String filename = "pixelpilot_" + formattedNow + ".mp4";
DocumentFile newFile = pickedDir.createFile("video/mp4", filename);
Toast.makeText(this, "Recording to " + filename, Toast.LENGTH_SHORT).show();
+ if(newFile == null)
+ Log.e(TAG, "dvr newFile null");
return newFile != null ? newFile.getUri() : null;
}
return null;
@@ -309,10 +454,26 @@ private void startDvr(Uri dvrUri) {
try {
dvrFd = getContentResolver().openFileDescriptor(dvrUri, "rw");
videoPlayer.startDvr(dvrFd.getFd(), getDvrMP4());
+ binding.imgBtnRecord.setImageResource(R.drawable.recording);
} catch (IOException e) {
Log.e(TAG, "Failed to open dvr file ", e);
dvrFd = null;
}
+
+ binding.txtRecordLabel.setVisibility(View.VISIBLE);
+ recordTimer = new Timer();
+ recordTimer.schedule(new TimerTask() {
+ @Override
+ public void run() {
+ int minutes = seconds / 60;
+ int secs = seconds % 60;
+
+ String timeFormatted = String.format("%02d:%02d", minutes, secs);
+ runOnUiThread(() -> binding.txtRecordLabel.setText(timeFormatted));
+ seconds++;
+ }
+ }, 0, 1000);
+
dvrIconTimer = new Timer();
dvrIconTimer.schedule(new TimerTask() {
@Override
@@ -328,10 +489,20 @@ private void stopDvr() {
return;
}
binding.imgRecIndicator.setVisibility(View.INVISIBLE);
+ binding.imgBtnRecord.setImageResource(R.drawable.record);
videoPlayer.stopDvr();
- dvrIconTimer.cancel();
- dvrIconTimer.purge();
- dvrIconTimer = null;
+ if(recordTimer != null) {
+ recordTimer.cancel();
+ recordTimer.purge();
+ recordTimer = null;
+ seconds = 0;
+ binding.txtRecordLabel.setVisibility(View.GONE);
+ }
+ if(dvrIconTimer != null) {
+ dvrIconTimer.cancel();
+ dvrIconTimer.purge();
+ dvrIconTimer = null;
+ }
try {
dvrFd.close();
} catch (IOException e) {
@@ -511,9 +682,6 @@ public void onDecodingInfoChanged(final DecodingInfo decodingInfo) {
runOnUiThread(() -> {
if (lastCodec != decodingInfo.nCodec) {
lastCodec = decodingInfo.nCodec;
- videoPlayer.stopAndRemoveReceiverDecoder();
- videoPlayer.addAndStartDecoderReceiver(binding.mainVideo.getHolder().getSurface());
- videoPlayer.start();
}
if (decodingInfo.currentFPS > 0) {
binding.tvMessage.setVisibility(View.GONE);
@@ -574,9 +742,13 @@ public void onWfbNgStatsChanged(WfbNGStats data) {
paddedDigits(data.count_p_dec_ok, 6),
paddedDigits(data.count_p_fec_recovered, 6),
paddedDigits(data.count_p_lost, 6)));
+ isStreaming = true;
}
} else {
binding.tvLinkStatus.setText("No wfb-ng data.");
+ isStreaming = false;
+ binding.imgBtnRecord.setImageResource(R.drawable.record);
+ stopDvr();
}
});
}
diff --git a/app/src/main/java/com/openipc/pixelpilot/osd/MovableLayout.java b/app/src/main/java/com/openipc/pixelpilot/osd/MovableLayout.java
index 7d49e0a..74b8846 100644
--- a/app/src/main/java/com/openipc/pixelpilot/osd/MovableLayout.java
+++ b/app/src/main/java/com/openipc/pixelpilot/osd/MovableLayout.java
@@ -42,6 +42,15 @@ private void init(Context context) {
defaultY = (float) displaySize.y / 2 - ((float) displaySize.y / 4);
}
+ @Override
+ public boolean onInterceptTouchEvent(MotionEvent ev) {
+ // Intercept touch events and pass them to onTouchEvent
+ if (isMovable) {
+ return true;
+ }
+ return false;
+ }
+
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!isMovable) {
diff --git a/app/src/main/java/com/openipc/pixelpilot/osd/OSDManager.java b/app/src/main/java/com/openipc/pixelpilot/osd/OSDManager.java
index 9e78f3a..719feee 100644
--- a/app/src/main/java/com/openipc/pixelpilot/osd/OSDManager.java
+++ b/app/src/main/java/com/openipc/pixelpilot/osd/OSDManager.java
@@ -91,6 +91,7 @@ public void onFinish() {
listOSDItems.add(new OSDElement("Pitch", binding.itemPitch));
listOSDItems.add(new OSDElement("RC Link", binding.itemRCLink));
listOSDItems.add(new OSDElement("Recording Indicator", binding.itemRecIndicator));
+ listOSDItems.add(new OSDElement("Recording Button", binding.btnRecord));
listOSDItems.add(new OSDElement("Roll", binding.itemRoll));
listOSDItems.add(new OSDElement("Satellites", binding.itemSat));
listOSDItems.add(new OSDElement("Status", binding.itemStatus));
diff --git a/app/src/main/res/drawable/record.png b/app/src/main/res/drawable/record.png
new file mode 100644
index 0000000..2bc447c
Binary files /dev/null and b/app/src/main/res/drawable/record.png differ
diff --git a/app/src/main/res/drawable/recording.png b/app/src/main/res/drawable/recording.png
new file mode 100644
index 0000000..19a202a
Binary files /dev/null and b/app/src/main/res/drawable/recording.png differ
diff --git a/app/src/main/res/layout/activity_video.xml b/app/src/main/res/layout/activity_video.xml
index 79ee930..a0e3a09 100644
--- a/app/src/main/res/layout/activity_video.xml
+++ b/app/src/main/res/layout/activity_video.xml
@@ -15,6 +15,38 @@
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/videonative/src/main/cpp/VideoDecoder.cpp b/app/videonative/src/main/cpp/VideoDecoder.cpp
index 9aa6fa5..e7ded3a 100644
--- a/app/videonative/src/main/cpp/VideoDecoder.cpp
+++ b/app/videonative/src/main/cpp/VideoDecoder.cpp
@@ -21,34 +21,35 @@ VideoDecoder::VideoDecoder(JNIEnv *env) {
resetStatistics();
}
-void VideoDecoder::setOutputSurface(JNIEnv *env, jobject surface) {
+void VideoDecoder::setOutputSurface(JNIEnv *env, jobject surface, jint idx) {
if (surface == nullptr) {
MLOGD << "Set output null surface";
//assert(decoder.window!=nullptr);
- if (decoder.window == nullptr) {
+ if (decoder.window[idx] == nullptr || decoder.codec[idx] == nullptr) {
//MLOGD<<"Decoder window is already null";
return;
}
std::lock_guard lock(mMutexInputPipe);
inputPipeClosed = true;
- if (decoder.configured) {
- AMediaCodec_stop(decoder.codec);
- AMediaCodec_delete(decoder.codec);
+ if (decoder.configured[idx]) {
+ AMediaCodec_stop(decoder.codec[idx]);
+ AMediaCodec_delete(decoder.codec[idx]);
+ decoder.codec[idx] = nullptr;
mKeyFrameFinder.reset();
- decoder.configured = false;
- if (mCheckOutputThread->joinable()) {
- mCheckOutputThread->join();
- mCheckOutputThread.reset();
+ decoder.configured[idx] = false;
+ if (mCheckOutputThread[idx]->joinable()) {
+ mCheckOutputThread[idx]->join();
+ mCheckOutputThread[idx].reset();
}
}
- ANativeWindow_release(decoder.window);
- decoder.window = nullptr;
+ ANativeWindow_release(decoder.window[idx]);
+ decoder.window[idx] = nullptr;
resetStatistics();
} else {
MLOGD << "Set output non-null surface";
// Throw warning if the surface is set without clearing it first
- assert(decoder.window == nullptr);
- decoder.window = ANativeWindow_fromSurface(env, surface);
+ assert(decoder.window[idx] == nullptr);
+ decoder.window[idx] = ANativeWindow_fromSurface(env, surface);
// open the input pipe - now the decoder will start as soon as enough data is available
inputPipeClosed = false;
}
@@ -83,8 +84,9 @@ void VideoDecoder::interpretNALU(const NALU &nalu) {
mKeyFrameFinder.saveIfKeyFrame(nalu);
return;
}
- if (decoder.configured) {
- feedDecoder(nalu);
+ if (decoder.configured[0] || decoder.configured[1]) {
+ feedDecoder(nalu, 0);
+ feedDecoder(nalu, 1);
decodingInfo.nNALUSFeeded++;
// manually feeding AUDs doesn't seem to change anything for high latency streams
// Only for the x264 sw encoded example stream it might improve latency slightly
@@ -98,14 +100,17 @@ void VideoDecoder::interpretNALU(const NALU &nalu) {
mKeyFrameFinder.saveIfKeyFrame(nalu);
if (mKeyFrameFinder.allKeyFramesAvailable(IS_H265)) {
MLOGD << "Configuring decoder...";
- configureStartDecoder();
+ configureStartDecoder(0);
+ configureStartDecoder(1);
}
}
}
-void VideoDecoder::configureStartDecoder() {
+void VideoDecoder::configureStartDecoder(int idx) {
+ if(decoder.window[idx] == nullptr)
+ return;
const std::string MIME = IS_H265 ? "video/hevc" : "video/avc";
- decoder.codec = AMediaCodec_createDecoderByType(MIME.c_str());
+ decoder.codec[idx] = AMediaCodec_createDecoderByType(MIME.c_str());
AMediaFormat *format = AMediaFormat_new();
AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, MIME.c_str());
@@ -127,7 +132,7 @@ void VideoDecoder::configureStartDecoder() {
MLOGD << "Configuring decoder:" << AMediaFormat_toString(format);
- auto status = AMediaCodec_configure(decoder.codec, format, decoder.window, nullptr, 0);
+ auto status = AMediaCodec_configure(decoder.codec[idx], format, decoder.window[idx], nullptr, 0);
AMediaFormat_delete(format);
switch (status) {
@@ -161,26 +166,28 @@ void VideoDecoder::configureStartDecoder() {
}
- if (decoder.codec == nullptr) {
+ if (decoder.codec[idx] == nullptr) {
MLOGD << "Cannot configure decoder";
//set csd-0 and csd-1 back to 0, maybe they were just faulty but we have better luck with the next ones
//mKeyFrameFinder.reset();
return;
}
- AMediaCodec_start(decoder.codec);
- mCheckOutputThread = std::make_unique(&VideoDecoder::checkOutputLoop, this);
- NDKThreadHelper::setName(mCheckOutputThread->native_handle(), "LLDCheckOutput");
- decoder.configured = true;
+ AMediaCodec_start(decoder.codec[idx]);
+ mCheckOutputThread[idx] = std::make_unique(&VideoDecoder::checkOutputLoop, this, idx);
+ NDKThreadHelper::setName(mCheckOutputThread[idx]->native_handle(), "LLDCheckOutput");
+ decoder.configured[idx] = true;
}
-void VideoDecoder::feedDecoder(const NALU &nalu) {
+void VideoDecoder::feedDecoder(const NALU &nalu, int idx) {
+ if(!decoder.codec[idx])
+ return;
const auto now = std::chrono::steady_clock::now();
const auto deltaParsing = now - nalu.creationTime;
while (true) {
- const auto index = AMediaCodec_dequeueInputBuffer(decoder.codec, BUFFER_TIMEOUT_US);
+ const auto index = AMediaCodec_dequeueInputBuffer(decoder.codec[idx], BUFFER_TIMEOUT_US);
if (index >= 0) {
size_t inputBufferSize;
- uint8_t *buf = AMediaCodec_getInputBuffer(decoder.codec, (size_t) index,
+ uint8_t *buf = AMediaCodec_getInputBuffer(decoder.codec[idx], (size_t) index,
&inputBufferSize);
// I have not seen any case where the input buffer returned by MediaCodec is too small to hold the NALU
// But better be safe than crashing with a memory exception
@@ -194,7 +201,7 @@ void VideoDecoder::feedDecoder(const NALU &nalu) {
std::memcpy(buf, nalu.getData(), (size_t) nalu.getSize());
const uint64_t presentationTimeUS = (uint64_t) duration_cast(
steady_clock::now().time_since_epoch()).count();
- AMediaCodec_queueInputBuffer(decoder.codec, (size_t) index, 0, (size_t) nalu.getSize(),
+ AMediaCodec_queueInputBuffer(decoder.codec[idx], (size_t) index, 0, (size_t) nalu.getSize(),
presentationTimeUS, flag);
waitForInputB.add(steady_clock::now() - now);
parsingTime.add(deltaParsing);
@@ -217,13 +224,15 @@ void VideoDecoder::feedDecoder(const NALU &nalu) {
}
}
-void VideoDecoder::checkOutputLoop() {
+void VideoDecoder::checkOutputLoop(int idx) {
NDKThreadHelper::setProcessThreadPriorityAttachDetach(javaVm, -16, "DecoderCheckOutput");
AMediaCodecBufferInfo info;
bool decoderSawEOS = false;
bool decoderProducedUnknown = false;
while (!decoderSawEOS && !decoderProducedUnknown) {
- const ssize_t index = AMediaCodec_dequeueOutputBuffer(decoder.codec, &info,
+ if(!decoder.codec[idx])
+ break;
+ const ssize_t index = AMediaCodec_dequeueOutputBuffer(decoder.codec[idx], &info,
BUFFER_TIMEOUT_US);
if (index >= 0) {
const auto now = steady_clock::now();
@@ -234,22 +243,27 @@ void VideoDecoder::checkOutputLoop() {
//-> renderOutputBufferAndRelease which is in https://android.googlesource.com/platform/frameworks/av/+/3fdb405/media/libstagefright/MediaCodec.cpp
//-> Message kWhatReleaseOutputBuffer -> onReleaseOutputBuffer
// also https://android.googlesource.com/platform/frameworks/native/+/5c1139f/libs/gui/SurfaceTexture.cpp
- AMediaCodec_releaseOutputBuffer(decoder.codec, (size_t) index, true);
+ if(!decoder.codec[idx])
+ break;
+ AMediaCodec_releaseOutputBuffer(decoder.codec[idx], (size_t) index, true);
//but the presentationTime is in US
- decodingTime.add(std::chrono::microseconds(nowUS - info.presentationTimeUs));
- nDecodedFrames.add(1);
+ if(idx == 0)
+ {
+ decodingTime.add(std::chrono::microseconds(nowUS - info.presentationTimeUs));
+ nDecodedFrames.add(1);
+ }
if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
MLOGD << "Decoder saw EOS";
decoderSawEOS = true;
continue;
}
} else if (index == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
- auto format = AMediaCodec_getOutputFormat(decoder.codec);
+ auto format = AMediaCodec_getOutputFormat(decoder.codec[idx]);
int width = 0, height = 0;
AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_WIDTH, &width);
AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_HEIGHT, &height);
MLOGD << "Actual Width and Height in output " << width << "," << height;
- if (onDecoderRatioChangedCallback != nullptr && width != 0 && height != 0) {
+ if (idx == 0 && onDecoderRatioChangedCallback != nullptr && width != 0 && height != 0) {
onDecoderRatioChangedCallback({width, height});
}
MLOGD << "AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED " << width << " " << height << " "
@@ -267,7 +281,7 @@ void VideoDecoder::checkOutputLoop() {
//every 2 seconds recalculate the current fps and bitrate
const auto now = steady_clock::now();
const auto delta = now - decodingInfo.lastCalculation;
- if (delta > DECODING_INFO_RECALCULATION_INTERVAL) {
+ if (idx == 0 && delta > DECODING_INFO_RECALCULATION_INTERVAL) {
decodingInfo.lastCalculation = steady_clock::now();
decodingInfo.currentFPS = (float) nDecodedFrames.getDeltaSinceLastCall() /
(float) duration_cast(delta).count();
diff --git a/app/videonative/src/main/cpp/VideoDecoder.h b/app/videonative/src/main/cpp/VideoDecoder.h
index 2645290..c90bdee 100644
--- a/app/videonative/src/main/cpp/VideoDecoder.h
+++ b/app/videonative/src/main/cpp/VideoDecoder.h
@@ -60,9 +60,9 @@ struct VideoRatio {
class VideoDecoder {
private:
struct Decoder {
- bool configured = false;
- AMediaCodec *codec = nullptr;
- ANativeWindow *window = nullptr;
+ bool configured[2] = {false, false};
+ AMediaCodec *codec[2] = {nullptr, nullptr};
+ ANativeWindow *window[2] = {nullptr, nullptr};
};
public:
//Make sure to do no heavy lifting on this callback, since it is called from the low-latency mCheckOutputThread thread (best to copy values and leave processing to another thread)
@@ -80,7 +80,7 @@ class VideoDecoder {
// After acquiring the surface, the decoder will be started as soon as enough configuration data was passed to it
// When releasing the surface, the decoder will be stopped if running and any resources will be freed
// After releasing the surface it is safe for the android os to delete it
- void setOutputSurface(JNIEnv *env, jobject surface);
+ void setOutputSurface(JNIEnv *env, jobject surface, jint idx);
//register the specified callbacks. Only one can be registered at a time
void registerOnDecoderRatioChangedCallback(DECODER_RATIO_CHANGED decoderRatioChangedC);
@@ -96,20 +96,20 @@ class VideoDecoder {
private:
//Initialize decoder with SPS / PPS data from KeyFrameFinder
//Set Decoder.configured to true on success
- void configureStartDecoder();
+ void configureStartDecoder(int idx);
//Wait for input buffer to become available before feeding NALU
- void feedDecoder(const NALU &nalu);
+ void feedDecoder(const NALU &nalu, int idx);
//Runs until EOS arrives at output buffer or decoder is stopped
- void checkOutputLoop();
+ void checkOutputLoop(int idx);
//Debug log
void printAvgLog();
void resetStatistics();
- std::unique_ptr mCheckOutputThread = nullptr;
+ std::unique_ptr mCheckOutputThread[2] = {nullptr, nullptr};
bool USE_SW_DECODER_INSTEAD = false;
//Holds the AMediaCodec instance, as well as the state (configured or not configured)
Decoder decoder{};
diff --git a/app/videonative/src/main/cpp/VideoPlayer.cpp b/app/videonative/src/main/cpp/VideoPlayer.cpp
index bcd090e..0ec079f 100644
--- a/app/videonative/src/main/cpp/VideoPlayer.cpp
+++ b/app/videonative/src/main/cpp/VideoPlayer.cpp
@@ -38,6 +38,12 @@ void VideoPlayer::processQueue() {
MP4E_mux_t *mux = MP4E_open(0 /*sequential_mode*/, dvr_mp4_fragmentation, fout, write_callback);
mp4_h26x_writer_t mp4wr;
float framerate = 0;
+ if(mux == nullptr)
+ {
+ __android_log_print(ANDROID_LOG_ERROR, TAG,
+ "dvr open failed");
+ return;
+ }
while (true) {
last_dvr_write = get_time_ms();
@@ -104,11 +110,11 @@ void VideoPlayer::onNewNALU(const NALU &nalu) {
enqueueNALU(nalu_);
}
-void VideoPlayer::setVideoSurface(JNIEnv *env, jobject surface) {
+void VideoPlayer::setVideoSurface(JNIEnv *env, jobject surface, jint i) {
//reset the parser so the statistics start again from 0
// mParser.reset();
//set the jni object for settings
- videoDecoder.setOutputSurface(env, surface);
+ videoDecoder.setOutputSurface(env, surface, i);
}
@@ -159,6 +165,7 @@ void VideoPlayer::startDvr(JNIEnv *env, jint fd, jint dvr_fmp4_enabled) {
}
void VideoPlayer::stopDvr() {
+ __android_log_print(ANDROID_LOG_DEBUG, TAG, "Stop dvr");
stopProcessing();
}
@@ -204,8 +211,8 @@ JNI_METHOD(void, nativeStop)
}
JNI_METHOD(void, nativeSetVideoSurface)
-(JNIEnv *env, jclass jclass1, jlong videoPlayerN, jobject surface) {
- native(videoPlayerN)->setVideoSurface(env, surface);
+(JNIEnv *env, jclass jclass1, jlong videoPlayerN, jobject surface, jint index) {
+ native(videoPlayerN)->setVideoSurface(env, surface, index);
}
JNI_METHOD(jstring, getVideoInfoString)
diff --git a/app/videonative/src/main/cpp/VideoPlayer.h b/app/videonative/src/main/cpp/VideoPlayer.h
index b07f913..b2f6c5a 100644
--- a/app/videonative/src/main/cpp/VideoPlayer.h
+++ b/app/videonative/src/main/cpp/VideoPlayer.h
@@ -27,7 +27,7 @@ class VideoPlayer {
* Set the surface the decoder can be configured with. When @param surface==nullptr
* It is guaranteed that the surface is not used by the decoder anymore when this call returns
*/
- void setVideoSurface(JNIEnv *env, jobject surface);
+ void setVideoSurface(JNIEnv *env, jobject surface, jint i);
/*
* Start the receiver and ground recorder if enabled
diff --git a/app/videonative/src/main/java/com/openipc/videonative/VideoPlayer.java b/app/videonative/src/main/java/com/openipc/videonative/VideoPlayer.java
index 6048285..06d0382 100644
--- a/app/videonative/src/main/java/com/openipc/videonative/VideoPlayer.java
+++ b/app/videonative/src/main/java/com/openipc/videonative/VideoPlayer.java
@@ -47,7 +47,7 @@ public VideoPlayer(final AppCompatActivity parent) {
public static native void nativeStop(long nativeInstance, Context context);
- public static native void nativeSetVideoSurface(long nativeInstance, Surface surface);
+ public static native void nativeSetVideoSurface(long nativeInstance, Surface surface, int index);
public static native void nativeStartDvr(long nativeInstance, int fd, int fmp4_enabled);
@@ -78,9 +78,9 @@ public void setIVideoParamsChanged(final IVideoParamsChanged iVideoParamsChanged
mVideoParamsChanged = iVideoParamsChanged;
}
- private void setVideoSurface(final @Nullable Surface surface) {
+ private void setVideoSurface(final @Nullable Surface surface, int index) {
verifyApplicationThread();
- nativeSetVideoSurface(nativeVideoPlayer, surface);
+ nativeSetVideoSurface(nativeVideoPlayer, surface, index);
}
public synchronized void start() {
@@ -128,8 +128,8 @@ public void stopDvr() {
* d) Receiving Data from a file in the phone file system
* e) and more
*/
- public void addAndStartDecoderReceiver(Surface surface) {
- setVideoSurface(surface);
+ public void addAndStartDecoderReceiver(Surface surface, int index) {
+ setVideoSurface(surface, index);
}
/**
@@ -137,9 +137,9 @@ public void addAndStartDecoderReceiver(Surface surface) {
* Stop the Decoder
* Free resources
*/
- public void stopAndRemoveReceiverDecoder() {
+ public void stopAndRemoveReceiverDecoder(int index) {
stop();
- setVideoSurface(null);
+ setVideoSurface(null, index);
}
/**
@@ -148,11 +148,11 @@ public void stopAndRemoveReceiverDecoder() {
*
* @return Callback that should be added to SurfaceView.Holder
*/
- public SurfaceHolder.Callback configure1() {
+ public SurfaceHolder.Callback configure1(int index) {
return new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
- addAndStartDecoderReceiver(holder.getSurface());
+ addAndStartDecoderReceiver(holder.getSurface(), index);
}
@Override
@@ -162,30 +162,30 @@ public void surfaceChanged(SurfaceHolder holder, int format, int width, int heig
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
- stopAndRemoveReceiverDecoder();
+ stopAndRemoveReceiverDecoder(index);
}
};
}
- /**
- * Configure for use with VideoSurfaceHolder (OpenGL)
- * The callback will handle the lifecycle of the video player
- *
- * @return Callback that should be added to VideoSurfaceHolder
- */
- public ISurfaceTextureAvailable configure2() {
- return new ISurfaceTextureAvailable() {
- @Override
- public void surfaceTextureCreated(SurfaceTexture surfaceTexture, Surface surface) {
- addAndStartDecoderReceiver(surface);
- }
-
- @Override
- public void surfaceTextureDestroyed() {
- stopAndRemoveReceiverDecoder();
- }
- };
- }
+// /**
+// * Configure for use with VideoSurfaceHolder (OpenGL)
+// * The callback will handle the lifecycle of the video player
+// *
+// * @return Callback that should be added to VideoSurfaceHolder
+// */
+// public ISurfaceTextureAvailable configure2() {
+// return new ISurfaceTextureAvailable() {
+// @Override
+// public void surfaceTextureCreated(SurfaceTexture surfaceTexture, Surface surface) {
+// addAndStartDecoderReceiver(surface, index);
+// }
+//
+// @Override
+// public void surfaceTextureDestroyed() {
+// stopAndRemoveReceiverDecoder(index);
+// }
+// };
+// }
public long getNativeInstance() {
return nativeVideoPlayer;