Skip to content

How to get the full preview on screen and after clicking the photo the photo chnaged to landscape and stretched #1828

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
Prabhat-mobilefirst opened this issue May 20, 2025 · 1 comment

Comments

@Prabhat-mobilefirst
Copy link

Intially the preview is small and in the center of the screen and there black surface on the top and bottom of the camera preview

Image

Image

But after clicking the photo the preview change and cover the full screen

Image

After clicking the photo the photo that is saved in gallery is stretched in landscape

Image

How i can solved these issue

Here is my code

`
package com.nepo.lifeprotectorapp

import android.net.Uri
import android.content.Context
import android.graphics.Bitmap
import android.hardware.Sensor
import android.hardware.SensorEvent
import android.hardware.SensorEventListener
import android.hardware.SensorManager
import android.hardware.camera2.CameraAccessException
import android.hardware.camera2.CameraManager
import android.util.AttributeSet
import android.util.Log
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.widget.FrameLayout
import android.widget.Toast
import com.pedro.library.rtmp.RtmpOnlyAudio
import com.pedro.common.ConnectChecker
import com.pedro.encoder.input.sources.video.Camera1Source
import com.pedro.encoder.input.sources.video.Camera2Source
import com.pedro.library.base.recording.RecordController
import com.pedro.library.rtmp.RtmpStream
import com.pedro.library.util.BitrateAdapter
import com.pedro.library.view.TakePhotoCallback
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.SupervisorJob
import kotlinx.coroutines.cancel
import kotlinx.coroutines.delay
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
import java.io.File
import java.io.FileOutputStream
import java.io.IOException
import java.text.SimpleDateFormat
import java.util.Date
import java.util.Locale
import kotlin.math.log
import com.facebook.react.bridge.Callback
import android.util.Range

class GenericStreamerView @jvmoverloads constructor(
context: Context,
attrs: AttributeSet? = null,
defStyleAttr: Int = 0
) : FrameLayout(context, attrs, defStyleAttr), SurfaceHolder.Callback, ConnectChecker,
SensorEventListener {
private val TAG = "GenericStreamerView"
@volatile
private var flashOn = false;
private var surfaceView: SurfaceView = SurfaceView(context)
private val rtmpStream: RtmpStream by lazy {
RtmpStream(context, this).apply {
getGlInterface().autoHandleOrientation = true // Let Pedro handle basic orientation
getStreamClient().setBitrateExponentialFactor(0.5f)
getStreamClient().forceIncrementalTs(true)
getStreamClient().setReTries(10) // Set retries
}
}
private var photoUri: String? = null
private val width = 640
private val height = 480
val vBitrate = 1200 * 1000
private var rotation = 0

// private val sampleRate = 32000
private val sampleRate = 44100

// private val isStereo = true
private val isStereo = false

// private val aBitrate = 128 * 1000 // For stereo
private val aBitrate = 64 * 1000
private var recordPath = ""

//Bitrate adapter used to change the bitrate on fly depend of the bandwidth.
private val bitrateAdapter = BitrateAdapter {
    rtmpStream.setVideoBitrateOnFly(it)
}.apply {
    setMaxBitrate(vBitrate + aBitrate)
}
private var rtmpUrl: String? = null
private var autoStartPreview = true // Start preview when surface is ready

//for audio
private var isRtmpOnlyAudioPrepared = false
private val rtmpOnlyAudio: RtmpOnlyAudio by lazy {
    RtmpOnlyAudio(this).apply {
    }
}
private var rtmpOnlyAudioUrl: String? = null

// ---  Auto Flash State and Sensor Members ---
private var isAutoFlashEnabled = false
private lateinit var sensorManager: SensorManager
private var lightSensor: Sensor? = null
private val FLASH_THRESHOLD_LUX = 30f


@Volatile
private var isTakingPhoto = false // Keep this flag

// --- Photo Flash Settings ---
private val PHOTO_FLASH_DELAY_MS = 1200L // Milliseconds delay for flash to illuminate


private val viewScope = CoroutineScope(Dispatchers.Main + SupervisorJob())

// --- State Variables ---
@Volatile
private var isStreaming = false
@Volatile
private var isRecording = false
@Volatile
private var isPreviewing = false
@Volatile
private var isPrepared = false

init {

    // Add OpenGlView to the layout
    addView(surfaceView, LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT))
    surfaceView.holder.addCallback(this)

    //sensor light
    sensorManager = context.getSystemService(Context.SENSOR_SERVICE) as SensorManager
    lightSensor = sensorManager.getDefaultSensor(Sensor.TYPE_LIGHT)
    if (lightSensor == null) {
        Log.w(TAG, "No light sensor available, Auto Flash will be disabled.")
        // You might want a callback to inform the containing Activity/Fragment
        // so it can disable the UI button for auto flash.
    }
}

override fun onAttachedToWindow() {
    super.onAttachedToWindow()
    if (!isPrepared) {
        prepareStreamInternal() // Attempt prepare
    }

    if (autoStartPreview && surfaceView.holder.surface.isValid && isPrepared && !isPreviewing) {
        startPreviewInternal()
    }
    // Re-register listener if auto flash was enabled when detached
    if (isAutoFlashEnabled && lightSensor != null) {
        sensorManager.registerListener(
            this,
            lightSensor,
            SensorManager.SENSOR_DELAY_NORMAL
        )
        Log.d(TAG, "Light sensor listener registered on attach.")
    }
}

fun setRtmpUrl(url: String?) {
    this.rtmpUrl = url
    Log.d(TAG, "RTMP URL set to: $url")
}

fun setAutoStartPreview(autoStart: Boolean) {
    this.autoStartPreview = autoStart
    if (autoStart && isAttachedToWindow && surfaceView.holder.surface.isValid && isPrepared && !isPreviewing) {
        startPreviewInternal()
    }

}

fun setFlash(flashMode: Boolean) {
    flashOn = flashMode
}

fun onTakePhoto(callback: Callback?) {
    if (flashOn) {
        takePhotoWithFlash(callback)
    } else {
        takePhoto(callback)
    }
}

fun setCameraZoom(zoomLevel: Number) {
    Log.d(TAG, "setCameraZoom: zoomLevel $zoomLevel")
    val zoom = zoomLevel.toInt()
    Log.d(TAG, "setCameraZoom:zoom $zoom")
    if (!rtmpStream.isOnPreview && !rtmpStream.isStreaming) {
        Log.w(TAG, "Cannot set zoom: Stream is not previewing or streaming.")
        return
    }

    val source = rtmpStream.videoSource ?: run {
        Log.w(TAG, "Cannot set zoom: Video source is null.")
        return
    }

    try {
        when (source) {
            is Camera1Source -> {
                if (zoom !is Int) {
                    Log.w(
                        TAG,
                        "Camera1Source requires an Int zoom level. Got: ${zoomLevel::class.java.simpleName}"
                    )
                    return
                }

                val maxZoomIndex =
                    source.getZoomRange() // CORRECT: This gets the max zoom INDEX
                if (zoom in maxZoomIndex) { // Compare with the max zoom INDEX
                    source.setZoom(zoom) // Set zoom using the INDEX
                    Log.i(
                        TAG,
                        "Camera1Source: Zoom set to index $zoom (Max Index: $maxZoomIndex)"
                    )
                } else {
                    Log.w(
                        TAG,
                        "Camera1Source: Zoom index $zoom out of range (0-$maxZoomIndex)."
                    )
                }
            }

            is Camera2Source -> {
                val floatZoom = zoomLevel.toFloat()
                /* if (zoom !is Float) {
                     Log.w(TAG, "Camera2Source requires a Float zoom level. Got: ${zoomLevel::class.java.simpleName}")
                     return
                 }*/
                val zoomRange: Range<Float>? = source.getZoomRange() // This can be null
                if (zoomRange == null) {
                    Log.w(
                        TAG,
                        "Camera2Source: Zoom range not available (zoom might not be supported)."
                    )
                    return
                }
                val minZoomSupported = zoomRange.lower
                val maxZoomSupported = zoomRange.upper

                if (floatZoom >= minZoomSupported && floatZoom <= maxZoomSupported) {
                    source.setZoom(floatZoom)
                    Log.i(
                        TAG,
                        "Camera2Source: Zoom set to level $floatZoom (Range: $minZoomSupported - $maxZoomSupported)"
                    )
                } else {
                    Log.w(
                        TAG,
                        "Camera2Source: Zoom level $floatZoom out of range ($minZoomSupported - $maxZoomSupported)."
                    )
                }
            }

            else -> {
                Log.w(
                    TAG,
                    "Zoom not supported for this video source type: ${source::class.java.simpleName}"
                )
            }
        }
    } catch (e: Exception) {
        Log.e(TAG, "Error setting camera zoom", e)
    }
}

fun zoomIn(step: Number? = null) {
    val source = rtmpStream.videoSource ?: return
    when (source) {
        is Camera1Source -> {

// if (!source.isZoomSupported) return
val currentZoomIndex = source.getZoom() // Current zoom INDEX
val maxZoomIndex = source.getZoomRange().lower // Max zoom INDEX
val zoomStep = if (step is Int) step.coerceAtLeast(1) else 1
val newZoomIndex = (currentZoomIndex + zoomStep).coerceAtMost(maxZoomIndex)
if (newZoomIndex != currentZoomIndex) {
setCameraZoom(newZoomIndex)
}
}

        is Camera2Source -> {
            val zoomRange = source.getZoomRange() ?: return
            val currentZoom = source.getZoom() // Current zoom magnification
            val maxZoom = zoomRange.upper   // Max zoom magnification
            val zoomStep = if (step is Float) step.coerceAtLeast(0.01f) else 0.1f
            val newZoom = (currentZoom + zoomStep).coerceAtMost(maxZoom)
            if (newZoom > currentZoom && (newZoom - currentZoom) > 0.001f) {
                setCameraZoom(newZoom)
            }
        }

        else -> Log.w(TAG, "Zoom In not supported for this source.")
    }
}

fun zoomOut(step: Number? = null) {
    val source = rtmpStream.videoSource ?: return
    when (source) {
        is Camera1Source -> {
            // if (!source.isZoomSupported) return // Good to check here too
            val currentZoomIndex = source.getZoom() // Current zoom INDEX
            // Min zoom index for Camera1 is always 0
            val zoomStep = if (step is Int) step.coerceAtLeast(1) else 1
            val newZoomIndex = (currentZoomIndex - zoomStep).coerceAtLeast(0)
            if (newZoomIndex != currentZoomIndex) {
                setCameraZoom(newZoomIndex)
            }
        }

        is Camera2Source -> {
            val zoomRange = source.getZoomRange() ?: return
            val currentZoom = source.getZoom()
            val minZoom = zoomRange.lower
            val zoomStep = if (step is Float) step.coerceAtLeast(0.01f) else 0.1f
            val newZoom = (currentZoom - zoomStep).coerceAtLeast(minZoom)
            if (currentZoom > newZoom && (currentZoom - newZoom) > 0.001f) {
                setCameraZoom(newZoom)
            }
        }

        else -> Log.w(TAG, "Zoom Out not supported for this source.")
    }
}

fun startStream() {
    if (isStreaming) {
        Log.w(TAG, "Start stream ignored: Already streaming.")
        return
    }
    if (rtmpUrl.isNullOrEmpty()) {
        Log.e(TAG, "Start stream failed: RTMP URL is not set.")
        return
    }
    if (!isPrepared) {
        if (!prepareStreamInternal()) {
            Log.e(TAG, "Start stream failed: Preparation failed.")
            return
        }
    }
    if (!isPreviewing) {
        startPreviewInternal()
        if (!isPreviewing) {
            Log.e(TAG, "Start stream failed: Could not start preview.")
            return
        }
    }
    Log.d(TAG, "Attempting to start stream to: $rtmpUrl")
    rtmpStream.startStream(rtmpUrl!!)
}

/*fun stopStream() {
    if (!isStreaming) {
        Log.w(TAG, "Stop stream ignored: Not streaming.")
        return
    }
    Log.d(TAG, "Attempting to stop stream.")
    rtmpStream.stopStream()
}*/
fun stopStream() {
    if (rtmpStream.isStreaming) {
        rtmpStream.stopStream()
        isStreaming = false
        Log.d(TAG, "stopStream: stream stopped")

    } else {

        Log.d(TAG, "stopStream: not streaming")
    }

}

fun startVideoRecord() {
    if (isRecording) {
        Log.w(TAG, "Start record ignored: Already recording.")
        return
    }
    if (!isPrepared) {
        if (!prepareStreamInternal()) {
            Log.e(TAG, "Start record failed: Preparation failed.")
            return
        }
    }
    if (!isPreviewing) {
        startPreviewInternal()
        if (!isPreviewing) {
            Log.e(TAG, "Start record failed: Could not start preview.")
            return
        }
    }

    val folder = PathUtils.getRecordPath()
    if (!folder.exists() && !folder.mkdirs()) {
        Log.e(TAG, "Failed to create recording directory: ${folder.absolutePath}")
        return
    }
    val sdf = SimpleDateFormat("yyyyMMdd_HHmmss", Locale.getDefault())
    recordPath = "${folder.absolutePath}/${sdf.format(Date())}.mp4"
    Log.d(TAG, "Attempting to start recording to: $recordPath")

    try {
        rtmpStream.startRecord(recordPath) { status ->
            viewScope.launch { // Ensure event sending is on Main thread
                when (status) {
                    RecordController.Status.RECORDING -> {
                        if (!isRecording) {
                            isRecording = true
                            Log.i(TAG, "Recording started successfully.")
                        }
                    }

                    RecordController.Status.STOPPED -> {
                        if (isRecording) {
                            isRecording = false
                            Log.i(TAG, "Recording stopped unexpectedly via status update.")
                        }
                    }

                    else -> {} // Ignore STARTING, PAUSED for now
                }
            }
        }
    } catch (e: Exception) {
        Log.e(TAG, "Failed to initiate recording", e)
        isRecording = false
    }
}

fun stopVideoRecord() {
    if (!isRecording) {
        Log.w(TAG, "Stop record ignored: Not recording.")
        return
    }
    Log.d(TAG, "Attempting to stop recording.")
    try {
        rtmpStream.stopRecord()
        val previouslyRecording = isRecording
        isRecording = false
        if (previouslyRecording) {
            Log.i(TAG, "Recording stopped. File potentially saved to: $recordPath")
            PathUtils.updateGallery(context, recordPath)
        }
    } catch (e: Exception) {
        Log.e(TAG, "Error stopping recording", e)
        isRecording = false
    }
}

// --- Take Photo Implementation ---
private fun takePhoto(reactCallback: Callback?) {
    if (!isPreviewing) {
        Log.w(TAG, "Cannot take photo: Preview not running.")
    }
    val callback = object : TakePhotoCallback {
        override fun onTakePhoto(bitmap: Bitmap) {
            Log.d(TAG, "Photo captured successfully via GlInterface.")
            saveBitmapToFileAsync(bitmap, reactCallback)
        }
    }
    try {
        Log.d(TAG, "Requesting photo capture via GlInterface...")
        rtmpStream.getGlInterface()?.takePhoto(callback)
    } catch (e: Exception) {
        Log.e(TAG, "Error initiating takePhoto via GlInterface", e)
        reactCallback?.invoke("error", e.message)

    }

}

// --- Take Photo WITH Flash Assist ---
fun takePhotoWithFlash(reactCallback: Callback?) {
    if (!isPreviewing) {
        Log.w(TAG, "Cannot take photo with flash: Preview not running.");
        return
    }
    if (isTakingPhoto) {
        Log.w(TAG, "Take Photo with Flash ignored: Already in progress."); return
    }

    val source = rtmpStream.videoSource
    val flashControlPossible = source is Camera1Source || source is Camera2Source
    if (!flashControlPossible) {
        Log.w(TAG, "Cannot take photo with flash: Flash control not supported.");
        Toast.makeText(context, "Flash not available", Toast.LENGTH_SHORT).show();
        return
    }

    isTakingPhoto = true
    Log.d(TAG, "Starting takePhotoWithFlash sequence...")

    viewScope.launch { // Use the existing CoroutineScope
        var flashTurnedOnSuccessfully = false
        try {
            // --- Turn Flash ON ---
            withContext(Dispatchers.Main) { // Ensure camera ops on main thread if needed
                Log.d(TAG, "Take Photo Flash: Turning flash ON.")
                flashTurnedOnSuccessfully = setFlashState(true)
                if (!flashTurnedOnSuccessfully) {
                    Log.e(TAG, "Take Photo Flash: Failed to turn flash ON.")
                    // Optionally notify user immediately? Or just let photo fail/proceed without flash?
                    // Let's proceed but log it. Photo might still work without flash.
                }
            }

            // --- Delay and Capture ---
            if (flashTurnedOnSuccessfully) {
                delay(PHOTO_FLASH_DELAY_MS)
            } else {
                Log.w(
                    TAG,
                    "Take Photo Flash: Proceeding without delay as flash failed to turn on."
                )
                // No delay needed if flash isn't on
            }

            Log.d(TAG, "Take Photo Flash: Requesting photo capture via GlInterface...")
            rtmpStream.getGlInterface().takePhoto { bitmap ->
                Log.d(TAG, "Take Photo Flash: Photo captured successfully via GlInterface.")


                saveBitmapToFileAsync(bitmap, reactCallback)

                // Save photo (will reset flag and send event)
                // The 'finally' block below will turn the flash off *after* capture request
            } ?: run {
                // Handle case where getGlInterface() is null or takePhoto fails immediately
                Log.e(TAG, "Take Photo Flash: Failed to initiate takePhoto via GlInterface.")
                throw IOException("Failed to get GL interface or call takePhoto")
            }

        } catch (e: Exception) {
            Log.e(TAG, "Error during takePhotoWithFlash sequence (before saving)", e)
            // Ensure flash is turned off and flag is reset even on error *before* saving starts
            withContext(Dispatchers.Main.immediate) { // Use immediate to run quickly
                Log.d(TAG, "Take Photo Flash: Turning flash OFF after error.")
                setFlashState(false)
            }
            isTakingPhoto = false // Reset flag here if saving didn't start
        } finally {
            // --- Turn Flash OFF (Always runs *after* takePhoto request returns or throws) ---
            // This might run slightly before the photo is fully processed by the camera,
            // but usually after the capture command is accepted.
            withContext(Dispatchers.Main.immediate) {
                if (flashTurnedOnSuccessfully) { // Only turn off if we successfully turned it on
                    Log.d(TAG, "Take Photo Flash: Turning flash OFF in finally block.")
                    setFlashState(false)
                } else {
                    Log.d(
                        TAG,
                        "Take Photo Flash: Skipping flash OFF in finally block (was not turned on)."
                    )
                }
            }
            // DO NOT reset isTakingPhoto flag here - let saveBitmapToFileAsync handle it
            // after the save attempt.
        }
    }
}

// --- saveBitmapToFileAsync (Handles flag reset and event sending) ---
private fun saveBitmapToFileAsync(bitmap: Bitmap, callback: Callback?) {
    viewScope.launch(Dispatchers.IO) {
        val photoDir: File = PathUtils.getPhotoPath()
        if (!photoDir.exists() && !photoDir.mkdirs()) {
            Log.e(TAG, "Failed to create photo directory: ${photoDir.absolutePath}")
            return@launch
        }

        val sdf = SimpleDateFormat("yyyyMMdd_HHmmss", Locale.getDefault())
        val fileName = "IMG_${sdf.format(Date())}.jpg"
        val photoFile = File(photoDir, fileName)
        val filePath = photoFile.absolutePath

        Log.d(TAG, "Attempting to save photo to: $filePath")
        var success = false
        try {
            FileOutputStream(photoFile).use { out ->
                bitmap.compress(Bitmap.CompressFormat.JPEG, 90, out)
                out.flush()
            }
            Log.d(TAG, "Bitmap saved successfully.")
            PathUtils.updateGallery(context, filePath)
            success = true
        } catch (e: IOException) {
            Log.e(TAG, "Error saving bitmap to file", e)
        }

        if (success) {
            photoUri = filePath
            withContext(Dispatchers.Main) {
                Log.d(TAG, "saveBitmapToFileAsync: ${photoFile.absolutePath}")
                callback?.invoke(null, photoFile.absolutePath) // success
                Toast.makeText(context, "Saved to gallery", Toast.LENGTH_SHORT).show()

            }
        }
    }
}

/*private fun saveBitmapToFileAsync(bitmap: Bitmap) {
    viewScope.launch(Dispatchers.IO) {
        val photoDir: File = PathUtils.getPhotoPath()
        if (!photoDir.exists() && !photoDir.mkdirs()) {
            Log.e(TAG, "Failed to create photo directory: ${photoDir.absolutePath}")
            return@launch
        }

        val sdf = SimpleDateFormat("yyyyMMdd_HHmmss", Locale.getDefault())
        val fileName = "IMG_${sdf.format(Date())}.jpg"
        val photoFile = File(photoDir, fileName)
        val filePath = photoFile.absolutePath

        Log.d(TAG, "Attempting to save photo to: $filePath")
        var success = false
        try {
            FileOutputStream(photoFile).use { out ->
                bitmap.compress(Bitmap.CompressFormat.JPEG, 90, out)
                out.flush()
            }
            Log.d(TAG, "Bitmap saved successfully.")
            PathUtils.updateGallery(context, filePath)
            success = true
        } catch (e: IOException) {
            Log.e(TAG, "Error saving bitmap to file", e)
        }

        if (success) {
            withContext(Dispatchers.Main) {
                Toast.makeText(context, "Saved to gallery", Toast.LENGTH_SHORT).show()
            }
        }
    }
}*/

//switch camera
// Inside StreamerView class

fun getCameraCount(): Int {
    val manager = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager
    try {
        val strings = manager.cameraIdList
        return strings.size
    } catch (e: CameraAccessException) {
        e.printStackTrace()
        return 0
    }
}

fun switchCamera(desiredFacing: Int) {

    if (!rtmpStream.isOnPreview && !rtmpStream.isStreaming) {
        Log.w(TAG, "Cannot switch camera: Stream is not previewing or streaming.")
        return
    }

    val source = rtmpStream.videoSource ?: run {
        Log.w(TAG, "Cannot switch camera: Video source is null.")
        return
    }

    var currentFacingOrdinal = -1 // Ordinal value (0 for back, 1 for front typically)
    var canSwitch = false

    try {

        val cameraCount = getCameraCount() // Assumes static method exists


        canSwitch = cameraCount > 1
        Log.d(TAG, "Number of cameras detected: $cameraCount. Can switch: $canSwitch")

        if (!canSwitch) {
            Log.w(TAG, "Switch Camera: Only one camera available.")
            return
        }

        when (source) {
            is Camera1Source -> {
                currentFacingOrdinal = source.getCameraFacing().ordinal
            }

            is Camera2Source -> {
                currentFacingOrdinal = source.getCameraFacing().ordinal
            }

            else -> {
                Log.w(TAG, "Switch camera not supported for this video source type.")
                return
            }
        }

        Log.d(
            TAG,
            "Switch Camera Request: Desired=$desiredFacing, Current=$currentFacingOrdinal"
        )

        if (currentFacingOrdinal != desiredFacing) {
            Log.i(TAG, "Attempting to switch camera (toggle) to reach desired facing...")
            when (source) {
                is Camera1Source -> source.switchCamera()
                is Camera2Source -> source.switchCamera()
            }
        } else {
            Log.i(
                TAG,
                "Camera already facing the desired direction ($desiredFacing). No switch needed."
            )
        }

    } catch (e: Exception) {
        Log.e(TAG, "Error during camera switch process", e)
    }
}


fun toggleFlash(state: Boolean) {
    disableAutoFlash() // Disable auto mode on manual toggle

    val source = rtmpStream?.videoSource ?: return

    var currentFlashOn = false
    try {
        when (source) {
            is Camera1Source -> currentFlashOn = source.isLanternEnabled()
            is Camera2Source -> currentFlashOn = source.isLanternEnabled()
            else -> {
                return
            }
        }
    } catch (e: Exception) {
        Log.e(TAG, "Error getting current flash state", e)
        return
    }

    Log.d(
        TAG,
        "Manual Toggle Flash: Current state=$currentFlashOn. Setting to ${!currentFlashOn}"
    )
    if (setFlashState(state)) {
        // Optional: Update UI immediately based on the *intended* new state
        // This assumes setFlashState succeeded. The UI update might be better
        // handled by the container based on the return value or a callback.
    }
}


//when photo is it will automatically on flash based on condition
fun toggleAutoFlashMode() {
    if (lightSensor == null) {
        post {
            Toast.makeText(
                context,
                "Auto Flash not available (no light sensor)",
                Toast.LENGTH_SHORT
            ).show()
        }
        return
    }
    isAutoFlashEnabled = !isAutoFlashEnabled // Toggle the mode

    if (isAutoFlashEnabled) {
        Log.i(TAG, "Auto Flash ENABLED")
        post { Toast.makeText(context, "Auto Flash ON", Toast.LENGTH_SHORT).show() }
        // Register listener when enabling
        val registered = sensorManager.registerListener(
            this,
            lightSensor,
            SensorManager.SENSOR_DELAY_NORMAL // Adjust delay if needed
        )
        if (!registered) {
            Log.e(TAG, "Failed to register light sensor listener!")
            isAutoFlashEnabled = false // Turn mode off if listener fails
            post {
                Toast.makeText(context, "Failed to enable Auto Flash", Toast.LENGTH_SHORT)
                    .show()
            }
        }
        // UI update for the button state needs to happen in the Activity/Fragment
    } else {
        Log.i(TAG, "Auto Flash DISABLED")
        post { Toast.makeText(context, "Auto Flash OFF", Toast.LENGTH_SHORT).show() }
        // Unregister listener when disabling
        sensorManager.unregisterListener(this)
        // UI update for the button state needs to happen in the Activity/Fragment
        // Optionally turn flash OFF when disabling auto mode? Or leave as is?
        // setFlashState(false) // Call the helper if you want to force OFF
    }
}

// Internal method to disable auto flash when manual flash is used
private fun disableAutoFlash() {
    if (isAutoFlashEnabled) {
        Log.d(TAG, "Disabling Auto Flash due to manual action.")
        isAutoFlashEnabled = false
        // UI update for the button state needs to happen in the Activity/Fragment
        sensorManager.unregisterListener(this)
    }
}

// Internal logic application
private fun applyAutoFlashLogic(lux: Float) {

    val desiredFlashOn = lux < FLASH_THRESHOLD_LUX // Flash ON if dark

    // Get the current flash state
    var currentFlashOn = false
    val source = rtmpStream.videoSource
    try {
        when (source) {
            is Camera1Source -> currentFlashOn = source.isLanternEnabled()
            is Camera2Source -> currentFlashOn = source.isLanternEnabled()
            else -> return // Flash not controllable or source not ready
        }
    } catch (e: Exception) {
        Log.e(TAG, "Error checking current flash state in auto flash", e)
        return // Avoid trying to change state if we can't read it
    }


    // Change flash state only if needed
    if (desiredFlashOn != currentFlashOn) {
        Log.i(
            TAG,
            "Auto Flash: Light=$lux lux. Threshold=$FLASH_THRESHOLD_LUX. Changing flash to ${if (desiredFlashOn) "ON" else "OFF"}"
        )
        setFlashState(desiredFlashOn) // Call helper to set the specific state
    }
}

// Helper function to set flash to a specific state (ON or OFF)
// Make this public if the container needs to set flash directly sometimes
fun setFlashState(enable: Boolean): Boolean { // Return success/failure

    val source = rtmpStream.videoSource ?: run {
        Log.w(TAG, "Cannot set flash state: Video source is null")
        return false
    }

    try {
        val success: Boolean = when (source) {
            is Camera1Source -> {
                if (enable) source.enableLantern() else source.disableLantern()
                true
            }

            is Camera2Source -> {
                if (enable) source.enableLantern() else source.disableLantern()
                true
            }

            else -> {
                Log.w(TAG, "setFlashState: Flash control not supported for this video source.")
                false
            }
        }
        if (success) {
            Log.d(TAG, "Flash state set to $enable")
        }
        return success // Return if operation was attempted/supported

    } catch (e: Exception) {
        Log.e(TAG, "Error setting flash state to $enable via service stream", e)
        return false
    }
}

override fun surfaceCreated(holder: SurfaceHolder) {
    if (!rtmpStream.isOnPreview) rtmpStream.startPreview(surfaceView)

}

override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {
    rtmpStream.getGlInterface().setPreviewResolution(width, height)
}

override fun surfaceDestroyed(holder: SurfaceHolder) {

// if (rtmpStream.isOnPreview) rtmpStream.stopPreview()
stopPreviewInternal()
// releaseStreamResources() // Ensure all resources are cleaned up

}

override fun onAuthError() {
    isStreaming = false
    rtmpStream.stopStream()
}

override fun onAuthSuccess() {

}

override fun onConnectionFailed(reason: String) {
    if (rtmpStream.getStreamClient().reTry(5000, reason, null)) {

// toast("Retry")
} else {
rtmpStream.stopStream()
// bStartStop.setImageResource(R.drawable.stream_icon)
// toast("Failed: $reason")
}
}

override fun onConnectionStarted(url: String) {

}

override fun onConnectionSuccess() {
    isStreaming = true
    bitrateAdapter.reset()
}

override fun onDisconnect() {
    isStreaming = false
}

override fun onSensorChanged(event: SensorEvent?) {
    if (event?.sensor?.type == Sensor.TYPE_LIGHT) {
        val lux = event.values[0]
        // Log less frequently if needed: if (System.currentTimeMillis() % 1000 < 50) Log.d(TAG, ...)
        Log.d(TAG, "Light sensor value: $lux lux")
        if (isAutoFlashEnabled) {
            applyAutoFlashLogic(lux)
        }
    }
}

override fun onAccuracyChanged(sensor: Sensor?, accuracy: Int) {

}

override fun onDetachedFromWindow() {
    super.onDetachedFromWindow()
    if (lightSensor != null) {
        sensorManager.unregisterListener(this)
        Log.d(TAG, "Light sensor listener unregistered on detach.")
    }
    //added
    if (surfaceView != null) {
        surfaceView.holder.removeCallback(this)
    }
    releaseStreamResources()
    viewScope.cancel()
}

override fun onNewBitrate(bitrate: Long) {
    super.onNewBitrate(bitrate)
    bitrateAdapter.adaptBitrate(bitrate, rtmpStream.getStreamClient().hasCongestion())

}

// Renamed for clarity - focuses on releasing Pedro stream resources
fun releaseStreamResources() {
    Log.d(TAG, "Releasing stream resources...")
    if (isStreaming) {
        try {
            rtmpStream.stopStream()
        } catch (e: Exception) {
            Log.e(TAG, "Exception stopping stream", e)
        }
        isStreaming = false
    }
    if (isRecording) {
        try {
            rtmpStream.stopRecord()
        } catch (e: Exception) {
            Log.e(TAG, "Exception stopping record", e)
        }
        isRecording = false
    }
    stopPreviewInternal() // Handles check and event

    if (lightSensor != null) {

        if (lightSensor != null) {
            sensorManager.unregisterListener(this)
            Log.d(TAG, "Light sensor listener unregistered on detach.")
        }
    }
    isPrepared = false
    Log.d(TAG, "Stream resources released.")
}

// Made public for potential command from RN
fun prepareStream(): Boolean {
    return prepareStreamInternal()
}

private fun prepareStreamInternal(): Boolean {
    stopAudioOnlyStream()
    if (isStreaming || isRecording || isPreviewing) {
        Log.w(TAG, "Prepare aborted: Already active.")
        return isPrepared
    }
    if (isPrepared) {
        Log.d(TAG, "Prepare skipped: Already prepared.")
        return true
    }

    val success = try {
        rtmpStream.prepareVideo(width, height, vBitrate) &&
                rtmpStream.prepareAudio(sampleRate, isStereo, aBitrate)
    } catch (e: Exception) {
        Log.e(TAG, "Error preparing stream", e)
        false
    }

    isPrepared = success // Update state

    if (!isPrepared) {
        Log.e(TAG, "Stream preparation failed.")
        // Event already sent in catch block or implied if success is false
    } else {
        Log.d(TAG, "Stream prepared successfully.")
    }
    return isPrepared
}


// Made public for potential command from RN
fun startPreview() {
    startPreviewInternal()
}

/*private fun startPreviewInternal() {

    if (isPreviewing) return

    if (!isPrepared) {
        Log.w(TAG,"Cannot start preview: Stream not prepared.")
        if (!prepareStreamInternal()) {
            return
        }
    }
    if (!isPreviewing) {
        try {
            Log.d(TAG, "Starting preview...")
            rtmpStream.startPreview(surfaceView)
            isPreviewing = true
            Log.d(TAG, "Preview started.")
        } catch (e: Exception) {
            Log.e(TAG, "Error starting preview", e)
            isPreviewing = false
        }
    } else {
        Log.d(TAG, "Preview already started.")
    }
}*/
private fun startPreviewInternal() {

    if (rtmpStream.isOnPreview) {
        isPreviewing = true
        return
    }
    if (isPreviewing) return

    if (!isPrepared) {
        Log.w(TAG, "Cannot start preview: Stream not prepared.")
        if (!prepareStreamInternal()) {
            return
        }
    }
    if (!isPreviewing) {
        try {
            Log.d(TAG, "Starting preview...")
            rtmpStream.startPreview(surfaceView)
            isPreviewing = true
            Log.d(TAG, "Preview started.")
        } catch (e: Exception) {
            Log.e(TAG, "Error starting preview", e)
            isPreviewing = false
        }
    } else {
        Log.d(TAG, "Preview already started.")
    }
}

// Made public for potential command from RN
fun stopPreview() {
    stopPreviewInternal()
}

private fun releaseRtmResource() {
    if (rtmpStream.isStreaming) {
        rtmpStream.stopStream()
    }
    if (rtmpStream.isRecording) {
        rtmpStream.stopRecord()
    }
}

private fun stopPreviewInternal() {
    if (rtmpStream.isOnPreview) {
        try {
            Log.d(TAG, "Stopping preview...")
            rtmpStream.stopPreview()
            val wasPreviewing = isPreviewing
            isPreviewing = false
            Log.d(TAG, "Preview stopped.")
        } catch (e: Exception) {
            Log.e(TAG, "Error stopping preview", e)
        }
    }
}


fun prepareAudionOnlyStream(): Boolean {
    stopStream()
    releaseRtmResource()
    return prepareAudioStreamInternal()
}

private fun prepareAudioStreamInternal(): Boolean {
    if (rtmpOnlyAudio.isStreaming) {
        Log.d(TAG, "prepareAudioStreamInternal: Already prepared return")
        return isRtmpOnlyAudioPrepared
    }
    if (rtmpOnlyAudio.prepareAudio()) {
        isRtmpOnlyAudioPrepared = true
    }
    return isRtmpOnlyAudioPrepared
}

//step 2
fun setRtmpOnlyAudioUrl(rtmpOnlyAudioUrl: String) {
    this.rtmpOnlyAudioUrl = rtmpOnlyAudioUrl
}

// step 3
fun startAudioOnlyStream() {
    startAudioOnlyStreamInternal()
}

private fun startAudioOnlyStreamInternal() {
    if (rtmpOnlyAudio.isStreaming) {
        Log.d(TAG, "startAudioOnlyStream: Already streaming return")
        return
    }
    if (rtmpOnlyAudioUrl == null) {
        Log.d(TAG, "startAudioOnlyStream: rtmOnlyAudioUrl null")
        return
    }
    if (rtmpOnlyAudio.prepareAudio() && rtmpOnlyAudioUrl != null) {
        if (isRtmpOnlyAudioPrepared) {
            Log.d(TAG, "startAudioOnlyStreamInternal: started")
            rtmpOnlyAudio.startStream(rtmpOnlyAudioUrl)
        }
    }
}

// step 4
fun stopAudioOnlyStream() {
    if (!rtmpOnlyAudio.isStreaming) {
        Log.d(TAG, "stopAudioOnlyStream: not streaming return")
        return
    }
    if (rtmpOnlyAudio.isStreaming) {
        rtmpOnlyAudioUrl = null
        rtmpOnlyAudio.stopStream()
        Log.d(TAG, "stopAudioOnlyStream: stopped")
    }
}

}
`

@Prabhat-mobilefirst
Copy link
Author

@pedroSG94 Please guide me so that i can solve this problem

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant