Open
Description
Steps to reproduce
- Run app
- Select front camera
- Click on pausePreview
- Click on resumePreview

Expected results
Preview should be resumed
Actual results
Preview is paused for ever
Code sample
Code sample
Demo code copy pasted from https://pub.dev/packages/camera/example :
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:io';
import 'package:camera/camera.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter/scheduler.dart';
import 'package:video_player/video_player.dart';
/// Camera example home widget.
class CameraExampleHome extends StatefulWidget {
/// Default Constructor
const CameraExampleHome({super.key});
@override
State<CameraExampleHome> createState() {
return _CameraExampleHomeState();
}
}
/// Returns a suitable camera icon for [direction].
IconData getCameraLensIcon(CameraLensDirection direction) {
switch (direction) {
case CameraLensDirection.back:
return Icons.camera_rear;
case CameraLensDirection.front:
return Icons.camera_front;
case CameraLensDirection.external:
return Icons.camera;
}
// This enum is from a different package, so a new value could be added at
// any time. The example should keep working if that happens.
// ignore: dead_code
return Icons.camera;
}
void _logError(String code, String? message) {
// ignore: avoid_print
print('Error: $code${message == null ? '' : '\nError Message: $message'}');
}
class _CameraExampleHomeState extends State<CameraExampleHome> with WidgetsBindingObserver, TickerProviderStateMixin {
CameraController? controller;
XFile? imageFile;
XFile? videoFile;
VideoPlayerController? videoController;
VoidCallback? videoPlayerListener;
bool enableAudio = true;
double _minAvailableExposureOffset = 0.0;
double _maxAvailableExposureOffset = 0.0;
double _currentExposureOffset = 0.0;
late final AnimationController _flashModeControlRowAnimationController;
late final CurvedAnimation _flashModeControlRowAnimation;
late final AnimationController _exposureModeControlRowAnimationController;
late final CurvedAnimation _exposureModeControlRowAnimation;
late final AnimationController _focusModeControlRowAnimationController;
late final CurvedAnimation _focusModeControlRowAnimation;
double _minAvailableZoom = 1.0;
double _maxAvailableZoom = 1.0;
double _currentScale = 1.0;
double _baseScale = 1.0;
// Counting pointers (number of user fingers on screen)
int _pointers = 0;
@override
void initState() {
super.initState();
WidgetsBinding.instance.addObserver(this);
_flashModeControlRowAnimationController = AnimationController(duration: const Duration(milliseconds: 300), vsync: this);
_flashModeControlRowAnimation = CurvedAnimation(parent: _flashModeControlRowAnimationController, curve: Curves.easeInCubic);
_exposureModeControlRowAnimationController = AnimationController(duration: const Duration(milliseconds: 300), vsync: this);
_exposureModeControlRowAnimation = CurvedAnimation(parent: _exposureModeControlRowAnimationController, curve: Curves.easeInCubic);
_focusModeControlRowAnimationController = AnimationController(duration: const Duration(milliseconds: 300), vsync: this);
_focusModeControlRowAnimation = CurvedAnimation(parent: _focusModeControlRowAnimationController, curve: Curves.easeInCubic);
}
@override
void dispose() {
WidgetsBinding.instance.removeObserver(this);
_flashModeControlRowAnimationController.dispose();
_flashModeControlRowAnimation.dispose();
_exposureModeControlRowAnimationController.dispose();
_exposureModeControlRowAnimation.dispose();
_focusModeControlRowAnimationController.dispose();
_focusModeControlRowAnimation.dispose();
super.dispose();
}
// #docregion AppLifecycle
@override
void didChangeAppLifecycleState(AppLifecycleState state) {
final CameraController? cameraController = controller;
// App state changed before we got the chance to initialize.
if (cameraController == null || !cameraController.value.isInitialized) {
return;
}
if (state == AppLifecycleState.inactive) {
cameraController.dispose();
} else if (state == AppLifecycleState.resumed) {
_initializeCameraController(cameraController.description);
}
}
// #enddocregion AppLifecycle
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: const Text('Camera example')),
body: Column(
children: <Widget>[
Expanded(
child: Container(
decoration: BoxDecoration(
color: Colors.black,
border: Border.all(color: controller != null && controller!.value.isRecordingVideo ? Colors.redAccent : Colors.grey, width: 3.0),
),
child: Padding(
padding: const EdgeInsets.all(1.0),
child: Center(child: _cameraPreviewWidget()),
),
),
),
_captureControlRowWidget(),
_modeControlRowWidget(),
Padding(
padding: const EdgeInsets.all(5.0),
child: Row(children: <Widget>[_cameraTogglesRowWidget(), _thumbnailWidget()]),
),
],
),
);
}
/// Display the preview from the camera (or a message if the preview is not available).
Widget _cameraPreviewWidget() {
final CameraController? cameraController = controller;
if (cameraController == null || !cameraController.value.isInitialized) {
return const Text(
'Tap a camera',
style: TextStyle(color: Colors.white, fontSize: 24.0, fontWeight: FontWeight.w900),
);
} else {
return Listener(
onPointerDown: (_) => _pointers++,
onPointerUp: (_) => _pointers--,
child: CameraPreview(
controller!,
child: LayoutBuilder(
builder: (BuildContext context, BoxConstraints constraints) {
return GestureDetector(behavior: HitTestBehavior.opaque, onScaleStart: _handleScaleStart, onScaleUpdate: _handleScaleUpdate, onTapDown: (TapDownDetails details) => onViewFinderTap(details, constraints));
},
),
),
);
}
}
void _handleScaleStart(ScaleStartDetails details) {
_baseScale = _currentScale;
}
Future<void> _handleScaleUpdate(ScaleUpdateDetails details) async {
// When there are not exactly two fingers on screen don't scale
if (controller == null || _pointers != 2) {
return;
}
_currentScale = (_baseScale * details.scale).clamp(_minAvailableZoom, _maxAvailableZoom);
await controller!.setZoomLevel(_currentScale);
}
/// Display the thumbnail of the captured image or video.
Widget _thumbnailWidget() {
final VideoPlayerController? localVideoController = videoController;
return Expanded(
child: Align(
alignment: Alignment.centerRight,
child: Row(
mainAxisSize: MainAxisSize.min,
children: <Widget>[
if (localVideoController == null && imageFile == null)
Container()
else
SizedBox(
width: 64.0,
height: 64.0,
child: (localVideoController == null)
? (
// The captured image on the web contains a network-accessible URL
// pointing to a location within the browser. It may be displayed
// either with Image.network or Image.memory after loading the image
// bytes to memory.
kIsWeb ? Image.network(imageFile!.path) : Image.file(File(imageFile!.path)))
: Container(
decoration: BoxDecoration(border: Border.all(color: Colors.pink)),
child: Center(
child: AspectRatio(aspectRatio: localVideoController.value.aspectRatio, child: VideoPlayer(localVideoController)),
),
),
),
],
),
),
);
}
/// Display a bar with buttons to change the flash and exposure modes
Widget _modeControlRowWidget() {
return Column(
children: <Widget>[
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
IconButton(icon: const Icon(Icons.flash_on), color: Colors.blue, onPressed: controller != null ? onFlashModeButtonPressed : null),
// The exposure and focus mode are currently not supported on the web.
...!kIsWeb ? <Widget>[IconButton(icon: const Icon(Icons.exposure), color: Colors.blue, onPressed: controller != null ? onExposureModeButtonPressed : null), IconButton(icon: const Icon(Icons.filter_center_focus), color: Colors.blue, onPressed: controller != null ? onFocusModeButtonPressed : null)] : <Widget>[],
IconButton(icon: Icon(enableAudio ? Icons.volume_up : Icons.volume_mute), color: Colors.blue, onPressed: controller != null ? onAudioModeButtonPressed : null),
IconButton(icon: Icon(controller?.value.isCaptureOrientationLocked ?? false ? Icons.screen_lock_rotation : Icons.screen_rotation), color: Colors.blue, onPressed: controller != null ? onCaptureOrientationLockButtonPressed : null),
],
),
_flashModeControlRowWidget(),
_exposureModeControlRowWidget(),
_focusModeControlRowWidget(),
],
);
}
Widget _flashModeControlRowWidget() {
return SizeTransition(
sizeFactor: _flashModeControlRowAnimation,
child: ClipRect(
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
IconButton(icon: const Icon(Icons.flash_off), color: controller?.value.flashMode == FlashMode.off ? Colors.orange : Colors.blue, onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.off) : null),
IconButton(icon: const Icon(Icons.flash_auto), color: controller?.value.flashMode == FlashMode.auto ? Colors.orange : Colors.blue, onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.auto) : null),
IconButton(icon: const Icon(Icons.flash_on), color: controller?.value.flashMode == FlashMode.always ? Colors.orange : Colors.blue, onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.always) : null),
IconButton(icon: const Icon(Icons.highlight), color: controller?.value.flashMode == FlashMode.torch ? Colors.orange : Colors.blue, onPressed: controller != null ? () => onSetFlashModeButtonPressed(FlashMode.torch) : null),
],
),
),
);
}
Widget _exposureModeControlRowWidget() {
final ButtonStyle styleAuto = TextButton.styleFrom(foregroundColor: controller?.value.exposureMode == ExposureMode.auto ? Colors.orange : Colors.blue);
final ButtonStyle styleLocked = TextButton.styleFrom(foregroundColor: controller?.value.exposureMode == ExposureMode.locked ? Colors.orange : Colors.blue);
return SizeTransition(
sizeFactor: _exposureModeControlRowAnimation,
child: ClipRect(
child: ColoredBox(
color: Colors.grey.shade50,
child: Column(
children: <Widget>[
const Center(child: Text('Exposure Mode')),
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
TextButton(
style: styleAuto,
onPressed: controller != null ? () => onSetExposureModeButtonPressed(ExposureMode.auto) : null,
onLongPress: () {
if (controller != null) {
controller!.setExposurePoint(null);
showInSnackBar('Resetting exposure point');
}
},
child: const Text('AUTO'),
),
TextButton(style: styleLocked, onPressed: controller != null ? () => onSetExposureModeButtonPressed(ExposureMode.locked) : null, child: const Text('LOCKED')),
TextButton(style: styleLocked, onPressed: controller != null ? () => controller!.setExposureOffset(0.0) : null, child: const Text('RESET OFFSET')),
],
),
const Center(child: Text('Exposure Offset')),
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
Text(_minAvailableExposureOffset.toString()),
Slider(value: _currentExposureOffset, min: _minAvailableExposureOffset, max: _maxAvailableExposureOffset, label: _currentExposureOffset.toString(), onChanged: _minAvailableExposureOffset == _maxAvailableExposureOffset ? null : setExposureOffset),
Text(_maxAvailableExposureOffset.toString()),
],
),
],
),
),
),
);
}
Widget _focusModeControlRowWidget() {
final ButtonStyle styleAuto = TextButton.styleFrom(foregroundColor: controller?.value.focusMode == FocusMode.auto ? Colors.orange : Colors.blue);
final ButtonStyle styleLocked = TextButton.styleFrom(foregroundColor: controller?.value.focusMode == FocusMode.locked ? Colors.orange : Colors.blue);
return SizeTransition(
sizeFactor: _focusModeControlRowAnimation,
child: ClipRect(
child: ColoredBox(
color: Colors.grey.shade50,
child: Column(
children: <Widget>[
const Center(child: Text('Focus Mode')),
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
TextButton(
style: styleAuto,
onPressed: controller != null ? () => onSetFocusModeButtonPressed(FocusMode.auto) : null,
onLongPress: () {
if (controller != null) {
controller!.setFocusPoint(null);
}
showInSnackBar('Resetting focus point');
},
child: const Text('AUTO'),
),
TextButton(style: styleLocked, onPressed: controller != null ? () => onSetFocusModeButtonPressed(FocusMode.locked) : null, child: const Text('LOCKED')),
],
),
],
),
),
),
);
}
/// Display the control bar with buttons to take pictures and record videos.
Widget _captureControlRowWidget() {
final CameraController? cameraController = controller;
return Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
IconButton(icon: const Icon(Icons.camera_alt), color: Colors.blue, onPressed: cameraController != null && cameraController.value.isInitialized && !cameraController.value.isRecordingVideo ? onTakePictureButtonPressed : null),
IconButton(icon: const Icon(Icons.videocam), color: Colors.blue, onPressed: cameraController != null && cameraController.value.isInitialized && !cameraController.value.isRecordingVideo ? onVideoRecordButtonPressed : null),
IconButton(
icon: cameraController != null && cameraController.value.isRecordingPaused ? const Icon(Icons.play_arrow) : const Icon(Icons.pause),
color: Colors.blue,
onPressed: cameraController != null && cameraController.value.isInitialized && cameraController.value.isRecordingVideo
? cameraController.value.isRecordingPaused
? onResumeButtonPressed
: onPauseButtonPressed
: null,
),
IconButton(icon: const Icon(Icons.stop), color: Colors.red, onPressed: cameraController != null && cameraController.value.isInitialized && cameraController.value.isRecordingVideo ? onStopButtonPressed : null),
IconButton(icon: const Icon(Icons.pause_presentation), color: cameraController != null && cameraController.value.isPreviewPaused ? Colors.red : Colors.blue, onPressed: cameraController == null ? null : onPausePreviewButtonPressed),
],
);
}
/// Display a row of toggle to select the camera (or a message if no camera is available).
Widget _cameraTogglesRowWidget() {
final List<Widget> toggles = <Widget>[];
void onChanged(CameraDescription? description) {
if (description == null) {
return;
}
onNewCameraSelected(description);
}
if (_cameras.isEmpty) {
SchedulerBinding.instance.addPostFrameCallback((_) async {
showInSnackBar('No camera found.');
});
return const Text('None');
} else {
for (final CameraDescription cameraDescription in _cameras) {
toggles.add(
SizedBox(
width: 90.0,
child: RadioListTile<CameraDescription>(title: Icon(getCameraLensIcon(cameraDescription.lensDirection)), groupValue: controller?.description, value: cameraDescription, onChanged: onChanged),
),
);
}
}
return Row(children: toggles);
}
String timestamp() => DateTime.now().millisecondsSinceEpoch.toString();
void showInSnackBar(String message) {
ScaffoldMessenger.of(context).showSnackBar(SnackBar(content: Text(message)));
}
void onViewFinderTap(TapDownDetails details, BoxConstraints constraints) {
if (controller == null) {
return;
}
final CameraController cameraController = controller!;
final Offset offset = Offset(details.localPosition.dx / constraints.maxWidth, details.localPosition.dy / constraints.maxHeight);
cameraController.setExposurePoint(offset);
cameraController.setFocusPoint(offset);
}
Future<void> onNewCameraSelected(CameraDescription cameraDescription) async {
if (controller != null) {
return controller!.setDescription(cameraDescription);
} else {
return _initializeCameraController(cameraDescription);
}
}
Future<void> _initializeCameraController(CameraDescription cameraDescription) async {
final CameraController cameraController = CameraController(cameraDescription, kIsWeb ? ResolutionPreset.max : ResolutionPreset.medium, enableAudio: enableAudio, imageFormatGroup: ImageFormatGroup.jpeg);
controller = cameraController;
// If the controller is updated then update the UI.
cameraController.addListener(() {
if (mounted) {
setState(() {});
}
if (cameraController.value.hasError) {
showInSnackBar('Camera error ${cameraController.value.errorDescription}');
}
});
try {
await cameraController.initialize();
await Future.wait(<Future<Object?>>[
// The exposure mode is currently not supported on the web.
...!kIsWeb ? <Future<Object?>>[cameraController.getMinExposureOffset().then((double value) => _minAvailableExposureOffset = value), cameraController.getMaxExposureOffset().then((double value) => _maxAvailableExposureOffset = value)] : <Future<Object?>>[],
cameraController.getMaxZoomLevel().then((double value) => _maxAvailableZoom = value),
cameraController.getMinZoomLevel().then((double value) => _minAvailableZoom = value),
]);
} on CameraException catch (e) {
switch (e.code) {
case 'CameraAccessDenied':
showInSnackBar('You have denied camera access.');
case 'CameraAccessDeniedWithoutPrompt':
// iOS only
showInSnackBar('Please go to Settings app to enable camera access.');
case 'CameraAccessRestricted':
// iOS only
showInSnackBar('Camera access is restricted.');
case 'AudioAccessDenied':
showInSnackBar('You have denied audio access.');
case 'AudioAccessDeniedWithoutPrompt':
// iOS only
showInSnackBar('Please go to Settings app to enable audio access.');
case 'AudioAccessRestricted':
// iOS only
showInSnackBar('Audio access is restricted.');
default:
_showCameraException(e);
}
}
if (mounted) {
setState(() {});
}
}
void onTakePictureButtonPressed() {
takePicture().then((XFile? file) {
if (mounted) {
setState(() {
imageFile = file;
videoController?.dispose();
videoController = null;
});
if (file != null) {
showInSnackBar('Picture saved to ${file.path}');
}
}
});
}
void onFlashModeButtonPressed() {
if (_flashModeControlRowAnimationController.value == 1) {
_flashModeControlRowAnimationController.reverse();
} else {
_flashModeControlRowAnimationController.forward();
_exposureModeControlRowAnimationController.reverse();
_focusModeControlRowAnimationController.reverse();
}
}
void onExposureModeButtonPressed() {
if (_exposureModeControlRowAnimationController.value == 1) {
_exposureModeControlRowAnimationController.reverse();
} else {
_exposureModeControlRowAnimationController.forward();
_flashModeControlRowAnimationController.reverse();
_focusModeControlRowAnimationController.reverse();
}
}
void onFocusModeButtonPressed() {
if (_focusModeControlRowAnimationController.value == 1) {
_focusModeControlRowAnimationController.reverse();
} else {
_focusModeControlRowAnimationController.forward();
_flashModeControlRowAnimationController.reverse();
_exposureModeControlRowAnimationController.reverse();
}
}
void onAudioModeButtonPressed() {
enableAudio = !enableAudio;
if (controller != null) {
onNewCameraSelected(controller!.description);
}
}
Future<void> onCaptureOrientationLockButtonPressed() async {
try {
if (controller != null) {
final CameraController cameraController = controller!;
if (cameraController.value.isCaptureOrientationLocked) {
await cameraController.unlockCaptureOrientation();
showInSnackBar('Capture orientation unlocked');
} else {
await cameraController.lockCaptureOrientation();
showInSnackBar('Capture orientation locked to ${cameraController.value.lockedCaptureOrientation.toString().split('.').last}');
}
}
} on CameraException catch (e) {
_showCameraException(e);
}
}
void onSetFlashModeButtonPressed(FlashMode mode) {
setFlashMode(mode).then((_) {
if (mounted) {
setState(() {});
}
showInSnackBar('Flash mode set to ${mode.toString().split('.').last}');
});
}
void onSetExposureModeButtonPressed(ExposureMode mode) {
setExposureMode(mode).then((_) {
if (mounted) {
setState(() {});
}
showInSnackBar('Exposure mode set to ${mode.toString().split('.').last}');
});
}
void onSetFocusModeButtonPressed(FocusMode mode) {
setFocusMode(mode).then((_) {
if (mounted) {
setState(() {});
}
showInSnackBar('Focus mode set to ${mode.toString().split('.').last}');
});
}
void onVideoRecordButtonPressed() {
startVideoRecording().then((_) {
if (mounted) {
setState(() {});
}
});
}
void onStopButtonPressed() {
stopVideoRecording().then((XFile? file) {
if (mounted) {
setState(() {});
}
if (file != null) {
showInSnackBar('Video recorded to ${file.path}');
videoFile = file;
_startVideoPlayer();
}
});
}
Future<void> onPausePreviewButtonPressed() async {
final CameraController? cameraController = controller;
if (cameraController == null || !cameraController.value.isInitialized) {
showInSnackBar('Error: select a camera first.');
return;
}
if (cameraController.value.isPreviewPaused) {
await cameraController.resumePreview();
} else {
await cameraController.pausePreview();
}
if (mounted) {
setState(() {});
}
}
void onPauseButtonPressed() {
pauseVideoRecording().then((_) {
if (mounted) {
setState(() {});
}
showInSnackBar('Video recording paused');
});
}
void onResumeButtonPressed() {
resumeVideoRecording().then((_) {
if (mounted) {
setState(() {});
}
showInSnackBar('Video recording resumed');
});
}
Future<void> startVideoRecording() async {
final CameraController? cameraController = controller;
if (cameraController == null || !cameraController.value.isInitialized) {
showInSnackBar('Error: select a camera first.');
return;
}
if (cameraController.value.isRecordingVideo) {
// A recording is already started, do nothing.
return;
}
try {
await cameraController.startVideoRecording();
} on CameraException catch (e) {
_showCameraException(e);
return;
}
}
Future<XFile?> stopVideoRecording() async {
final CameraController? cameraController = controller;
if (cameraController == null || !cameraController.value.isRecordingVideo) {
return null;
}
try {
return cameraController.stopVideoRecording();
} on CameraException catch (e) {
_showCameraException(e);
return null;
}
}
Future<void> pauseVideoRecording() async {
final CameraController? cameraController = controller;
if (cameraController == null || !cameraController.value.isRecordingVideo) {
return;
}
try {
await cameraController.pauseVideoRecording();
} on CameraException catch (e) {
_showCameraException(e);
rethrow;
}
}
Future<void> resumeVideoRecording() async {
final CameraController? cameraController = controller;
if (cameraController == null || !cameraController.value.isRecordingVideo) {
return;
}
try {
await cameraController.resumeVideoRecording();
} on CameraException catch (e) {
_showCameraException(e);
rethrow;
}
}
Future<void> setFlashMode(FlashMode mode) async {
if (controller == null) {
return;
}
try {
await controller!.setFlashMode(mode);
} on CameraException catch (e) {
_showCameraException(e);
rethrow;
}
}
Future<void> setExposureMode(ExposureMode mode) async {
if (controller == null) {
return;
}
try {
await controller!.setExposureMode(mode);
} on CameraException catch (e) {
_showCameraException(e);
rethrow;
}
}
Future<void> setExposureOffset(double offset) async {
if (controller == null) {
return;
}
setState(() {
_currentExposureOffset = offset;
});
try {
offset = await controller!.setExposureOffset(offset);
} on CameraException catch (e) {
_showCameraException(e);
rethrow;
}
}
Future<void> setFocusMode(FocusMode mode) async {
if (controller == null) {
return;
}
try {
await controller!.setFocusMode(mode);
} on CameraException catch (e) {
_showCameraException(e);
rethrow;
}
}
Future<void> _startVideoPlayer() async {
if (videoFile == null) {
return;
}
final VideoPlayerController vController = kIsWeb ? VideoPlayerController.networkUrl(Uri.parse(videoFile!.path)) : VideoPlayerController.file(File(videoFile!.path));
videoPlayerListener = () {
if (videoController != null) {
// Refreshing the state to update video player with the correct ratio.
if (mounted) {
setState(() {});
}
videoController!.removeListener(videoPlayerListener!);
}
};
vController.addListener(videoPlayerListener!);
await vController.setLooping(true);
await vController.initialize();
await videoController?.dispose();
if (mounted) {
setState(() {
imageFile = null;
videoController = vController;
});
}
await vController.play();
}
Future<XFile?> takePicture() async {
final CameraController? cameraController = controller;
if (cameraController == null || !cameraController.value.isInitialized) {
showInSnackBar('Error: select a camera first.');
return null;
}
if (cameraController.value.isTakingPicture) {
// A capture is already pending, do nothing.
return null;
}
try {
final XFile file = await cameraController.takePicture();
return file;
} on CameraException catch (e) {
_showCameraException(e);
return null;
}
}
void _showCameraException(CameraException e) {
_logError(e.code, e.description);
showInSnackBar('Error: ${e.code}\n${e.description}');
}
}
/// CameraApp is the Main Application.
class CameraApp extends StatelessWidget {
/// Default Constructor
const CameraApp({super.key});
@override
Widget build(BuildContext context) {
return const MaterialApp(home: CameraExampleHome());
}
}
List<CameraDescription> _cameras = <CameraDescription>[];
Future<void> main() async {
// Fetch the available cameras before initializing the app.
try {
WidgetsFlutterBinding.ensureInitialized();
_cameras = await availableCameras();
} on CameraException catch (e) {
_logError(e.code, e.description);
}
runApp(const CameraApp());
}
Screenshots or Video
Screenshots / Video demonstration
[Upload media here]
Logs
Logs
Launching lib/main.dart on sdk gphone64 arm64 in debug mode...
Running Gradle task 'assembleDebug'...
✓ Built build/app/outputs/flutter-apk/app-debug.apk
Installing build/app/outputs/flutter-apk/app-debug.apk...
I/flutter ( 5886): [IMPORTANT:flutter/shell/platform/android/android_context_gl_impeller.cc(94)] Using the Impeller rendering backend (OpenGLES).
Debug service listening on ws://127.0.0.1:54039/MKGxDy3-ywo=/ws
Syncing files to device sdk gphone64 arm64...
D/QuirkSettingsLoader( 5886): QuirkSettings$MetadataHolderService is not found.
D/CameraX ( 5886): QuirkSettings from app metadata: null
D/CameraX ( 5886): QuirkSettings by default: QuirkSettings{enabledWhenDeviceHasQuirk=true, forceEnabledQuirks=[], forceDisabledQuirks=[]}
D/DeviceQuirks( 5886): camera2 DeviceQuirks = CaptureSessionShouldUseMrirQuirk
I/CameraManagerGlobal( 5886): Connecting to camera service
D/CameraRepository( 5886): Added camera: 1
D/CameraQuirks( 5886): camera2 CameraQuirks =
I/Camera2CameraInfo( 5886): Device Level: INFO_SUPPORTED_HARDWARE_LEVEL_FULL
D/CameraRepository( 5886): Added camera: 10
D/CameraQuirks( 5886): camera2 CameraQuirks =
I/Camera2CameraInfo( 5886): Device Level: INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED
D/CameraValidator( 5886): Verifying camera lens facing on emu64a, lensFacingInteger: null
D/EGL_emulation( 5886): app_time_stats: avg=801.84ms min=34.76ms max=1568.92ms count=2
D/DeviceQuirks( 5886): video DeviceQuirks = MediaCodecDefaultDataSpaceQuirk
D/Recorder( 5886): mRequiredFreeStorageBytes = 50 MB
D/DeviceQuirks( 5886): core DeviceQuirks = SurfaceOrderQuirk
D/ResolvedFeatureCombination( 5886): resolveFeatureCombination: sessionConfig = androidx.camera.core.LegacySessionConfig@8844adb, lensFacing = 0
D/CameraUseCaseAdapter( 5886): addUseCases: appUseCasesToAdd = [Preview:androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0, ImageCapture:androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e363, ImageAnalysis:androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d307], featureCombinationnull
D/UseCase ( 5886): applyFeaturesToConfig: mFeatures = null, this = Preview:androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0
D/UseCase ( 5886): applyFeaturesToConfig: mFeatures = null, this = ImageCapture:androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e363
D/UseCase ( 5886): applyFeaturesToConfig: mFeatures = null, this = ImageAnalysis:androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d307
D/DynamicRangeResolver( 5886): Resolved dynamic range for use case androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0 to no compatible HDR dynamic ranges.
D/DynamicRangeResolver( 5886): DynamicRange@570588d{encoding=UNSPECIFIED, bitDepth=0}
D/DynamicRangeResolver( 5886): ->
D/DynamicRangeResolver( 5886): DynamicRange@6f90524{encoding=SDR, bitDepth=8}
D/CameraQuirks( 5886): camera2 CameraQuirks =
D/CameraQuirks( 5886): camera2 CameraQuirks =
D/CameraQuirks( 5886): camera2 CameraQuirks =
D/UseCase ( 5886): applyFeaturesToConfig: mFeatures = null, this = Preview:androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0
D/DeferrableSurface( 5886): Surface created[total_surfaces=1, used_surfaces=0](androidx.camera.core.processing.SurfaceEdge$SettableSurface@204588e}
D/DeferrableSurface( 5886): Surface created[total_surfaces=2, used_surfaces=0](androidx.camera.core.SurfaceRequest$2@7f16f9a}
D/DeferrableSurface( 5886): New surface in use[total_surfaces=2, used_surfaces=1](androidx.camera.core.SurfaceRequest$2@7f16f9a}
D/DeferrableSurface( 5886): use count+1, useCount=1 androidx.camera.core.SurfaceRequest$2@7f16f9a
D/UseCase ( 5886): applyFeaturesToConfig: mFeatures = null, this = ImageCapture:androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e363
D/ImageCapture( 5886): createPipeline(cameraId: 1, streamSpec: StreamSpec{resolution=720x480, originalConfiguredResolution=720x480, dynamicRange=DynamicRange@6f90524{encoding=SDR, bitDepth=8}, sessionType=0, expectedFrameRateRange=[0, 0], implementationOptions=androidx.camera.camera2.impl.Camera2ImplConfig@76f7ea7, zslDisabled=false})
D/DeferrableSurface( 5886): Surface created[total_surfaces=3, used_surfaces=1](androidx.camera.core.impl.ImmediateSurface@f39ff54}
D/UseCase ( 5886): applyFeaturesToConfig: mFeatures = null, this = ImageAnalysis:androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d307
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use case androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201 ACTIVE
D/UseCaseAttachState( 5886): Active and attached use case: [] for camera: 1
D/DeferrableSurface( 5886): Surface created[total_surfaces=4, used_surfaces=1](androidx.camera.core.impl.ImmediateSurface@e8b5e43}
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use case androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475 ACTIVE
D/ImageCapture( 5886): onCameraControlReady
D/UseCaseAttachState( 5886): Active and attached use case: [] for camera: 1
D/Camera2CameraControlImp( 5886): setFlashMode: mFlashMode = 2
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use case androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201 ACTIVE
D/UseCaseAttachState( 5886): Active and attached use case: [] for camera: 1
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use case androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d30768584407 INACTIVE
D/UseCaseAttachState( 5886): Active and attached use case: [] for camera: 1
D/UseCaseAttachState( 5886): Active and attached use case: [] for camera: 1
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use cases [androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475, androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d30768584407] now ATTACHED
D/Camera2CameraControlImp( 5886): setActive: isActive = true
D/UseCaseAttachState( 5886): All use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475, androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d30768584407] for camera: 1
D/UseCaseAttachState( 5886): Active and attached use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475] for camera: 1
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Resetting Capture Session
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Skipping Capture Session state check due to current camera state: INITIALIZED and previous session status: false
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Releasing session in state INITIALIZED
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Attempting to force open the camera.
D/CameraStateRegistry( 5886): tryOpenCamera(Camera@f799ded[id=1]) [Available Cameras: 1, Already Open: false (Previous state: null)] --> SUCCESS
D/CameraStateRegistry( 5886): Recalculating open cameras:
D/CameraStateRegistry( 5886): Camera State
D/CameraStateRegistry( 5886): -------------------------------------------------------------------
D/CameraStateRegistry( 5886): Camera@f799ded[id=1] OPENING
D/CameraStateRegistry( 5886): Camera@b524f9c[id=10] UNKNOWN
D/CameraStateRegistry( 5886): -------------------------------------------------------------------
D/CameraStateRegistry( 5886): Open count: 1 (Max allowed: 1)
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Opening camera.
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Transitioning camera internal state: INITIALIZED --> OPENING
D/CameraStateMachine( 5886): New public camera state CameraState{type=OPENING, error=null} from OPENING and null
D/CameraStateMachine( 5886): Publishing new public camera state CameraState{type=OPENING, error=null}
D/UseCaseAttachState( 5886): All use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475, androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d30768584407] for camera: 1
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use case androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475 ACTIVE
D/UseCaseAttachState( 5886): Active and attached use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475] for camera: 1
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use case androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201 ACTIVE
D/UseCaseAttachState( 5886): Active and attached use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475] for camera: 1
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use case androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d30768584407 INACTIVE
D/UseCaseAttachState( 5886): Active and attached use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475] for camera: 1
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} CameraDevice.onOpened()
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Transitioning camera internal state: OPENING --> OPENED
D/CameraStateRegistry( 5886): Recalculating open cameras:
D/CameraStateRegistry( 5886): Camera State
D/CameraStateRegistry( 5886): -------------------------------------------------------------------
D/CameraStateRegistry( 5886): Camera@f799ded[id=1] OPEN
D/CameraStateRegistry( 5886): Camera@b524f9c[id=10] UNKNOWN
D/CameraStateRegistry( 5886): -------------------------------------------------------------------
D/CameraStateRegistry( 5886): Open count: 1 (Max allowed: 1)
D/CameraStateMachine( 5886): New public camera state CameraState{type=OPEN, error=null} from OPEN and null
D/CameraStateMachine( 5886): Publishing new public camera state CameraState{type=OPEN, error=null}
D/UseCaseAttachState( 5886): All use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475, androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d30768584407] for camera: 1
D/UseCaseAttachState( 5886): Active and attached use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475] for camera: 1
D/SyncCaptureSessionBase( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@3ee494c] getSurface done with results: [Surface(name=null mNativeObject=-5476376659118543520)/@0x485e584, Surface(name=null mNativeObject=-5476376659118726640)/@0xde6f69e, Surface(name=null mNativeObject=-5476376659118587120)/@0x6af557f]
D/CaptureSession( 5886): Opening capture session.
D/Camera2CaptureRequestBuilder( 5886): template type = 1
D/SyncCaptureSessionImpl( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@3ee494c] start openCaptureSession
D/DeferrableSurface( 5886): use count+1, useCount=2 androidx.camera.core.SurfaceRequest$2@7f16f9a
D/DeferrableSurface( 5886): New surface in use[total_surfaces=4, used_surfaces=2](androidx.camera.core.impl.ImmediateSurface@f39ff54}
D/DeferrableSurface( 5886): use count+1, useCount=1 androidx.camera.core.impl.ImmediateSurface@f39ff54
D/DeferrableSurface( 5886): New surface in use[total_surfaces=4, used_surfaces=3](androidx.camera.core.impl.ImmediateSurface@e8b5e43}
D/DeferrableSurface( 5886): use count+1, useCount=1 androidx.camera.core.impl.ImmediateSurface@e8b5e43
D/SyncCaptureSessionImpl( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@3ee494c] Session onConfigured()
D/CaptureSession( 5886): Attempting to send capture request onConfigured
D/CaptureSession( 5886): Issuing request for session.
D/Camera2CaptureRequestBuilder( 5886): createCaptureRequest
D/CaptureSession( 5886): CameraCaptureSession.onConfigured() mState=OPENED
D/CaptureSession( 5886): CameraCaptureSession.onReady() OPENED
D/EGL_emulation( 5886): app_time_stats: avg=34.43ms min=1.48ms max=149.27ms count=29
D/DeferrableSurface( 5886): surface closed, useCount=2 closed=true androidx.camera.core.SurfaceRequest$2@7f16f9a
D/DeferrableSurface( 5886): surface closed, useCount=0 closed=true androidx.camera.core.processing.SurfaceEdge$SettableSurface@204588e
D/DeferrableSurface( 5886): Surface terminated[total_surfaces=3, used_surfaces=3](androidx.camera.core.processing.SurfaceEdge$SettableSurface@204588e}
D/DeferrableSurface( 5886): use count-1, useCount=1 closed=true androidx.camera.core.SurfaceRequest$2@7f16f9a
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use cases [androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475] now DETACHED for camera
D/UseCaseAttachState( 5886): All use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d30768584407] for camera: 1
D/Camera2CameraImpl( 5886): No need to remove a previous mMeteringRepeating, SessionConfig Surfaces: 2, CaptureConfig Surfaces: 1
D/UseCaseAttachState( 5886): Active and attached use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201] for camera: 1
D/CaptureSession( 5886): Attempting to submit CaptureRequest after setting
D/CaptureSession( 5886): Skipping issueRepeatingCaptureRequests for no surface.
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Resetting Capture Session
D/SyncCaptureSessionImpl( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@3ee494c] Session call close()
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Releasing session in state OPENED
D/UseCaseAttachState( 5886): All use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d30768584407] for camera: 1
D/SyncCaptureSessionImpl( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@3ee494c] Session call super.close()
D/CaptureSession( 5886): onSessionFinished()
D/SyncCaptureSessionBase( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@ca05dac] getSurface done with results: [Surface(name=null mNativeObject=-5476376659118726640)/@0xde6f69e, Surface(name=null mNativeObject=-5476376659118587120)/@0x6af557f]
D/CaptureSession( 5886): Opening capture session.
D/Camera2CaptureRequestBuilder( 5886): template type = 1
D/SyncCaptureSessionImpl( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@ca05dac] start openCaptureSession
D/DeferrableSurface( 5886): use count+1, useCount=2 androidx.camera.core.impl.ImmediateSurface@f39ff54
D/DeferrableSurface( 5886): use count+1, useCount=2 androidx.camera.core.impl.ImmediateSurface@e8b5e43
D/EGL_emulation( 5886): app_time_stats: avg=31.40ms min=7.55ms max=66.75ms count=32
D/SyncCaptureSessionImpl( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@ca05dac] Session onConfigured()
D/DeferrableSurface( 5886): use count-1, useCount=0 closed=true androidx.camera.core.SurfaceRequest$2@7f16f9a
D/DeferrableSurface( 5886): Surface no longer in use[total_surfaces=3, used_surfaces=2](androidx.camera.core.SurfaceRequest$2@7f16f9a}
D/DeferrableSurface( 5886): Surface terminated[total_surfaces=2, used_surfaces=2](androidx.camera.core.SurfaceRequest$2@7f16f9a}
D/DeferrableSurface( 5886): use count-1, useCount=1 closed=false androidx.camera.core.impl.ImmediateSurface@f39ff54
D/DeferrableSurface( 5886): use count-1, useCount=1 closed=false androidx.camera.core.impl.ImmediateSurface@e8b5e43
D/CaptureSession( 5886): Attempting to send capture request onConfigured
D/CaptureSession( 5886): Skipping issueRepeatingCaptureRequests for no surface.
D/CaptureSession( 5886): CameraCaptureSession.onConfigured() mState=OPENED
D/CaptureSession( 5886): CameraCaptureSession.onReady() OPENED
D/CaptureSession( 5886): CameraCaptureSession.onReady() OPENED
D/SyncCaptureSessionImpl( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@3ee494c] onClosed()
D/EGL_emulation( 5886): app_time_stats: avg=44.79ms min=14.44ms max=833.56ms count=29
D/ProfileInstaller( 5886): Installing profile for com.example.cameraissue
D/ResolvedFeatureCombination( 5886): resolveFeatureCombination: sessionConfig = androidx.camera.core.LegacySessionConfig@188f5f3, lensFacing = 0
D/CameraUseCaseAdapter( 5886): addUseCases: appUseCasesToAdd = [Preview:androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0], featureCombinationnull
D/UseCase ( 5886): applyFeaturesToConfig: mFeatures = null, this = Preview:androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0
D/DynamicRangeResolver( 5886): Resolved dynamic range for use case androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0 to no compatible HDR dynamic ranges.
D/DynamicRangeResolver( 5886): DynamicRange@570588d{encoding=UNSPECIFIED, bitDepth=0}
D/DynamicRangeResolver( 5886): ->
D/DynamicRangeResolver( 5886): DynamicRange@6f90524{encoding=SDR, bitDepth=8}
D/CameraQuirks( 5886): camera2 CameraQuirks =
D/UseCase ( 5886): applyFeaturesToConfig: mFeatures = null, this = Preview:androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use case androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201 UPDATED
D/UseCaseAttachState( 5886): Active and attached use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201] for camera: 1
D/DeferrableSurface( 5886): Surface created[total_surfaces=3, used_surfaces=2](androidx.camera.core.processing.SurfaceEdge$SettableSurface@2c32529}
D/CaptureSession( 5886): Attempting to submit CaptureRequest after setting
D/CaptureSession( 5886): Skipping issueRepeatingCaptureRequests for no surface.
D/DeferrableSurface( 5886): Surface created[total_surfaces=4, used_surfaces=2](androidx.camera.core.SurfaceRequest$2@1bfeae5}
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use case androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d30768584407 UPDATED
D/UseCaseAttachState( 5886): Active and attached use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201] for camera: 1
D/DeferrableSurface( 5886): New surface in use[total_surfaces=4, used_surfaces=3](androidx.camera.core.SurfaceRequest$2@1bfeae5}
D/DeferrableSurface( 5886): use count+1, useCount=1 androidx.camera.core.SurfaceRequest$2@1bfeae5
D/CaptureSession( 5886): Attempting to submit CaptureRequest after setting
D/CaptureSession( 5886): Skipping issueRepeatingCaptureRequests for no surface.
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use cases [androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475] now ATTACHED
D/UseCaseAttachState( 5886): All use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d30768584407, androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475] for camera: 1
D/UseCaseAttachState( 5886): Active and attached use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201] for camera: 1
D/CaptureSession( 5886): Attempting to submit CaptureRequest after setting
D/CaptureSession( 5886): Skipping issueRepeatingCaptureRequests for no surface.
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Resetting Capture Session
D/SyncCaptureSessionImpl( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@ca05dac] Session call close()
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Releasing session in state OPENED
D/UseCaseAttachState( 5886): All use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.ImageAnalysis-99c5b4c2-be13-4b3e-88a4-0a985876d30768584407, androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475] for camera: 1
D/Camera2CameraImpl( 5886): {Camera@f799ded[id=1]} Use case androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475 ACTIVE
D/UseCaseAttachState( 5886): Active and attached use case: [androidx.camera.core.ImageCapture-c51fec5a-f76a-4ca0-bbcd-d6a69351e36312025201, androidx.camera.core.Preview-2fadf7fa-1a31-4731-ae6b-00260f87aec0191476475] for camera: 1
D/SyncCaptureSessionImpl( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@ca05dac] Session call super.close()
D/CaptureSession( 5886): onSessionFinished()
D/SyncCaptureSessionImpl( 5886): [androidx.camera.camera2.internal.SynchronizedCaptureSessionImpl@ca05dac] onClosed()
D/DeferrableSurface( 5886): use count-1, useCount=0 closed=false androidx.camera.core.impl.ImmediateSurface@f39ff54
D/DeferrableSurface( 5886): Surface no longer in use[total_surfaces=4, used_surfaces=2](androidx.camera.core.impl.ImmediateSurface@f39ff54}
D/DeferrableSurface( 5886): use count-1, useCount=0 closed=false androidx.camera.core.impl.ImmediateSurface@e8b5e43
D/DeferrableSurface( 5886): Surface no longer in use[total_surfaces=4, used_surfaces=1](androidx.camera.core.impl.ImmediateSurface@e8b5e43}
Flutter Doctor output
Doctor output
[✓] Flutter (Channel stable, 3.32.0, on macOS 15.5 24F74 darwin-arm64, locale fr-FR) [1 056ms]
• Flutter version 3.32.0 on channel stable at /Users/earminjon/fvm/versions/3.32.0
• Upstream repository https://github.com/flutter/flutter.git
• Framework revision be698c48a6 (8 days ago), 2025-05-19 12:59:14 -0700
• Engine revision 1881800949
• Dart version 3.8.0
• DevTools version 2.45.1
[✓] Android toolchain - develop for Android devices (Android SDK version 35.0.1) [1 673ms]
• Android SDK at /Users/earminjon/Library/Android/sdk
• Platform android-35, build-tools 35.0.1
• ANDROID_HOME = /Users/earminjon/Library/Android/sdk
• Java binary at: /Users/earminjon/Library/Java/JavaVirtualMachines/corretto-17.0.14/Contents/Home/bin/java
This JDK is specified in your Flutter configuration.
To change the current JDK, run: `flutter config --jdk-dir="path/to/jdk"`.
• Java version OpenJDK Runtime Environment Corretto-17.0.14.7.1 (build 17.0.14+7-LTS)
• All Android licenses accepted.
[!] Xcode - develop for iOS and macOS (Xcode 16.3) [884ms]
• Xcode at /Applications/Xcode.app/Contents/Developer
• Build 16E140
✗ CocoaPods installed but not working.
You appear to have CocoaPods installed but it is not working.
This can happen if the version of Ruby that CocoaPods was installed with is different from the one being used to invoke it.
This can usually be fixed by re-installing CocoaPods.
For re-installation instructions, see https://guides.cocoapods.org/using/getting-started.html#installation
[✓] Chrome - develop for the web [81ms]
• Chrome at /Applications/Google Chrome.app/Contents/MacOS/Google Chrome
[✓] Android Studio (version 2024.3) [80ms]
• Android Studio at /Users/earminjon/Applications/Android Studio.app/Contents
• Flutter plugin can be installed from:
🔨 https://plugins.jetbrains.com/plugin/9212-flutter
• Dart plugin can be installed from:
🔨 https://plugins.jetbrains.com/plugin/6351-dart
• Java version OpenJDK Runtime Environment (build 21.0.6+-13368085-b895.109)
[✓] IntelliJ IDEA Ultimate Edition (version 2025.1.1.1) [79ms]
• IntelliJ at /Users/earminjon/Applications/IntelliJ IDEA Ultimate.app
• Flutter plugin version 85.3.2
• Dart plugin version 251.25410.28
[✓] Connected device (3 available) [6,5s]
• sdk gphone64 arm64 (mobile) • emulator-5554 • android-arm64 • Android 15 (API 35) (emulator)
• macOS (desktop) • macos • darwin-arm64 • macOS 15.5 24F74 darwin-arm64
• Chrome (web) • chrome • web-javascript • Google Chrome 136.0.7103.114
! Error: Browsing on the local area network for iPhone de Romain. Ensure the device is unlocked and attached with a cable or
associated with the same local area network as this Mac.
The device must be opted into Developer Mode to connect wirelessly. (code -27)
! Error: Browsing on the local area network for iPhone d’Enguerrand. Ensure the device is unlocked and attached with a cable or
associated with the same local area network as this Mac.
The device must be opted into Developer Mode to connect wirelessly. (code -27)
[✓] Network resources [240ms]
• All expected network resources are available.
Metadata
Metadata
Assignees
Labels
High-priority issues at the top of the work listFound to occur in 3.32Found to occur in 3.33The issue has been confirmed reproducible and is ready to work onThe camera pluginflutter/packages repository. See also p: labels.Android applications specificallyOwned by Android platform teamTriaged by Android platform team