Skip to content

Commit

Permalink
Merge pull request #1903 from TalaoDAO/google_mlkit_barcode_scanning
Browse files Browse the repository at this point in the history
Google mlkit barcode scanning
  • Loading branch information
bibash28 authored Sep 14, 2023
2 parents d81498d + 601e3db commit a0eedda
Show file tree
Hide file tree
Showing 7 changed files with 385 additions and 162 deletions.
18 changes: 16 additions & 2 deletions ios/Podfile
Original file line number Diff line number Diff line change
Expand Up @@ -38,17 +38,31 @@ target 'Runner' do
flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__))
end


$iOSVersion = '14.0'

post_install do |installer|
# Google's ML Kit Barcode Scanning setup
installer.pods_project.build_configurations.each do |config|
config.build_settings["EXCLUDED_ARCHS[sdk=*]"] = "armv7"
config.build_settings['IPHONEOS_DEPLOYMENT_TARGET'] = $iOSVersion
end

installer.pods_project.targets.each do |target|
flutter_additional_ios_build_settings(target)
# polygonid-setup
# polygonid-setup
target.build_configurations.each do |config|
cflags = config.build_settings['OTHER_CFLAGS'] || ['$(inherited)']
cflags << '-fembed-bitcode'
config.build_settings['OTHER_CFLAGS'] = cflags
config.build_settings['SWIFT_VERSION'] = '5.0'
config.build_settings['ENABLE_BITCODE'] = 'NO'
config.build_settings['IPHONEOS_DEPLOYMENT_TARGET'] = '14.0'
config.build_settings['IPHONEOS_DEPLOYMENT_TARGET'] = $iOSVersion

# Google's ML Kit Barcode Scanning setup
if Gem::Version.new($iOSVersion) > Gem::Version.new(config.build_settings['IPHONEOS_DEPLOYMENT_TARGET'])
config.build_settings['IPHONEOS_DEPLOYMENT_TARGET'] = $iOSVersion
end
end
if target.name == "Pods-Runner"
puts "Updating #{target.name} OTHER_LDFLAGS"
Expand Down
1 change: 1 addition & 0 deletions lib/dashboard/qr_code/qr_code_scan/qr_code_scan.dart
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
export 'cubit/qr_code_scan_cubit.dart';
export 'view/qr_camera_view.dart';
export 'view/qr_code_scan_page.dart';
export 'view/qr_scanner_page.dart';
263 changes: 263 additions & 0 deletions lib/dashboard/qr_code/qr_code_scan/view/qr_camera_view.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,263 @@
import 'dart:async';
import 'dart:io';

import 'package:altme/app/app.dart';
import 'package:altme/theme/theme.dart';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:google_mlkit_commons/google_mlkit_commons.dart';

class QrCameraView extends StatefulWidget {
const QrCameraView({
super.key,
required this.title,
required this.onImage,
this.onCameraFeedReady,
this.onDetectorViewModeChanged,
this.onCameraLensDirectionChanged,
this.initialCameraLensDirection = CameraLensDirection.back,
});

final String title;
final Function(InputImage inputImage) onImage;
final VoidCallback? onCameraFeedReady;
final VoidCallback? onDetectorViewModeChanged;
final Function(CameraLensDirection direction)? onCameraLensDirectionChanged;
final CameraLensDirection initialCameraLensDirection;

@override
State<QrCameraView> createState() => _QrCameraViewState();
}

class _QrCameraViewState extends State<QrCameraView> {
static List<CameraDescription> _cameras = [];
CameraController? _controller;
int _cameraIndex = -1;
double _currentZoomLevel = 1.0;
double _minAvailableZoom = 1.0;
double _maxAvailableZoom = 1.0;
double _minAvailableExposureOffset = 0.0;
double _maxAvailableExposureOffset = 0.0;
double _currentExposureOffset = 0.0;
bool _changingCameraLens = false;

@override
void initState() {
super.initState();
_initialize();
}

void _initialize() async {
if (_cameras.isEmpty) {
_cameras = await availableCameras();
}
for (var i = 0; i < _cameras.length; i++) {
if (_cameras[i].lensDirection == widget.initialCameraLensDirection) {
_cameraIndex = i;
break;
}
}
if (_cameraIndex != -1) {
unawaited(_startLiveFeed());
}
}

@override
void dispose() {
_stopLiveFeed();
super.dispose();
}

@override
Widget build(BuildContext context) {
if (_cameras.isEmpty) return Container();
if (_controller == null) return Container();
if (_controller?.value.isInitialized == false) return Container();
return Scaffold(
backgroundColor: Theme.of(context).colorScheme.background,
appBar: AppBar(
title: Text(
widget.title,
textAlign: TextAlign.center,
style: Theme.of(context).textTheme.appBar,
),
leading: const BackLeadingButton(),
backgroundColor: Theme.of(context).colorScheme.background,
),
body: ColoredBox(
color: Theme.of(context).colorScheme.background,
child: Center(
child: _changingCameraLens
? Container()
: CameraPreview(
_controller!,
child: CustomPaint(
painter: BlurPainter(),
child: Container(),
),
),
),
),
);
}

Future<void> _startLiveFeed() async {
final camera = _cameras[_cameraIndex];
_controller = CameraController(
camera,
// Set to ResolutionPreset.high. Do NOT set it to ResolutionPreset.max because for some phones does NOT work.
ResolutionPreset.high,
enableAudio: false,
imageFormatGroup: Platform.isAndroid
? ImageFormatGroup.nv21
: ImageFormatGroup.bgra8888,
);
await _controller?.initialize().then((_) {
if (!mounted) {
return;
}
_controller?.getMinZoomLevel().then((value) {
_currentZoomLevel = value;
_minAvailableZoom = value;
});
_controller?.getMaxZoomLevel().then((value) {
_maxAvailableZoom = value;
});
_currentExposureOffset = 0.0;
_controller?.getMinExposureOffset().then((value) {
_minAvailableExposureOffset = value;
});
_controller?.getMaxExposureOffset().then((value) {
_maxAvailableExposureOffset = value;
});
_controller?.startImageStream(_processCameraImage).then((value) {
if (widget.onCameraFeedReady != null) {
widget.onCameraFeedReady!();
}
if (widget.onCameraLensDirectionChanged != null) {
widget.onCameraLensDirectionChanged!(camera.lensDirection);
}
});
setState(() {});
});
}

Future<void> _stopLiveFeed() async {
await _controller?.stopImageStream();
await _controller?.dispose();
_controller = null;
}

void _processCameraImage(CameraImage image) {
final inputImage = _inputImageFromCameraImage(image);
if (inputImage == null) return;
widget.onImage(inputImage);
}

final _orientations = {
DeviceOrientation.portraitUp: 0,
DeviceOrientation.landscapeLeft: 90,
DeviceOrientation.portraitDown: 180,
DeviceOrientation.landscapeRight: 270,
};

InputImage? _inputImageFromCameraImage(CameraImage image) {
if (_controller == null) return null;

// get image rotation
// it is used in android to convert the InputImage from Dart to Java: https://github.com/flutter-ml/google_ml_kit_flutter/blob/master/packages/google_mlkit_commons/android/src/main/java/com/google_mlkit_commons/InputImageConverter.java
// `rotation` is not used in iOS to convert the InputImage from Dart to Obj-C: https://github.com/flutter-ml/google_ml_kit_flutter/blob/master/packages/google_mlkit_commons/ios/Classes/MLKVisionImage%2BFlutterPlugin.m
// in both platforms `rotation` and `camera.lensDirection` can be used to compensate `x` and `y` coordinates on a canvas: https://github.com/flutter-ml/google_ml_kit_flutter/blob/master/packages/example/lib/vision_detector_views/painters/coordinates_translator.dart
final camera = _cameras[_cameraIndex];
final sensorOrientation = camera.sensorOrientation;
// print(
// 'lensDirection: ${camera.lensDirection}, sensorOrientation: $sensorOrientation, ${_controller?.value.deviceOrientation} ${_controller?.value.lockedCaptureOrientation} ${_controller?.value.isCaptureOrientationLocked}');
InputImageRotation? rotation;
if (Platform.isIOS) {
rotation = InputImageRotationValue.fromRawValue(sensorOrientation);
} else if (Platform.isAndroid) {
var rotationCompensation =
_orientations[_controller!.value.deviceOrientation];
if (rotationCompensation == null) return null;
if (camera.lensDirection == CameraLensDirection.front) {
// front-facing
rotationCompensation = (sensorOrientation + rotationCompensation) % 360;
} else {
// back-facing
rotationCompensation =
(sensorOrientation - rotationCompensation + 360) % 360;
}
rotation = InputImageRotationValue.fromRawValue(rotationCompensation);
// print('rotationCompensation: $rotationCompensation');
}
if (rotation == null) return null;
// print('final rotation: $rotation');

// get image format
final format = InputImageFormatValue.fromRawValue(image.format.raw as int);
// validate format depending on platform
// only supported formats:
// * nv21 for Android
// * bgra8888 for iOS
if (format == null ||
(Platform.isAndroid && format != InputImageFormat.nv21) ||
(Platform.isIOS && format != InputImageFormat.bgra8888)) return null;

// since format is constraint to nv21 or bgra8888, both only have one plane
if (image.planes.length != 1) return null;
final plane = image.planes.first;

// compose InputImage using bytes
return InputImage.fromBytes(
bytes: plane.bytes,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation, // used only in Android
format: format, // used only in iOS
bytesPerRow: plane.bytesPerRow, // used only in iOS
),
);
}
}

class BlurPainter extends CustomPainter {
@override
void paint(Canvas canvas, Size size) {
// Create a Rect to represent the entire canvas
final Rect rect = Offset.zero & size;

// Create a Paint object for the blur effect
final Paint blurPaint = Paint();
blurPaint.color = Colors.black.withOpacity(0.5);

// Calculate the size of the middle square
final double squareSize = size.width * 0.8;
final double squareOffsetX = (size.width - squareSize) / 2;
final double squareOffsetY = (size.height - squareSize) / 2;
final Rect middleSquare = Rect.fromLTWH(
squareOffsetX,
squareOffsetY,
squareSize,
squareSize,
);

// Create a Path object for the middle square
final Path squarePath = Path()..addRect(middleSquare);

// Create a Path object for the entire canvas
final Path canvasPath = Path()..addRect(rect);

// Subtract the middle square path from the canvas path
final Path blurredPath =
Path.combine(PathOperation.difference, canvasPath, squarePath);

// Draw the blurred path on the canvas
canvas.drawPath(blurredPath, blurPaint);
}

@override
bool shouldRepaint(covariant CustomPainter oldDelegate) {
return false;
}
}
Loading

0 comments on commit a0eedda

Please sign in to comment.