add structure

This commit is contained in:
2020-05-29 07:45:27 +06:30
parent 4c851d9971
commit bad27ba5c4
272 changed files with 36065 additions and 174 deletions

View File

@@ -0,0 +1,177 @@
import 'smile_painter.dart';
import 'package:flutter/material.dart';
import 'package:camera/camera.dart';
import 'package:firebase_ml_vision/firebase_ml_vision.dart';
import 'package:flutter/foundation.dart';
import 'dart:ui' as ui show Image;
import 'utils.dart';
class FaceDetectionFromLiveCamera extends StatefulWidget {
FaceDetectionFromLiveCamera({Key key}) : super(key: key);
@override
_FaceDetectionFromLiveCameraState createState() =>
_FaceDetectionFromLiveCameraState();
}
class _FaceDetectionFromLiveCameraState
extends State<FaceDetectionFromLiveCamera> {
final FaceDetector faceDetector = FirebaseVision.instance.faceDetector();
List<Face> faces;
CameraController _camera;
bool _isDetecting = false;
CameraLensDirection _direction = CameraLensDirection.back;
@override
void initState() {
super.initState();
_initializeCamera();
}
void _initializeCamera() async {
CameraDescription description = await getCamera(_direction);
ImageRotation rotation = rotationIntToImageRotation(
description.sensorOrientation,
);
_camera = CameraController(
description,
defaultTargetPlatform == TargetPlatform.iOS
? ResolutionPreset.low
: ResolutionPreset.medium,
);
await _camera.initialize();
_camera.startImageStream((CameraImage image) {
if (_isDetecting) return;
_isDetecting = true;
detect(
image,
FirebaseVision.instance
.faceDetector(FaceDetectorOptions(
mode: FaceDetectorMode.accurate,
enableClassification: true))
.processImage,
rotation)
.then(
(dynamic result) {
setState(() {
faces = result;
});
_isDetecting = false;
},
).catchError(
(_) {
_isDetecting = false;
},
);
});
}
Widget _buildResults() {
const Text noResultsText = const Text('No results!');
const Text multipleFaceText = const Text('Multiple faces!');
const Text pleaseSmileText = const Text('Please smile!');
if (faces == null || _camera == null || !_camera.value.isInitialized) {
return noResultsText;
}
CustomPainter painter;
final Size imageSize = Size(
_camera.value.previewSize.height,
_camera.value.previewSize.width,
);
if (faces is! List<Face> ||
faces.isEmpty ||
faces == null ||
faces.length == 0) return noResultsText;
if (faces.length > 1) return multipleFaceText;
var face = faces[0];
if (face.smilingProbability == null || face.smilingProbability < 0.8) {
return pleaseSmileText;
}
painter = SmilePainterLiveCamera(imageSize, faces);
return CustomPaint(
painter: painter,
);
}
Widget _buildImage() {
return Container(
constraints: const BoxConstraints.expand(),
child: _camera == null
? const Center(
child: Text(
'Initializing Camera...',
style: TextStyle(
color: Colors.green,
fontSize: 30.0,
),
),
)
: Stack(
fit: StackFit.expand,
children: <Widget>[
CameraPreview(_camera),
_buildResults(),
Positioned(
bottom: 0.0,
left: 0.0,
right: 0.0,
child: Container(
color: Colors.white,
height: 50.0,
child: ListView(
children: faces
.map((face) => Text(
"${face.boundingBox.center.toString()}, Smile:${face.smilingProbability}"))
.toList(),
),
),
),
],
),
);
}
void _toggleCameraDirection() async {
if (_direction == CameraLensDirection.back) {
_direction = CameraLensDirection.front;
} else {
_direction = CameraLensDirection.back;
}
await _camera.stopImageStream();
await _camera.dispose();
setState(() {
_camera = null;
});
_initializeCamera();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text("Face Detection with Smile"),
),
body: _buildImage(),
floatingActionButton: FloatingActionButton(
onPressed: _toggleCameraDirection,
child: _direction == CameraLensDirection.back
? const Icon(Icons.camera_front)
: const Icon(Icons.camera_rear),
),
);
}
}

View File

@@ -0,0 +1,59 @@
import 'package:flutter/material.dart';
import 'dart:io';
import 'smile_painter.dart';
import 'package:firebase_ml_vision/firebase_ml_vision.dart';
import 'dart:ui' as ui show Image;
import 'package:image_picker/image_picker.dart';
class FaceDetectionFromImage extends StatefulWidget {
@override
_FaceDetectionFromImageState createState() => _FaceDetectionFromImageState();
}
class _FaceDetectionFromImageState extends State<FaceDetectionFromImage> {
bool loading = true;
ui.Image image;
List<Face> faces;
final FaceDetector faceDetector = FirebaseVision.instance.faceDetector();
Future<ui.Image> _loadImage(File file) async {
final data = await file.readAsBytes();
return await decodeImageFromList(data);
}
void pickAndProcessImage() async {
final File file = await ImagePicker.pickImage(source: ImageSource.gallery);
final FirebaseVisionImage visionImage = FirebaseVisionImage.fromFile(file);
faces = await faceDetector.processImage(visionImage);
image = await _loadImage(file);
setState(() {
loading = false;
});
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('Face detection with Smile'),
),
body: Center(
child: loading
? Text('Press The floating Action Button for load image!')
: FittedBox(
child: SizedBox(
width: image.width.toDouble(),
height: image.height.toDouble(),
child: FacePaint(
painter: SmilePainter(image, faces),
),
),
),
),
floatingActionButton: FloatingActionButton(
onPressed: pickAndProcessImage,
child: Icon(Icons.image),
),
);
}
}

40
lib/face/home.dart Normal file
View File

@@ -0,0 +1,40 @@
import 'face_detection_camera.dart';
import 'face_detection_image.dart';
import 'package:flutter/material.dart';
class HomeScreen extends StatelessWidget {
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('Smile To Face App'),
),
body: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
RaisedButton(
child: Text('Add Smile to Face from Image'),
onPressed: () {
Navigator.of(context).push(
MaterialPageRoute(
builder: (context) => FaceDetectionFromImage(),
),
);
}),
RaisedButton(
child: Text('Add Smile to Face from Live Camera'),
onPressed: () {
Navigator.of(context).push(
MaterialPageRoute(
builder: (context) => FaceDetectionFromLiveCamera(),
),
);
}),
],
),
),
);
}
}

17
lib/face/main.dart Normal file
View File

@@ -0,0 +1,17 @@
import 'package:flutter/material.dart';
import 'home.dart';
void main() => runApp(MyApp());
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Flutter Demo',
theme: ThemeData(
primarySwatch: Colors.blue,
),
home: HomeScreen(),
);
}
}

138
lib/face/smile_painter.dart Normal file
View File

@@ -0,0 +1,138 @@
import 'dart:ui' as ui show Image;
import 'dart:math' as Math;
import 'package:firebase_ml_vision/firebase_ml_vision.dart';
import 'package:flutter/material.dart';
class FacePaint extends CustomPaint {
final CustomPainter painter;
FacePaint({this.painter}) : super(painter: painter);
}
class SmilePainter extends CustomPainter {
final ui.Image image;
final List<Face> faces;
SmilePainter(this.image, this.faces);
@override
void paint(Canvas canvas, Size size) {
if (image != null) {
canvas.drawImage(image, Offset.zero, Paint());
}
final paintRectStyle = Paint()
..color = Colors.red
..strokeWidth = 30.0
..style = PaintingStyle.stroke;
//Draw Body
final paint = Paint()..color = Colors.yellow;
for (var i = 0; i < faces.length; i++) {
final radius =
Math.min(faces[i].boundingBox.width, faces[i].boundingBox.height) / 2;
final center = faces[i].boundingBox.center;
final smilePaint = Paint()
..style = PaintingStyle.stroke
..strokeWidth = radius / 8;
canvas.drawRect(faces[i].boundingBox, paintRectStyle);
canvas.drawCircle(center, radius, paint);
canvas.drawArc(
Rect.fromCircle(
center: center.translate(0, radius / 8), radius: radius / 2),
0,
Math.pi,
false,
smilePaint);
//Draw the eyes
canvas.drawCircle(Offset(center.dx - radius / 2, center.dy - radius / 2),
radius / 8, Paint());
canvas.drawCircle(Offset(center.dx + radius / 2, center.dy - radius / 2),
radius / 8, Paint());
}
}
@override
bool shouldRepaint(SmilePainter oldDelegate) {
return image != oldDelegate.image || faces != oldDelegate.faces;
}
}
class SmilePainterLiveCamera extends CustomPainter {
final Size imageSize;
final List<Face> faces;
SmilePainterLiveCamera(this.imageSize, this.faces);
@override
void paint(Canvas canvas, Size size) {
// final paintRectStyle = Paint()
// ..color = Colors.red
// ..strokeWidth = 10.0
// ..style = PaintingStyle.stroke;
final paint = Paint()..color = Colors.yellow;
for (var i = 0; i < faces.length; i++) {
//Scale rect to image size
final rect = _scaleRect(
rect: faces[i].boundingBox,
imageSize: imageSize,
widgetSize: size,
);
//Radius for smile circle
final radius = Math.min(rect.width, rect.height) / 2;
//Center of face rect
final Offset center = rect.center;
final smilePaint = Paint()
..style = PaintingStyle.stroke
..strokeWidth = radius / 8;
//Draw rect border
//canvas.drawRect(rect, paintRectStyle);
//Draw body
canvas.drawCircle(center, radius, paint);
//Draw mouth
canvas.drawArc(
Rect.fromCircle(
center: center.translate(0, radius / 8), radius: radius / 2),
0,
Math.pi,
false,
smilePaint);
//Draw the eyes
canvas.drawCircle(Offset(center.dx - radius / 2, center.dy - radius / 2),
radius / 8, Paint());
canvas.drawCircle(Offset(center.dx + radius / 2, center.dy - radius / 2),
radius / 8, Paint());
}
}
@override
bool shouldRepaint(SmilePainterLiveCamera oldDelegate) {
return imageSize != oldDelegate.imageSize || faces != oldDelegate.faces;
}
}
Rect _scaleRect({
@required Rect rect,
@required Size imageSize,
@required Size widgetSize,
}) {
final double scaleX = widgetSize.width / imageSize.width;
final double scaleY = widgetSize.height / imageSize.height;
return Rect.fromLTRB(
rect.left.toDouble() * scaleX,
rect.top.toDouble() * scaleY,
rect.right.toDouble() * scaleX,
rect.bottom.toDouble() * scaleY,
);
}

69
lib/face/utils.dart Normal file
View File

@@ -0,0 +1,69 @@
import 'dart:async';
import 'dart:typed_data';
import 'dart:ui';
import 'package:camera/camera.dart';
import 'package:firebase_ml_vision/firebase_ml_vision.dart';
import 'package:flutter/foundation.dart';
typedef HandleDetection = Future<List<Face>> Function(FirebaseVisionImage image);
Future<CameraDescription> getCamera(CameraLensDirection dir) async {
return await availableCameras().then(
(List<CameraDescription> cameras) => cameras.firstWhere(
(CameraDescription camera) => camera.lensDirection == dir,
),
);
}
Uint8List concatenatePlanes(List<Plane> planes) {
final WriteBuffer allBytes = WriteBuffer();
planes.forEach((Plane plane) => allBytes.putUint8List(plane.bytes));
return allBytes.done().buffer.asUint8List();
}
FirebaseVisionImageMetadata buildMetaData(
CameraImage image,
ImageRotation rotation,
) {
return FirebaseVisionImageMetadata(
rawFormat: image.format.raw,
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
planeData: image.planes.map(
(Plane plane) {
return FirebaseVisionImagePlaneMetadata(
bytesPerRow: plane.bytesPerRow,
height: plane.height,
width: plane.width,
);
},
).toList(),
);
}
Future<List<Face>> detect(
CameraImage image,
HandleDetection handleDetection,
ImageRotation rotation,
) async {
return handleDetection(
FirebaseVisionImage.fromBytes(
concatenatePlanes(image.planes),
buildMetaData(image, rotation),
),
);
}
ImageRotation rotationIntToImageRotation(int rotation) {
switch (rotation) {
case 0:
return ImageRotation.rotation0;
case 90:
return ImageRotation.rotation90;
case 180:
return ImageRotation.rotation180;
default:
assert(rotation == 270);
return ImageRotation.rotation270;
}
}