Skip to content
This repository has been archived by the owner on Jan 4, 2022. It is now read-only.

Commit

Permalink
Flutter ml_vision plugin updated
Browse files Browse the repository at this point in the history
Code updated
  • Loading branch information
lewix committed Feb 18, 2019
1 parent 0192387 commit c968b77
Show file tree
Hide file tree
Showing 3 changed files with 115 additions and 86 deletions.
8 changes: 4 additions & 4 deletions ios/Runner.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@
TargetAttributes = {
97C146ED1CF9000F007C117D = {
CreatedOnToolsVersion = 7.3.1;
DevelopmentTeam = EWA7KKRUCC;
DevelopmentTeam = G8T8XQ7RF6;
ProvisioningStyle = Automatic;
};
};
Expand Down Expand Up @@ -401,7 +401,7 @@
CODE_SIGN_IDENTITY = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
DEVELOPMENT_TEAM = EWA7KKRUCC;
DEVELOPMENT_TEAM = G8T8XQ7RF6;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
Expand Down Expand Up @@ -531,7 +531,7 @@
CODE_SIGN_IDENTITY = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
DEVELOPMENT_TEAM = EWA7KKRUCC;
DEVELOPMENT_TEAM = G8T8XQ7RF6;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
Expand Down Expand Up @@ -559,7 +559,7 @@
CODE_SIGN_IDENTITY = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
DEVELOPMENT_TEAM = EWA7KKRUCC;
DEVELOPMENT_TEAM = G8T8XQ7RF6;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
Expand Down
114 changes: 32 additions & 82 deletions lib/main.dart
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import 'package:flutter/material.dart';
import 'package:camera/camera.dart';
import 'dart:async';
import 'package:firebase_ml_vision/firebase_ml_vision.dart';
import 'ocr_engine.dart';

List<CameraDescription> cameras;

Expand Down Expand Up @@ -37,7 +37,7 @@ class _CameraAppState extends State<CameraPage> {
@override
void initState() {
super.initState();
controller = CameraController(cameras[0], ResolutionPreset.medium);
controller = CameraController(cameras[0], ResolutionPreset.low);
controller.initialize().then((_) {
if (!mounted) {
return;
Expand All @@ -63,41 +63,37 @@ class _CameraAppState extends State<CameraPage> {
if (!controller.value.isInitialized) {
return Container();
}
return Column(
children: [
Expanded(
child: _cameraPreviewWidget()
),
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
MaterialButton(
child: Text("Start Scanning"),
textColor: Colors.white,
color: Colors.blue,
onPressed: () async {
await controller.startImageStream((CameraImage availableImage) {
//controller.stopImageStream();

if (!_isScanBusy)
_scanText(availableImage);
});
}
),
MaterialButton(
child: Text("Stop Scanning"),
textColor: Colors.white,
color: Colors.red,
onPressed: () async => await controller.stopImageStream()
)
]
)
]
);

return Column(children: [
Expanded(child: _cameraPreviewWidget()),
Row(mainAxisAlignment: MainAxisAlignment.spaceEvenly, children: <Widget>[
MaterialButton(
child: Text("Start Scanning"),
textColor: Colors.white,
color: Colors.blue,
onPressed: () async {
await controller.startImageStream((CameraImage availableImage) async {
//controller.stopImageStream();
if (_isScanBusy) {
print("1.5 -------- isScanBusy, skipping...");
return;
}

print("1 -------- isScanBusy = true");
_isScanBusy = true;

_isScanBusy = await OcrManager.scanText(availableImage);
});
}),
MaterialButton(
child: Text("Stop Scanning"),
textColor: Colors.white,
color: Colors.red,
onPressed: () async => await controller.stopImageStream())
])
]);
}

Widget _cameraPreviewWidget() {
Widget _cameraPreviewWidget() {
if (controller == null || !controller.value.isInitialized) {
return const Text(
'Tap a camera',
Expand All @@ -114,50 +110,4 @@ class _CameraAppState extends State<CameraPage> {
);
}
}

void _scanText(CameraImage availableImage) async {
_isScanBusy = true;

print("scanning!...");

/*
* https://firebase.google.com/docs/ml-kit/android/recognize-text
* .setWidth(480) // 480x360 is typically sufficient for
* .setHeight(360) // image recognition
*/

final FirebaseVisionImageMetadata metadata = FirebaseVisionImageMetadata(
rawFormat: availableImage.format.raw,
size: Size(availableImage.width.toDouble(),availableImage.height.toDouble()),
planeData: availableImage.planes.map((currentPlane) => FirebaseVisionImagePlaneMetadata(
bytesPerRow: currentPlane.bytesPerRow,
height: currentPlane.height,
width: currentPlane.width
)).toList(),
rotation: ImageRotation.rotation90
);

final FirebaseVisionImage visionImage = FirebaseVisionImage.fromBytes(availableImage.planes[0].bytes, metadata);
final TextRecognizer textRecognizer = FirebaseVision.instance.textRecognizer();
final VisionText visionText = await textRecognizer.processImage(visionImage);

print("--------------------visionText:${visionText.text}");
for (TextBlock block in visionText.blocks) {
// final Rectangle<int> boundingBox = block.boundingBox;
// final List<Point<int>> cornerPoints = block.cornerPoints;
print(block.text);
final List<RecognizedLanguage> languages = block.recognizedLanguages;

for (TextLine line in block.lines) {
// Same getters as TextBlock
print(line.text);
for (TextElement element in line.elements) {
// Same getters as TextBlock
print(element.text);
}
}
}

_isScanBusy = false;
}
}
}
79 changes: 79 additions & 0 deletions lib/ocr_engine.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import 'package:flutter/material.dart';
import 'package:camera/camera.dart';
import 'package:firebase_ml_vision/firebase_ml_vision.dart';
import 'dart:typed_data';
import 'package:flutter/foundation.dart';

class OcrManager {

static Future<bool> scanText(CameraImage availableImage) async {

print("scanning!...");

/*
* https://firebase.google.com/docs/ml-kit/android/recognize-text
* .setWidth(480) // 480x360 is typically sufficient for
* .setHeight(360) // image recognition
*/

final FirebaseVisionImageMetadata metadata = FirebaseVisionImageMetadata(
rawFormat: availableImage.format.raw,
size: Size(availableImage.width.toDouble(),availableImage.height.toDouble()),
planeData: availableImage.planes.map((currentPlane) => FirebaseVisionImagePlaneMetadata(
bytesPerRow: currentPlane.bytesPerRow,
height: currentPlane.height,
width: currentPlane.width
)).toList(),
rotation: ImageRotation.rotation90
);

final FirebaseVisionImage visionImage = FirebaseVisionImage.fromBytes(availableImage.planes[0].bytes, metadata);
final TextRecognizer textRecognizer = FirebaseVision.instance.textRecognizer();
final VisionText visionText = await textRecognizer.processImage(visionImage);

print("--------------------visionText:${visionText.text}");
for (TextBlock block in visionText.blocks) {
// final Rectangle<int> boundingBox = block.boundingBox;
// final List<Point<int>> cornerPoints = block.cornerPoints;
print(block.text);
//final List<RecognizedLanguage> languages = block.recognizedLanguages;

for (TextLine line in block.lines) {
// Same getters as TextBlock
print(line.text);
for (TextElement element in line.elements) {
// Same getters as TextBlock
print(element.text);
}
}
}

return false;
}

/*
* code by
* https://github.com/bparrishMines/mlkit_demo/blob/master/lib/main.dart
*/

Uint8List concatenatePlanes(List<Plane> planes) {
final WriteBuffer allBytes = WriteBuffer();
planes.forEach((Plane plane) => allBytes.putUint8List(plane.bytes));
return allBytes.done().buffer.asUint8List();
}

FirebaseVisionImageMetadata buildMetaData(CameraImage image) {
return FirebaseVisionImageMetadata(
rawFormat: image.format.raw,
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: ImageRotation.rotation270,
planeData: image.planes.map((Plane plane) {
return FirebaseVisionImagePlaneMetadata(
bytesPerRow: plane.bytesPerRow,
height: plane.height,
width: plane.width,
);
}).toList(),
);
}
}

0 comments on commit c968b77

Please sign in to comment.