Skip to content
This repository has been archived by the owner on Apr 4, 2023. It is now read-only.

Commit

Permalink
Demo UI++
Browse files Browse the repository at this point in the history
  • Loading branch information
EddyVerbruggen committed Apr 10, 2019
1 parent 5ead746 commit b0f999c
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 18 deletions.
1 change: 1 addition & 0 deletions demo-ng/app/app.css
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ Label.mlkit-result {
margin: 5;
text-align: left;
color: white;
font-size: 17;
}

.mask {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
</GridLayout>
<Label row="0" col="1" class="text-center c-white" textWrap="true" color="white" verticalAlignment="center" text="The scanner has been configured to detect faces every 30th frame (default is 10). You can tweak this in your usage of the plugin."></Label>
<StackLayout row="2" col="0" colSpan="3">
<Label [text]="mlKitAllOK" textWrap="true" class="m-t-5 c-purple"></Label>
<Label [text]="mlKitAllOK" textWrap="true" class="m-t-5 font-weight-bold c-white" [class.c-blue]="allSmilingAndEyesOpen"></Label>
<GridLayout row="auto" columns="60, *, *, *" class="m-t-5">
<Label row="0" col="0" class="mlkit-result font-weight-bold" textWrap="true" text="ID"></Label>
<Label row="0" col="1" class="mlkit-result font-weight-bold" textWrap="true" text="Smiling"></Label>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,19 +11,20 @@ export class FaceDetectionComponent extends AbstractMLKitViewComponent {
faces: Array<MLKitDetectFacesResultFace>;

mlKitAllOK: string;
allSmilingAndEyesOpen: boolean = false;

onFaceDetectionResult(scanResult: any): any {
const value: MLKitDetectFacesOnDeviceResult = scanResult.value;
if (value.faces.length > 0) {
this.faces = value.faces;
console.log("this.faces: " + JSON.stringify(this.faces));

let allSmilingAndEyesOpen = true;
this.allSmilingAndEyesOpen = true;
value.faces.forEach(face => {
allSmilingAndEyesOpen = allSmilingAndEyesOpen && face.smilingProbability && face.leftEyeOpenProbability && face.rightEyeOpenProbability &&
this.allSmilingAndEyesOpen = this.allSmilingAndEyesOpen && face.smilingProbability && face.leftEyeOpenProbability && face.rightEyeOpenProbability &&
face.smilingProbability > 0.7 && face.leftEyeOpenProbability > 0.7 && face.rightEyeOpenProbability > 0.7;
});
this.mlKitAllOK = `All smiling and eyes open? ${allSmilingAndEyesOpen ? 'Yes!' : 'Nope'}`;
this.mlKitAllOK = `All smiling and eyes open? ${this.allSmilingAndEyesOpen ? 'Yes!' : 'Nope'}`;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
width="100%"
height="100%"
confidenceThreshold="0.6"
[torchOn]="torchOn"
(scanResult)="onImageLabelingResult($event)">
</MLKitImageLabeling>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,6 @@ export class ImageLabelingComponent extends AbstractMLKitViewComponent {
confidence: number;
}>;

constructor() {
super();
// let's start with the torch on, just for show
this.torchOn = true;
}

onImageLabelingResult(scanResult: any): void {
const value: MLKitImageLabelingOnDeviceResult = scanResult.value;
this.labels = value.labels;
Expand Down
30 changes: 22 additions & 8 deletions demo-ng/app/tabs/mlkit/mlkit.component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ export class MLKitComponent {
"Custom model",
"Landmark recognition (cloud)",
"Language identification",
"Smart Reply"
"Smart reply"
];

private mlkitOnDeviceFeatures: Array<string> = [
Expand Down Expand Up @@ -268,22 +268,36 @@ export class MLKitComponent {
firebase.mlkit.textrecognition.recognizeTextOnDevice({
image: imageSource
}).then((result: MLKitRecognizeTextResult) => {
const messages: Array<MLKitSmartReplyConversationMessage> = [];
result.blocks.forEach(block => messages.push({
const conversation: Array<MLKitSmartReplyConversationMessage> = [];
let personId = 0;

// just faking a conversation based on the text blocks we got from the image :)
result.blocks.forEach(block => conversation.push({
text: block.text,
userId: "abc",
localUser: false,
timestamp: new Date().getTime()
userId: "person" + (++personId % 2 === 0 ? 1 : 2),
localUser: (personId % 2 === 0),
timestamp: new Date().getTime() - 500000 + (personId * 5000)
}));

console.log(JSON.stringify(conversation));

firebase.mlkit.smartreply.suggestReplies({
messages
conversation
}).then((result: Array<string>) => {
console.log("result1: " + JSON.stringify(result));
alert({
title: `Suggestions`,
message: JSON.stringify(result),
okButtonText: "OK"
});
}).catch(errorMessage => console.log("ML Kit error: " + errorMessage));
}).catch(errorMessage => {
console.log("ML Kit error: " + errorMessage);
alert({
title: `Error getting suggestions`,
message: errorMessage,
okButtonText: "Pity.."
});
});
}).catch(errorMessage => console.log("ML Kit error: " + errorMessage));
}

Expand Down

0 comments on commit b0f999c

Please sign in to comment.