Skip to content

Commit

Permalink
Face SDK v3.23.0
Browse files Browse the repository at this point in the history
  • Loading branch information
nk2033 committed Jul 25, 2024
1 parent 766cbca commit 647e78f
Show file tree
Hide file tree
Showing 72 changed files with 3,383 additions and 319 deletions.
6 changes: 6 additions & 0 deletions .idea/inspectionProfiles/profiles_settings.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

105 changes: 105 additions & 0 deletions conf/facerec/video_worker_fdatracker_ssyv_pb_fda.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
<?xml version="1.0"?>
<opencv_storage>

<video_worker_impl_name>libface_video_worker</video_worker_impl_name>

<tracker_type>fdatracker</tracker_type>



<fitter_type>pb_ff</fitter_type>

<fda_max_bad_count_wait>5</fda_max_bad_count_wait>
<fda_max_weak_count_wait>30</fda_max_weak_count_wait>
<fda_good_threshold>0.7</fda_good_threshold>
<fda_continue_tracking_threshold>0.2</fda_continue_tracking_threshold>
<!--<base_angle>0</base_angle>-->

<iris_enabled>0</iris_enabled>
<iris_config_filepath>conf/facerec/iris_fitter.xml</iris_config_filepath>

<min_detection_period>0.1</min_detection_period>
<max_detection_period>0</max_detection_period>
<move_detection_threshold>8</move_detection_threshold>
<max_processed_width>640</max_processed_width>
<max_processed_height>640</max_processed_height>


<detector_sub_type>pb_fd</detector_sub_type>
<modification>ssyv</modification>
<version>4</version>
<num_threads>1</num_threads>

<iou_threshold>0.45</iou_threshold>
<confidence_threshold>0.4</confidence_threshold>

<use_cuda>0</use_cuda>
<gpu_index>0</gpu_index>
<use_advanced_multithreading>0</use_advanced_multithreading>

<depth_data_flag>0</depth_data_flag>
<depth_liveness_config>conf/facerec/depth_liveness_estimator_cnn.xml</depth_liveness_config>
<timestamp_distance_threshold_in_microsecs>5000</timestamp_distance_threshold_in_microsecs>
<max_frames_number_to_synch_depth>150</max_frames_number_to_synch_depth>
<max_frames_queue_size>3</max_frames_queue_size>

<face_quality_model>share/face_quality/fda_fqm.bin</face_quality_model>

<consecutive_match_count_for_match_found_callback>2</consecutive_match_count_for_match_found_callback>

<recognition_yaw_min_threshold> -40 </recognition_yaw_min_threshold>
<recognition_yaw_max_threshold> 40 </recognition_yaw_max_threshold>

<recognition_pitch_min_threshold> -40 </recognition_pitch_min_threshold>
<recognition_pitch_max_threshold> 40 </recognition_pitch_max_threshold>

<consequent_detector_rejection_count_remove_limit>5</consequent_detector_rejection_count_remove_limit>

<max_detector_confirm_wait_time>3</max_detector_confirm_wait_time>

<max_frame_delay_store>10</max_frame_delay_store>

<good_light_dark_threshold>32</good_light_dark_threshold>
<good_light_range_threshold>32</good_light_range_threshold>
<good_blur_threshold>0.20</good_blur_threshold>

<single_match_mode>0</single_match_mode>

<delayed_samples_in_tracking_callback>0</delayed_samples_in_tracking_callback>

<weak_tracks_in_tracking_callback>0</weak_tracks_in_tracking_callback>

<search_k>1</search_k>


<matching_queue_size_limit>10000</matching_queue_size_limit>
<processing_queue_size_limit>1000</processing_queue_size_limit>

<recognizer_processing_less_memory_consumption>0</recognizer_processing_less_memory_consumption>

<not_found_match_found_callback>0</not_found_match_found_callback>

<store_original_frame_in_raw_sample>0</store_original_frame_in_raw_sample>



<squeeze_match_found_callback_groups>0</squeeze_match_found_callback_groups>

<debug_log_enabled>0</debug_log_enabled>

<downscale_rawsamples_to_preferred_size>1</downscale_rawsamples_to_preferred_size>

<min_template_generation_face_size>-1</min_template_generation_face_size>
<min_tracking_face_size>-1</min_tracking_face_size>
<max_tracking_face_size>-1</max_tracking_face_size>

<need_stable_results>0</need_stable_results>


<age_gender_estimator_config>conf/facerec/age_gender_estimator.xml</age_gender_estimator_config>
<emotions_estimator_config>conf/facerec/emotions_estimator.xml</emotions_estimator_config>

<enable_active_liveness>0</enable_active_liveness>
<active_liveness_estimator_config>conf/facerec/active_liveness_estimator.xml</active_liveness_estimator_config>

</opencv_storage>
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
android:id="@+id/aboutText1"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Video recognition with depth and IR liveness demo for Telpo (v 0.2)\n\nFace SDK version 3.22.1\n"
android:text="Video recognition with depth and IR liveness demo for Telpo (v 0.2)\n\nFace SDK version 3.23.0\n"
android:layout_below="@+id/aboutLogo"
/>

Expand Down
48 changes: 43 additions & 5 deletions examples/cpp/processing_block/processing_block.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ void drawEmotions(const pbio::Context& data, cv::Mat& image)
text_point.y += text_line_height / 3;
}
}
};
}

void drawAgeGenderMaskQuality(const pbio::Context& data, cv::Mat& image, const std::string& className)
{
Expand Down Expand Up @@ -196,7 +196,42 @@ void drawAgeGenderMaskQuality(const pbio::Context& data, cv::Mat& image, const s
}
}
}
};
}

void drawEyeOpenness(const pbio::Context& data, cv::Mat& image)
{
int width = image.cols;
int heigth = image.rows;
cv::Mat image_copy = image.clone();

drawObjects(data, image, std::string("face"));

for(const pbio::Context& object : data.at("objects"))
{
if(object.at("class").getString().compare("face"))
{
continue;
}

cv::Point textPoint
(
std::min(static_cast<int>(object.at("bbox")[2].getDouble() * width), width),
std::max(static_cast<int>(object.at("bbox")[1].getDouble() * heigth), 0) + 15
);

putTextWithRightExpansion
(
image, "Is left eye open: " + std::to_string(object.at("is_left_eye_open").at("value").getBool()),
textPoint, cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(0, 0, 255), 1, false
);

putTextWithRightExpansion
(
image, "Is right eye open: " + std::to_string(object.at("is_right_eye_open").at("value").getBool()),
cv::Point(textPoint.x, textPoint.y + 15), cv::FONT_HERSHEY_DUPLEX, 0.5, cv::Scalar(0, 0, 255), 1, false
);
}
}

void drawLiveness(const pbio::Context& data, cv::Mat& image)
{
Expand All @@ -222,7 +257,7 @@ void drawLiveness(const pbio::Context& data, cv::Mat& image)
}
}
}
};
}

const std::map<std::string, std::string> unitTypes {
{"body", "HUMAN_BODY_DETECTOR"},
Expand All @@ -233,6 +268,7 @@ const std::map<std::string, std::string> unitTypes {
{"age", "AGE_ESTIMATOR"},
{"gender","GENDER_ESTIMATOR"},
{"mask", "MASK_ESTIMATOR"},
{"eye_openness", "EYE_OPENNESS_ESTIMATOR"},
{"liveness", "LIVENESS_ESTIMATOR"},
{"quality", "QUALITY_ASSESSMENT_ESTIMATOR"},
{"pose", "HUMAN_POSE_ESTIMATOR"},
Expand All @@ -243,7 +279,7 @@ int main(int argc, char **argv)
// print usage
std::cout << "usage: " << argv[0] <<
" [--input_image <path to image>]"
" [--unit_type body|face|face_keypoint|pose|objects|emotions|age|gender|mask|liveness|quality]"
" [--unit_type body|face|face_keypoint|pose|objects|emotions|age|gender|mask|eye_openness|liveness|quality]"
" [--sdk_path ..]"
" [--use_cuda]"
<< std::endl;
Expand Down Expand Up @@ -341,7 +377,7 @@ int main(int argc, char **argv)
}

else if(!unit_type.compare("emotions") || !unit_type.compare("gender") ||
!unit_type.compare("age") || !unit_type.compare("mask") ||
!unit_type.compare("age") || !unit_type.compare("mask") || !unit_type.compare("eye_openness") ||
!unit_type.compare("face_keypoint"))
{
auto faceCtx = service->createContext();
Expand Down Expand Up @@ -396,6 +432,8 @@ int main(int argc, char **argv)
drawEmotions(ioData, image);
else if(!unit_type.compare("age") || !unit_type.compare("gender") || !unit_type.compare("mask") || !unit_type.compare("quality"))
drawAgeGenderMaskQuality(ioData, image, unit_type);
else if (!unit_type.compare("eye_openness"))
drawEyeOpenness(ioData, image);
else if(unit_type.find("liveness") != std::string::npos)
drawLiveness(ioData, image);

Expand Down
44 changes: 39 additions & 5 deletions examples/csharp/processing_block_demo/vs/processing_block_demo.cs
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ class Program
{ "age", "AGE_ESTIMATOR" },
{ "gender","GENDER_ESTIMATOR" },
{ "mask", "MASK_ESTIMATOR" },
{ "eye_openness", "EYE_OPENNESS_ESTIMATOR" },
{ "liveness", "LIVENESS_ESTIMATOR" },
{ "quality", "QUALITY_ASSESSMENT_ESTIMATOR" },
{ "pose", "HUMAN_POSE_ESTIMATOR" },
Expand All @@ -55,10 +56,10 @@ static int Main(string[] args)
Console.WriteLine
(
$@"Usage: dotnet csharp_csharp_processing_block_demo.dll {System.Reflection.Assembly.GetExecutingAssembly().Location}
[--input_image <path to image>]
[--unit_type body|face|face_keypoint|pose|objects|emotions|age|gender|mask|liveness|quality]
[--sdk_path ../../../]
[--use_cuda]"
[--input_image <path to image>]
[--unit_type body|face|face_keypoint|pose|objects|emotions|age|gender|mask|eye_openness|liveness|quality]
[--sdk_path ../../../]
[--use_cuda]"
);

bool error = false;
Expand Down Expand Up @@ -152,7 +153,7 @@ static int Main(string[] args)
ioData["objects"].PushBack(sample.ToContext());
}
}
else if (new List<string> { "emotions", "gender", "age", "mask", "face_keypoint" }.Contains(unitType))
else if (new List<string> { "emotions", "gender", "age", "mask", "eye_openness", "face_keypoint" }.Contains(unitType))
{
ProcessingBlock faceBlock = service.CreateProcessingBlock
(
Expand Down Expand Up @@ -247,6 +248,11 @@ static int Main(string[] args)

break;

case "eye_openness":
DrawEyeOpenness(ioData, image);

break;

case "liveness":
DrawLiveness(ioData, image);

Expand Down Expand Up @@ -425,6 +431,7 @@ private static void DrawEmotions(Context ioData, Mat image)
}
}
}

private static void DrawAgeGenderMaskQuality(Context ioData, Mat image, string classFilter = "")
{
int width = image.Cols;
Expand Down Expand Up @@ -497,6 +504,33 @@ private static void DrawAgeGenderMaskQuality(Context ioData, Mat image, string c
}
}

private static void DrawEyeOpenness(Context ioData, Mat image)
{
int width = image.Cols;
int heigth = image.Rows;
Context objects = ioData["objects"];

DrawObjects(ioData, image, "face");

for (int i = 0; i < (int)objects.Length(); i++)
{
Context obj = objects[i];

if (obj["class"].GetString() != "face")
{
continue;
}

OpenCvSharp.Point textPoint = new(obj["bbox"][2].GetDouble() * width, obj["bbox"][1].GetDouble() * heigth);

PutTextWithRightExpansion(image, $"Is left eye open: {obj["is_left_eye_open"]["value"].GetBool()}", textPoint, HersheyFonts.HersheyDuplex, 0.5, new(0, 0, 255), 1);

textPoint.Y += 15;

PutTextWithRightExpansion(image, $"Is right eye open: {obj["is_right_eye_open"]["value"].GetBool()}", textPoint, HersheyFonts.HersheyDuplex, 0.5, new(0, 0, 255), 1);
}
}

private static void DrawLiveness(Context ioData, Mat image)
{
int width = image.Cols;
Expand Down
28 changes: 4 additions & 24 deletions examples/flutter/demo/lib/home.dart
Original file line number Diff line number Diff line change
Expand Up @@ -10,42 +10,22 @@ typedef void setServiceCallback(FacerecService templ);

class HomePage extends StatefulWidget {
final List<CameraDescription> cameras;
final String _dataDir;
final setServiceCallback _setService;
final String nextRoute;

HomePage(this.cameras, this._dataDir, this.nextRoute, this._setService);
HomePage(this.cameras, this.nextRoute, this._setService);

@override
_HomePageState createState() => new _HomePageState();
}

class _HomePageState extends State<HomePage> {
String _libDir = "";
static const platform = const MethodChannel('samples.flutter.dev/facesdk');
late FacerecService _facerecService;
bool _loading = true;

Future<void> getLibDir() async {
String libDir = "None";
try {
final String res = await platform.invokeMethod('getNativeLibDir');
libDir = res;
} on PlatformException catch (e) {}
setState(() {
_libDir = libDir;
});
}
Future<void> createService() async {
_facerecService = await FaceSdkPlugin.createFacerecService();

void createService(){
if(widget._dataDir == '' || _libDir == ''){
return;
}
_facerecService = FaceSdkPlugin.createFacerecService(
widget._dataDir + "/conf/facerec",
widget._dataDir + "/license",
libPath: _libDir + "/" + FaceSdkPlugin.nativeLibName
);
setState(() {
widget._setService(_facerecService);
_loading = false;
Expand All @@ -55,7 +35,7 @@ class _HomePageState extends State<HomePage> {
@override
void initState() {
super.initState();
getLibDir().whenComplete(() {createService();});
createService();
}

@override
Expand Down
Loading

0 comments on commit 647e78f

Please sign in to comment.