mediapipe
mediapipe copied to clipboard
I want to know how these glasses are worn on the face, how does it work
I use Unity3d engine +mediapipe to realize real-time facial drive, but I don't know how to achieve the effect of wearing glasses, please give me some advice
https://user-images.githubusercontent.com/60952586/187204941-ebde46a9-4ab7-4212-acda-65ee5747bfb4.mp4
Hi @wizVR-zhangjun, Present we don't have any examples, but you can go through this for more understanding https://github.com/google/mediapipe/issues/1155. You can also build one from here https://google.github.io/mediapipe/solutions/face_mesh#face-effect-example. Thank you!
您好 @wizVR-zhangjun,我是韩国启明大学计算机系的留学生,我们最近在做这个有关换脸的项目,mediapipe在unity里,但是不知道如何获取坐标,可以请教一下您吗?可以的话可以留个联系方式,万分感谢
@ma6667 https://github.com/homuler/MediaPipeUnityPlugin I used this plug-in, I hope it was helpful
@wizVR-zhangjun,我们用了这个MediaPipeUnityPlugin,您能分享一下您的代码吗?因为我们也在做这个项目,但不知道如何在这个项目中为脸部添加图像或网格。
@ma6667 If your project gets off the ground
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using Stopwatch = System.Diagnostics.Stopwatch;
namespace Mediapipe.Unity.Hqft
{
public class FaceMesh : MonoBehaviour
{
///
/// <summary>
/// 配置文件
/// android端
/// </summary>
[SerializeField] private TextAsset _configAssetGpu;
/// <summary>
/// 显示的画面
/// </summary>
[SerializeField] private RawImage _screen;
/// <summary>
/// 设置摄像
/// </summary>
[SerializeField] private int _width;
[SerializeField] private int _height;
[SerializeField] private int _fps;
/// <summary>
/// 脸部数据控制器
/// </summary>
[SerializeField] private MultiFaceLandmarkListAnnotationController _multiFaceLandmarksAnnotationController;
private CalculatorGraph _graph;
private WebCamTexture _webCamTexture;
private Texture2D _inputTexture;
private Color32[] _inputPixelData;
private IEnumerator Start()
{
//设置脸部数据,后面计算会用到
#if UNITY_EDITOR _multiFaceLandmarksAnnotationController.rotationAngle = RotationAngle.Rotation0; _multiFaceLandmarksAnnotationController.isMirrored = false; #elif UNITY_ANDROID _multiFaceLandmarksAnnotationController.rotationAngle = RotationAngle.Rotation270; _multiFaceLandmarksAnnotationController.isMirrored = true; #endif
if (WebCamTexture.devices.Length == 0)
{
throw new System.Exception("Web Camera devices are not found");
}
var webCamDevice = WebCamTexture.devices[0];
_webCamTexture = new WebCamTexture(webCamDevice.name, _width, _height, _fps);
_webCamTexture.Play();
yield return new WaitUntil(() => _webCamTexture.width > 16);
//平台判定 是否需要android端需要gpu加载
#if !UNITY_EDITOR yield return GpuManager.Initialize();
if (!GpuManager.IsInitialized)
{
throw new System.Exception("Failed to initialize GPU resources");
}
#endif _screen.rectTransform.sizeDelta = new Vector2(_width, _height);
_inputTexture = new Texture2D(_width, _height, TextureFormat.RGBA32, false);
_inputPixelData = new Color32[_width*_height];
//手机端和PC端 画面成像不同, 需要设置一定的角度
#if UNITY_EDITOR_WIN
_screen.rectTransform.localEulerAngles = new Vector3(0, 0, 0);
#else
_screen.rectTransform.localEulerAngles = new Vector3(0, 0, 270);
#endif
_screen.texture = _webCamTexture;
//加载脸部数据 这个是在StreamingAssets 目录下
var resourceManager = new StreamingAssetsResourceManager();
yield return resourceManager.PrepareAssetAsync("face_detection_short_range.bytes");
yield return resourceManager.PrepareAssetAsync("face_landmark_with_attention.bytes");
var stopwatch = new Stopwatch(); //官方说是时间戳
var sidePacket = new SidePacket();
//设置面部数据流格式
#if UNITY_EDITOR _screen.uvRect = new UnityEngine.Rect(1, 0, -1, 1); sidePacket.Emplace("num_faces", new IntPacket(1)); sidePacket.Emplace("with_attention", new BoolPacket(true)); sidePacket.Emplace("input_rotation", new IntPacket(180)); sidePacket.Emplace("input_horizontally_flipped", new BoolPacket(false)); sidePacket.Emplace("input_vertically_flipped", new BoolPacket(false)); _graph = new CalculatorGraph(_configAssetCpu.text); #elif UNITY_ANDROID _screen.uvRect = new UnityEngine.Rect(0, 0, 1, 1); sidePacket.Emplace("num_faces", new IntPacket(1)); sidePacket.Emplace("with_attention", new BoolPacket(true)); sidePacket.Emplace("input_rotation", new IntPacket(270)); sidePacket.Emplace("input_horizontally_flipped", new BoolPacket(false)); sidePacket.Emplace("input_vertically_flipped", new BoolPacket(false)); _graph = new CalculatorGraph(_configAssetGpu.text); _graph.SetGpuResources(GpuManager.GpuResources).AssertOk(); #endif //处理输出图像 var multiFaceLandmarksStream = new OutputStream<NormalizedLandmarkListVectorPacket, List<NormalizedLandmarkList>>(_graph, "multi_face_landmarks");//output_video //multi_face_landmarks multiFaceLandmarksStream.StartPolling().AssertOk(); _graph.StartRun(sidePacket).AssertOk();
stopwatch.Start(); //启动时间戳
while (true)
{
//添加输入视频流
_inputTexture.SetPixels32(_webCamTexture.GetPixels32(_inputPixelData));
var imageFrame = new ImageFrame(ImageFormat.Format.SRGBA, _width, _height, _width * 4, _inputTexture.GetRawTextureData<byte>());
var currentTimestamp = stopwatch.ElapsedTicks / (System.TimeSpan.TicksPerMillisecond / 1000);
_graph.AddPacketToInputStream("input_video", new ImageFramePacket(imageFrame, new Timestamp(currentTimestamp))).AssertOk();
yield return new WaitForEndOfFrame();
if (multiFaceLandmarksStream.TryGetNext(out var multiFaceLandmarks))
{
_multiFaceLandmarksAnnotationController.DrawNow(multiFaceLandmarks);
}
else
{
_multiFaceLandmarksAnnotationController.DrawNow(null);
}
#region 纯数据 直接从meiapipe 获取
//if (multiFaceLandmarksStream.TryGetNext(out var multiFaceLandmarks))
//{
// if (multiFaceLandmarks != null)
// {
// for(int i = 0; i < multiFaceLandmarks.Count; i++)
// {
// if (multiFaceLandmarks[i].Landmark != null)
// {
// int count = multiFaceLandmarks[i].Landmark.Count;
// var landmark = multiFaceLandmarks[i].Landmark;
// Debug.Log(landmark.ToString());
// }
// }
// }
//}
#endregion
}
}
private void OnDestroy()
{
if (_webCamTexture != null)
{
_webCamTexture.Stop();
}
if (_graph != null)
{
try
{
_graph.CloseInputStream("input_video").AssertOk();
_graph.WaitUntilDone().AssertOk();
}
finally
{
_graph.Dispose();
}
}
#if !UNITY_EDITOR GpuManager.Shutdown(); #endif }
#region //根据面部点信息计算出 脸部旋转值
#endregion
}
}
I hope it was helpful
@wizVR-zhangjun,非常感谢您提供的代码,如果有需要以后可以相互讨论,谢谢
Hi @wizVR-zhangjun, Could you confirm that the above comment gives a better understanding about rending different effect on the faces. Thank you!
@kuaashish I'm sorry. I'm still a little confused
Getting following error
MediaPipeException: Failed to parse config text. See error logs for more details at Mediapipe.CalculatorGraphConfigExtension.ParseFromTextFormat (Google.Protobuf.MessageParser
1[T] _, System.String configText) [0x00027] in /Users/neeraj/Projects/Abbvie Unity Project/AbbvieImageCapturePoC/Packages/package/Runtime/Scripts/Framework/CalculatorGraphConfigExtension.cs:21 `
Getting following error
MediaPipeException: Failed to parse config text. See error logs for more details at Mediapipe.CalculatorGraphConfigExtension.ParseFromTextFormat (Google.Protobuf.MessageParser
1[T] _, System.String configText) [0x00027] in /Users/neeraj/Projects/Abbvie Unity Project/AbbvieImageCapturePoC/Packages/package/Runtime/Scripts/Framework/CalculatorGraphConfigExtension.cs:21 `
@NeerajMehta I don't know if you built successfully, I don't know much about your mistakes, I can't do anything about it