Anim: Motion Paths in camera space

Animators (especially for film and TV) often need
to track the movement of things in screenspace.
At the end of the day, the pixel motion is what counts.
But motion paths were always in world space,
which made it hard to use when the camera
is also animated (during action scenes e.g.)

This PR introduces the feature of projecting a motion path into the screen space of the active scene camera.

Limitations
This makes the motion path only useful when looking through the active scene camera.
Switching the scene camera using markers is not yet supported.

Technical Implementation
This is achieved by baking the motion path points into the
camera space on creation. For every point calculated,
the camera is evaluated through the depsgraph and
the resulting world matrix is used.
Then I pass in the current frame's world matrix of the
camera into the shader to make sure the points follow it.
As can be seen in the video, it looks quite odd when
viewed at another angle but this is expected.
I mentioned that in the tooltip, so it shouldn't be an issue

Pull Request: https://projects.blender.org/blender/blender/pulls/117593
This commit is contained in:
Christoph Lendenfeld 2024-02-06 23:14:17 +01:00 committed by Christoph Lendenfeld
parent 28208cfddb
commit 79f84775f2
9 changed files with 87 additions and 6 deletions

View File

@ -42,6 +42,9 @@ class MotionPathButtonsPanel:
start_end_group.prop(mps, "frame_end", text="End")
col.prop(mps, "frame_step", text="Step")
row = col.row()
row.prop(mps, "bake_in_camera_space", text="Bake to Active Camera")
if bones:
op_category = "pose"
icon = 'BONE_DATA'

View File

@ -149,6 +149,13 @@ bMotionPath *animviz_verify_motionpaths(ReportList *reports,
if (*dst != nullptr) {
mpath = *dst;
if (avs->path_bakeflag & MOTIONPATH_BAKE_CAMERA_SPACE) {
mpath->flag |= MOTIONPATH_FLAG_BAKE_CAMERA;
}
else {
mpath->flag &= ~MOTIONPATH_FLAG_BAKE_CAMERA;
}
/* Only reuse a path if it was already a valid path, and of the expected length. */
if (mpath->start_frame != mpath->end_frame && mpath->length == expected_length) {
mpath->start_frame = avs->path_sf;
@ -176,6 +183,13 @@ bMotionPath *animviz_verify_motionpaths(ReportList *reports,
mpath->flag &= ~MOTIONPATH_FLAG_BHEAD;
}
if (avs->path_bakeflag & MOTIONPATH_BAKE_CAMERA_SPACE) {
mpath->flag |= MOTIONPATH_FLAG_BAKE_CAMERA;
}
else {
mpath->flag &= ~MOTIONPATH_FLAG_BAKE_CAMERA;
}
/* Set default custom values (RGB). */
mpath->color[0] = 1.0;
mpath->color[1] = 0.0;

View File

@ -9,6 +9,7 @@
#include "DRW_render.hh"
#include "BLI_listbase.h"
#include "BLI_math_matrix.hh"
#include "BLI_string.h"
#include "DNA_armature_types.h"
@ -107,12 +108,22 @@ static void motion_path_get_frame_range_to_draw(bAnimVizSettings *avs,
*r_step = max_ii(avs->path_step, 1);
}
static Object *get_camera_for_motion_path(const DRWContextState *draw_context,
const eMotionPath_BakeFlag bake_flag)
{
if ((bake_flag & MOTIONPATH_BAKE_CAMERA_SPACE) == 0) {
return nullptr;
}
return draw_context->v3d->camera;
}
static void motion_path_cache(OVERLAY_Data *vedata,
Object *ob,
bPoseChannel *pchan,
bAnimVizSettings *avs,
bMotionPath *mpath)
{
using namespace blender;
OVERLAY_PrivateData *pd = vedata->stl->pd;
const DRWContextState *draw_ctx = DRW_context_state_get();
DRWTextStore *dt = DRW_text_cache_ensure();
@ -135,6 +146,16 @@ static void motion_path_cache(OVERLAY_Data *vedata,
}
int start_index = sfra - mpath->start_frame;
float camera_matrix[4][4];
Object *motion_path_camera = get_camera_for_motion_path(
draw_ctx, eMotionPath_BakeFlag(avs->path_bakeflag));
if (motion_path_camera) {
copy_m4_m4(camera_matrix, motion_path_camera->object_to_world);
}
else {
unit_m4(camera_matrix);
}
/* Draw curve-line of path. */
if (show_lines) {
const int motion_path_settings[4] = {cfra, sfra, efra, mpath->start_frame};
@ -143,6 +164,7 @@ static void motion_path_cache(OVERLAY_Data *vedata,
DRW_shgroup_uniform_int_copy(grp, "lineThickness", mpath->line_thickness);
DRW_shgroup_uniform_bool_copy(grp, "selected", selected);
DRW_shgroup_uniform_vec3_copy(grp, "customColor", color);
DRW_shgroup_uniform_mat4_copy(grp, "camera_space_matrix", camera_matrix);
/* Only draw the required range. */
DRW_shgroup_call_range(grp, nullptr, mpath_batch_line_get(mpath), start_index, len);
}
@ -155,6 +177,7 @@ static void motion_path_cache(OVERLAY_Data *vedata,
DRW_shgroup_uniform_ivec4_copy(grp, "mpathPointSettings", motion_path_settings);
DRW_shgroup_uniform_bool_copy(grp, "showKeyFrames", show_keyframes);
DRW_shgroup_uniform_vec3_copy(grp, "customColor", color);
DRW_shgroup_uniform_mat4_copy(grp, "camera_space_matrix", camera_matrix);
/* Only draw the required range. */
DRW_shgroup_call_range(grp, nullptr, mpath_batch_points_get(mpath), start_index, len);
}
@ -168,17 +191,29 @@ static void motion_path_cache(OVERLAY_Data *vedata,
UI_GetThemeColor3ubv(TH_VERTEX_SELECT, col_kf);
col[3] = col_kf[3] = 255;
Object *cam_eval = nullptr;
if (motion_path_camera) {
cam_eval = DEG_get_evaluated_object(draw_ctx->depsgraph, motion_path_camera);
}
bMotionPathVert *mpv = mpath->points + start_index;
for (i = 0; i < len; i += stepsize, mpv += stepsize) {
int frame = sfra + i;
char numstr[32];
size_t numstr_len;
bool is_keyframe = (mpv->flag & MOTIONPATH_VERT_KEY) != 0;
float3 vert_coordinate;
copy_v3_v3(vert_coordinate, mpv->co);
if (cam_eval) {
/* Projecting the point into world space from the cameras pov. */
vert_coordinate = math::transform_point(float4x4(cam_eval->object_to_world),
vert_coordinate);
}
if ((show_keyframes && show_keyframes_no && is_keyframe) || (show_frame_no && (i == 0))) {
numstr_len = SNPRINTF_RLEN(numstr, " %d", frame);
DRW_text_cache_add(
dt, mpv->co, numstr, numstr_len, 0, 0, txt_flag, (is_keyframe) ? col_kf : col);
dt, vert_coordinate, numstr, numstr_len, 0, 0, txt_flag, (is_keyframe) ? col_kf : col);
}
else if (show_frame_no) {
bMotionPathVert *mpvP = (mpv - stepsize);
@ -187,7 +222,7 @@ static void motion_path_cache(OVERLAY_Data *vedata,
* don't occur on same point. */
if ((equals_v3v3(mpv->co, mpvP->co) == 0) || (equals_v3v3(mpv->co, mpvN->co) == 0)) {
numstr_len = SNPRINTF_RLEN(numstr, " %d", frame);
DRW_text_cache_add(dt, mpv->co, numstr, numstr_len, 0, 0, txt_flag, col);
DRW_text_cache_add(dt, vert_coordinate, numstr, numstr_len, 0, 0, txt_flag, col);
}
}
}

View File

@ -193,6 +193,7 @@ GPU_SHADER_CREATE_INFO(overlay_motion_path_line)
.push_constant(Type::BOOL, "selected")
.push_constant(Type::VEC3, "customColor")
.push_constant(Type::INT, "lineThickness") /* In pixels. */
.push_constant(Type::MAT4, "camera_space_matrix")
.vertex_out(overlay_motion_path_line_iface)
.vertex_out(overlay_motion_path_line_flat_iface)
.geometry_out(overlay_motion_path_line_iface)
@ -236,6 +237,7 @@ GPU_SHADER_CREATE_INFO(overlay_motion_path_point)
.push_constant(Type::IVEC4, "mpathPointSettings")
.push_constant(Type::BOOL, "showKeyFrames")
.push_constant(Type::VEC3, "customColor")
.push_constant(Type::MAT4, "camera_space_matrix")
.vertex_out(overlay_motion_path_point_iface)
.fragment_out(0, Type::VEC4, "fragColor")
.vertex_source("overlay_motion_path_point_vert.glsl")

View File

@ -25,7 +25,7 @@ float calc_intensity(int segment_start, int segment_current, int segment_end, fl
void main()
{
gl_Position = drw_view.winmat * (drw_view.viewmat * vec4(pos, 1.0));
gl_Position = drw_view.winmat * (drw_view.viewmat * (camera_space_matrix * vec4(pos, 1.0)));
interp_flat.ss_pos = proj(gl_Position);

View File

@ -12,7 +12,7 @@
void main()
{
gl_Position = drw_view.winmat * (drw_view.viewmat * vec4(pos, 1.0));
gl_Position = drw_view.winmat * (drw_view.viewmat * (camera_space_matrix * vec4(pos, 1.0)));
gl_PointSize = float(pointSize + 2);
int frame = gl_VertexID + cacheStart;

View File

@ -13,6 +13,7 @@
#include "BLI_dlrbTree.h"
#include "BLI_listbase.h"
#include "BLI_math_matrix.h"
#include "BLI_math_matrix.hh"
#include "DNA_anim_types.h"
#include "DNA_armature_types.h"
@ -127,8 +128,12 @@ void animviz_get_object_motionpaths(Object *ob, ListBase *targets)
/* ........ */
/* perform baking for the targets on the current frame */
static void motionpaths_calc_bake_targets(ListBase *targets, int cframe)
static void motionpaths_calc_bake_targets(ListBase *targets,
int cframe,
Depsgraph *depsgraph,
Object *camera)
{
using namespace blender;
/* for each target, check if it can be baked on the current frame */
LISTBASE_FOREACH (MPathTarget *, mpt, targets) {
bMotionPath *mpath = mpt->mpath;
@ -170,6 +175,14 @@ static void motionpaths_calc_bake_targets(ListBase *targets, int cframe)
copy_v3_v3(mpv->co, ob_eval->object_to_world[3]);
}
if (mpath->flag & MOTIONPATH_FLAG_BAKE_CAMERA && camera) {
Object *cam_eval = DEG_get_evaluated_object(depsgraph, camera);
/* Convert point to camera space. */
float3 co_camera_space = math::transform_point(float4x4(cam_eval->world_to_object),
float3(mpv->co));
copy_v3_v3(mpv->co, co_camera_space);
}
float mframe = float(cframe);
/* Tag if it's a keyframe */
@ -503,7 +516,7 @@ void animviz_calc_motionpaths(Depsgraph *depsgraph,
}
/* perform baking for targets */
motionpaths_calc_bake_targets(targets, scene->r.cfra);
motionpaths_calc_bake_targets(targets, scene->r.cfra, depsgraph, scene->camera);
}
/* reset original environment */

View File

@ -86,6 +86,8 @@ typedef enum eMotionPath_Flag {
MOTIONPATH_FLAG_CUSTOM = (1 << 2),
/* Draw lines or only points */
MOTIONPATH_FLAG_LINES = (1 << 3),
/* Bake to scene camera. */
MOTIONPATH_FLAG_BAKE_CAMERA = (1 << 4),
} eMotionPath_Flag;
/* Visualization General --------------------------- */
@ -163,6 +165,8 @@ typedef enum eMotionPaths_BakeFlag {
/** motion paths exist for AnimVizSettings instance - set when calc for first time,
* and unset when clearing */
MOTIONPATH_BAKE_HAS_PATHS = (1 << 2),
/* Bake the path in camera space. */
MOTIONPATH_BAKE_CAMERA_SPACE = (1 << 3),
} eMotionPath_BakeFlag;
/* runtime */

View File

@ -313,6 +313,16 @@ static void rna_def_animviz_paths(BlenderRNA *brna)
RNA_def_property_ui_text(
prop, "Has Motion Paths", "Are there any bone paths that will need updating (read-only)");
/* If enabled, bakes the motion paths into camera space. */
prop = RNA_def_property(srna, "bake_in_camera_space", PROP_BOOLEAN, PROP_NONE);
RNA_def_property_boolean_sdna(prop, nullptr, "path_bakeflag", MOTIONPATH_BAKE_CAMERA_SPACE);
RNA_def_property_ui_text(
prop,
"Bake to active Camera",
"Motion path points will be baked into the camera space of the active camera. This means "
"they will only look right when looking through that camera. Switching cameras using "
"markers is not supported");
RNA_define_lib_overridable(false);
}