Merge branch 'bf-blender' into mb-0009-addon-menu-references

This commit is contained in:
Jaume Bellet 2024-02-25 23:32:30 +01:00
commit 5ca0cb05b7
1994 changed files with 49682 additions and 33840 deletions

View File

@ -279,5 +279,6 @@ StatementMacros:
MacroBlockBegin: "^OSL_CLOSURE_STRUCT_BEGIN$"
MacroBlockEnd: "^OSL_CLOSURE_STRUCT_END$"
# Ensure lew line at the end of source files.
# Ensure single new line at the end of source files.
InsertNewlineAtEOF: True
KeepEmptyLinesAtEOF: False

1
.gitignore vendored
View File

@ -56,6 +56,7 @@ waveletNoiseTile.bin
# External repositories.
/scripts/addons/
/scripts/addons_contrib/
/tests/benchmarks/
# Ignore old submodules directories.
# Eventually need to get rid of those, but for the first time of transition

29
.gitmodules vendored Normal file
View File

@ -0,0 +1,29 @@
[submodule "lib/linux_x64"]
update = none
path = lib/linux_x64
url = https://projects.blender.org/blender/lib-linux_x64.git
branch = main
[submodule "lib/macos_arm64"]
update = none
path = lib/macos_arm64
url = https://projects.blender.org/blender/lib-macos_arm64.git
branch = main
[submodule "lib/macos_x64"]
update = none
path = lib/macos_x64
url = https://projects.blender.org/blender/lib-macos_x64.git
branch = main
[submodule "lib/windows_x64"]
update = none
path = lib/windows_x64
url = https://projects.blender.org/blender/lib-windows_x64.git
branch = main
[submodule "release/datafiles/assets"]
path = release/datafiles/assets
url = https://projects.blender.org/blender/blender-assets.git
branch = main
[submodule "tests/data"]
update = none
path = tests/data
url = https://projects.blender.org/blender/blender-test-data.git
branch = main

View File

@ -259,6 +259,28 @@ else()
set(WITH_UNITY_BUILD OFF)
endif()
if(COMMAND target_precompile_headers)
# Disabling is needed for `./tools/utils_maintenance/code_clean.py` to function.
option(WITH_COMPILER_PRECOMPILED_HEADERS "\
Use pre-compiled headers to speed up compilation."
ON
)
mark_as_advanced(WITH_COMPILER_PRECOMPILED_HEADERS)
if(WITH_CLANG_TIDY AND CMAKE_COMPILER_IS_GNUCC)
if(WITH_COMPILER_PRECOMPILED_HEADERS)
message(STATUS
"Clang-Tidy and GCC precompiled headers are incompatible, disabling precompiled headers"
)
set(WITH_COMPILER_PRECOMPILED_HEADERS OFF)
endif()
endif()
if(NOT WITH_COMPILER_PRECOMPILED_HEADERS)
set(CMAKE_DISABLE_PRECOMPILE_HEADERS ON)
endif()
endif()
option(WITH_IK_ITASC "\
Enable ITASC IK solver (only disable for development & for incompatible C++ compilers)"
ON
@ -387,6 +409,18 @@ else()
set(WITH_SYSTEM_EIGEN3 OFF)
endif()
if((NOT WITH_PYTHON_MODULE) AND (
(WIN32 AND (CMAKE_SYSTEM_PROCESSOR STREQUAL "AMD64")) OR
((UNIX AND NOT APPLE) AND (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64"))))
option(WITH_CPU_CHECK "\
Report when a CPU is not compatible on startup \
instead of failing to start with an inscrutable error."
ON
)
mark_as_advanced(WITH_CPU_CHECK)
else()
set(WITH_CPU_CHECK OFF)
endif()
# Modifiers
option(WITH_MOD_FLUID "Enable Mantaflow Fluid Simulation Framework" ON)
@ -400,7 +434,6 @@ option(WITH_IMAGE_CINEON "Enable CINEON and DPX Image Support" ON)
option(WITH_IMAGE_WEBP "Enable WebP Image Support" ON)
# Audio/Video format support
option(WITH_CODEC_AVI "Enable Blenders own AVI file support (raw/jpeg)" ON)
option(WITH_CODEC_FFMPEG "Enable FFMPeg Support (http://ffmpeg.org)" ON)
option(WITH_CODEC_SNDFILE "Enable libsndfile Support (http://www.mega-nerd.com/libsndfile)" ON)
@ -703,12 +736,7 @@ mark_as_advanced(WITH_DRAW_DEBUG)
# LLVM
option(WITH_LLVM "Use LLVM" OFF)
if(APPLE)
# We prefer static llvm build on Apple, dyn build possible though.
option(LLVM_STATIC "Link with LLVM static libraries" ON)
else()
option(LLVM_STATIC "Link with LLVM static libraries" OFF)
endif()
option(LLVM_STATIC "Link with LLVM static libraries" OFF)
mark_as_advanced(LLVM_STATIC)
option(WITH_CLANG "Use Clang" OFF)
@ -753,8 +781,10 @@ if(WIN32)
option(WITH_TBB_MALLOC_PROXY "Enable the TBB malloc replacement" ON)
endif()
option(WITH_EXPERIMENTAL_FEATURES "Enable experimental features" ON)
# This should be turned off when Blender enter beta/rc/release
if("${BLENDER_VERSION_CYCLE}" STREQUAL "alpha")
if("${BLENDER_VERSION_CYCLE}" STREQUAL "alpha" AND WITH_EXPERIMENTAL_FEATURES)
set(WITH_EXPERIMENTAL_FEATURES ON)
else()
set(WITH_EXPERIMENTAL_FEATURES OFF)
@ -1156,14 +1186,7 @@ set_and_warn_dependency(WITH_IMAGE_OPENEXR WITH_CYCLES_OSL OFF)
# Hydra requires USD.
set_and_warn_dependency(WITH_USD WITH_HYDRA OFF)
# auto enable openimageio for cycles
if(WITH_CYCLES)
# auto enable llvm for cycles_osl
if(WITH_CYCLES_OSL)
set(WITH_LLVM ON CACHE BOOL "" FORCE)
set(WITH_CLANG ON CACHE BOOL "" FORCE)
endif()
else()
if(NOT WITH_CYCLES)
set(WITH_CYCLES_OSL OFF)
endif()
@ -1382,18 +1405,6 @@ if(NOT WITH_FFTW3 AND WITH_MOD_OCEANSIM)
message(FATAL_ERROR "WITH_MOD_OCEANSIM requires WITH_FFTW3 to be ON")
endif()
if(WITH_CYCLES)
if(WITH_CYCLES_OSL)
if(NOT WITH_LLVM)
message(
FATAL_ERROR
"Cycles OSL requires WITH_LLVM, the library may not have been found. "
"Configure LLVM or disable WITH_CYCLES_OSL"
)
endif()
endif()
endif()
if(WITH_INTERNATIONAL)
if(NOT WITH_BOOST)
message(
@ -2402,7 +2413,6 @@ if(FIRST_RUN)
info_cfg_option(WITH_IMAGE_OPENJPEG)
info_cfg_text("Audio:")
info_cfg_option(WITH_CODEC_AVI)
info_cfg_option(WITH_CODEC_FFMPEG)
info_cfg_option(WITH_CODEC_SNDFILE)
info_cfg_option(WITH_COREAUDIO)

View File

@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: GPL-2.0-or-later
# This Makefile does an out-of-source CMake build in ../build_`OS`_`CPU`
# This Makefile does an out-of-source CMake build in ../build_`OS`
# eg:
# ../build_linux_i386
# This is for users who like to configure & build blender with a single command.
@ -35,7 +35,7 @@ Other Convenience Targets
* deps: Build library dependencies (intended only for platform maintainers).
The existence of locally build dependencies overrides the pre-built dependencies from subversion.
These must be manually removed from '../lib/' to go back to using the pre-compiled libraries.
These must be manually removed from 'lib/' to go back to using the pre-compiled libraries.
Project Files
Generate project files for development environments.
@ -165,6 +165,16 @@ OS:=$(shell uname -s)
OS_NCASE:=$(shell uname -s | tr '[A-Z]' '[a-z]')
CPU:=$(shell uname -m)
# Use our OS and CPU architecture naming conventions.
ifeq ($(CPU),x86_64)
CPU:=x64
endif
ifeq ($(OS_NCASE),darwin)
OS_LIBDIR:=macos
else
OS_LIBDIR:=$(OS_NCASE)
endif
# Source and Build DIR's
BLENDER_DIR:=$(shell pwd -P)
@ -186,26 +196,13 @@ ifndef DEPS_BUILD_DIR
endif
ifndef DEPS_INSTALL_DIR
DEPS_INSTALL_DIR:=$(shell dirname "$(BLENDER_DIR)")/lib/$(OS_NCASE)
# Add processor type to directory name, except for darwin x86_64
# which by convention does not have it.
ifeq ($(OS_NCASE),darwin)
ifneq ($(CPU),x86_64)
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
endif
else
DEPS_INSTALL_DIR:=$(DEPS_INSTALL_DIR)_$(CPU)
endif
DEPS_INSTALL_DIR:=$(shell dirname "$(BLENDER_DIR)")/lib/$(OS_LIBDIR)_$(CPU)
endif
# Set the LIBDIR, an empty string when not found.
LIBDIR:=$(wildcard ../lib/${OS_NCASE}_${CPU})
LIBDIR:=$(wildcard $(BLENDER_DIR)/lib/${OS_LIBDIR}_${CPU})
ifeq (, $(LIBDIR))
LIBDIR:=$(wildcard ../lib/${OS_NCASE}_${CPU}_glibc_228)
endif
ifeq (, $(LIBDIR))
LIBDIR:=$(wildcard ../lib/${OS_NCASE})
LIBDIR:=$(wildcard $(BLENDER_DIR)/lib/${OS_LIBDIR})
endif
# Find the newest Python version bundled in `LIBDIR`.
@ -535,7 +532,7 @@ check_spelling_shaders: .FORCE
"$(BLENDER_DIR)/source/"
check_descriptions: .FORCE
@$(BLENDER_BIN) --background -noaudio --factory-startup --python \
@$(BLENDER_BIN) --background --factory-startup --python \
"$(BLENDER_DIR)/tools/check_source/check_descriptions.py"
check_deprecated: .FORCE
@ -598,7 +595,7 @@ format: .FORCE
doc_py: .FORCE
@ASAN_OPTIONS=halt_on_error=0:${ASAN_OPTIONS} \
$(BLENDER_BIN) \
--background -noaudio --factory-startup \
--background --factory-startup \
--python doc/python_api/sphinx_doc_gen.py
@sphinx-build -b html -j $(NPROCS) doc/python_api/sphinx-in doc/python_api/sphinx-out
@echo "docs written into: '$(BLENDER_DIR)/doc/python_api/sphinx-out/index.html'"
@ -609,7 +606,7 @@ doc_doxy: .FORCE
doc_dna: .FORCE
@$(BLENDER_BIN) \
--background -noaudio --factory-startup \
--background --factory-startup \
--python doc/blender_file_format/BlendFileDnaExporter_25.py
@echo "docs written into: '$(BLENDER_DIR)/doc/blender_file_format/dna.html'"

View File

@ -43,6 +43,10 @@ set(OSL_EXTRA_ARGS
-DPython_EXECUTABLE=${PYTHON_BINARY}
)
if(NOT APPLE)
list(APPEND OSL_EXTRA_ARGS -DOSL_USE_OPTIX=ON)
endif()
ExternalProject_Add(external_osl
URL file://${PACKAGE_DIR}/${OSL_FILE}
DOWNLOAD_DIR ${DOWNLOAD_DIR}

View File

@ -639,10 +639,10 @@ set(BROTLI_HASH_TYPE SHA256)
set(BROTLI_FILE brotli-v${BROTLI_VERSION}.tar.gz)
set(BROTLI_CPE "cpe:2.3:a:google:brotli:${BROTLI_VERSION}:*:*:*:*:*:*:*")
set(OPENPGL_VERSION v0.5.0)
set(OPENPGL_SHORT_VERSION 0.5.0)
set(OPENPGL_VERSION v0.6.0)
set(OPENPGL_SHORT_VERSION 0.6.0)
set(OPENPGL_URI https://github.com/OpenPathGuidingLibrary/openpgl/archive/refs/tags/${OPENPGL_VERSION}.tar.gz)
set(OPENPGL_HASH 1ec806d434d45e43e098f82ee9be0cb74928343898c57490b34ff80584e9805a)
set(OPENPGL_HASH 4192a4096ee3e3d31878cd013f8de23418c8037c576537551f946c4811931c5e)
set(OPENPGL_HASH_TYPE SHA256)
set(OPENPGL_FILE openpgl-${OPENPGL_VERSION}.tar.gz)

View File

@ -133,10 +133,19 @@ BUILD_MANDATORY_SUBPACKAGES = (
DISTRO_ID_ARCH: "base-devel",
},
),
Package(name="Git",
Package(name="Git", is_group=True,
sub_packages=(
Package(name="Git LFS",
distro_package_names={DISTRO_ID_DEBIAN: "git-lfs",
DISTRO_ID_FEDORA: "git-lfs",
DISTRO_ID_SUSE: "git-lfs",
DISTRO_ID_ARCH: "git-lfs",
},
),
),
distro_package_names={DISTRO_ID_DEBIAN: "git",
DISTRO_ID_FEDORA: "git",
DISTRO_ID_SUSE: None,
DISTRO_ID_SUSE: "git",
DISTRO_ID_ARCH: "git",
},
),
@ -899,7 +908,7 @@ PACKAGES_ALL = (
DISTRO_ID_ARCH: "level-zero-headers", # ???
},
),
Package(name="OpenPGL Library", is_mandatory=False, version="0.5.0", version_short="0.5", version_min="0.5.0", version_mex="0.6",
Package(name="OpenPGL Library", is_mandatory=False, version="0.6.0", version_short="0.6", version_min="0.5.0", version_mex="0.7",
sub_packages=(),
distro_package_names={DISTRO_ID_DEBIAN: None,
DISTRO_ID_FEDORA: "openpgl-devel",

View File

@ -12,7 +12,6 @@ set(WITH_ALEMBIC ON CACHE BOOL "" FORCE)
set(WITH_AUDASPACE ON CACHE BOOL "" FORCE)
set(WITH_BUILDINFO ON CACHE BOOL "" FORCE)
set(WITH_BULLET ON CACHE BOOL "" FORCE)
set(WITH_CODEC_AVI ON CACHE BOOL "" FORCE)
set(WITH_CODEC_FFMPEG ON CACHE BOOL "" FORCE)
set(WITH_CODEC_SNDFILE ON CACHE BOOL "" FORCE)
set(WITH_COMPOSITOR_CPU ON CACHE BOOL "" FORCE)

View File

@ -15,7 +15,6 @@ set(WITH_BLENDER_THUMBNAILER OFF CACHE BOOL "" FORCE)
set(WITH_BOOST OFF CACHE BOOL "" FORCE)
set(WITH_BUILDINFO OFF CACHE BOOL "" FORCE)
set(WITH_BULLET OFF CACHE BOOL "" FORCE)
set(WITH_CODEC_AVI OFF CACHE BOOL "" FORCE)
set(WITH_CODEC_FFMPEG OFF CACHE BOOL "" FORCE)
set(WITH_CODEC_SNDFILE OFF CACHE BOOL "" FORCE)
set(WITH_COMPOSITOR_CPU OFF CACHE BOOL "" FORCE)

View File

@ -16,7 +16,6 @@ set(WITH_ASSERT_ABORT OFF CACHE BOOL "" FORCE)
set(WITH_AUDASPACE ON CACHE BOOL "" FORCE)
set(WITH_BUILDINFO ON CACHE BOOL "" FORCE)
set(WITH_BULLET ON CACHE BOOL "" FORCE)
set(WITH_CODEC_AVI ON CACHE BOOL "" FORCE)
set(WITH_CODEC_FFMPEG ON CACHE BOOL "" FORCE)
set(WITH_CODEC_SNDFILE ON CACHE BOOL "" FORCE)
set(WITH_COMPOSITOR_CPU ON CACHE BOOL "" FORCE)

View File

@ -49,18 +49,17 @@ endif()
if(NOT DEFINED LIBDIR)
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "x86_64")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin)
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/macos_x64)
else()
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/darwin_${CMAKE_OSX_ARCHITECTURES})
endif()
else()
if(FIRST_RUN)
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/macos_${CMAKE_OSX_ARCHITECTURES})
endif()
endif()
if(NOT EXISTS "${LIBDIR}/")
if(NOT EXISTS "${LIBDIR}/.git")
message(FATAL_ERROR "Mac OSX requires pre-compiled libs at: '${LIBDIR}'")
endif()
if(FIRST_RUN)
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
endif()
# Avoid searching for headers since this would otherwise override our lib
# directory as well as PYTHON_ROOT_DIR.

View File

@ -152,13 +152,9 @@ endif()
unset(OSX_SDKROOT)
if("${CMAKE_OSX_ARCHITECTURES}" STREQUAL "arm64")
# M1 chips run Big Sur onwards.
set(OSX_MIN_DEPLOYMENT_TARGET 11.00)
else()
# 10.15 is our min. target, if you use higher sdk, weak linking happens
set(OSX_MIN_DEPLOYMENT_TARGET 10.15)
endif()
# This is our minimum target, if you use higher sdk, weak linking happens
# Mainly required because of Metal drivers.
set(OSX_MIN_DEPLOYMENT_TARGET 11.2)
set(CMAKE_OSX_DEPLOYMENT_TARGET "${OSX_MIN_DEPLOYMENT_TARGET}" CACHE STRING "" FORCE)

View File

@ -16,13 +16,13 @@ else()
set(LIBDIR_NATIVE_ABI ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_NAME})
# Path to precompiled libraries with known glibc 2.28 ABI.
set(LIBDIR_GLIBC228_ABI ${CMAKE_SOURCE_DIR}/../lib/linux_x86_64_glibc_228)
set(LIBDIR_GLIBC228_ABI ${CMAKE_SOURCE_DIR}/lib/linux_x64)
# Choose the best suitable libraries.
if(EXISTS ${LIBDIR_NATIVE_ABI})
set(LIBDIR ${LIBDIR_NATIVE_ABI})
set(WITH_LIBC_MALLOC_HOOK_WORKAROUND TRUE)
elseif(EXISTS ${LIBDIR_GLIBC228_ABI})
elseif(EXISTS "${LIBDIR_GLIBC228_ABI}/.git")
set(LIBDIR ${LIBDIR_GLIBC228_ABI})
if(WITH_MEM_JEMALLOC)
# jemalloc provides malloc hooks.

View File

@ -266,23 +266,23 @@ if(NOT DEFINED LIBDIR)
# Setup 64bit and 64bit windows systems
if(CMAKE_CL_64)
message(STATUS "64 bit compiler detected.")
set(LIBDIR_BASE "win64")
set(LIBDIR_BASE "windows_x64")
else()
message(FATAL_ERROR "32 bit compiler detected, blender no longer provides pre-build libraries for 32 bit windows, please set the LIBDIR cmake variable to your own library folder")
endif()
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 19.30.30423)
message(STATUS "Visual Studio 2022 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/${LIBDIR_BASE})
elseif(MSVC_VERSION GREATER 1919)
message(STATUS "Visual Studio 2019 detected.")
set(LIBDIR ${CMAKE_SOURCE_DIR}/../lib/${LIBDIR_BASE}_vc15)
set(LIBDIR ${CMAKE_SOURCE_DIR}/lib/${LIBDIR_BASE})
endif()
else()
if(FIRST_RUN)
message(STATUS "Using pre-compiled LIBDIR: ${LIBDIR}")
endif()
endif()
if(NOT EXISTS "${LIBDIR}/")
if(NOT EXISTS "${LIBDIR}/.git")
message(FATAL_ERROR "\n\nWindows requires pre-compiled libs at: '${LIBDIR}'. Please run `make update` in the blender source folder to obtain them.")
endif()

View File

@ -163,14 +163,14 @@ function(blender_add_ctests)
TEST_PREFIX ${ARGS_SUITE_NAME}
WORKING_DIRECTORY "${TEST_INSTALL_DIR}"
EXTRA_ARGS
--test-assets-dir "${CMAKE_SOURCE_DIR}/../lib/tests"
--test-assets-dir "${CMAKE_SOURCE_DIR}/tests/data"
--test-release-dir "${_test_release_dir}"
)
else()
add_test(
NAME ${ARGS_SUITE_NAME}
COMMAND ${ARGS_TARGET}
--test-assets-dir "${CMAKE_SOURCE_DIR}/../lib/tests"
--test-assets-dir "${CMAKE_SOURCE_DIR}/tests/data"
--test-release-dir "${_test_release_dir}"
WORKING_DIRECTORY ${TEST_INSTALL_DIR}
)

View File

@ -13,6 +13,7 @@ import sys
import make_utils
from make_utils import call
from pathlib import Path
# Parse arguments.
@ -21,7 +22,6 @@ def parse_arguments() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument("--ctest-command", default="ctest")
parser.add_argument("--cmake-command", default="cmake")
parser.add_argument("--svn-command", default="svn")
parser.add_argument("--git-command", default="git")
parser.add_argument("--config", default="")
parser.add_argument("build_directory")
@ -30,7 +30,6 @@ def parse_arguments() -> argparse.Namespace:
args = parse_arguments()
git_command = args.git_command
svn_command = args.svn_command
ctest_command = args.ctest_command
cmake_command = args.cmake_command
config = args.config
@ -45,24 +44,18 @@ if make_utils.command_missing(git_command):
sys.exit(1)
# Test if we are building a specific release version.
branch = make_utils.git_branch(git_command)
tag = make_utils.git_tag(git_command)
release_version = make_utils.git_branch_release_version(branch, tag)
lib_tests_dirpath = os.path.join('..', 'lib', "tests")
lib_tests_dirpath = Path("tests") / "data"
if not os.path.exists(lib_tests_dirpath):
if not (lib_tests_dirpath / ".git").exists():
print("Tests files not found, downloading...")
if make_utils.command_missing(svn_command):
sys.stderr.write("svn not found, can't checkout test files\n")
sys.exit(1)
if make_utils.command_missing(cmake_command):
sys.stderr.write("cmake not found, can't checkout test files\n")
sys.exit(1)
svn_url = make_utils.svn_libraries_base_url(release_version) + "/tests"
call([svn_command, "checkout", svn_url, lib_tests_dirpath])
# Ensure the test data files sub-module is configured and present.
make_utils.git_enable_submodule(git_command, "tests/data")
make_utils.git_update_submodule(args.git_command, lib_tests_dirpath)
# Run cmake again to detect tests files.
os.chdir(build_dir)

View File

@ -4,11 +4,11 @@
# SPDX-License-Identifier: GPL-2.0-or-later
"""
"make update" for all platforms, updating svn libraries and tests and Blender
git repository and sub-modules.
"make update" for all platforms, updating Git LFS submodules for libraries and
tests, and Blender git repository.
For release branches, this will check out the appropriate branches of
sub-modules and libraries.
submodules and libraries.
"""
import argparse
@ -20,168 +20,176 @@ import sys
import make_utils
from pathlib import Path
from make_utils import call, check_output
from urllib.parse import urljoin
from urllib.parse import urljoin, urlsplit
from typing import (
Optional,
)
class Submodule:
path: str
branch: str
branch_fallback: str
def __init__(self, path: str, branch: str, branch_fallback: str) -> None:
self.path = path
self.branch = branch
self.branch_fallback = branch_fallback
from typing import Optional
def print_stage(text: str) -> None:
print("")
print(text)
print("=" * len(text))
print("")
# Parse arguments
def parse_arguments() -> argparse.Namespace:
"""
Parse command line line arguments.
Returns parsed object from which the command line arguments can be accessed
as properties. The name of the properties matches the command line argument,
but with the leading dashed omitted and all remaining dashes replaced with
underscore.
"""
parser = argparse.ArgumentParser()
parser.add_argument("--no-libraries", action="store_true")
parser.add_argument("--no-blender", action="store_true")
parser.add_argument("--no-submodules", action="store_true")
parser.add_argument("--use-tests", action="store_true")
parser.add_argument("--svn-command", default="svn")
parser.add_argument("--svn-branch", default=None)
parser.add_argument("--git-command", default="git")
parser.add_argument("--use-linux-libraries", action="store_true")
parser.add_argument("--architecture", type=str, choices=("x86_64", "amd64", "arm64",))
parser.add_argument("--architecture", type=str,
choices=("x86_64", "amd64", "arm64",))
return parser.parse_args()
def get_blender_git_root() -> str:
return check_output([args.git_command, "rev-parse", "--show-toplevel"])
def get_blender_git_root() -> Path:
"""
Get root directory of the current Git directory.
"""
return Path(
check_output([args.git_command, "rev-parse", "--show-toplevel"]))
# Setup for precompiled libraries and tests from svn.
def get_effective_platform(args: argparse.Namespace) -> str:
"""
Get platform of the host.
The result string is normalized to the name used by Blender releases and
library repository name prefixes: linux, macos, windows.
"""
if sys.platform == "darwin":
platform = "macos"
elif sys.platform == "win32":
platform = "windows"
else:
platform = sys.platform
assert (platform in ("linux", "macos", "windows"))
return platform
def get_effective_architecture(args: argparse.Namespace) -> str:
"""
Get architecture of the host.
The result string is normalized to the architecture name used by the Blender
releases and library repository name suffixes: x64, arm64.
NOTE: When cross-compiling the architecture is coming from the command line
argument.
"""
architecture = args.architecture
if architecture:
assert isinstance(architecture, str)
return architecture
# Check platform.version to detect arm64 with x86_64 python binary.
if "ARM64" in platform.version():
return "arm64"
return platform.machine().lower()
def svn_update(args: argparse.Namespace, release_version: Optional[str]) -> None:
svn_non_interactive = [args.svn_command, '--non-interactive']
lib_dirpath = os.path.join(get_blender_git_root(), '..', 'lib')
svn_url = make_utils.svn_libraries_base_url(release_version, args.svn_branch)
# Checkout precompiled libraries
architecture = get_effective_architecture(args)
if sys.platform == 'darwin':
if architecture == 'arm64':
lib_platform = "darwin_arm64"
elif architecture == 'x86_64':
lib_platform = "darwin"
else:
lib_platform = None
elif sys.platform == 'win32':
# Windows checkout is usually handled by bat scripts since python3 to run
# this script is bundled as part of the precompiled libraries. However it
# is used by the buildbot.
lib_platform = "win64_vc15"
elif args.use_linux_libraries:
lib_platform = "linux_x86_64_glibc_228"
elif "ARM64" in platform.version():
# Check platform.version to detect arm64 with x86_64 python binary.
architecture = "arm64"
else:
# No precompiled libraries for Linux.
lib_platform = None
architecture = platform.machine().lower()
if lib_platform:
lib_platform_dirpath = os.path.join(lib_dirpath, lib_platform)
# Normalize the architecture name.
if architecture in ("x86_64", "amd64"):
architecture = "x64"
if not os.path.exists(lib_platform_dirpath):
print_stage("Checking out Precompiled Libraries")
assert (architecture in ("x64", "arm64"))
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't checkout libraries\n")
sys.exit(1)
svn_url_platform = svn_url + lib_platform
call(svn_non_interactive + ["checkout", svn_url_platform, lib_platform_dirpath])
if args.use_tests:
lib_tests = "tests"
lib_tests_dirpath = os.path.join(lib_dirpath, lib_tests)
if not os.path.exists(lib_tests_dirpath):
print_stage("Checking out Tests")
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't checkout tests\n")
sys.exit(1)
svn_url_tests = svn_url + lib_tests
call(svn_non_interactive + ["checkout", svn_url_tests, lib_tests_dirpath])
lib_assets = "assets"
lib_assets_dirpath = os.path.join(lib_dirpath, lib_assets)
if not os.path.exists(lib_assets_dirpath):
print_stage("Checking out Assets")
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't checkout assets\n")
sys.exit(1)
svn_url_assets = svn_url + lib_assets
call(svn_non_interactive + ["checkout", svn_url_assets, lib_assets_dirpath])
# Update precompiled libraries, assets and tests
if not os.path.isdir(lib_dirpath):
print("Library path: %r, not found, skipping" % lib_dirpath)
else:
paths_local_and_remote = []
if os.path.exists(os.path.join(lib_dirpath, ".svn")):
print_stage("Updating Precompiled Libraries, Assets and Tests (one repository)")
paths_local_and_remote.append((lib_dirpath, svn_url))
else:
print_stage("Updating Precompiled Libraries, Assets and Tests (multiple repositories)")
# Separate paths checked out.
for dirname in os.listdir(lib_dirpath):
if dirname.startswith("."):
# Temporary paths such as ".mypy_cache" will report a warning, skip hidden directories.
continue
dirpath = os.path.join(lib_dirpath, dirname)
if not (os.path.isdir(dirpath) and os.path.exists(os.path.join(dirpath, ".svn"))):
continue
paths_local_and_remote.append((dirpath, svn_url + dirname))
if paths_local_and_remote:
if make_utils.command_missing(args.svn_command):
sys.stderr.write("svn not found, can't update libraries\n")
sys.exit(1)
for dirpath, svn_url_full in paths_local_and_remote:
call(svn_non_interactive + ["cleanup", dirpath])
# Switch to appropriate branch and update.
call(svn_non_interactive + ["switch", svn_url_full, dirpath], exit_on_error=False)
call(svn_non_interactive + ["update", dirpath])
return architecture
def get_submodule_directories(args: argparse.Namespace):
"""
Get list of all configured submodule directories.
"""
blender_git_root = get_blender_git_root()
dot_modules = blender_git_root / ".gitmodules"
if not dot_modules.exists():
return ()
submodule_directories_output = check_output(
[args.git_command, "config", "--file", dot_modules, "--get-regexp", "path"])
return (Path(line.split(' ', 1)[1]) for line in submodule_directories_output.strip().splitlines())
def ensure_git_lfs(args: argparse.Namespace) -> None:
# Use `--skip-repo` to avoid creating git hooks.
# This is called from the `blender.git` checkout, so we don't need to install hooks there.
call((args.git_command, "lfs", "install", "--skip-repo"), exit_on_error=True)
def update_precompiled_libraries(args: argparse.Namespace) -> str:
"""
Configure and update submodule for precompiled libraries
This function detects the current host architecture and enables
corresponding submodule, and updates the submodule.
NOTE: When cross-compiling the architecture is coming from the command line
argument.
"""
print_stage("Configuring Precompiled Libraries")
platform = get_effective_platform(args)
arch = get_effective_architecture(args)
print(f"Detected platform : {platform}")
print(f"Detected architecture : {arch}")
print()
if sys.platform == "linux" and not args.use_linux_libraries:
print("Skipping Linux libraries configuration")
return ""
submodule_dir = f"lib/{platform}_{arch}"
submodule_directories = get_submodule_directories(args)
if Path(submodule_dir) not in submodule_directories:
return "Skipping libraries update: no configured submodule\n"
make_utils.git_enable_submodule(args.git_command, submodule_dir)
if not make_utils.git_update_submodule(args.git_command, submodule_dir):
return "Error updating precompiled libraries\n"
return ""
def update_tests_data_files(args: argparse.Namespace) -> str:
"""
Configure and update submodule with files used by regression tests
"""
print_stage("Configuring Tests Data Files")
submodule_dir = "tests/data"
make_utils.git_enable_submodule(args.git_command, submodule_dir)
if not make_utils.git_update_submodule(args.git_command, submodule_dir):
return "Error updating test data\n"
return ""
# Test if git repo can be updated.
def git_update_skip(args: argparse.Namespace, check_remote_exists: bool = True) -> str:
"""Test if git repo can be updated."""
if make_utils.command_missing(args.git_command):
sys.stderr.write("git not found, can't update code\n")
sys.exit(1)
@ -274,23 +282,22 @@ def resolve_external_url(blender_url: str, repo_name: str) -> str:
return urljoin(blender_url + "/", "../" + repo_name)
def external_script_copy_old_submodule_over(args: argparse.Namespace, directory_name: str) -> None:
blender_git_root = Path(get_blender_git_root())
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
def external_script_copy_old_submodule_over(
args: argparse.Namespace,
directory: Path,
old_submodules_dir: Path) -> None:
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
old_submodule_relative_dir = Path("release") / "scripts" / directory_name
print(f"Moving {old_submodule_relative_dir} to scripts/{directory_name} ...")
old_submodule_dir = blender_git_root / old_submodule_relative_dir
shutil.move(old_submodule_dir, external_dir)
print(f"Moving {old_submodules_dir} to {directory} ...")
shutil.move(blender_git_root / old_submodules_dir, external_dir)
# Remove old ".git" which is a file with path to a submodule bare repo inside of main
# repo .git/modules directory.
(external_dir / ".git").unlink()
bare_repo_relative_dir = Path(".git") / "modules" / "release" / "scripts" / directory_name
print(f"Copying {bare_repo_relative_dir} to scripts/{directory_name}/.git ...")
bare_repo_relative_dir = Path(".git") / "modules" / old_submodules_dir
print(f"Copying {bare_repo_relative_dir} to {directory}/.git ...")
bare_repo_dir = blender_git_root / bare_repo_relative_dir
shutil.copytree(bare_repo_dir, external_dir / ".git")
@ -298,25 +305,26 @@ def external_script_copy_old_submodule_over(args: argparse.Namespace, directory_
call((args.git_command, "config", "--file", str(git_config), "--unset", "core.worktree"))
def external_script_initialize_if_needed(args: argparse.Namespace,
repo_name: str,
directory_name: str) -> None:
"""Initialize checkout of an external repository scripts directory"""
def floating_checkout_initialize_if_needed(args: argparse.Namespace,
repo_name: str,
directory: Path,
old_submodules_dir: Path = None) -> None:
"""Initialize checkout of an external repository"""
blender_git_root = Path(get_blender_git_root())
blender_git_root = get_blender_git_root()
blender_dot_git = blender_git_root / ".git"
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
external_dir = blender_git_root / directory
if external_dir.exists():
return
print(f"Initializing scripts/{directory_name} ...")
print(f"Initializing {directory} ...")
old_submodule_dot_git = blender_git_root / "release" / "scripts" / directory_name / ".git"
if old_submodule_dot_git.exists() and blender_dot_git.is_dir():
external_script_copy_old_submodule_over(args, directory_name)
return
if old_submodules_dir is not None:
old_submodule_dot_git = blender_git_root / old_submodules_dir / ".git"
if old_submodule_dot_git.exists() and blender_dot_git.is_dir():
external_script_copy_old_submodule_over(args, directory, old_submodules_dir)
return
origin_name = "upstream" if use_upstream_workflow(args) else "origin"
blender_url = make_utils.git_get_remote_url(args.git_command, origin_name)
@ -330,9 +338,9 @@ def external_script_initialize_if_needed(args: argparse.Namespace,
call((args.git_command, "clone", "--origin", origin_name, external_url, str(external_dir)))
def external_script_add_origin_if_needed(args: argparse.Namespace,
repo_name: str,
directory_name: str) -> None:
def floating_checkout_add_origin_if_needed(args: argparse.Namespace,
repo_name: str,
directory: Path) -> None:
"""
Add remote called 'origin' if there is a fork of the external repository available
@ -344,9 +352,8 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
cwd = os.getcwd()
blender_git_root = Path(get_blender_git_root())
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
origin_blender_url = make_utils.git_get_remote_url(args.git_command, "origin")
origin_external_url = resolve_external_url(origin_blender_url, repo_name)
@ -361,7 +368,7 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
if not make_utils.git_is_remote_repository(args.git_command, origin_external_url):
return
print(f"Adding origin remote to {directory_name} pointing to fork ...")
print(f"Adding origin remote to {directory} pointing to fork ...")
# Non-obvious tricks to introduce the new remote called "origin" to the existing
# submodule configuration.
@ -390,23 +397,30 @@ def external_script_add_origin_if_needed(args: argparse.Namespace,
return
def external_scripts_update(args: argparse.Namespace,
repo_name: str,
directory_name: str,
branch: Optional[str]) -> str:
def floating_checkout_update(args: argparse.Namespace,
repo_name: str,
directory: Path,
branch: Optional[str],
old_submodules_dir: Path = None,
only_update=False) -> str:
"""Update a single external checkout with the given name in the scripts folder"""
external_script_initialize_if_needed(args, repo_name, directory_name)
external_script_add_origin_if_needed(args, repo_name, directory_name)
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
print(f"Updating scripts/{directory_name} ...")
if only_update and not external_dir.exists():
return ""
floating_checkout_initialize_if_needed(args, repo_name, directory, old_submodules_dir)
floating_checkout_add_origin_if_needed(args, repo_name, directory)
blender_git_root = get_blender_git_root()
external_dir = blender_git_root / directory
print(f"* Updating {directory} ...")
cwd = os.getcwd()
blender_git_root = Path(get_blender_git_root())
scripts_dir = blender_git_root / "scripts"
external_dir = scripts_dir / directory_name
# Update externals to appropriate given branch, falling back to main if none is given and/or
# found in a sub-repository.
branch_fallback = "main"
@ -419,7 +433,7 @@ def external_scripts_update(args: argparse.Namespace,
os.chdir(external_dir)
msg = git_update_skip(args, check_remote_exists=False)
if msg:
skip_msg += directory_name + " skipped: " + msg + "\n"
skip_msg += str(directory) + " skipped: " + msg + "\n"
else:
# Find a matching branch that exists.
for remote in ("origin", "upstream"):
@ -465,6 +479,17 @@ def external_scripts_update(args: argparse.Namespace,
return skip_msg
def external_scripts_update(args: argparse.Namespace,
repo_name: str,
directory_name: str,
branch: Optional[str]) -> str:
return floating_checkout_update(args,
repo_name,
Path("scripts") / directory_name,
branch,
old_submodules_dir=Path("release") / "scripts" / directory_name)
def scripts_submodules_update(args: argparse.Namespace, branch: Optional[str]) -> str:
"""Update working trees of addons and addons_contrib within the scripts/ directory"""
msg = ""
@ -475,18 +500,91 @@ def scripts_submodules_update(args: argparse.Namespace, branch: Optional[str]) -
return msg
def floating_libraries_update(args: argparse.Namespace, branch: Optional[str]) -> str:
"""Update libraries checkouts which are floating (not attached as Git submodules)"""
msg = ""
msg += floating_checkout_update(args,
"benchmarks",
Path("tests") / "benchmarks",
branch,
only_update=True)
return msg
def add_submodule_push_url(args: argparse.Namespace):
"""
Add pushURL configuration for all locally activated submodules, pointing to SSH protocol.
"""
blender_git_root = get_blender_git_root()
modules = blender_git_root / ".git" / "modules"
submodule_directories = get_submodule_directories(args)
for submodule_path in submodule_directories:
module_path = modules / submodule_path
config = module_path / "config"
if not config.exists():
# Ignore modules which are not initialized
continue
push_url = check_output((args.git_command, "config", "--file", str(config),
"--get", "remote.origin.pushURL"), exit_on_error=False)
if push_url and push_url != "git@projects.blender.org:blender/lib-darwin_arm64.git":
# Ignore modules which have pushURL configured.
# Keep special exception, as some debug code sneaked into the production for a short
# while.
continue
url = make_utils.git_get_config(args.git_command, "remote.origin.url", str(config))
if not url.startswith("https:"):
# Ignore non-URL URLs.
continue
url_parts = urlsplit(url)
push_url = f"git@{url_parts.netloc}:{url_parts.path[1:]}"
print(f"Setting pushURL to {push_url} for {submodule_path}")
make_utils.git_set_config(args.git_command, "remote.origin.pushURL", push_url, str(config))
def submodules_update(args: argparse.Namespace, branch: Optional[str]) -> str:
"""Update submodules or other externally tracked source trees"""
print_stage("Updating Submodules")
msg = ""
msg += scripts_submodules_update(args, branch)
msg += floating_libraries_update(args, branch)
print("* Updating Git submodules")
submodule_directories = get_submodule_directories(args)
for submodule_path in submodule_directories:
if submodule_path.parts[0] == "lib" and args.no_libraries:
print(f"Skipping library submodule {submodule_path}")
continue
if submodule_path.parts[0] == "tests" and not args.use_tests:
print(f"Skipping tests submodule {submodule_path}")
continue
if not make_utils.git_update_submodule(args.git_command, submodule_path):
msg += f"Error updating Git submodule {submodule_path}\n"
add_submodule_push_url(args)
return msg
if __name__ == "__main__":
args = parse_arguments()
blender_skip_msg = ""
libraries_skip_msg = ""
submodules_skip_msg = ""
blender_version = make_utils. parse_blender_version()
@ -494,26 +592,32 @@ if __name__ == "__main__":
major = blender_version.version // 100
minor = blender_version.version % 100
branch = f"blender-v{major}.{minor}-release"
release_version: Optional[str] = f"{major}.{minor}"
else:
branch = 'main'
release_version = None
if not args.no_libraries:
svn_update(args, release_version)
# Submodules and precompiled libraries require Git LFS.
ensure_git_lfs(args)
if not args.no_blender:
blender_skip_msg = git_update_skip(args)
if not blender_skip_msg:
blender_skip_msg = blender_update(args)
if blender_skip_msg:
blender_skip_msg = "Blender repository skipped: " + blender_skip_msg + "\n"
if not args.no_libraries:
libraries_skip_msg += update_precompiled_libraries(args)
if args.use_tests:
libraries_skip_msg += update_tests_data_files(args)
if not args.no_submodules:
submodules_skip_msg = submodules_update(args, branch)
# Report any skipped repositories at the end, so it's not as easy to miss.
skip_msg = blender_skip_msg + submodules_skip_msg
skip_msg = blender_skip_msg + libraries_skip_msg + submodules_skip_msg
if skip_msg:
print_stage(skip_msg.strip())
print_stage("Update finished with the following messages")
print(skip_msg.strip())
# For failed submodule update we throw an error, since not having correct
# submodules can make Blender throw errors.

View File

@ -8,12 +8,11 @@ Utility functions for make update and make tests.
"""
import re
import os
import shutil
import subprocess
import sys
import os
from pathlib import Path
from urllib.parse import urljoin
from typing import (
Sequence,
@ -21,18 +20,30 @@ from typing import (
)
def call(cmd: Sequence[str], exit_on_error: bool = True, silent: bool = False) -> int:
def call(cmd: Sequence[str], exit_on_error: bool = True, silent: bool = False, env=None) -> int:
if not silent:
print(" ".join([str(x) for x in cmd]))
cmd_str = ""
if env:
cmd_str += " ".join([f"{item[0]}={item[1]}" for item in env.items()])
cmd_str += " "
cmd_str += " ".join([str(x) for x in cmd])
print(cmd_str)
env_full = None
if env:
env_full = os.environ.copy()
for key, value in env.items():
env_full[key] = value
# Flush to ensure correct order output on Windows.
sys.stdout.flush()
sys.stderr.flush()
if silent:
retcode = subprocess.call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
retcode = subprocess.call(
cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, env=env_full)
else:
retcode = subprocess.call(cmd)
retcode = subprocess.call(cmd, env=env_full)
if exit_on_error and retcode != 0:
sys.exit(retcode)
@ -48,7 +59,7 @@ def check_output(cmd: Sequence[str], exit_on_error: bool = True) -> str:
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, universal_newlines=True)
except subprocess.CalledProcessError as e:
if exit_on_error:
sys.stderr.write(" ".join(cmd))
sys.stderr.write(" ".join(cmd) + "\n")
sys.stderr.write(e.output + "\n")
sys.exit(e.returncode)
output = ""
@ -87,25 +98,6 @@ def git_remote_exist(git_command: str, remote_name: str) -> bool:
return remote_url != remote_name
def git_get_resolved_submodule_url(git_command: str, blender_url: str, submodule_path: str) -> str:
git_root = check_output([git_command, "rev-parse", "--show-toplevel"])
dot_gitmodules = os.path.join(git_root, ".gitmodules")
submodule_key_prefix = f"submodule.{submodule_path}"
submodule_key_url = f"{submodule_key_prefix}.url"
gitmodule_url = git_get_config(
git_command, submodule_key_url, file=dot_gitmodules)
# A bit of a trickery to construct final URL.
# Only works for the relative submodule URLs.
#
# Note that unless the LHS URL ends up with a slash urljoin treats the last component as a
# file.
assert gitmodule_url.startswith('..')
return urljoin(blender_url + "/", gitmodule_url)