diff --git a/.github/workflows/TagIt.yml b/.github/workflows/TagIt.yml
new file mode 100644
index 000000000..2c4b889d6
--- /dev/null
+++ b/.github/workflows/TagIt.yml
@@ -0,0 +1,68 @@
+on:
+ push:
+ tags:
+ # Only match TagIt tags, which always start with this prefix
+ - 'v20*'
+
+name: TagIt
+
+jobs:
+ build:
+ name: Release
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+ - name: Archive project
+ id: archive_project
+ run: |
+ FILE_NAME=${GITHUB_REPOSITORY#*/}-${GITHUB_REF##*/}
+ git archive ${{ github.ref }} -o ${FILE_NAME}.zip
+ git archive ${{ github.ref }} -o ${FILE_NAME}.tar.gz
+ echo "::set-output name=file_name::${FILE_NAME}"
+ - name: Compute digests
+ id: compute_digests
+ run: |
+ echo "::set-output name=tgz_256::$(openssl dgst -sha256 ${{ steps.archive_project.outputs.file_name }}.tar.gz)"
+ echo "::set-output name=tgz_512::$(openssl dgst -sha512 ${{ steps.archive_project.outputs.file_name }}.tar.gz)"
+ echo "::set-output name=zip_256::$(openssl dgst -sha256 ${{ steps.archive_project.outputs.file_name }}.zip)"
+ echo "::set-output name=zip_512::$(openssl dgst -sha512 ${{ steps.archive_project.outputs.file_name }}.zip)"
+ - name: Create Release
+ id: create_release
+ uses: actions/create-release@v1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ tag_name: ${{ github.ref }}
+ release_name: ${{ github.ref }}
+ body: |
+ Automated release from TagIt
+
+ File Hashes
+
+ - ${{ steps.compute_digests.outputs.zip_256 }}
+ - ${{ steps.compute_digests.outputs.zip_512 }}
+ - ${{ steps.compute_digests.outputs.tgz_256 }}
+ - ${{ steps.compute_digests.outputs.tgz_512 }}
+
+
+ draft: false
+ prerelease: false
+ - name: Upload zip
+ uses: actions/upload-release-asset@v1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ upload_url: ${{ steps.create_release.outputs.upload_url }}
+ asset_path: ./${{ steps.archive_project.outputs.file_name }}.zip
+ asset_name: ${{ steps.archive_project.outputs.file_name }}.zip
+ asset_content_type: application/zip
+ - name: Upload tar.gz
+ uses: actions/upload-release-asset@v1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ with:
+ upload_url: ${{ steps.create_release.outputs.upload_url }}
+ asset_path: ./${{ steps.archive_project.outputs.file_name }}.tar.gz
+ asset_name: ${{ steps.archive_project.outputs.file_name }}.tar.gz
+ asset_content_type: application/gzip
diff --git a/.gitignore b/.gitignore
index bf568c4c9..e00c3821f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -46,7 +46,6 @@ install_manifest.txt
### Project ###
-/build
/reactivesocket-cpp/CTestTestfile.cmake
/reactivesocket-cpp/ReactiveSocketTest
/reactivesocket-cpp/compile_commands.json
diff --git a/.travis.yml b/.travis.yml
index 8e72dfa19..1c88d192b 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -14,9 +14,6 @@ addons:
packages: &common_deps
- lcov
# Folly dependencies
- - autoconf
- - autoconf-archive
- - automake
- binutils-dev
- g++
- libboost-all-dev
@@ -25,42 +22,27 @@ addons:
- libgflags-dev
- libgoogle-glog-dev
- libiberty-dev
- - libjemalloc-dev
- liblz4-dev
- liblzma-dev
- libsnappy-dev
- libssl-dev
- - libtool
- make
- - pkg-config
- zlib1g-dev
matrix:
include:
- # Set COMPILER environment variable instead of CC or CXX because the latter
- # are overriden by Travis. Setting the compiler in Travis doesn't work
- # either because it strips version.
- - env: COMPILER=clang-4.0
+ - env: COMPILER_EVAL="CC=clang-6.0 CXX=clang++-6.0"
addons:
apt:
sources:
- *common_srcs
- - llvm-toolchain-trusty-4.0
+ - llvm-toolchain-trusty-6.0
packages:
- *common_deps
- - clang-4.0
+ - clang-6.0
- libstdc++-4.9-dev
- - env: COMPILER=gcc-4.9
- addons:
- apt:
- sources:
- - *common_srcs
- packages:
- - *common_deps
- - g++-4.9
-
- - env: COMPILER=gcc-5
+ - env: COMPILER_EVAL="CC=gcc-5 CXX=g++-5"
addons:
apt:
sources:
@@ -68,8 +50,9 @@ matrix:
packages:
- *common_deps
- g++-5
+ - libjemalloc-dev
- - env: COMPILER=gcc-6
+ - env: COMPILER_EVAL="CC=gcc-6 CXX=g++-6"
addons:
apt:
sources:
@@ -77,6 +60,7 @@ matrix:
packages:
- *common_deps
- g++-6
+ - libjemalloc-dev
env:
global:
@@ -93,30 +77,38 @@ env:
eHz/lHAoLXWg/BhtgQbPmMYYKRrQaH7EKzBbqEHv6PhOk7vLMtdx5X7KmhVuFjpAMbaYoj
zwxxH0u+VAnVB5iazzyjhySjvzkvx6pGzZtTnjLJHxKcp9633z4OU=
-cache:
- directories:
- - $HOME/folly
-
before_script:
+ - eval "$COMPILER_EVAL"
+ - export DEP_INSTALL_DIR=$PWD/build/dep-install
+ # Ubuntu trusty only comes with OpenSSL 1.0.1f, but we require
+ # at least OpenSSL 1.0.2 for ALPN support.
+ - curl -L https://github.com/openssl/openssl/archive/OpenSSL_1_1_1.tar.gz -o OpenSSL_1_1_1.tar.gz
+ - tar -xzf OpenSSL_1_1_1.tar.gz
+ - cd openssl-OpenSSL_1_1_1
+ - ./config --prefix=$DEP_INSTALL_DIR no-shared
+ - make -j4
+ - make install_sw install_ssldirs
+ - cd ..
# Install lcov to coveralls conversion + upload tool.
- gem install coveralls-lcov
- lcov --version
+ # Build folly
+ - ./scripts/build_folly.sh build/folly-src $DEP_INSTALL_DIR
script:
- - mkdir build
- cd build
- - cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DRSOCKET_CC=$COMPILER
- -DRSOCKET_ASAN=$ASAN -DRSOCKET_INSTALL_DEPS=True
+ - cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DRSOCKET_ASAN=$ASAN
+ -DCMAKE_PREFIX_PATH=$DEP_INSTALL_DIR
-DRSOCKET_BUILD_WITH_COVERAGE=ON ..
- make -j4
- lcov --directory . --zerocounters
- - make test
+ # - make test
# - make coverage
- - cd ..
- - ./scripts/tck_test.sh -c cpp -s cpp
- - ./scripts/tck_test.sh -c java -s java
- - ./scripts/tck_test.sh -c java -s cpp
- - ./scripts/tck_test.sh -c cpp -s java
+ # - cd ..
+ # - ./scripts/tck_test.sh -c cpp -s cpp
+ # - ./scripts/tck_test.sh -c java -s java
+ # - ./scripts/tck_test.sh -c java -s cpp
+ # - ./scripts/tck_test.sh -c cpp -s java
after_success:
# Upload to coveralls.
diff --git a/.ycm_extra_conf.py b/.ycm_extra_conf.py
deleted file mode 100644
index 4f893b15e..000000000
--- a/.ycm_extra_conf.py
+++ /dev/null
@@ -1,158 +0,0 @@
-import os
-import os.path
-import logging
-import ycm_core
-
-BASE_FLAGS = [
- '-xc++',
- '-Wall',
- '-Wextra',
- '-Werror',
- '-std=c++11',
- '-I.',
- '-isystem/usr/lib/',
- '-isystem/usr/include/',
-]
-
-SOURCE_EXTENSIONS = [
- '.cpp',
- '.cxx',
- '.cc',
- '.c',
- '.m',
- '.mm'
-]
-
-HEADER_EXTENSIONS = [
- '.h',
- '.hxx',
- '.hpp',
- '.hh',
- '.icc',
- '.tcc',
-]
-
-
-def IsHeaderFile(filename):
- extension = os.path.splitext(filename)[1]
- return extension in HEADER_EXTENSIONS
-
-
-def GetCompilationInfoForFile(database, filename):
- if IsHeaderFile(filename):
- basename = os.path.splitext(filename)[0]
- for extension in SOURCE_EXTENSIONS:
- replacement_file = basename + extension
- if os.path.exists(replacement_file):
- compilation_info = database.GetCompilationInfoForFile(
- replacement_file)
- if compilation_info.compiler_flags_:
- return compilation_info
- return None
- return database.GetCompilationInfoForFile(filename)
-
-
-def FindNearest(path, target):
- candidate = os.path.join(path, target)
- if(os.path.isfile(candidate) or os.path.isdir(candidate)):
- logging.info("Found nearest " + target + " at " + candidate)
- return candidate
- else:
- parent = os.path.dirname(os.path.abspath(path))
- if(parent == path):
- raise RuntimeError("Could not find " + target)
- return FindNearest(parent, target)
-
-
-def MakeRelativePathsInFlagsAbsolute(flags, working_directory):
- if not working_directory:
- return list(flags)
- new_flags = []
- make_next_absolute = False
- path_flags = ['-isystem', '-I', '-iquote', '--sysroot=']
- for flag in flags:
- new_flag = flag
-
- if make_next_absolute:
- make_next_absolute = False
- if not flag.startswith('/'):
- new_flag = os.path.join(working_directory, flag)
-
- for path_flag in path_flags:
- if flag == path_flag:
- make_next_absolute = True
- break
-
- if flag.startswith(path_flag):
- path = flag[len(path_flag):]
- new_flag = path_flag + os.path.join(working_directory, path)
- break
-
- if new_flag:
- new_flags.append(new_flag)
- return new_flags
-
-
-def FlagsForClangComplete(root):
- try:
- clang_complete_path = FindNearest(root, '.clang_complete')
- clang_complete_flags = open(
- clang_complete_path, 'r').read().splitlines()
- return clang_complete_flags
- except:
- return None
-
-
-def FlagsForInclude(root):
- try:
- include_path = FindNearest(root, 'include')
- flags = []
- for dirroot, dirnames, filenames in os.walk(include_path):
- for dir_path in dirnames:
- real_path = os.path.join(dirroot, dir_path)
- flags = flags + ["-I" + real_path]
- return flags
- except:
- return None
-
-
-def FlagsForCompilationDatabase(root, filename):
- try:
- compilation_db_path = FindNearest(
- os.path.join(root, 'build'), 'compile_commands.json')
- compilation_db_dir = os.path.dirname(compilation_db_path)
- logging.info(
- "Set compilation database directory to " + compilation_db_dir)
- compilation_db = ycm_core.CompilationDatabase(compilation_db_dir)
- if not compilation_db:
- logging.info("Compilation database file found but unable to load")
- return None
- compilation_info = GetCompilationInfoForFile(compilation_db, filename)
- if not compilation_info:
- logging.info(
- "No compilation info for " + filename + " in compilation database")
- return None
- return MakeRelativePathsInFlagsAbsolute(
- compilation_info.compiler_flags_,
- compilation_info.compiler_working_dir_)
- except:
- return None
-
-
-def FlagsForFile(filename):
- root = os.path.realpath(filename)
- compilation_db_flags = FlagsForCompilationDatabase(root, filename)
- if compilation_db_flags:
- final_flags = compilation_db_flags
- else:
- final_flags = BASE_FLAGS
- clang_flags = FlagsForClangComplete(root)
- if clang_flags:
- final_flags = final_flags + clang_flags
- include_flags = FlagsForInclude(root)
- if include_flags:
- final_flags = final_flags + include_flags
- return {
- 'flags': final_flags,
- 'do_cache': True
- }
diff --git a/CMakeLists.txt b/CMakeLists.txt
index f115a12fe..c736ccbf0 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,23 +1,21 @@
cmake_minimum_required(VERSION 3.2)
-# The RSOCKET_CC CMake variable specifies the C compiler, e.g. gcc-4.9.
-# The C++ compiler name is obtained by replacing "gcc" with "g++" and "clang"
-# with "clang++"". If RSOCKET_CC is not given, the compiler is detected
-# automatically.
-if (RSOCKET_CC)
- set(ENV{CC} ${RSOCKET_CC})
- if (${RSOCKET_CC} MATCHES clang)
- string(REPLACE clang clang++ CXX ${RSOCKET_CC})
- else ()
- string(REPLACE gcc g++ CXX ${RSOCKET_CC})
- endif ()
- set(ENV{CXX} ${CXX})
-endif ()
-
project(ReactiveSocket)
+if (NOT DEFINED CPACK_GENERATOR)
+ set(CPACK_GENERATOR "RPM")
+endif()
+include(CPack)
+
# CMake modules.
-set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake/")
+set(CMAKE_MODULE_PATH
+ "${CMAKE_SOURCE_DIR}/cmake/"
+ # For in-fbsource builds
+ "${CMAKE_CURRENT_SOURCE_DIR}/../opensource/fbcode_builder/CMake"
+ # For shipit-transformed builds
+ "${CMAKE_CURRENT_SOURCE_DIR}/build/fbcode_builder/CMake"
+ ${CMAKE_MODULE_PATH}
+)
# Joins arguments and stores the result in ${var}.
function(join var)
@@ -128,6 +126,8 @@ if (DEFINED ASAN_FLAGS)
endif ()
option(BUILD_BENCHMARKS "Build benchmarks" ON)
+option(BUILD_EXAMPLES "Build examples" ON)
+option(BUILD_TESTS "Build tests" ON)
enable_testing()
@@ -136,31 +136,44 @@ include(CTest)
include(${CMAKE_SOURCE_DIR}/cmake/InstallFolly.cmake)
-# gmock
-ExternalProject_Add(
- gmock
- URL ${CMAKE_CURRENT_SOURCE_DIR}/googletest-release-1.8.0.zip
- INSTALL_COMMAND ""
-)
+if(BUILD_TESTS)
+ # gmock
+ ExternalProject_Add(
+ gmock
+ URL ${CMAKE_CURRENT_SOURCE_DIR}/googletest-release-1.8.0.zip
+ INSTALL_COMMAND ""
+ )
-ExternalProject_Get_Property(gmock source_dir)
-set(GMOCK_SOURCE_DIR ${source_dir})
-ExternalProject_Get_Property(gmock binary_dir)
-set(GMOCK_BINARY_DIR ${binary_dir})
+ ExternalProject_Get_Property(gmock source_dir)
+ set(GMOCK_SOURCE_DIR ${source_dir})
+ ExternalProject_Get_Property(gmock binary_dir)
+ set(GMOCK_BINARY_DIR ${binary_dir})
-set(GMOCK_LIBS
- ${GMOCK_BINARY_DIR}/${CMAKE_CFG_INTDIR}/googlemock/${CMAKE_STATIC_LIBRARY_PREFIX}gmock${CMAKE_STATIC_LIBRARY_SUFFIX}
- ${GMOCK_BINARY_DIR}/${CMAKE_CFG_INTDIR}/googlemock/${CMAKE_STATIC_LIBRARY_PREFIX}gmock_main${CMAKE_STATIC_LIBRARY_SUFFIX}
+ set(GMOCK_LIBS
+ ${GMOCK_BINARY_DIR}/${CMAKE_CFG_INTDIR}/googlemock/${CMAKE_STATIC_LIBRARY_PREFIX}gmock${CMAKE_STATIC_LIBRARY_SUFFIX}
+ ${GMOCK_BINARY_DIR}/${CMAKE_CFG_INTDIR}/googlemock/${CMAKE_STATIC_LIBRARY_PREFIX}gmock_main${CMAKE_STATIC_LIBRARY_SUFFIX}
)
+ include_directories(${GMOCK_SOURCE_DIR}/googlemock/include)
+ include_directories(${GMOCK_SOURCE_DIR}/googletest/include)
+
+endif()
+
set(CMAKE_CXX_STANDARD 14)
+include(CheckCXXCompilerFlag)
+
# Common configuration for all build modes.
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -pedantic")
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Woverloaded-virtual")
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g")
+if (NOT MSVC)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Woverloaded-virtual")
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g")
+endif()
-set(EXTRA_CXX_FLAGS ${EXTRA_CXX_FLAGS} -Werror)
+CHECK_CXX_COMPILER_FLAG(-Wnoexcept-type COMPILER_HAS_W_NOEXCEPT_TYPE)
+if (COMPILER_HAS_W_NOEXCEPT_TYPE)
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-noexcept-type")
+endif()
if("${BUILD_TYPE_LOWER}" MATCHES "debug")
message("debug mode was set")
@@ -177,26 +190,16 @@ find_library(DOUBLE-CONVERSION double-conversion)
find_package(OpenSSL REQUIRED)
-# Find glog and gflags libraries specifically
-find_path(GLOG_INCLUDE_DIR glog/logging.h)
-find_path(GFLAGS_INCLUDE_DIR gflags/gflags.h)
+find_package(Gflags REQUIRED)
-find_library(GLOG_LIBRARY glog)
-find_library(GFLAGS_LIBRARY gflags)
+# find glog::glog to satisfy the folly dep.
+find_package(Glog REQUIRED)
-message("gflags include_dir <${GFLAGS_INCLUDE_DIR}> lib <${GFLAGS_LIBRARY}>")
-message("glog include_dir <${GLOG_INCLUDE_DIR}> lib <${GLOG_LIBRARY}>")
+find_package(fmt CONFIG REQUIRED)
include_directories(SYSTEM ${OPENSSL_INCLUDE_DIR})
include_directories(SYSTEM ${GFLAGS_INCLUDE_DIR})
-include_directories(SYSTEM ${GLOG_INCLUDE_DIR})
-
-include_directories(${CMAKE_SOURCE_DIR})
-
-include_directories(${CMAKE_CURRENT_BINARY_DIR}/reactivestreams/include)
-include_directories(${GMOCK_SOURCE_DIR}/googlemock/include)
-include_directories(${GMOCK_SOURCE_DIR}/googletest/include)
add_subdirectory(yarpl)
@@ -240,10 +243,6 @@ add_library(
rsocket/framing/FrameProcessor.h
rsocket/framing/FrameSerializer.cpp
rsocket/framing/FrameSerializer.h
- rsocket/framing/FrameSerializer_v0.cpp
- rsocket/framing/FrameSerializer_v0.h
- rsocket/framing/FrameSerializer_v0_1.cpp
- rsocket/framing/FrameSerializer_v0_1.h
rsocket/framing/FrameSerializer_v1_0.cpp
rsocket/framing/FrameSerializer_v1_0.h
rsocket/framing/FrameTransport.h
@@ -255,8 +254,10 @@ add_library(
rsocket/framing/FramedDuplexConnection.h
rsocket/framing/FramedReader.cpp
rsocket/framing/FramedReader.h
- rsocket/framing/FramedWriter.cpp
- rsocket/framing/FramedWriter.h
+ rsocket/framing/ProtocolVersion.cpp
+ rsocket/framing/ProtocolVersion.h
+ rsocket/framing/ResumeIdentificationToken.cpp
+ rsocket/framing/ResumeIdentificationToken.h
rsocket/framing/ScheduledFrameProcessor.cpp
rsocket/framing/ScheduledFrameProcessor.h
rsocket/framing/ScheduledFrameTransport.cpp
@@ -288,6 +289,8 @@ add_library(
rsocket/statemachine/ChannelResponder.h
rsocket/statemachine/ConsumerBase.cpp
rsocket/statemachine/ConsumerBase.h
+ rsocket/statemachine/FireAndForgetResponder.cpp
+ rsocket/statemachine/FireAndForgetResponder.h
rsocket/statemachine/PublisherBase.cpp
rsocket/statemachine/PublisherBase.h
rsocket/statemachine/RSocketStateMachine.cpp
@@ -300,13 +303,12 @@ add_library(
rsocket/statemachine/StreamRequester.h
rsocket/statemachine/StreamResponder.cpp
rsocket/statemachine/StreamResponder.h
- rsocket/statemachine/StreamState.cpp
- rsocket/statemachine/StreamState.h
rsocket/statemachine/StreamStateMachineBase.cpp
rsocket/statemachine/StreamStateMachineBase.h
- rsocket/statemachine/StreamsFactory.cpp
- rsocket/statemachine/StreamsFactory.h
+ rsocket/statemachine/StreamFragmentAccumulator.cpp
+ rsocket/statemachine/StreamFragmentAccumulator.h
rsocket/statemachine/StreamsWriter.h
+ rsocket/statemachine/StreamsWriter.cpp
rsocket/transports/tcp/TcpConnectionAcceptor.cpp
rsocket/transports/tcp/TcpConnectionAcceptor.h
rsocket/transports/tcp/TcpConnectionFactory.cpp
@@ -314,10 +316,17 @@ add_library(
rsocket/transports/tcp/TcpDuplexConnection.cpp
rsocket/transports/tcp/TcpDuplexConnection.h)
-target_include_directories(ReactiveSocket PUBLIC "${PROJECT_SOURCE_DIR}/yarpl/include")
-target_include_directories(ReactiveSocket PUBLIC "${PROJECT_SOURCE_DIR}/yarpl/src")
+target_include_directories(
+ ReactiveSocket
+ PUBLIC
+ $
+ $
+)
-target_link_libraries(ReactiveSocket yarpl ${GFLAGS_LIBRARY} ${GLOG_LIBRARY})
+
+target_link_libraries(ReactiveSocket
+ PUBLIC yarpl glog::glog gflags
+ INTERFACE ${EXTRA_LINK_FLAGS})
target_compile_options(
ReactiveSocket
@@ -325,62 +334,72 @@ target_compile_options(
enable_testing()
-install(TARGETS ReactiveSocket DESTINATION lib)
+install(TARGETS ReactiveSocket EXPORT rsocket-exports DESTINATION lib)
install(DIRECTORY rsocket DESTINATION include FILES_MATCHING PATTERN "*.h")
+install(EXPORT rsocket-exports NAMESPACE rsocket:: DESTINATION lib/cmake/rsocket)
+include(CMakePackageConfigHelpers)
+configure_package_config_file(
+ cmake/rsocket-config.cmake.in
+ rsocket-config.cmake
+ INSTALL_DESTINATION lib/cmake/rsocket
+)
+install(
+ FILES ${CMAKE_CURRENT_BINARY_DIR}/rsocket-config.cmake
+ DESTINATION lib/cmake/rsocket
+)
-# CMake doesn't seem to support "transitive" installing, and I can't access the
-# "yarpl" target from this file, so just grab the library file directly.
-install(FILES "${CMAKE_CURRENT_BINARY_DIR}/yarpl/libyarpl.a" DESTINATION lib)
-install(DIRECTORY yarpl/include/yarpl DESTINATION include FILES_MATCHING PATTERN "*.h")
-
+if(BUILD_TESTS)
add_executable(
tests
- test/ColdResumptionTest.cpp
- test/ConnectionEventsTest.cpp
- test/PayloadTest.cpp
- test/RSocketClientServerTest.cpp
- test/RSocketClientTest.cpp
- test/RSocketTests.cpp
- test/RSocketTests.h
- test/RequestChannelTest.cpp
- test/RequestResponseTest.cpp
- test/RequestStreamTest.cpp
- test/RequestStreamTest_concurrency.cpp
- test/Test.cpp
- test/WarmResumeManagerTest.cpp
- test/WarmResumptionTest.cpp
- test/framing/FrameTest.cpp
- test/framing/FrameTransportTest.cpp
- test/framing/FramedReaderTest.cpp
- test/handlers/HelloServiceHandler.cpp
- test/handlers/HelloServiceHandler.h
- test/handlers/HelloStreamRequestHandler.cpp
- test/handlers/HelloStreamRequestHandler.h
- test/internal/AllowanceTest.cpp
- test/internal/ConnectionSetTest.cpp
- test/internal/KeepaliveTimerTest.cpp
- test/internal/ResumeIdentificationToken.cpp
- test/internal/SetupResumeAcceptorTest.cpp
- test/internal/SwappableEventBaseTest.cpp
- test/test_utils/ColdResumeManager.cpp
- test/test_utils/ColdResumeManager.h
- test/test_utils/GenericRequestResponseHandler.h
- test/test_utils/MockDuplexConnection.h
- test/test_utils/MockKeepaliveTimer.h
- test/test_utils/MockRequestHandler.h
- test/test_utils/MockStats.h
- test/transport/DuplexConnectionTest.cpp
- test/transport/DuplexConnectionTest.h
- test/transport/TcpDuplexConnectionTest.cpp)
-
+ rsocket/test/ColdResumptionTest.cpp
+ rsocket/test/ConnectionEventsTest.cpp
+ rsocket/test/PayloadTest.cpp
+ rsocket/test/RSocketClientServerTest.cpp
+ rsocket/test/RSocketClientTest.cpp
+ rsocket/test/RSocketTests.cpp
+ rsocket/test/RSocketTests.h
+ rsocket/test/RequestChannelTest.cpp
+ rsocket/test/RequestResponseTest.cpp
+ rsocket/test/RequestStreamTest.cpp
+ rsocket/test/RequestStreamTest_concurrency.cpp
+ rsocket/test/Test.cpp
+ rsocket/test/WarmResumeManagerTest.cpp
+ rsocket/test/WarmResumptionTest.cpp
+ rsocket/test/framing/FrameTest.cpp
+ rsocket/test/framing/FrameTransportTest.cpp
+ rsocket/test/framing/FramedReaderTest.cpp
+ rsocket/test/handlers/HelloServiceHandler.cpp
+ rsocket/test/handlers/HelloServiceHandler.h
+ rsocket/test/handlers/HelloStreamRequestHandler.cpp
+ rsocket/test/handlers/HelloStreamRequestHandler.h
+ rsocket/test/internal/AllowanceTest.cpp
+ rsocket/test/internal/ConnectionSetTest.cpp
+ rsocket/test/internal/KeepaliveTimerTest.cpp
+ rsocket/test/internal/ResumeIdentificationToken.cpp
+ rsocket/test/internal/SetupResumeAcceptorTest.cpp
+ rsocket/test/internal/SwappableEventBaseTest.cpp
+ rsocket/test/statemachine/RSocketStateMachineTest.cpp
+ rsocket/test/statemachine/StreamStateTest.cpp
+ rsocket/test/statemachine/StreamsWriterTest.cpp
+ rsocket/test/test_utils/ColdResumeManager.cpp
+ rsocket/test/test_utils/ColdResumeManager.h
+ rsocket/test/test_utils/GenericRequestResponseHandler.h
+ rsocket/test/test_utils/MockDuplexConnection.h
+ rsocket/test/test_utils/MockStreamsWriter.h
+ rsocket/test/test_utils/MockStats.h
+ rsocket/test/transport/DuplexConnectionTest.cpp
+ rsocket/test/transport/DuplexConnectionTest.h
+ rsocket/test/transport/TcpDuplexConnectionTest.cpp)
+
+add_dependencies(tests gmock)
target_link_libraries(
tests
ReactiveSocket
yarpl
yarpl-test-utils
- ${GMOCK_LIBS}
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ ${GMOCK_LIBS} # This also needs the preceding `add_dependencies`
+ glog::glog
+ gflags)
target_include_directories(tests PUBLIC "${PROJECT_SOURCE_DIR}/yarpl/test/")
target_compile_options(
@@ -390,19 +409,18 @@ target_compile_options(
add_dependencies(tests gmock yarpl-test-utils ReactiveSocket)
add_test(NAME RSocketTests COMMAND tests)
-add_test(NAME RSocketTests-0.1 COMMAND tests --rs_use_protocol_version=0.1)
### Fuzzer harnesses
add_executable(
frame_fuzzer
- test/fuzzers/frame_fuzzer.cpp)
+ rsocket/test/fuzzers/frame_fuzzer.cpp)
target_link_libraries(
frame_fuzzer
ReactiveSocket
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
add_dependencies(frame_fuzzer gmock ReactiveSocket)
@@ -410,84 +428,91 @@ add_test(
NAME FrameFuzzerTests
COMMAND ./scripts/frame_fuzzer_test.sh
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR})
+endif()
########################################
# TCK Drivers
########################################
-add_executable(
- tckclient
- tck-test/client.cpp
- tck-test/TestFileParser.cpp
- tck-test/TestFileParser.h
- tck-test/FlowableSubscriber.cpp
- tck-test/FlowableSubscriber.h
- tck-test/SingleSubscriber.cpp
- tck-test/SingleSubscriber.h
- tck-test/TestSuite.cpp
- tck-test/TestSuite.h
- tck-test/TestInterpreter.cpp
- tck-test/TestInterpreter.h
- tck-test/TypedCommands.h
- tck-test/BaseSubscriber.cpp
- tck-test/BaseSubscriber.h)
-
-target_link_libraries(
- tckclient
- ReactiveSocket
- yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
-
-add_executable(
- tckserver
- tck-test/server.cpp
- tck-test/MarbleProcessor.cpp
- tck-test/MarbleProcessor.h
- test/test_utils/StatsPrinter.cpp
- test/test_utils/StatsPrinter.h)
-
-target_link_libraries(
- tckserver
- ReactiveSocket
- yarpl
- ${GFLAGS_LIBRARY}
- ${GMOCK_LIBS}
- ${GLOG_LIBRARY}
- ${DOUBLE-CONVERSION})
+if(BUILD_TESTS)
+ add_executable(
+ tckclient
+ rsocket/tck-test/client.cpp
+ rsocket/tck-test/TestFileParser.cpp
+ rsocket/tck-test/TestFileParser.h
+ rsocket/tck-test/FlowableSubscriber.cpp
+ rsocket/tck-test/FlowableSubscriber.h
+ rsocket/tck-test/SingleSubscriber.cpp
+ rsocket/tck-test/SingleSubscriber.h
+ rsocket/tck-test/TestSuite.cpp
+ rsocket/tck-test/TestSuite.h
+ rsocket/tck-test/TestInterpreter.cpp
+ rsocket/tck-test/TestInterpreter.h
+ rsocket/tck-test/TypedCommands.h
+ rsocket/tck-test/BaseSubscriber.cpp
+ rsocket/tck-test/BaseSubscriber.h)
+
+ target_link_libraries(
+ tckclient
+ ReactiveSocket
+ yarpl
+ glog::glog
+ gflags)
+
+ add_executable(
+ tckserver
+ rsocket/tck-test/server.cpp
+ rsocket/tck-test/MarbleProcessor.cpp
+ rsocket/tck-test/MarbleProcessor.h
+ rsocket/test/test_utils/StatsPrinter.cpp
+ rsocket/test/test_utils/StatsPrinter.h)
+
+ add_dependencies(tckserver gmock)
+ target_link_libraries(
+ tckserver
+ ReactiveSocket
+ yarpl
+ ${GMOCK_LIBS} # This also needs the preceding `add_dependencies`
+ glog::glog
+ gflags
+ ${DOUBLE-CONVERSION})
# Download the latest TCK drivers JAR.
-set(TCK_DRIVERS_JAR rsocket-tck-drivers-0.9.10.jar)
-join(TCK_DRIVERS_URL
- "https://oss.jfrog.org/libs-release/io/rsocket/"
- "rsocket-tck-drivers/0.9.10/${TCK_DRIVERS_JAR}")
-message(STATUS "Downloading ${TCK_DRIVERS_URL}")
-file(DOWNLOAD ${TCK_DRIVERS_URL} ${CMAKE_SOURCE_DIR}/${TCK_DRIVERS_JAR})
+ set(TCK_DRIVERS_JAR rsocket-tck-drivers-0.9.10.jar)
+ if (NOT EXISTS ${CMAKE_SOURCE_DIR}/${TCK_DRIVERS_JAR})
+ join(TCK_DRIVERS_URL
+ "https://oss.jfrog.org/libs-release/io/rsocket/"
+ "rsocket-tck-drivers/0.9.10/${TCK_DRIVERS_JAR}")
+ message(STATUS "Downloading ${TCK_DRIVERS_URL}")
+ file(DOWNLOAD ${TCK_DRIVERS_URL} ${CMAKE_SOURCE_DIR}/${TCK_DRIVERS_JAR})
+ endif ()
+endif()
########################################
# Examples
########################################
+if (BUILD_EXAMPLES)
add_library(
reactivesocket_examples_util
- examples/util/ExampleSubscriber.cpp
- examples/util/ExampleSubscriber.h
- test/test_utils/ColdResumeManager.h
- test/test_utils/ColdResumeManager.cpp
+ rsocket/examples/util/ExampleSubscriber.cpp
+ rsocket/examples/util/ExampleSubscriber.h
+ rsocket/test/test_utils/ColdResumeManager.h
+ rsocket/test/test_utils/ColdResumeManager.cpp
)
target_link_libraries(
reactivesocket_examples_util
yarpl
ReactiveSocket
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
# request-response-hello-world
add_executable(
example_request-response-hello-world-server
- examples/request-response-hello-world/RequestResponseHelloWorld_Server.cpp
+ rsocket/examples/request-response-hello-world/RequestResponseHelloWorld_Server.cpp
)
target_link_libraries(
@@ -495,12 +520,12 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
add_executable(
example_request-response-hello-world-client
- examples/request-response-hello-world/RequestResponseHelloWorld_Client.cpp
+ rsocket/examples/request-response-hello-world/RequestResponseHelloWorld_Client.cpp
)
target_link_libraries(
@@ -508,14 +533,14 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
# fire-and-forget-hello-world
add_executable(
example_fire-and-forget-hello-world-server
- examples/fire-and-forget-hello-world/FireAndForgetHelloWorld_Server.cpp
+ rsocket/examples/fire-and-forget-hello-world/FireAndForgetHelloWorld_Server.cpp
)
target_link_libraries(
@@ -523,12 +548,12 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
add_executable(
example_fire-and-forget-hello-world-client
- examples/fire-and-forget-hello-world/FireAndForgetHelloWorld_Client.cpp
+ rsocket/examples/fire-and-forget-hello-world/FireAndForgetHelloWorld_Client.cpp
)
target_link_libraries(
@@ -536,15 +561,15 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
# stream-hello-world
add_executable(
example_stream-hello-world-server
- examples/stream-hello-world/StreamHelloWorld_Server.cpp
+ rsocket/examples/stream-hello-world/StreamHelloWorld_Server.cpp
)
target_link_libraries(
@@ -552,12 +577,12 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
add_executable(
example_stream-hello-world-client
- examples/stream-hello-world/StreamHelloWorld_Client.cpp
+ rsocket/examples/stream-hello-world/StreamHelloWorld_Client.cpp
)
target_link_libraries(
@@ -565,14 +590,14 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
# channel-hello-world
add_executable(
example_channel-hello-world-server
- examples/channel-hello-world/ChannelHelloWorld_Server.cpp
+ rsocket/examples/channel-hello-world/ChannelHelloWorld_Server.cpp
)
target_link_libraries(
@@ -580,12 +605,14 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
+
+
add_executable(
example_channel-hello-world-client
- examples/channel-hello-world/ChannelHelloWorld_Client.cpp
+ rsocket/examples/channel-hello-world/ChannelHelloWorld_Client.cpp
)
target_link_libraries(
@@ -593,14 +620,14 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
# stream-observable-to-flowable
add_executable(
example_observable-to-flowable-server
- examples/stream-observable-to-flowable/StreamObservableToFlowable_Server.cpp
+ rsocket/examples/stream-observable-to-flowable/StreamObservableToFlowable_Server.cpp
)
target_link_libraries(
@@ -608,12 +635,12 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
add_executable(
example_observable-to-flowable-client
- examples/stream-observable-to-flowable/StreamObservableToFlowable_Client.cpp
+ rsocket/examples/stream-observable-to-flowable/StreamObservableToFlowable_Client.cpp
)
target_link_libraries(
@@ -621,18 +648,18 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
# conditional-request-handling
add_executable(
example_conditional-request-handling-server
- examples/conditional-request-handling/ConditionalRequestHandling_Server.cpp
- examples/conditional-request-handling/TextRequestHandler.h
- examples/conditional-request-handling/TextRequestHandler.cpp
- examples/conditional-request-handling/JsonRequestHandler.cpp
- examples/conditional-request-handling/JsonRequestHandler.h
+ rsocket/examples/conditional-request-handling/ConditionalRequestHandling_Server.cpp
+ rsocket/examples/conditional-request-handling/TextRequestHandler.h
+ rsocket/examples/conditional-request-handling/TextRequestHandler.cpp
+ rsocket/examples/conditional-request-handling/JsonRequestHandler.cpp
+ rsocket/examples/conditional-request-handling/JsonRequestHandler.h
)
target_link_libraries(
@@ -640,12 +667,12 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
add_executable(
example_conditional-request-handling-client
- examples/conditional-request-handling/ConditionalRequestHandling_Client.cpp
+ rsocket/examples/conditional-request-handling/ConditionalRequestHandling_Client.cpp
)
target_link_libraries(
@@ -653,14 +680,14 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
# warm-resumption
add_executable(
example_resumption-server
- examples/resumption/Resumption_Server.cpp
+ rsocket/examples/resumption/Resumption_Server.cpp
)
target_link_libraries(
@@ -668,12 +695,12 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
add_executable(
example_warm-resumption-client
- examples/resumption/WarmResumption_Client.cpp
+ rsocket/examples/resumption/WarmResumption_Client.cpp
)
target_link_libraries(
@@ -681,12 +708,12 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
add_executable(
example_cold-resumption-client
- examples/resumption/ColdResumption_Client.cpp
+ rsocket/examples/resumption/ColdResumption_Client.cpp
)
target_link_libraries(
@@ -694,13 +721,15 @@ target_link_libraries(
ReactiveSocket
reactivesocket_examples_util
yarpl
- ${GFLAGS_LIBRARY}
- ${GLOG_LIBRARY})
+ glog::glog
+ gflags)
+
+endif () # BUILD_EXAMPLES
########################################
# End Examples
########################################
if (BUILD_BENCHMARKS)
- add_subdirectory(benchmarks)
+ add_subdirectory(rsocket/benchmarks)
endif ()
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..d1abc700d
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,77 @@
+# Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to make participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, sex characteristics, gender identity and expression,
+level of experience, education, socio-economic status, nationality, personal
+appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies within all project spaces, and it also applies when
+an individual is representing the project or its community in public spaces.
+Examples of representing a project or community include using an official
+project e-mail address, posting via an official social media account, or acting
+as an appointed representative at an online or offline event. Representation of
+a project may be further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at . All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
+
+[homepage]: https://www.contributor-covenant.org
+
+For answers to common questions about this code of conduct, see
+https://www.contributor-covenant.org/faq
+
diff --git a/LICENSE b/LICENSE
index 4d4a15fb0..989e2c59e 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,30 +1,201 @@
-BSD License
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
-For reactivesocket-cpp software
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-Copyright (c) 2016-present, Facebook, Inc. All rights reserved.
+ 1. Definitions.
-Redistribution and use in source and binary forms, with or without modification,
-are permitted provided that the following conditions are met:
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
- * Redistributions of source code must retain the above copyright notice, this
- list of conditions and the following disclaimer.
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
- * Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
- * Neither the name Facebook nor the names of its contributors may be used to
- endorse or promote products derived from this software without specific
- prior written permission.
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
\ No newline at end of file
diff --git a/PATENTS b/PATENTS
deleted file mode 100644
index 3d7f19408..000000000
--- a/PATENTS
+++ /dev/null
@@ -1,33 +0,0 @@
-Additional Grant of Patent Rights Version 2
-
-"Software" means the reactivesocket-cpp software distributed by Facebook, Inc.
-
-Facebook, Inc. ("Facebook") hereby grants to each recipient of the Software
-("you") a perpetual, worldwide, royalty-free, non-exclusive, irrevocable
-(subject to the termination provision below) license under any Necessary
-Claims, to make, have made, use, sell, offer to sell, import, and otherwise
-transfer the Software. For avoidance of doubt, no license is granted under
-Facebook’s rights in any patent claims that are infringed by (i) modifications
-to the Software made by you or any third party or (ii) the Software in
-combination with any software or other technology.
-
-The license granted hereunder will terminate, automatically and without notice,
-if you (or any of your subsidiaries, corporate affiliates or agents) initiate
-directly or indirectly, or take a direct financial interest in, any Patent
-Assertion: (i) against Facebook or any of its subsidiaries or corporate
-affiliates, (ii) against any party if such Patent Assertion arises in whole or
-in part from any software, technology, product or service of Facebook or any of
-its subsidiaries or corporate affiliates, or (iii) against any party relating
-to the Software. Notwithstanding the foregoing, if Facebook or any of its
-subsidiaries or corporate affiliates files a lawsuit alleging patent
-infringement against you in the first instance, and you respond by filing a
-patent infringement counterclaim in that lawsuit against that party that is
-unrelated to the Software, the license granted hereunder will not terminate
-under section (i) of this paragraph due to such counterclaim.
-
-A "Necessary Claim" is a claim of a patent owned by Facebook that is
-necessarily infringed by the Software standing alone.
-
-A "Patent Assertion" is any lawsuit or other action alleging direct, indirect,
-or contributory infringement or inducement to infringe any patent, including a
-cross-claim or counterclaim.
diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 000000000..230862230
--- /dev/null
+++ b/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,28 @@
+## Motivation and Context
+
+
+
+
+
+## How Has This Been Tested
+
+
+
+## Types of changes
+
+
+- [ ] Docs change / refactoring / dependency upgrade
+- [ ] Bug fix (non-breaking change which fixes an issue)
+- [ ] New feature (non-breaking change which adds functionality)
+- [ ] Breaking change (fix or feature that would cause existing functionality to change)
+
+## Checklist
+
+
+
+- [ ] My code follows the code style of this project.
+- [ ] My change requires a change to the documentation.
+- [ ] I have updated the documentation accordingly.
+- [ ] I have read the **CONTRIBUTING** document.
+- [ ] I have added tests to cover my changes.
+- [ ] All new and existing tests passed.
diff --git a/README.md b/README.md
index 3b811aca9..1a5339e1c 100644
--- a/README.md
+++ b/README.md
@@ -10,7 +10,7 @@ C++ implementation of [RSocket](https://rsocket.io)
Install `folly`:
```
-brew install folly
+brew install --HEAD folly
```
# Building and running tests
@@ -25,3 +25,8 @@ cmake -DCMAKE_BUILD_TYPE=DEBUG ../
make -j
./tests
```
+
+# License
+
+By contributing to rsocket-cpp, you agree that your contributions will be licensed
+under the LICENSE file in the root directory of this source tree.
diff --git a/benchmarks/Benchmarks.cpp b/benchmarks/Benchmarks.cpp
deleted file mode 100644
index 8c357fbed..000000000
--- a/benchmarks/Benchmarks.cpp
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
-
-#include
-#include
-
-int main(int argc, char** argv) {
- folly::init(&argc, &argv);
-
- FLAGS_logtostderr = true;
-
- LOG(INFO) << "Running benchmarks... (takes minutes)";
- folly::runBenchmarks();
-
- return 0;
-}
diff --git a/benchmarks/Latch.h b/benchmarks/Latch.h
deleted file mode 100644
index b8dcc3520..000000000
--- a/benchmarks/Latch.h
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
-
-#pragma once
-
-#include
-
-/// Simple implementation of a latch synchronization primitive, for testing.
-class Latch {
- public:
- explicit Latch(size_t limit) : limit_{limit} {}
-
- void wait() {
- baton_.wait();
- }
-
- bool timed_wait(std::chrono::milliseconds timeout) {
- return baton_.timed_wait(timeout);
- }
-
- void post() {
- auto const old = count_.fetch_add(1);
- if (old == limit_ - 1) {
- baton_.post();
- }
- }
-
- private:
- folly::Baton<> baton_;
- std::atomic count_{0};
- const size_t limit_{0};
-};
diff --git a/build/README.md b/build/README.md
new file mode 100644
index 000000000..fdcb9fdcb
--- /dev/null
+++ b/build/README.md
@@ -0,0 +1,10 @@
+# Building using `fbcode_builder`
+
+Continuous integration builds are powered by `fbcode_builder`, a tiny tool
+shared by several Facebook projects. Its files are in `./fbcode_builder`
+(on Github) or in `fbcode/opensource/fbcode_builder` (inside Facebook's
+repo).
+
+Start with the READMEs in the `fbcode_builder` directory.
+
+`./fbcode_builder_config.py` contains the project-specific configuration.
diff --git a/build/deps/github_hashes/facebook/folly-rev.txt b/build/deps/github_hashes/facebook/folly-rev.txt
new file mode 100644
index 000000000..cd836348c
--- /dev/null
+++ b/build/deps/github_hashes/facebook/folly-rev.txt
@@ -0,0 +1 @@
+Subproject commit 2a20a79adf8480dffc165aebc02a93937e15ca94
diff --git a/build/fbcode_builder/.gitignore b/build/fbcode_builder/.gitignore
new file mode 100644
index 000000000..b98f3edfa
--- /dev/null
+++ b/build/fbcode_builder/.gitignore
@@ -0,0 +1,5 @@
+# Facebook-internal CI builds don't have write permission outside of the
+# source tree, so we install all projects into this directory.
+/facebook_ci
+__pycache__/
+*.pyc
diff --git a/build/fbcode_builder/CMake/FBBuildOptions.cmake b/build/fbcode_builder/CMake/FBBuildOptions.cmake
new file mode 100644
index 000000000..dbaa29933
--- /dev/null
+++ b/build/fbcode_builder/CMake/FBBuildOptions.cmake
@@ -0,0 +1,15 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+
+function (fb_activate_static_library_option)
+ option(USE_STATIC_DEPS_ON_UNIX
+ "If enabled, use static dependencies on unix systems. This is generally discouraged."
+ OFF
+ )
+ # Mark USE_STATIC_DEPS_ON_UNIX as an "advanced" option, since enabling it
+ # is generally discouraged.
+ mark_as_advanced(USE_STATIC_DEPS_ON_UNIX)
+
+ if(UNIX AND USE_STATIC_DEPS_ON_UNIX)
+ SET(CMAKE_FIND_LIBRARY_SUFFIXES ".a" PARENT_SCOPE)
+ endif()
+endfunction()
diff --git a/build/fbcode_builder/CMake/FBCMakeParseArgs.cmake b/build/fbcode_builder/CMake/FBCMakeParseArgs.cmake
new file mode 100644
index 000000000..933180189
--- /dev/null
+++ b/build/fbcode_builder/CMake/FBCMakeParseArgs.cmake
@@ -0,0 +1,141 @@
+#
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# Helper function for parsing arguments to a CMake function.
+#
+# This function is very similar to CMake's built-in cmake_parse_arguments()
+# function, with some improvements:
+# - This function correctly handles empty arguments. (cmake_parse_arguments()
+# ignores empty arguments.)
+# - If a multi-value argument is specified more than once, the subsequent
+# arguments are appended to the original list rather than replacing it. e.g.
+# if "SOURCES" is a multi-value argument, and the argument list contains
+# "SOURCES a b c SOURCES x y z" then the resulting value for SOURCES will be
+# "a;b;c;x;y;z" rather than "x;y;z"
+# - This function errors out by default on unrecognized arguments. You can
+# pass in an extra "ALLOW_UNPARSED_ARGS" argument to make it behave like
+# cmake_parse_arguments(), and return the unparsed arguments in a
+# _UNPARSED_ARGUMENTS variable instead.
+#
+# It does look like cmake_parse_arguments() handled empty arguments correctly
+# from CMake 3.0 through 3.3, but it seems like this was probably broken when
+# it was turned into a built-in function in CMake 3.4. Here is discussion and
+# patches that fixed this behavior prior to CMake 3.0:
+# https://cmake.org/pipermail/cmake-developers/2013-November/020607.html
+#
+# The one downside to this function over the built-in cmake_parse_arguments()
+# is that I don't think we can achieve the PARSE_ARGV behavior in a non-builtin
+# function, so we can't properly handle arguments that contain ";". CMake will
+# treat the ";" characters as list element separators, and treat it as multiple
+# separate arguments.
+#
+function(fb_cmake_parse_args PREFIX OPTIONS ONE_VALUE_ARGS MULTI_VALUE_ARGS ARGS)
+ foreach(option IN LISTS ARGN)
+ if ("${option}" STREQUAL "ALLOW_UNPARSED_ARGS")
+ set(ALLOW_UNPARSED_ARGS TRUE)
+ else()
+ message(
+ FATAL_ERROR
+ "unknown optional argument for fb_cmake_parse_args(): ${option}"
+ )
+ endif()
+ endforeach()
+
+ # Define all options as FALSE in the parent scope to start with
+ foreach(var_name IN LISTS OPTIONS)
+ set("${PREFIX}_${var_name}" "FALSE" PARENT_SCOPE)
+ endforeach()
+
+ # TODO: We aren't extremely strict about error checking for one-value
+ # arguments here. e.g., we don't complain if a one-value argument is
+ # followed by another option/one-value/multi-value name rather than an
+ # argument. We also don't complain if a one-value argument is the last
+ # argument and isn't followed by a value.
+
+ list(APPEND all_args ${ONE_VALUE_ARGS})
+ list(APPEND all_args ${MULTI_VALUE_ARGS})
+ set(current_variable)
+ set(unparsed_args)
+ foreach(arg IN LISTS ARGS)
+ list(FIND OPTIONS "${arg}" opt_index)
+ if("${opt_index}" EQUAL -1)
+ list(FIND all_args "${arg}" arg_index)
+ if("${arg_index}" EQUAL -1)
+ # This argument does not match an argument name,
+ # must be an argument value
+ if("${current_variable}" STREQUAL "")
+ list(APPEND unparsed_args "${arg}")
+ else()
+ # Ugh, CMake lists have a pretty fundamental flaw: they cannot
+ # distinguish between an empty list and a list with a single empty
+ # element. We track our own SEEN_VALUES_arg setting to help
+ # distinguish this and behave properly here.
+ if ("${SEEN_${current_variable}}" AND "${${current_variable}}" STREQUAL "")
+ set("${current_variable}" ";${arg}")
+ else()
+ list(APPEND "${current_variable}" "${arg}")
+ endif()
+ set("SEEN_${current_variable}" TRUE)
+ endif()
+ else()
+ # We found a single- or multi-value argument name
+ set(current_variable "VALUES_${arg}")
+ set("SEEN_${arg}" TRUE)
+ endif()
+ else()
+ # We found an option variable
+ set("${PREFIX}_${arg}" "TRUE" PARENT_SCOPE)
+ set(current_variable)
+ endif()
+ endforeach()
+
+ foreach(arg_name IN LISTS ONE_VALUE_ARGS)
+ if(NOT "${SEEN_${arg_name}}")
+ unset("${PREFIX}_${arg_name}" PARENT_SCOPE)
+ elseif(NOT "${SEEN_VALUES_${arg_name}}")
+ # If the argument was seen but a value wasn't specified, error out.
+ # We require exactly one value to be specified.
+ message(
+ FATAL_ERROR "argument ${arg_name} was specified without a value"
+ )
+ else()
+ list(LENGTH "VALUES_${arg_name}" num_args)
+ if("${num_args}" EQUAL 0)
+ # We know an argument was specified and that we called list(APPEND).
+ # If CMake thinks the list is empty that means there is really a single
+ # empty element in the list.
+ set("${PREFIX}_${arg_name}" "" PARENT_SCOPE)
+ elseif("${num_args}" EQUAL 1)
+ list(GET "VALUES_${arg_name}" 0 arg_value)
+ set("${PREFIX}_${arg_name}" "${arg_value}" PARENT_SCOPE)
+ else()
+ message(
+ FATAL_ERROR "too many arguments specified for ${arg_name}: "
+ "${VALUES_${arg_name}}"
+ )
+ endif()
+ endif()
+ endforeach()
+
+ foreach(arg_name IN LISTS MULTI_VALUE_ARGS)
+ # If this argument name was never seen, then unset the parent scope
+ if (NOT "${SEEN_${arg_name}}")
+ unset("${PREFIX}_${arg_name}" PARENT_SCOPE)
+ else()
+ # TODO: Our caller still won't be able to distinguish between an empty
+ # list and a list with a single empty element. We can tell which is
+ # which, but CMake lists don't make it easy to show this to our caller.
+ set("${PREFIX}_${arg_name}" "${VALUES_${arg_name}}" PARENT_SCOPE)
+ endif()
+ endforeach()
+
+ # By default we fatal out on unparsed arguments, but return them to the
+ # caller if ALLOW_UNPARSED_ARGS was specified.
+ if (DEFINED unparsed_args)
+ if ("${ALLOW_UNPARSED_ARGS}")
+ set("${PREFIX}_UNPARSED_ARGUMENTS" "${unparsed_args}" PARENT_SCOPE)
+ else()
+ message(FATAL_ERROR "unrecognized arguments: ${unparsed_args}")
+ endif()
+ endif()
+endfunction()
diff --git a/build/fbcode_builder/CMake/FBCompilerSettings.cmake b/build/fbcode_builder/CMake/FBCompilerSettings.cmake
new file mode 100644
index 000000000..585c95320
--- /dev/null
+++ b/build/fbcode_builder/CMake/FBCompilerSettings.cmake
@@ -0,0 +1,13 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+
+# This file applies common compiler settings that are shared across
+# a number of Facebook opensource projects.
+# Please use caution and your best judgement before making changes
+# to these shared compiler settings in order to avoid accidentally
+# breaking a build in another project!
+
+if (WIN32)
+ include(FBCompilerSettingsMSVC)
+else()
+ include(FBCompilerSettingsUnix)
+endif()
diff --git a/build/fbcode_builder/CMake/FBCompilerSettingsMSVC.cmake b/build/fbcode_builder/CMake/FBCompilerSettingsMSVC.cmake
new file mode 100644
index 000000000..4efd7e966
--- /dev/null
+++ b/build/fbcode_builder/CMake/FBCompilerSettingsMSVC.cmake
@@ -0,0 +1,11 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+
+# This file applies common compiler settings that are shared across
+# a number of Facebook opensource projects.
+# Please use caution and your best judgement before making changes
+# to these shared compiler settings in order to avoid accidentally
+# breaking a build in another project!
+
+add_compile_options(
+ /wd4250 # 'class1' : inherits 'class2::member' via dominance
+)
diff --git a/build/fbcode_builder/CMake/FBCompilerSettingsUnix.cmake b/build/fbcode_builder/CMake/FBCompilerSettingsUnix.cmake
new file mode 100644
index 000000000..c26ce78b1
--- /dev/null
+++ b/build/fbcode_builder/CMake/FBCompilerSettingsUnix.cmake
@@ -0,0 +1,9 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+
+# This file applies common compiler settings that are shared across
+# a number of Facebook opensource projects.
+# Please use caution and your best judgement before making changes
+# to these shared compiler settings in order to avoid accidentally
+# breaking a build in another project!
+
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -Wall -Wextra -Wno-deprecated -Wno-deprecated-declarations")
diff --git a/build/fbcode_builder/CMake/FBPythonBinary.cmake b/build/fbcode_builder/CMake/FBPythonBinary.cmake
new file mode 100644
index 000000000..99c33fb8c
--- /dev/null
+++ b/build/fbcode_builder/CMake/FBPythonBinary.cmake
@@ -0,0 +1,697 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+
+include(FBCMakeParseArgs)
+
+#
+# This file contains helper functions for building self-executing Python
+# binaries.
+#
+# This is somewhat different than typical python installation with
+# distutils/pip/virtualenv/etc. We primarily want to build a standalone
+# executable, isolated from other Python packages on the system. We don't want
+# to install files into the standard library python paths. This is more
+# similar to PEX (https://github.com/pantsbuild/pex) and XAR
+# (https://github.com/facebookincubator/xar). (In the future it would be nice
+# to update this code to also support directly generating XAR files if XAR is
+# available.)
+#
+# We also want to be able to easily define "libraries" of python files that can
+# be shared and re-used between these standalone python executables, and can be
+# shared across projects in different repositories. This means that we do need
+# a way to "install" libraries so that they are visible to CMake builds in
+# other repositories, without actually installing them in the standard python
+# library paths.
+#
+
+# If the caller has not already found Python, do so now.
+# If we fail to find python now we won't fail immediately, but
+# add_fb_python_executable() or add_fb_python_library() will fatal out if they
+# are used.
+if(NOT TARGET Python3::Interpreter)
+ # CMake 3.12+ ships with a FindPython3.cmake module. Try using it first.
+ # We find with QUIET here, since otherwise this generates some noisy warnings
+ # on versions of CMake before 3.12
+ if (WIN32)
+ # On Windows we need both the Intepreter as well as the Development
+ # libraries.
+ find_package(Python3 COMPONENTS Interpreter Development QUIET)
+ else()
+ find_package(Python3 COMPONENTS Interpreter QUIET)
+ endif()
+ if(Python3_Interpreter_FOUND)
+ message(STATUS "Found Python 3: ${Python3_EXECUTABLE}")
+ else()
+ # Try with the FindPythonInterp.cmake module available in older CMake
+ # versions. Check to see if the caller has already searched for this
+ # themselves first.
+ if(NOT PYTHONINTERP_FOUND)
+ set(Python_ADDITIONAL_VERSIONS 3 3.6 3.5 3.4 3.3 3.2 3.1)
+ find_package(PythonInterp)
+ # TODO: On Windows we require the Python libraries as well.
+ # We currently do not search for them on this code path.
+ # For now we require building with CMake 3.12+ on Windows, so that the
+ # FindPython3 code path above is available.
+ endif()
+ if(PYTHONINTERP_FOUND)
+ if("${PYTHON_VERSION_MAJOR}" GREATER_EQUAL 3)
+ set(Python3_EXECUTABLE "${PYTHON_EXECUTABLE}")
+ add_custom_target(Python3::Interpreter)
+ else()
+ string(
+ CONCAT FBPY_FIND_PYTHON_ERR
+ "found Python ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}, "
+ "but need Python 3"
+ )
+ endif()
+ endif()
+ endif()
+endif()
+
+# Find our helper program.
+# We typically install this in the same directory as this .cmake file.
+find_program(
+ FB_MAKE_PYTHON_ARCHIVE "make_fbpy_archive.py"
+ PATHS ${CMAKE_MODULE_PATH}
+)
+set(FB_PY_TEST_MAIN "${CMAKE_CURRENT_LIST_DIR}/fb_py_test_main.py")
+set(
+ FB_PY_TEST_DISCOVER_SCRIPT
+ "${CMAKE_CURRENT_LIST_DIR}/FBPythonTestAddTests.cmake"
+)
+set(
+ FB_PY_WIN_MAIN_C
+ "${CMAKE_CURRENT_LIST_DIR}/fb_py_win_main.c"
+)
+
+# An option to control the default installation location for
+# install_fb_python_library(). This is relative to ${CMAKE_INSTALL_PREFIX}
+set(
+ FBPY_LIB_INSTALL_DIR "lib/fb-py-libs" CACHE STRING
+ "The subdirectory where FB python libraries should be installed"
+)
+
+#
+# Build a self-executing python binary.
+#
+# This accepts the same arguments as add_fb_python_library().
+#
+# In addition, a MAIN_MODULE argument is accepted. This argument specifies
+# which module should be started as the __main__ module when the executable is
+# run. If left unspecified, a __main__.py script must be present in the
+# manifest.
+#
+function(add_fb_python_executable TARGET)
+ fb_py_check_available()
+
+ # Parse the arguments
+ set(one_value_args BASE_DIR NAMESPACE MAIN_MODULE TYPE)
+ set(multi_value_args SOURCES DEPENDS)
+ fb_cmake_parse_args(
+ ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}"
+ )
+ fb_py_process_default_args(ARG_NAMESPACE ARG_BASE_DIR)
+
+ # Use add_fb_python_library() to perform most of our source handling
+ add_fb_python_library(
+ "${TARGET}.main_lib"
+ BASE_DIR "${ARG_BASE_DIR}"
+ NAMESPACE "${ARG_NAMESPACE}"
+ SOURCES ${ARG_SOURCES}
+ DEPENDS ${ARG_DEPENDS}
+ )
+
+ set(
+ manifest_files
+ "$"
+ )
+ set(
+ source_files
+ "$"
+ )
+
+ # The command to build the executable archive.
+ #
+ # If we are using CMake 3.8+ we can use COMMAND_EXPAND_LISTS.
+ # CMP0067 isn't really the policy we care about, but seems like the best way
+ # to check if we are running 3.8+.
+ if (POLICY CMP0067)
+ set(extra_cmd_params COMMAND_EXPAND_LISTS)
+ set(make_py_args "${manifest_files}")
+ else()
+ set(extra_cmd_params)
+ set(make_py_args --manifest-separator "::" "$")
+ endif()
+
+ set(output_file "${TARGET}${CMAKE_EXECUTABLE_SUFFIX}")
+ if(WIN32)
+ set(zipapp_output "${TARGET}.py_zipapp")
+ else()
+ set(zipapp_output "${output_file}")
+ endif()
+ set(zipapp_output_file "${zipapp_output}")
+
+ set(is_dir_output FALSE)
+ if(DEFINED ARG_TYPE)
+ list(APPEND make_py_args "--type" "${ARG_TYPE}")
+ if ("${ARG_TYPE}" STREQUAL "dir")
+ set(is_dir_output TRUE)
+ # CMake doesn't really seem to like having a directory specified as an
+ # output; specify the __main__.py file as the output instead.
+ set(zipapp_output_file "${zipapp_output}/__main__.py")
+ list(APPEND
+ extra_cmd_params
+ COMMAND "${CMAKE_COMMAND}" -E remove_directory "${zipapp_output}"
+ )
+ endif()
+ endif()
+
+ if(DEFINED ARG_MAIN_MODULE)
+ list(APPEND make_py_args "--main" "${ARG_MAIN_MODULE}")
+ endif()
+
+ add_custom_command(
+ OUTPUT "${zipapp_output_file}"
+ ${extra_cmd_params}
+ COMMAND
+ "${Python3_EXECUTABLE}" "${FB_MAKE_PYTHON_ARCHIVE}"
+ -o "${zipapp_output}"
+ ${make_py_args}
+ DEPENDS
+ ${source_files}
+ "${TARGET}.main_lib.py_sources_built"
+ "${FB_MAKE_PYTHON_ARCHIVE}"
+ )
+
+ if(WIN32)
+ if(is_dir_output)
+ # TODO: generate a main executable that will invoke Python3
+ # with the correct main module inside the output directory
+ else()
+ add_executable("${TARGET}.winmain" "${FB_PY_WIN_MAIN_C}")
+ target_link_libraries("${TARGET}.winmain" Python3::Python)
+ # The Python3::Python target doesn't seem to be set up completely
+ # correctly on Windows for some reason, and we have to explicitly add
+ # ${Python3_LIBRARY_DIRS} to the target link directories.
+ target_link_directories(
+ "${TARGET}.winmain"
+ PUBLIC ${Python3_LIBRARY_DIRS}
+ )
+ add_custom_command(
+ OUTPUT "${output_file}"
+ DEPENDS "${TARGET}.winmain" "${zipapp_output_file}"
+ COMMAND
+ "cmd.exe" "/c" "copy" "/b"
+ "${TARGET}.winmain${CMAKE_EXECUTABLE_SUFFIX}+${zipapp_output}"
+ "${output_file}"
+ )
+ endif()
+ endif()
+
+ # Add an "ALL" target that depends on force ${TARGET},
+ # so that ${TARGET} will be included in the default list of build targets.
+ add_custom_target("${TARGET}.GEN_PY_EXE" ALL DEPENDS "${output_file}")
+
+ # Allow resolving the executable path for the target that we generate
+ # via a generator expression like:
+ # "WATCHMAN_WAIT_PATH=$"
+ set_property(TARGET "${TARGET}.GEN_PY_EXE"
+ PROPERTY EXECUTABLE "${CMAKE_CURRENT_BINARY_DIR}/${output_file}")
+endfunction()
+
+# Define a python unittest executable.
+# The executable is built using add_fb_python_executable and has the
+# following differences:
+#
+# Each of the source files specified in SOURCES will be imported
+# and have unittest discovery performed upon them.
+# Those sources will be imported in the top level namespace.
+#
+# The ENV argument allows specifying a list of "KEY=VALUE"
+# pairs that will be used by the test runner to set up the environment
+# in the child process prior to running the test. This is useful for
+# passing additional configuration to the test.
+function(add_fb_python_unittest TARGET)
+ # Parse the arguments
+ set(multi_value_args SOURCES DEPENDS ENV PROPERTIES)
+ set(
+ one_value_args
+ WORKING_DIRECTORY BASE_DIR NAMESPACE TEST_LIST DISCOVERY_TIMEOUT
+ )
+ fb_cmake_parse_args(
+ ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}"
+ )
+ fb_py_process_default_args(ARG_NAMESPACE ARG_BASE_DIR)
+ if(NOT ARG_WORKING_DIRECTORY)
+ # Default the working directory to the current binary directory.
+ # This matches the default behavior of add_test() and other standard
+ # test functions like gtest_discover_tests()
+ set(ARG_WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}")
+ endif()
+ if(NOT ARG_TEST_LIST)
+ set(ARG_TEST_LIST "${TARGET}_TESTS")
+ endif()
+ if(NOT ARG_DISCOVERY_TIMEOUT)
+ set(ARG_DISCOVERY_TIMEOUT 5)
+ endif()
+
+ # Tell our test program the list of modules to scan for tests.
+ # We scan all modules directly listed in our SOURCES argument, and skip
+ # modules that came from dependencies in the DEPENDS list.
+ #
+ # This is written into a __test_modules__.py module that the test runner
+ # will look at.
+ set(
+ test_modules_path
+ "${CMAKE_CURRENT_BINARY_DIR}/${TARGET}_test_modules.py"
+ )
+ file(WRITE "${test_modules_path}" "TEST_MODULES = [\n")
+ string(REPLACE "." "/" namespace_dir "${ARG_NAMESPACE}")
+ if (NOT "${namespace_dir}" STREQUAL "")
+ set(namespace_dir "${namespace_dir}/")
+ endif()
+ set(test_modules)
+ foreach(src_path IN LISTS ARG_SOURCES)
+ fb_py_compute_dest_path(
+ abs_source dest_path
+ "${src_path}" "${namespace_dir}" "${ARG_BASE_DIR}"
+ )
+ string(REPLACE "/" "." module_name "${dest_path}")
+ string(REGEX REPLACE "\\.py$" "" module_name "${module_name}")
+ list(APPEND test_modules "${module_name}")
+ file(APPEND "${test_modules_path}" " '${module_name}',\n")
+ endforeach()
+ file(APPEND "${test_modules_path}" "]\n")
+
+ # The __main__ is provided by our runner wrapper/bootstrap
+ list(APPEND ARG_SOURCES "${FB_PY_TEST_MAIN}=__main__.py")
+ list(APPEND ARG_SOURCES "${test_modules_path}=__test_modules__.py")
+
+ add_fb_python_executable(
+ "${TARGET}"
+ NAMESPACE "${ARG_NAMESPACE}"
+ BASE_DIR "${ARG_BASE_DIR}"
+ SOURCES ${ARG_SOURCES}
+ DEPENDS ${ARG_DEPENDS}
+ )
+
+ # Run test discovery after the test executable is built.
+ # This logic is based on the code for gtest_discover_tests()
+ set(ctest_file_base "${CMAKE_CURRENT_BINARY_DIR}/${TARGET}")
+ set(ctest_include_file "${ctest_file_base}_include.cmake")
+ set(ctest_tests_file "${ctest_file_base}_tests.cmake")
+ add_custom_command(
+ TARGET "${TARGET}.GEN_PY_EXE" POST_BUILD
+ BYPRODUCTS "${ctest_tests_file}"
+ COMMAND
+ "${CMAKE_COMMAND}"
+ -D "TEST_TARGET=${TARGET}"
+ -D "TEST_INTERPRETER=${Python3_EXECUTABLE}"
+ -D "TEST_ENV=${ARG_ENV}"
+ -D "TEST_EXECUTABLE=$"
+ -D "TEST_WORKING_DIR=${ARG_WORKING_DIRECTORY}"
+ -D "TEST_LIST=${ARG_TEST_LIST}"
+ -D "TEST_PREFIX=${TARGET}::"
+ -D "TEST_PROPERTIES=${ARG_PROPERTIES}"
+ -D "CTEST_FILE=${ctest_tests_file}"
+ -P "${FB_PY_TEST_DISCOVER_SCRIPT}"
+ VERBATIM
+ )
+
+ file(
+ WRITE "${ctest_include_file}"
+ "if(EXISTS \"${ctest_tests_file}\")\n"
+ " include(\"${ctest_tests_file}\")\n"
+ "else()\n"
+ " add_test(\"${TARGET}_NOT_BUILT\" \"${TARGET}_NOT_BUILT\")\n"
+ "endif()\n"
+ )
+ set_property(
+ DIRECTORY APPEND PROPERTY TEST_INCLUDE_FILES
+ "${ctest_include_file}"
+ )
+endfunction()
+
+#
+# Define a python library.
+#
+# If you want to install a python library generated from this rule note that
+# you need to use install_fb_python_library() rather than CMake's built-in
+# install() function. This will make it available for other downstream
+# projects to use in their add_fb_python_executable() and
+# add_fb_python_library() calls. (You do still need to use `install(EXPORT)`
+# later to install the CMake exports.)
+#
+# Parameters:
+# - BASE_DIR :
+# The base directory path to strip off from each source path. All source
+# files must be inside this directory. If not specified it defaults to
+# ${CMAKE_CURRENT_SOURCE_DIR}.
+# - NAMESPACE :
+# The destination namespace where these files should be installed in python
+# binaries. If not specified, this defaults to the current relative path of
+# ${CMAKE_CURRENT_SOURCE_DIR} inside ${CMAKE_SOURCE_DIR}. e.g., a python
+# library defined in the directory repo_root/foo/bar will use a default
+# namespace of "foo.bar"
+# - SOURCES <...>:
+# The python source files.
+# You may optionally specify as source using the form: PATH=ALIAS where
+# PATH is a relative path in the source tree and ALIAS is the relative
+# path into which PATH should be rewritten. This is useful for mapping
+# an executable script to the main module in a python executable.
+# e.g.: `python/bin/watchman-wait=__main__.py`
+# - DEPENDS <...>:
+# Other python libraries that this one depends on.
+# - INSTALL_DIR :
+# The directory where this library should be installed.
+# install_fb_python_library() must still be called later to perform the
+# installation. If a relative path is given it will be treated relative to
+# ${CMAKE_INSTALL_PREFIX}
+#
+# CMake is unfortunately pretty crappy at being able to define custom build
+# rules & behaviors. It doesn't support transitive property propagation
+# between custom targets; only the built-in add_executable() and add_library()
+# targets support transitive properties.
+#
+# We hack around this janky CMake behavior by (ab)using interface libraries to
+# propagate some of the data we want between targets, without actually
+# generating a C library.
+#
+# add_fb_python_library(SOMELIB) generates the following things:
+# - An INTERFACE library rule named SOMELIB.py_lib which tracks some
+# information about transitive dependencies:
+# - the transitive set of source files in the INTERFACE_SOURCES property
+# - the transitive set of manifest files that this library depends on in
+# the INTERFACE_INCLUDE_DIRECTORIES property.
+# - A custom command that generates a SOMELIB.manifest file.
+# This file contains the mapping of source files to desired destination
+# locations in executables that depend on this library. This manifest file
+# will then be read at build-time in order to build executables.
+#
+function(add_fb_python_library LIB_NAME)
+ fb_py_check_available()
+
+ # Parse the arguments
+ # We use fb_cmake_parse_args() rather than cmake_parse_arguments() since
+ # cmake_parse_arguments() does not handle empty arguments, and it is common
+ # for callers to want to specify an empty NAMESPACE parameter.
+ set(one_value_args BASE_DIR NAMESPACE INSTALL_DIR)
+ set(multi_value_args SOURCES DEPENDS)
+ fb_cmake_parse_args(
+ ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}"
+ )
+ fb_py_process_default_args(ARG_NAMESPACE ARG_BASE_DIR)
+
+ string(REPLACE "." "/" namespace_dir "${ARG_NAMESPACE}")
+ if (NOT "${namespace_dir}" STREQUAL "")
+ set(namespace_dir "${namespace_dir}/")
+ endif()
+
+ if(NOT DEFINED ARG_INSTALL_DIR)
+ set(install_dir "${FBPY_LIB_INSTALL_DIR}/")
+ elseif("${ARG_INSTALL_DIR}" STREQUAL "")
+ set(install_dir "")
+ else()
+ set(install_dir "${ARG_INSTALL_DIR}/")
+ endif()
+
+ # message(STATUS "fb py library ${LIB_NAME}: "
+ # "NS=${namespace_dir} BASE=${ARG_BASE_DIR}")
+
+ # TODO: In the future it would be nice to support pre-compiling the source
+ # files. We could emit a rule to compile each source file and emit a
+ # .pyc/.pyo file here, and then have the manifest reference the pyc/pyo
+ # files.
+
+ # Define a library target to help pass around information about the library,
+ # and propagate dependency information.
+ #
+ # CMake make a lot of assumptions that libraries are C++ libraries. To help
+ # avoid confusion we name our target "${LIB_NAME}.py_lib" rather than just
+ # "${LIB_NAME}". This helps avoid confusion if callers try to use
+ # "${LIB_NAME}" on their own as a target name. (e.g., attempting to install
+ # it directly with install(TARGETS) won't work. Callers must use
+ # install_fb_python_library() instead.)
+ add_library("${LIB_NAME}.py_lib" INTERFACE)
+
+ # Emit the manifest file.
+ #
+ # We write the manifest file to a temporary path first, then copy it with
+ # configure_file(COPYONLY). This is necessary to get CMake to understand
+ # that "${manifest_path}" is generated by the CMake configure phase,
+ # and allow using it as a dependency for add_custom_command().
+ # (https://gitlab.kitware.com/cmake/cmake/issues/16367)
+ set(manifest_path "${CMAKE_CURRENT_BINARY_DIR}/${LIB_NAME}.manifest")
+ set(tmp_manifest "${manifest_path}.tmp")
+ file(WRITE "${tmp_manifest}" "FBPY_MANIFEST 1\n")
+ set(abs_sources)
+ foreach(src_path IN LISTS ARG_SOURCES)
+ fb_py_compute_dest_path(
+ abs_source dest_path
+ "${src_path}" "${namespace_dir}" "${ARG_BASE_DIR}"
+ )
+ list(APPEND abs_sources "${abs_source}")
+ target_sources(
+ "${LIB_NAME}.py_lib" INTERFACE
+ "$"
+ "$"
+ )
+ file(
+ APPEND "${tmp_manifest}"
+ "${abs_source} :: ${dest_path}\n"
+ )
+ endforeach()
+ configure_file("${tmp_manifest}" "${manifest_path}" COPYONLY)
+
+ target_include_directories(
+ "${LIB_NAME}.py_lib" INTERFACE
+ "$"
+ "$"
+ )
+
+ # Add a target that depends on all of the source files.
+ # This is needed in case some of the source files are generated. This will
+ # ensure that these source files are brought up-to-date before we build
+ # any python binaries that depend on this library.
+ add_custom_target("${LIB_NAME}.py_sources_built" DEPENDS ${abs_sources})
+ add_dependencies("${LIB_NAME}.py_lib" "${LIB_NAME}.py_sources_built")
+
+ # Hook up library dependencies, and also make the *.py_sources_built target
+ # depend on the sources for all of our dependencies also being up-to-date.
+ foreach(dep IN LISTS ARG_DEPENDS)
+ target_link_libraries("${LIB_NAME}.py_lib" INTERFACE "${dep}.py_lib")
+
+ # Mark that our .py_sources_built target depends on each our our dependent
+ # libraries. This serves two functions:
+ # - This causes CMake to generate an error message if one of the
+ # dependencies is never defined. The target_link_libraries() call above
+ # won't complain if one of the dependencies doesn't exist (since it is
+ # intended to allow passing in file names for plain library files rather
+ # than just targets).
+ # - It ensures that sources for our depencencies are built before any
+ # executable that depends on us. Note that we depend on "${dep}.py_lib"
+ # rather than "${dep}.py_sources_built" for this purpose because the
+ # ".py_sources_built" target won't be available for imported targets.
+ add_dependencies("${LIB_NAME}.py_sources_built" "${dep}.py_lib")
+ endforeach()
+
+ # Add a custom command to help with library installation, in case
+ # install_fb_python_library() is called later for this library.
+ # add_custom_command() only works with file dependencies defined in the same
+ # CMakeLists.txt file, so we want to make sure this is defined here, rather
+ # then where install_fb_python_library() is called.
+ # This command won't be run by default, but will only be run if it is needed
+ # by a subsequent install_fb_python_library() call.
+ #
+ # This command copies the library contents into the build directory.
+ # It would be nicer if we could skip this intermediate copy, and just run
+ # make_fbpy_archive.py at install time to copy them directly to the desired
+ # installation directory. Unfortunately this is difficult to do, and seems
+ # to interfere with some of the CMake code that wants to generate a manifest
+ # of installed files.
+ set(build_install_dir "${CMAKE_CURRENT_BINARY_DIR}/${LIB_NAME}.lib_install")
+ add_custom_command(
+ OUTPUT
+ "${build_install_dir}/${LIB_NAME}.manifest"
+ COMMAND "${CMAKE_COMMAND}" -E remove_directory "${build_install_dir}"
+ COMMAND
+ "${Python3_EXECUTABLE}" "${FB_MAKE_PYTHON_ARCHIVE}" --type lib-install
+ --install-dir "${LIB_NAME}"
+ -o "${build_install_dir}/${LIB_NAME}" "${manifest_path}"
+ DEPENDS
+ "${abs_sources}"
+ "${manifest_path}"
+ "${FB_MAKE_PYTHON_ARCHIVE}"
+ )
+ add_custom_target(
+ "${LIB_NAME}.py_lib_install"
+ DEPENDS "${build_install_dir}/${LIB_NAME}.manifest"
+ )
+
+ # Set some properties to pass through the install paths to
+ # install_fb_python_library()
+ #
+ # Passing through ${build_install_dir} allows install_fb_python_library()
+ # to work even if used from a different CMakeLists.txt file than where
+ # add_fb_python_library() was called (i.e. such that
+ # ${CMAKE_CURRENT_BINARY_DIR} is different between the two calls).
+ set(abs_install_dir "${install_dir}")
+ if(NOT IS_ABSOLUTE "${abs_install_dir}")
+ set(abs_install_dir "${CMAKE_INSTALL_PREFIX}/${abs_install_dir}")
+ endif()
+ string(REGEX REPLACE "/$" "" abs_install_dir "${abs_install_dir}")
+ set_target_properties(
+ "${LIB_NAME}.py_lib_install"
+ PROPERTIES
+ INSTALL_DIR "${abs_install_dir}"
+ BUILD_INSTALL_DIR "${build_install_dir}"
+ )
+endfunction()
+
+#
+# Install an FB-style packaged python binary.
+#
+# - DESTINATION :
+# Associate the installed target files with the given export-name.
+#
+function(install_fb_python_executable TARGET)
+ # Parse the arguments
+ set(one_value_args DESTINATION)
+ set(multi_value_args)
+ fb_cmake_parse_args(
+ ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}"
+ )
+
+ if(NOT DEFINED ARG_DESTINATION)
+ set(ARG_DESTINATION bin)
+ endif()
+
+ install(
+ PROGRAMS "$"
+ DESTINATION "${ARG_DESTINATION}"
+ )
+endfunction()
+
+#
+# Install a python library.
+#
+# - EXPORT :
+# Associate the installed target files with the given export-name.
+#
+# Note that unlike the built-in CMake install() function we do not accept a
+# DESTINATION parameter. Instead, use the INSTALL_DIR parameter to
+# add_fb_python_library() to set the installation location.
+#
+function(install_fb_python_library LIB_NAME)
+ set(one_value_args EXPORT)
+ fb_cmake_parse_args(ARG "" "${one_value_args}" "" "${ARGN}")
+
+ # Export our "${LIB_NAME}.py_lib" target so that it will be available to
+ # downstream projects in our installed CMake config files.
+ if(DEFINED ARG_EXPORT)
+ install(TARGETS "${LIB_NAME}.py_lib" EXPORT "${ARG_EXPORT}")
+ endif()
+
+ # add_fb_python_library() emits a .py_lib_install target that will prepare
+ # the installation directory. However, it isn't part of the "ALL" target and
+ # therefore isn't built by default.
+ #
+ # Make sure the ALL target depends on it now. We have to do this by
+ # introducing yet another custom target.
+ # Add it as a dependency to the ALL target now.
+ add_custom_target("${LIB_NAME}.py_lib_install_all" ALL)
+ add_dependencies(
+ "${LIB_NAME}.py_lib_install_all" "${LIB_NAME}.py_lib_install"
+ )
+
+ # Copy the intermediate install directory generated at build time into
+ # the desired install location.
+ get_target_property(dest_dir "${LIB_NAME}.py_lib_install" "INSTALL_DIR")
+ get_target_property(
+ build_install_dir "${LIB_NAME}.py_lib_install" "BUILD_INSTALL_DIR"
+ )
+ install(
+ DIRECTORY "${build_install_dir}/${LIB_NAME}"
+ DESTINATION "${dest_dir}"
+ )
+ install(
+ FILES "${build_install_dir}/${LIB_NAME}.manifest"
+ DESTINATION "${dest_dir}"
+ )
+endfunction()
+
+# Helper macro to process the BASE_DIR and NAMESPACE arguments for
+# add_fb_python_executable() and add_fb_python_executable()
+macro(fb_py_process_default_args NAMESPACE_VAR BASE_DIR_VAR)
+ # If the namespace was not specified, default to the relative path to the
+ # current directory (starting from the repository root).
+ if(NOT DEFINED "${NAMESPACE_VAR}")
+ file(
+ RELATIVE_PATH "${NAMESPACE_VAR}"
+ "${CMAKE_SOURCE_DIR}"
+ "${CMAKE_CURRENT_SOURCE_DIR}"
+ )
+ endif()
+
+ if(NOT DEFINED "${BASE_DIR_VAR}")
+ # If the base directory was not specified, default to the current directory
+ set("${BASE_DIR_VAR}" "${CMAKE_CURRENT_SOURCE_DIR}")
+ else()
+ # If the base directory was specified, always convert it to an
+ # absolute path.
+ get_filename_component("${BASE_DIR_VAR}" "${${BASE_DIR_VAR}}" ABSOLUTE)
+ endif()
+endmacro()
+
+function(fb_py_check_available)
+ # Make sure that Python 3 and our make_fbpy_archive.py helper script are
+ # available.
+ if(NOT Python3_EXECUTABLE)
+ if(FBPY_FIND_PYTHON_ERR)
+ message(FATAL_ERROR "Unable to find Python 3: ${FBPY_FIND_PYTHON_ERR}")
+ else()
+ message(FATAL_ERROR "Unable to find Python 3")
+ endif()
+ endif()
+
+ if (NOT FB_MAKE_PYTHON_ARCHIVE)
+ message(
+ FATAL_ERROR "unable to find make_fbpy_archive.py helper program (it "
+ "should be located in the same directory as FBPythonBinary.cmake)"
+ )
+ endif()
+endfunction()
+
+function(
+ fb_py_compute_dest_path
+ src_path_output dest_path_output src_path namespace_dir base_dir
+)
+ if("${src_path}" MATCHES "=")
+ # We want to split the string on the `=` sign, but cmake doesn't
+ # provide much in the way of helpers for this, so we rewrite the
+ # `=` sign to `;` so that we can treat it as a cmake list and
+ # then index into the components
+ string(REPLACE "=" ";" src_path_list "${src_path}")
+ list(GET src_path_list 0 src_path)
+ # Note that we ignore the `namespace_dir` in the alias case
+ # in order to allow aliasing a source to the top level `__main__.py`
+ # filename.
+ list(GET src_path_list 1 dest_path)
+ else()
+ unset(dest_path)
+ endif()
+
+ get_filename_component(abs_source "${src_path}" ABSOLUTE)
+ if(NOT DEFINED dest_path)
+ file(RELATIVE_PATH rel_src "${ARG_BASE_DIR}" "${abs_source}")
+ if("${rel_src}" MATCHES "^../")
+ message(
+ FATAL_ERROR "${LIB_NAME}: source file \"${abs_source}\" is not inside "
+ "the base directory ${ARG_BASE_DIR}"
+ )
+ endif()
+ set(dest_path "${namespace_dir}${rel_src}")
+ endif()
+
+ set("${src_path_output}" "${abs_source}" PARENT_SCOPE)
+ set("${dest_path_output}" "${dest_path}" PARENT_SCOPE)
+endfunction()
diff --git a/build/fbcode_builder/CMake/FBPythonTestAddTests.cmake b/build/fbcode_builder/CMake/FBPythonTestAddTests.cmake
new file mode 100644
index 000000000..d73c055d8
--- /dev/null
+++ b/build/fbcode_builder/CMake/FBPythonTestAddTests.cmake
@@ -0,0 +1,59 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+
+# Add a command to be emitted to the CTest file
+set(ctest_script)
+function(add_command CMD)
+ set(escaped_args "")
+ foreach(arg ${ARGN})
+ # Escape all arguments using "Bracket Argument" syntax
+ # We could skip this for argument that don't contain any special
+ # characters if we wanted to make the output slightly more human-friendly.
+ set(escaped_args "${escaped_args} [==[${arg}]==]")
+ endforeach()
+ set(ctest_script "${ctest_script}${CMD}(${escaped_args})\n" PARENT_SCOPE)
+endfunction()
+
+if(NOT EXISTS "${TEST_EXECUTABLE}")
+ message(FATAL_ERROR "Test executable does not exist: ${TEST_EXECUTABLE}")
+endif()
+execute_process(
+ COMMAND ${CMAKE_COMMAND} -E env ${TEST_ENV} "${TEST_INTERPRETER}" "${TEST_EXECUTABLE}" --list-tests
+ WORKING_DIRECTORY "${TEST_WORKING_DIR}"
+ OUTPUT_VARIABLE output
+ RESULT_VARIABLE result
+)
+if(NOT "${result}" EQUAL 0)
+ string(REPLACE "\n" "\n " output "${output}")
+ message(
+ FATAL_ERROR
+ "Error running test executable: ${TEST_EXECUTABLE}\n"
+ "Output:\n"
+ " ${output}\n"
+ )
+endif()
+
+# Parse output
+string(REPLACE "\n" ";" tests_list "${output}")
+foreach(test_name ${tests_list})
+ add_command(
+ add_test
+ "${TEST_PREFIX}${test_name}"
+ ${CMAKE_COMMAND} -E env ${TEST_ENV}
+ "${TEST_INTERPRETER}" "${TEST_EXECUTABLE}" "${test_name}"
+ )
+ add_command(
+ set_tests_properties
+ "${TEST_PREFIX}${test_name}"
+ PROPERTIES
+ WORKING_DIRECTORY "${TEST_WORKING_DIR}"
+ ${TEST_PROPERTIES}
+ )
+endforeach()
+
+# Set a list of discovered tests in the parent scope, in case users
+# want access to this list as a CMake variable
+if(TEST_LIST)
+ add_command(set ${TEST_LIST} ${tests_list})
+endif()
+
+file(WRITE "${CTEST_FILE}" "${ctest_script}")
diff --git a/build/fbcode_builder/CMake/FBThriftCppLibrary.cmake b/build/fbcode_builder/CMake/FBThriftCppLibrary.cmake
new file mode 100644
index 000000000..670771a46
--- /dev/null
+++ b/build/fbcode_builder/CMake/FBThriftCppLibrary.cmake
@@ -0,0 +1,194 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+
+include(FBCMakeParseArgs)
+
+# Generate a C++ library from a thrift file
+#
+# Parameters:
+# - SERVICES [ ...]
+# The names of the services defined in the thrift file.
+# - DEPENDS [ ...]
+# A list of other thrift C++ libraries that this library depends on.
+# - OPTIONS [ ...]
+# A list of options to pass to the thrift compiler.
+# - INCLUDE_DIR
+# The sub-directory where generated headers will be installed.
+# Defaults to "include" if not specified. The caller must still call
+# install() to install the thrift library if desired.
+# - THRIFT_INCLUDE_DIR
+# The sub-directory where generated headers will be installed.
+# Defaults to "${INCLUDE_DIR}/thrift-files" if not specified.
+# The caller must still call install() to install the thrift library if
+# desired.
+function(add_fbthrift_cpp_library LIB_NAME THRIFT_FILE)
+ # Parse the arguments
+ set(one_value_args INCLUDE_DIR THRIFT_INCLUDE_DIR)
+ set(multi_value_args SERVICES DEPENDS OPTIONS)
+ fb_cmake_parse_args(
+ ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}"
+ )
+ if(NOT DEFINED ARG_INCLUDE_DIR)
+ set(ARG_INCLUDE_DIR "include")
+ endif()
+ if(NOT DEFINED ARG_THRIFT_INCLUDE_DIR)
+ set(ARG_THRIFT_INCLUDE_DIR "${ARG_INCLUDE_DIR}/thrift-files")
+ endif()
+
+ get_filename_component(base ${THRIFT_FILE} NAME_WE)
+ get_filename_component(
+ output_dir
+ ${CMAKE_CURRENT_BINARY_DIR}/${THRIFT_FILE}
+ DIRECTORY
+ )
+
+ # Generate relative paths in #includes
+ file(
+ RELATIVE_PATH include_prefix
+ "${CMAKE_SOURCE_DIR}"
+ "${CMAKE_CURRENT_SOURCE_DIR}/${THRIFT_FILE}"
+ )
+ get_filename_component(include_prefix ${include_prefix} DIRECTORY)
+
+ if (NOT "${include_prefix}" STREQUAL "")
+ list(APPEND ARG_OPTIONS "include_prefix=${include_prefix}")
+ endif()
+ # CMake 3.12 is finally getting a list(JOIN) function, but until then
+ # treating the list as a string and replacing the semicolons is good enough.
+ string(REPLACE ";" "," GEN_ARG_STR "${ARG_OPTIONS}")
+
+ # Compute the list of generated files
+ list(APPEND generated_headers
+ "${output_dir}/gen-cpp2/${base}_constants.h"
+ "${output_dir}/gen-cpp2/${base}_types.h"
+ "${output_dir}/gen-cpp2/${base}_types.tcc"
+ "${output_dir}/gen-cpp2/${base}_types_custom_protocol.h"
+ "${output_dir}/gen-cpp2/${base}_metadata.h"
+ )
+ list(APPEND generated_sources
+ "${output_dir}/gen-cpp2/${base}_constants.cpp"
+ "${output_dir}/gen-cpp2/${base}_data.h"
+ "${output_dir}/gen-cpp2/${base}_data.cpp"
+ "${output_dir}/gen-cpp2/${base}_types.cpp"
+ "${output_dir}/gen-cpp2/${base}_metadata.cpp"
+ )
+ foreach(service IN LISTS ARG_SERVICES)
+ list(APPEND generated_headers
+ "${output_dir}/gen-cpp2/${service}.h"
+ "${output_dir}/gen-cpp2/${service}.tcc"
+ "${output_dir}/gen-cpp2/${service}AsyncClient.h"
+ "${output_dir}/gen-cpp2/${service}_custom_protocol.h"
+ )
+ list(APPEND generated_sources
+ "${output_dir}/gen-cpp2/${service}.cpp"
+ "${output_dir}/gen-cpp2/${service}AsyncClient.cpp"
+ "${output_dir}/gen-cpp2/${service}_processmap_binary.cpp"
+ "${output_dir}/gen-cpp2/${service}_processmap_compact.cpp"
+ )
+ endforeach()
+
+ # This generator expression gets the list of include directories required
+ # for all of our dependencies.
+ # It requires using COMMAND_EXPAND_LISTS in the add_custom_command() call
+ # below. COMMAND_EXPAND_LISTS is only available in CMake 3.8+
+ # If we really had to support older versions of CMake we would probably need
+ # to use a wrapper script around the thrift compiler that could take the
+ # include list as a single argument and split it up before invoking the
+ # thrift compiler.
+ if (NOT POLICY CMP0067)
+ message(FATAL_ERROR "add_fbthrift_cpp_library() requires CMake 3.8+")
+ endif()
+ set(
+ thrift_include_options
+ "-I;$,;-I;>"
+ )
+
+ # Emit the rule to run the thrift compiler
+ add_custom_command(
+ OUTPUT
+ ${generated_headers}
+ ${generated_sources}
+ COMMAND_EXPAND_LISTS
+ COMMAND
+ "${CMAKE_COMMAND}" -E make_directory "${output_dir}"
+ COMMAND
+ "${FBTHRIFT_COMPILER}"
+ --strict
+ --gen "mstch_cpp2:${GEN_ARG_STR}"
+ "${thrift_include_options}"
+ -o "${output_dir}"
+ "${CMAKE_CURRENT_SOURCE_DIR}/${THRIFT_FILE}"
+ WORKING_DIRECTORY
+ "${CMAKE_BINARY_DIR}"
+ MAIN_DEPENDENCY
+ "${THRIFT_FILE}"
+ DEPENDS
+ ${ARG_DEPENDS}
+ "${FBTHRIFT_COMPILER}"
+ )
+
+ # Now emit the library rule to compile the sources
+ if (BUILD_SHARED_LIBS)
+ set(LIB_TYPE SHARED)
+ else ()
+ set(LIB_TYPE STATIC)
+ endif ()
+
+ add_library(
+ "${LIB_NAME}" ${LIB_TYPE}
+ ${generated_sources}
+ )
+
+ target_include_directories(
+ "${LIB_NAME}"
+ PUBLIC
+ "$"
+ "$"
+ )
+ target_link_libraries(
+ "${LIB_NAME}"
+ PUBLIC
+ ${ARG_DEPENDS}
+ FBThrift::thriftcpp2
+ Folly::folly
+ )
+
+ # Add ${generated_headers} to the PUBLIC_HEADER property for ${LIB_NAME}
+ #
+ # This allows callers to install it using
+ # "install(TARGETS ${LIB_NAME} PUBLIC_HEADER)"
+ # However, note that CMake's PUBLIC_HEADER behavior is rather inflexible,
+ # and does have any way to preserve header directory structure. Callers
+ # must be careful to use the correct PUBLIC_HEADER DESTINATION parameter
+ # when doing this, to put the files the correct directory themselves.
+ # We define a HEADER_INSTALL_DIR property with the include directory prefix,
+ # so typically callers should specify the PUBLIC_HEADER DESTINATION as
+ # "$"
+ set_property(
+ TARGET "${LIB_NAME}"
+ PROPERTY PUBLIC_HEADER ${generated_headers}
+ )
+
+ # Define a dummy interface library to help propagate the thrift include
+ # directories between dependencies.
+ add_library("${LIB_NAME}.thrift_includes" INTERFACE)
+ target_include_directories(
+ "${LIB_NAME}.thrift_includes"
+ INTERFACE
+ "$"
+ "$"
+ )
+ foreach(dep IN LISTS ARG_DEPENDS)
+ target_link_libraries(
+ "${LIB_NAME}.thrift_includes"
+ INTERFACE "${dep}.thrift_includes"
+ )
+ endforeach()
+
+ set_target_properties(
+ "${LIB_NAME}"
+ PROPERTIES
+ EXPORT_PROPERTIES "THRIFT_INSTALL_DIR"
+ THRIFT_INSTALL_DIR "${ARG_THRIFT_INCLUDE_DIR}/${include_prefix}"
+ HEADER_INSTALL_DIR "${ARG_INCLUDE_DIR}/${include_prefix}/gen-cpp2"
+ )
+endfunction()
diff --git a/build/fbcode_builder/CMake/FBThriftLibrary.cmake b/build/fbcode_builder/CMake/FBThriftLibrary.cmake
new file mode 100644
index 000000000..e4280e2a4
--- /dev/null
+++ b/build/fbcode_builder/CMake/FBThriftLibrary.cmake
@@ -0,0 +1,77 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+
+include(FBCMakeParseArgs)
+include(FBThriftPyLibrary)
+include(FBThriftCppLibrary)
+
+#
+# add_fbthrift_library()
+#
+# This is a convenience function that generates thrift libraries for multiple
+# languages.
+#
+# For example:
+# add_fbthrift_library(
+# foo foo.thrift
+# LANGUAGES cpp py
+# SERVICES Foo
+# DEPENDS bar)
+#
+# will be expanded into two separate calls:
+#
+# add_fbthrift_cpp_library(foo_cpp foo.thrift SERVICES Foo DEPENDS bar_cpp)
+# add_fbthrift_py_library(foo_py foo.thrift SERVICES Foo DEPENDS bar_py)
+#
+function(add_fbthrift_library LIB_NAME THRIFT_FILE)
+ # Parse the arguments
+ set(one_value_args PY_NAMESPACE INCLUDE_DIR THRIFT_INCLUDE_DIR)
+ set(multi_value_args SERVICES DEPENDS LANGUAGES CPP_OPTIONS PY_OPTIONS)
+ fb_cmake_parse_args(
+ ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}"
+ )
+
+ if(NOT DEFINED ARG_INCLUDE_DIR)
+ set(ARG_INCLUDE_DIR "include")
+ endif()
+ if(NOT DEFINED ARG_THRIFT_INCLUDE_DIR)
+ set(ARG_THRIFT_INCLUDE_DIR "${ARG_INCLUDE_DIR}/thrift-files")
+ endif()
+
+ # CMake 3.12+ adds list(TRANSFORM) which would be nice to use here, but for
+ # now we still want to support older versions of CMake.
+ set(CPP_DEPENDS)
+ set(PY_DEPENDS)
+ foreach(dep IN LISTS ARG_DEPENDS)
+ list(APPEND CPP_DEPENDS "${dep}_cpp")
+ list(APPEND PY_DEPENDS "${dep}_py")
+ endforeach()
+
+ foreach(lang IN LISTS ARG_LANGUAGES)
+ if ("${lang}" STREQUAL "cpp")
+ add_fbthrift_cpp_library(
+ "${LIB_NAME}_cpp" "${THRIFT_FILE}"
+ SERVICES ${ARG_SERVICES}
+ DEPENDS ${CPP_DEPENDS}
+ OPTIONS ${ARG_CPP_OPTIONS}
+ INCLUDE_DIR "${ARG_INCLUDE_DIR}"
+ THRIFT_INCLUDE_DIR "${ARG_THRIFT_INCLUDE_DIR}"
+ )
+ elseif ("${lang}" STREQUAL "py" OR "${lang}" STREQUAL "python")
+ if (DEFINED ARG_PY_NAMESPACE)
+ set(namespace_args NAMESPACE "${ARG_PY_NAMESPACE}")
+ endif()
+ add_fbthrift_py_library(
+ "${LIB_NAME}_py" "${THRIFT_FILE}"
+ SERVICES ${ARG_SERVICES}
+ ${namespace_args}
+ DEPENDS ${PY_DEPENDS}
+ OPTIONS ${ARG_PY_OPTIONS}
+ THRIFT_INCLUDE_DIR "${ARG_THRIFT_INCLUDE_DIR}"
+ )
+ else()
+ message(
+ FATAL_ERROR "unknown language for thrift library ${LIB_NAME}: ${lang}"
+ )
+ endif()
+ endforeach()
+endfunction()
diff --git a/build/fbcode_builder/CMake/FBThriftPyLibrary.cmake b/build/fbcode_builder/CMake/FBThriftPyLibrary.cmake
new file mode 100644
index 000000000..7bd8879ee
--- /dev/null
+++ b/build/fbcode_builder/CMake/FBThriftPyLibrary.cmake
@@ -0,0 +1,111 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+
+include(FBCMakeParseArgs)
+include(FBPythonBinary)
+
+# Generate a Python library from a thrift file
+function(add_fbthrift_py_library LIB_NAME THRIFT_FILE)
+ # Parse the arguments
+ set(one_value_args NAMESPACE THRIFT_INCLUDE_DIR)
+ set(multi_value_args SERVICES DEPENDS OPTIONS)
+ fb_cmake_parse_args(
+ ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}"
+ )
+
+ if(NOT DEFINED ARG_THRIFT_INCLUDE_DIR)
+ set(ARG_THRIFT_INCLUDE_DIR "include/thrift-files")
+ endif()
+
+ get_filename_component(base ${THRIFT_FILE} NAME_WE)
+ set(output_dir "${CMAKE_CURRENT_BINARY_DIR}/${THRIFT_FILE}-py")
+
+ # Parse the namespace value
+ if (NOT DEFINED ARG_NAMESPACE)
+ set(ARG_NAMESPACE "${base}")
+ endif()
+
+ string(REPLACE "." "/" namespace_dir "${ARG_NAMESPACE}")
+ set(py_output_dir "${output_dir}/gen-py/${namespace_dir}")
+ list(APPEND generated_sources
+ "${py_output_dir}/__init__.py"
+ "${py_output_dir}/ttypes.py"
+ "${py_output_dir}/constants.py"
+ )
+ foreach(service IN LISTS ARG_SERVICES)
+ list(APPEND generated_sources
+ ${py_output_dir}/${service}.py
+ )
+ endforeach()
+
+ # Define a dummy interface library to help propagate the thrift include
+ # directories between dependencies.
+ add_library("${LIB_NAME}.thrift_includes" INTERFACE)
+ target_include_directories(
+ "${LIB_NAME}.thrift_includes"
+ INTERFACE
+ "$"
+ "$"
+ )
+ foreach(dep IN LISTS ARG_DEPENDS)
+ target_link_libraries(
+ "${LIB_NAME}.thrift_includes"
+ INTERFACE "${dep}.thrift_includes"
+ )
+ endforeach()
+
+ # This generator expression gets the list of include directories required
+ # for all of our dependencies.
+ # It requires using COMMAND_EXPAND_LISTS in the add_custom_command() call
+ # below. COMMAND_EXPAND_LISTS is only available in CMake 3.8+
+ # If we really had to support older versions of CMake we would probably need
+ # to use a wrapper script around the thrift compiler that could take the
+ # include list as a single argument and split it up before invoking the
+ # thrift compiler.
+ if (NOT POLICY CMP0067)
+ message(FATAL_ERROR "add_fbthrift_py_library() requires CMake 3.8+")
+ endif()
+ set(
+ thrift_include_options
+ "-I;$,;-I;>"
+ )
+
+ # Always force generation of "new-style" python classes for Python 2
+ list(APPEND ARG_OPTIONS "new_style")
+ # CMake 3.12 is finally getting a list(JOIN) function, but until then
+ # treating the list as a string and replacing the semicolons is good enough.
+ string(REPLACE ";" "," GEN_ARG_STR "${ARG_OPTIONS}")
+
+ # Emit the rule to run the thrift compiler
+ add_custom_command(
+ OUTPUT
+ ${generated_sources}
+ COMMAND_EXPAND_LISTS
+ COMMAND
+ "${CMAKE_COMMAND}" -E make_directory "${output_dir}"
+ COMMAND
+ "${FBTHRIFT_COMPILER}"
+ --strict
+ --gen "py:${GEN_ARG_STR}"
+ "${thrift_include_options}"
+ -o "${output_dir}"
+ "${CMAKE_CURRENT_SOURCE_DIR}/${THRIFT_FILE}"
+ WORKING_DIRECTORY
+ "${CMAKE_BINARY_DIR}"
+ MAIN_DEPENDENCY
+ "${THRIFT_FILE}"
+ DEPENDS
+ "${FBTHRIFT_COMPILER}"
+ )
+
+ # We always want to pass the namespace as "" to this call:
+ # thrift will already emit the files with the desired namespace prefix under
+ # gen-py. We don't want add_fb_python_library() to prepend the namespace a
+ # second time.
+ add_fb_python_library(
+ "${LIB_NAME}"
+ BASE_DIR "${output_dir}/gen-py"
+ NAMESPACE ""
+ SOURCES ${generated_sources}
+ DEPENDS ${ARG_DEPENDS} FBThrift::thrift_py
+ )
+endfunction()
diff --git a/build/fbcode_builder/CMake/FindGMock.cmake b/build/fbcode_builder/CMake/FindGMock.cmake
new file mode 100644
index 000000000..cd042dd9c
--- /dev/null
+++ b/build/fbcode_builder/CMake/FindGMock.cmake
@@ -0,0 +1,80 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+# Find libgmock
+#
+# LIBGMOCK_DEFINES - List of defines when using libgmock.
+# LIBGMOCK_INCLUDE_DIR - where to find gmock/gmock.h, etc.
+# LIBGMOCK_LIBRARIES - List of libraries when using libgmock.
+# LIBGMOCK_FOUND - True if libgmock found.
+
+IF (LIBGMOCK_INCLUDE_DIR)
+ # Already in cache, be silent
+ SET(LIBGMOCK_FIND_QUIETLY TRUE)
+ENDIF ()
+
+find_package(GTest CONFIG QUIET)
+if (TARGET GTest::gmock)
+ get_target_property(LIBGMOCK_DEFINES GTest::gtest INTERFACE_COMPILE_DEFINITIONS)
+ if (NOT ${LIBGMOCK_DEFINES})
+ # Explicitly set to empty string if not found to avoid it being
+ # set to NOTFOUND and breaking compilation
+ set(LIBGMOCK_DEFINES "")
+ endif()
+ get_target_property(LIBGMOCK_INCLUDE_DIR GTest::gtest INTERFACE_INCLUDE_DIRECTORIES)
+ set(LIBGMOCK_LIBRARIES GTest::gmock_main GTest::gmock GTest::gtest)
+ set(LIBGMOCK_FOUND ON)
+ message(STATUS "Found gmock via config, defines=${LIBGMOCK_DEFINES}, include=${LIBGMOCK_INCLUDE_DIR}, libs=${LIBGMOCK_LIBRARIES}")
+else()
+
+ FIND_PATH(LIBGMOCK_INCLUDE_DIR gmock/gmock.h)
+
+ FIND_LIBRARY(LIBGMOCK_MAIN_LIBRARY_DEBUG NAMES gmock_maind)
+ FIND_LIBRARY(LIBGMOCK_MAIN_LIBRARY_RELEASE NAMES gmock_main)
+ FIND_LIBRARY(LIBGMOCK_LIBRARY_DEBUG NAMES gmockd)
+ FIND_LIBRARY(LIBGMOCK_LIBRARY_RELEASE NAMES gmock)
+ FIND_LIBRARY(LIBGTEST_LIBRARY_DEBUG NAMES gtestd)
+ FIND_LIBRARY(LIBGTEST_LIBRARY_RELEASE NAMES gtest)
+
+ find_package(Threads REQUIRED)
+ INCLUDE(SelectLibraryConfigurations)
+ SELECT_LIBRARY_CONFIGURATIONS(LIBGMOCK_MAIN)
+ SELECT_LIBRARY_CONFIGURATIONS(LIBGMOCK)
+ SELECT_LIBRARY_CONFIGURATIONS(LIBGTEST)
+
+ set(LIBGMOCK_LIBRARIES
+ ${LIBGMOCK_MAIN_LIBRARY}
+ ${LIBGMOCK_LIBRARY}
+ ${LIBGTEST_LIBRARY}
+ Threads::Threads
+ )
+
+ if(CMAKE_SYSTEM_NAME STREQUAL "Windows")
+ # The GTEST_LINKED_AS_SHARED_LIBRARY macro must be set properly on Windows.
+ #
+ # There isn't currently an easy way to determine if a library was compiled as
+ # a shared library on Windows, so just assume we've been built against a
+ # shared build of gmock for now.
+ SET(LIBGMOCK_DEFINES "GTEST_LINKED_AS_SHARED_LIBRARY=1" CACHE STRING "")
+ endif()
+
+ # handle the QUIETLY and REQUIRED arguments and set LIBGMOCK_FOUND to TRUE if
+ # all listed variables are TRUE
+ INCLUDE(FindPackageHandleStandardArgs)
+ FIND_PACKAGE_HANDLE_STANDARD_ARGS(
+ GMock
+ DEFAULT_MSG
+ LIBGMOCK_MAIN_LIBRARY
+ LIBGMOCK_LIBRARY
+ LIBGTEST_LIBRARY
+ LIBGMOCK_LIBRARIES
+ LIBGMOCK_INCLUDE_DIR
+ )
+
+ MARK_AS_ADVANCED(
+ LIBGMOCK_DEFINES
+ LIBGMOCK_MAIN_LIBRARY
+ LIBGMOCK_LIBRARY
+ LIBGTEST_LIBRARY
+ LIBGMOCK_LIBRARIES
+ LIBGMOCK_INCLUDE_DIR
+ )
+endif()
diff --git a/build/fbcode_builder/CMake/FindGflags.cmake b/build/fbcode_builder/CMake/FindGflags.cmake
new file mode 100644
index 000000000..c00896a34
--- /dev/null
+++ b/build/fbcode_builder/CMake/FindGflags.cmake
@@ -0,0 +1,105 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+# Find libgflags.
+# There's a lot of compatibility cruft going on in here, both
+# to deal with changes across the FB consumers of this and also
+# to deal with variances in behavior of cmake itself.
+#
+# Since this file is named FindGflags.cmake the cmake convention
+# is for the module to export both GFLAGS_FOUND and Gflags_FOUND.
+# The convention expected by consumers is that we export the
+# following variables, even though these do not match the cmake
+# conventions:
+#
+# LIBGFLAGS_INCLUDE_DIR - where to find gflags/gflags.h, etc.
+# LIBGFLAGS_LIBRARY - List of libraries when using libgflags.
+# LIBGFLAGS_FOUND - True if libgflags found.
+#
+# We need to be able to locate gflags both from an installed
+# cmake config file and just from the raw headers and libs, so
+# test for the former and then the latter, and then stick
+# the results together and export them into the variables
+# listed above.
+#
+# For forwards compatibility, we export the following variables:
+#
+# gflags_INCLUDE_DIR - where to find gflags/gflags.h, etc.
+# gflags_TARGET / GFLAGS_TARGET / gflags_LIBRARIES
+# - List of libraries when using libgflags.
+# gflags_FOUND - True if libgflags found.
+#
+
+IF (LIBGFLAGS_INCLUDE_DIR)
+ # Already in cache, be silent
+ SET(Gflags_FIND_QUIETLY TRUE)
+ENDIF ()
+
+find_package(gflags CONFIG QUIET)
+if (gflags_FOUND)
+ if (NOT Gflags_FIND_QUIETLY)
+ message(STATUS "Found gflags from package config ${gflags_CONFIG}")
+ endif()
+ # Re-export the config-specified libs with our local names
+ set(LIBGFLAGS_LIBRARY ${gflags_LIBRARIES})
+ set(LIBGFLAGS_INCLUDE_DIR ${gflags_INCLUDE_DIR})
+ if(NOT EXISTS "${gflags_INCLUDE_DIR}")
+ # The gflags-devel RPM on recent RedHat-based systems is somewhat broken.
+ # RedHat symlinks /lib64 to /usr/lib64, and this breaks some of the
+ # relative path computation performed in gflags-config.cmake. The package
+ # config file ends up being found via /lib64, but the relative path
+ # computation it does only works if it was found in /usr/lib64.
+ # If gflags_INCLUDE_DIR does not actually exist, simply default it to
+ # /usr/include on these systems.
+ set(LIBGFLAGS_INCLUDE_DIR "/usr/include")
+ endif()
+ set(LIBGFLAGS_FOUND ${gflags_FOUND})
+ # cmake module compat
+ set(GFLAGS_FOUND ${gflags_FOUND})
+ set(Gflags_FOUND ${gflags_FOUND})
+else()
+ FIND_PATH(LIBGFLAGS_INCLUDE_DIR gflags/gflags.h)
+
+ FIND_LIBRARY(LIBGFLAGS_LIBRARY_DEBUG NAMES gflagsd gflags_staticd)
+ FIND_LIBRARY(LIBGFLAGS_LIBRARY_RELEASE NAMES gflags gflags_static)
+
+ INCLUDE(SelectLibraryConfigurations)
+ SELECT_LIBRARY_CONFIGURATIONS(LIBGFLAGS)
+
+ # handle the QUIETLY and REQUIRED arguments and set LIBGFLAGS_FOUND to TRUE if
+ # all listed variables are TRUE
+ INCLUDE(FindPackageHandleStandardArgs)
+ FIND_PACKAGE_HANDLE_STANDARD_ARGS(gflags DEFAULT_MSG LIBGFLAGS_LIBRARY LIBGFLAGS_INCLUDE_DIR)
+ # cmake module compat
+ set(Gflags_FOUND ${GFLAGS_FOUND})
+ # compat with some existing FindGflags consumers
+ set(LIBGFLAGS_FOUND ${GFLAGS_FOUND})
+
+ # Compat with the gflags CONFIG based detection
+ set(gflags_FOUND ${GFLAGS_FOUND})
+ set(gflags_INCLUDE_DIR ${LIBGFLAGS_INCLUDE_DIR})
+ set(gflags_LIBRARIES ${LIBGFLAGS_LIBRARY})
+ set(GFLAGS_TARGET ${LIBGFLAGS_LIBRARY})
+ set(gflags_TARGET ${LIBGFLAGS_LIBRARY})
+
+ MARK_AS_ADVANCED(LIBGFLAGS_LIBRARY LIBGFLAGS_INCLUDE_DIR)
+endif()
+
+# Compat with the gflags CONFIG based detection
+if (LIBGFLAGS_FOUND AND NOT TARGET gflags)
+ add_library(gflags UNKNOWN IMPORTED)
+ if(TARGET gflags-shared)
+ # If the installed gflags CMake package config defines a gflags-shared
+ # target but not gflags, just make the gflags target that we define
+ # depend on the gflags-shared target.
+ target_link_libraries(gflags INTERFACE gflags-shared)
+ # Export LIBGFLAGS_LIBRARY as the gflags-shared target in this case.
+ set(LIBGFLAGS_LIBRARY gflags-shared)
+ else()
+ set_target_properties(
+ gflags
+ PROPERTIES
+ IMPORTED_LINK_INTERFACE_LANGUAGES "C"
+ IMPORTED_LOCATION "${LIBGFLAGS_LIBRARY}"
+ INTERFACE_INCLUDE_DIRECTORIES "${LIBGFLAGS_INCLUDE_DIR}"
+ )
+ endif()
+endif()
diff --git a/build/fbcode_builder/CMake/FindGlog.cmake b/build/fbcode_builder/CMake/FindGlog.cmake
new file mode 100644
index 000000000..752647cb3
--- /dev/null
+++ b/build/fbcode_builder/CMake/FindGlog.cmake
@@ -0,0 +1,37 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+# - Try to find Glog
+# Once done, this will define
+#
+# GLOG_FOUND - system has Glog
+# GLOG_INCLUDE_DIRS - the Glog include directories
+# GLOG_LIBRARIES - link these to use Glog
+
+include(FindPackageHandleStandardArgs)
+include(SelectLibraryConfigurations)
+
+find_library(GLOG_LIBRARY_RELEASE glog
+ PATHS ${GLOG_LIBRARYDIR})
+find_library(GLOG_LIBRARY_DEBUG glogd
+ PATHS ${GLOG_LIBRARYDIR})
+
+find_path(GLOG_INCLUDE_DIR glog/logging.h
+ PATHS ${GLOG_INCLUDEDIR})
+
+select_library_configurations(GLOG)
+
+find_package_handle_standard_args(glog DEFAULT_MSG
+ GLOG_LIBRARY
+ GLOG_INCLUDE_DIR)
+
+mark_as_advanced(
+ GLOG_LIBRARY
+ GLOG_INCLUDE_DIR)
+
+set(GLOG_LIBRARIES ${GLOG_LIBRARY})
+set(GLOG_INCLUDE_DIRS ${GLOG_INCLUDE_DIR})
+
+if (NOT TARGET glog::glog)
+ add_library(glog::glog UNKNOWN IMPORTED)
+ set_target_properties(glog::glog PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${GLOG_INCLUDE_DIRS}")
+ set_target_properties(glog::glog PROPERTIES IMPORTED_LINK_INTERFACE_LANGUAGES "C" IMPORTED_LOCATION "${GLOG_LIBRARIES}")
+endif()
diff --git a/build/fbcode_builder/CMake/FindLibEvent.cmake b/build/fbcode_builder/CMake/FindLibEvent.cmake
new file mode 100644
index 000000000..dd11ebd84
--- /dev/null
+++ b/build/fbcode_builder/CMake/FindLibEvent.cmake
@@ -0,0 +1,77 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+# - Find LibEvent (a cross event library)
+# This module defines
+# LIBEVENT_INCLUDE_DIR, where to find LibEvent headers
+# LIBEVENT_LIB, LibEvent libraries
+# LibEvent_FOUND, If false, do not try to use libevent
+
+set(LibEvent_EXTRA_PREFIXES /usr/local /opt/local "$ENV{HOME}")
+foreach(prefix ${LibEvent_EXTRA_PREFIXES})
+ list(APPEND LibEvent_INCLUDE_PATHS "${prefix}/include")
+ list(APPEND LibEvent_LIB_PATHS "${prefix}/lib")
+endforeach()
+
+find_package(Libevent CONFIG QUIET)
+if (TARGET event)
+ # Re-export the config under our own names
+
+ # Somewhat gross, but some vcpkg installed libevents have a relative
+ # `include` path exported into LIBEVENT_INCLUDE_DIRS, which triggers
+ # a cmake error because it resolves to the `include` dir within the
+ # folly repo, which is not something cmake allows to be in the
+ # INTERFACE_INCLUDE_DIRECTORIES. Thankfully on such a system the
+ # actual include directory is already part of the global include
+ # directories, so we can just skip it.
+ if (NOT "${LIBEVENT_INCLUDE_DIRS}" STREQUAL "include")
+ set(LIBEVENT_INCLUDE_DIR ${LIBEVENT_INCLUDE_DIRS})
+ else()
+ set(LIBEVENT_INCLUDE_DIR)
+ endif()
+
+ # Unfortunately, with a bare target name `event`, downstream consumers
+ # of the package that depends on `Libevent` located via CONFIG end
+ # up exporting just a bare `event` in their libraries. This is problematic
+ # because this in interpreted as just `-levent` with no library path.
+ # When libevent is not installed in the default installation prefix
+ # this results in linker errors.
+ # To resolve this, we ask cmake to lookup the full path to the library
+ # and use that instead.
+ cmake_policy(PUSH)
+ if(POLICY CMP0026)
+ # Allow reading the LOCATION property
+ cmake_policy(SET CMP0026 OLD)
+ endif()
+ get_target_property(LIBEVENT_LIB event LOCATION)
+ cmake_policy(POP)
+
+ set(LibEvent_FOUND ${Libevent_FOUND})
+ if (NOT LibEvent_FIND_QUIETLY)
+ message(STATUS "Found libevent from package config include=${LIBEVENT_INCLUDE_DIRS} lib=${LIBEVENT_LIB}")
+ endif()
+else()
+ find_path(LIBEVENT_INCLUDE_DIR event.h PATHS ${LibEvent_INCLUDE_PATHS})
+ find_library(LIBEVENT_LIB NAMES event PATHS ${LibEvent_LIB_PATHS})
+
+ if (LIBEVENT_LIB AND LIBEVENT_INCLUDE_DIR)
+ set(LibEvent_FOUND TRUE)
+ set(LIBEVENT_LIB ${LIBEVENT_LIB})
+ else ()
+ set(LibEvent_FOUND FALSE)
+ endif ()
+
+ if (LibEvent_FOUND)
+ if (NOT LibEvent_FIND_QUIETLY)
+ message(STATUS "Found libevent: ${LIBEVENT_LIB}")
+ endif ()
+ else ()
+ if (LibEvent_FIND_REQUIRED)
+ message(FATAL_ERROR "Could NOT find libevent.")
+ endif ()
+ message(STATUS "libevent NOT found.")
+ endif ()
+
+ mark_as_advanced(
+ LIBEVENT_LIB
+ LIBEVENT_INCLUDE_DIR
+ )
+endif()
diff --git a/build/fbcode_builder/CMake/FindLibUnwind.cmake b/build/fbcode_builder/CMake/FindLibUnwind.cmake
new file mode 100644
index 000000000..b01a674a5
--- /dev/null
+++ b/build/fbcode_builder/CMake/FindLibUnwind.cmake
@@ -0,0 +1,29 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+find_path(LIBUNWIND_INCLUDE_DIR NAMES libunwind.h)
+mark_as_advanced(LIBUNWIND_INCLUDE_DIR)
+
+find_library(LIBUNWIND_LIBRARY NAMES unwind)
+mark_as_advanced(LIBUNWIND_LIBRARY)
+
+include(FindPackageHandleStandardArgs)
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(
+ LIBUNWIND
+ REQUIRED_VARS LIBUNWIND_LIBRARY LIBUNWIND_INCLUDE_DIR)
+
+if(LIBUNWIND_FOUND)
+ set(LIBUNWIND_LIBRARIES ${LIBUNWIND_LIBRARY})
+ set(LIBUNWIND_INCLUDE_DIRS ${LIBUNWIND_INCLUDE_DIR})
+endif()
diff --git a/build/fbcode_builder/CMake/FindPCRE.cmake b/build/fbcode_builder/CMake/FindPCRE.cmake
new file mode 100644
index 000000000..32ccb3725
--- /dev/null
+++ b/build/fbcode_builder/CMake/FindPCRE.cmake
@@ -0,0 +1,11 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+include(FindPackageHandleStandardArgs)
+find_path(PCRE_INCLUDE_DIR NAMES pcre.h)
+find_library(PCRE_LIBRARY NAMES pcre)
+find_package_handle_standard_args(
+ PCRE
+ DEFAULT_MSG
+ PCRE_LIBRARY
+ PCRE_INCLUDE_DIR
+)
+mark_as_advanced(PCRE_INCLUDE_DIR PCRE_LIBRARY)
diff --git a/build/fbcode_builder/CMake/FindRe2.cmake b/build/fbcode_builder/CMake/FindRe2.cmake
new file mode 100644
index 000000000..013ae7761
--- /dev/null
+++ b/build/fbcode_builder/CMake/FindRe2.cmake
@@ -0,0 +1,20 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2.
+
+find_library(RE2_LIBRARY re2)
+mark_as_advanced(RE2_LIBRARY)
+
+find_path(RE2_INCLUDE_DIR NAMES re2/re2.h)
+mark_as_advanced(RE2_INCLUDE_DIR)
+
+include(FindPackageHandleStandardArgs)
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(
+ RE2
+ REQUIRED_VARS RE2_LIBRARY RE2_INCLUDE_DIR)
+
+if(RE2_FOUND)
+ set(RE2_LIBRARY ${RE2_LIBRARY})
+ set(RE2_INCLUDE_DIR, ${RE2_INCLUDE_DIR})
+endif()
diff --git a/build/fbcode_builder/CMake/FindSodium.cmake b/build/fbcode_builder/CMake/FindSodium.cmake
new file mode 100644
index 000000000..3c3f1245c
--- /dev/null
+++ b/build/fbcode_builder/CMake/FindSodium.cmake
@@ -0,0 +1,297 @@
+# Written in 2016 by Henrik Steffen Gaßmann
+#
+# To the extent possible under law, the author(s) have dedicated all
+# copyright and related and neighboring rights to this software to the
+# public domain worldwide. This software is distributed without any warranty.
+#
+# You should have received a copy of the CC0 Public Domain Dedication
+# along with this software. If not, see
+#
+# http://creativecommons.org/publicdomain/zero/1.0/
+#
+########################################################################
+# Tries to find the local libsodium installation.
+#
+# On Windows the sodium_DIR environment variable is used as a default
+# hint which can be overridden by setting the corresponding cmake variable.
+#
+# Once done the following variables will be defined:
+#
+# sodium_FOUND
+# sodium_INCLUDE_DIR
+# sodium_LIBRARY_DEBUG
+# sodium_LIBRARY_RELEASE
+#
+#
+# Furthermore an imported "sodium" target is created.
+#
+
+if (CMAKE_C_COMPILER_ID STREQUAL "GNU"
+ OR CMAKE_C_COMPILER_ID STREQUAL "Clang")
+ set(_GCC_COMPATIBLE 1)
+endif()
+
+# static library option
+if (NOT DEFINED sodium_USE_STATIC_LIBS)
+ option(sodium_USE_STATIC_LIBS "enable to statically link against sodium" OFF)
+endif()
+if(NOT (sodium_USE_STATIC_LIBS EQUAL sodium_USE_STATIC_LIBS_LAST))
+ unset(sodium_LIBRARY CACHE)
+ unset(sodium_LIBRARY_DEBUG CACHE)
+ unset(sodium_LIBRARY_RELEASE CACHE)
+ unset(sodium_DLL_DEBUG CACHE)
+ unset(sodium_DLL_RELEASE CACHE)
+ set(sodium_USE_STATIC_LIBS_LAST ${sodium_USE_STATIC_LIBS} CACHE INTERNAL "internal change tracking variable")
+endif()
+
+
+########################################################################
+# UNIX
+if (UNIX)
+ # import pkg-config
+ find_package(PkgConfig QUIET)
+ if (PKG_CONFIG_FOUND)
+ pkg_check_modules(sodium_PKG QUIET libsodium)
+ endif()
+
+ if(sodium_USE_STATIC_LIBS)
+ foreach(_libname ${sodium_PKG_STATIC_LIBRARIES})
+ if (NOT _libname MATCHES "^lib.*\\.a$") # ignore strings already ending with .a
+ list(INSERT sodium_PKG_STATIC_LIBRARIES 0 "lib${_libname}.a")
+ endif()
+ endforeach()
+ list(REMOVE_DUPLICATES sodium_PKG_STATIC_LIBRARIES)
+
+ # if pkgconfig for libsodium doesn't provide
+ # static lib info, then override PKG_STATIC here..
+ if (NOT sodium_PKG_STATIC_FOUND)
+ set(sodium_PKG_STATIC_LIBRARIES libsodium.a)
+ endif()
+
+ set(XPREFIX sodium_PKG_STATIC)
+ else()
+ if (NOT sodium_PKG_FOUND)
+ set(sodium_PKG_LIBRARIES sodium)
+ endif()
+
+ set(XPREFIX sodium_PKG)
+ endif()
+
+ find_path(sodium_INCLUDE_DIR sodium.h
+ HINTS ${${XPREFIX}_INCLUDE_DIRS}
+ )
+ find_library(sodium_LIBRARY_DEBUG NAMES ${${XPREFIX}_LIBRARIES}
+ HINTS ${${XPREFIX}_LIBRARY_DIRS}
+ )
+ find_library(sodium_LIBRARY_RELEASE NAMES ${${XPREFIX}_LIBRARIES}
+ HINTS ${${XPREFIX}_LIBRARY_DIRS}
+ )
+
+
+########################################################################
+# Windows
+elseif (WIN32)
+ set(sodium_DIR "$ENV{sodium_DIR}" CACHE FILEPATH "sodium install directory")
+ mark_as_advanced(sodium_DIR)
+
+ find_path(sodium_INCLUDE_DIR sodium.h
+ HINTS ${sodium_DIR}
+ PATH_SUFFIXES include
+ )
+
+ if (MSVC)
+ # detect target architecture
+ file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/arch.cpp" [=[
+ #if defined _M_IX86
+ #error ARCH_VALUE x86_32
+ #elif defined _M_X64
+ #error ARCH_VALUE x86_64
+ #endif
+ #error ARCH_VALUE unknown
+ ]=])
+ try_compile(_UNUSED_VAR "${CMAKE_CURRENT_BINARY_DIR}" "${CMAKE_CURRENT_BINARY_DIR}/arch.cpp"
+ OUTPUT_VARIABLE _COMPILATION_LOG
+ )
+ string(REGEX REPLACE ".*ARCH_VALUE ([a-zA-Z0-9_]+).*" "\\1" _TARGET_ARCH "${_COMPILATION_LOG}")
+
+ # construct library path
+ if (_TARGET_ARCH STREQUAL "x86_32")
+ string(APPEND _PLATFORM_PATH "Win32")
+ elseif(_TARGET_ARCH STREQUAL "x86_64")
+ string(APPEND _PLATFORM_PATH "x64")
+ else()
+ message(FATAL_ERROR "the ${_TARGET_ARCH} architecture is not supported by Findsodium.cmake.")
+ endif()
+ string(APPEND _PLATFORM_PATH "/$$CONFIG$$")
+
+ if (MSVC_VERSION LESS 1900)
+ math(EXPR _VS_VERSION "${MSVC_VERSION} / 10 - 60")
+ else()
+ math(EXPR _VS_VERSION "${MSVC_VERSION} / 10 - 50")
+ endif()
+ string(APPEND _PLATFORM_PATH "/v${_VS_VERSION}")
+
+ if (sodium_USE_STATIC_LIBS)
+ string(APPEND _PLATFORM_PATH "/static")
+ else()
+ string(APPEND _PLATFORM_PATH "/dynamic")
+ endif()
+
+ string(REPLACE "$$CONFIG$$" "Debug" _DEBUG_PATH_SUFFIX "${_PLATFORM_PATH}")
+ string(REPLACE "$$CONFIG$$" "Release" _RELEASE_PATH_SUFFIX "${_PLATFORM_PATH}")
+
+ find_library(sodium_LIBRARY_DEBUG libsodium.lib
+ HINTS ${sodium_DIR}
+ PATH_SUFFIXES ${_DEBUG_PATH_SUFFIX}
+ )
+ find_library(sodium_LIBRARY_RELEASE libsodium.lib
+ HINTS ${sodium_DIR}
+ PATH_SUFFIXES ${_RELEASE_PATH_SUFFIX}
+ )
+ if (NOT sodium_USE_STATIC_LIBS)
+ set(CMAKE_FIND_LIBRARY_SUFFIXES_BCK ${CMAKE_FIND_LIBRARY_SUFFIXES})
+ set(CMAKE_FIND_LIBRARY_SUFFIXES ".dll")
+ find_library(sodium_DLL_DEBUG libsodium
+ HINTS ${sodium_DIR}
+ PATH_SUFFIXES ${_DEBUG_PATH_SUFFIX}
+ )
+ find_library(sodium_DLL_RELEASE libsodium
+ HINTS ${sodium_DIR}
+ PATH_SUFFIXES ${_RELEASE_PATH_SUFFIX}
+ )
+ set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_BCK})
+ endif()
+
+ elseif(_GCC_COMPATIBLE)
+ if (sodium_USE_STATIC_LIBS)
+ find_library(sodium_LIBRARY_DEBUG libsodium.a
+ HINTS ${sodium_DIR}
+ PATH_SUFFIXES lib
+ )
+ find_library(sodium_LIBRARY_RELEASE libsodium.a
+ HINTS ${sodium_DIR}
+ PATH_SUFFIXES lib
+ )
+ else()
+ find_library(sodium_LIBRARY_DEBUG libsodium.dll.a
+ HINTS ${sodium_DIR}
+ PATH_SUFFIXES lib
+ )
+ find_library(sodium_LIBRARY_RELEASE libsodium.dll.a
+ HINTS ${sodium_DIR}
+ PATH_SUFFIXES lib
+ )
+
+ file(GLOB _DLL
+ LIST_DIRECTORIES false
+ RELATIVE "${sodium_DIR}/bin"
+ "${sodium_DIR}/bin/libsodium*.dll"
+ )
+ find_library(sodium_DLL_DEBUG ${_DLL} libsodium
+ HINTS ${sodium_DIR}
+ PATH_SUFFIXES bin
+ )
+ find_library(sodium_DLL_RELEASE ${_DLL} libsodium
+ HINTS ${sodium_DIR}
+ PATH_SUFFIXES bin
+ )
+ endif()
+ else()
+ message(FATAL_ERROR "this platform is not supported by FindSodium.cmake")
+ endif()
+
+
+########################################################################
+# unsupported
+else()
+ message(FATAL_ERROR "this platform is not supported by FindSodium.cmake")
+endif()
+
+
+########################################################################
+# common stuff
+
+# extract sodium version
+if (sodium_INCLUDE_DIR)
+ set(_VERSION_HEADER "${_INCLUDE_DIR}/sodium/version.h")
+ if (EXISTS _VERSION_HEADER)
+ file(READ "${_VERSION_HEADER}" _VERSION_HEADER_CONTENT)
+ string(REGEX REPLACE ".*#[ \t]*define[ \t]*SODIUM_VERSION_STRING[ \t]*\"([^\n]*)\".*" "\\1"
+ sodium_VERSION "${_VERSION_HEADER_CONTENT}")
+ set(sodium_VERSION "${sodium_VERSION}" PARENT_SCOPE)
+ endif()
+endif()
+
+# communicate results
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(
+ Sodium # The name must be either uppercase or match the filename case.
+ REQUIRED_VARS
+ sodium_LIBRARY_RELEASE
+ sodium_LIBRARY_DEBUG
+ sodium_INCLUDE_DIR
+ VERSION_VAR
+ sodium_VERSION
+)
+
+if(Sodium_FOUND)
+ set(sodium_LIBRARIES
+ optimized ${sodium_LIBRARY_RELEASE} debug ${sodium_LIBRARY_DEBUG})
+endif()
+
+# mark file paths as advanced
+mark_as_advanced(sodium_INCLUDE_DIR)
+mark_as_advanced(sodium_LIBRARY_DEBUG)
+mark_as_advanced(sodium_LIBRARY_RELEASE)
+if (WIN32)
+ mark_as_advanced(sodium_DLL_DEBUG)
+ mark_as_advanced(sodium_DLL_RELEASE)
+endif()
+
+# create imported target
+if(sodium_USE_STATIC_LIBS)
+ set(_LIB_TYPE STATIC)
+else()
+ set(_LIB_TYPE SHARED)
+endif()
+
+if(NOT TARGET sodium)
+ add_library(sodium ${_LIB_TYPE} IMPORTED)
+endif()
+
+set_target_properties(sodium PROPERTIES
+ INTERFACE_INCLUDE_DIRECTORIES "${sodium_INCLUDE_DIR}"
+ IMPORTED_LINK_INTERFACE_LANGUAGES "C"
+)
+
+if (sodium_USE_STATIC_LIBS)
+ set_target_properties(sodium PROPERTIES
+ INTERFACE_COMPILE_DEFINITIONS "SODIUM_STATIC"
+ IMPORTED_LOCATION "${sodium_LIBRARY_RELEASE}"
+ IMPORTED_LOCATION_DEBUG "${sodium_LIBRARY_DEBUG}"
+ )
+else()
+ if (UNIX)
+ set_target_properties(sodium PROPERTIES
+ IMPORTED_LOCATION "${sodium_LIBRARY_RELEASE}"
+ IMPORTED_LOCATION_DEBUG "${sodium_LIBRARY_DEBUG}"
+ )
+ elseif (WIN32)
+ set_target_properties(sodium PROPERTIES
+ IMPORTED_IMPLIB "${sodium_LIBRARY_RELEASE}"
+ IMPORTED_IMPLIB_DEBUG "${sodium_LIBRARY_DEBUG}"
+ )
+ if (NOT (sodium_DLL_DEBUG MATCHES ".*-NOTFOUND"))
+ set_target_properties(sodium PROPERTIES
+ IMPORTED_LOCATION_DEBUG "${sodium_DLL_DEBUG}"
+ )
+ endif()
+ if (NOT (sodium_DLL_RELEASE MATCHES ".*-NOTFOUND"))
+ set_target_properties(sodium PROPERTIES
+ IMPORTED_LOCATION_RELWITHDEBINFO "${sodium_DLL_RELEASE}"
+ IMPORTED_LOCATION_MINSIZEREL "${sodium_DLL_RELEASE}"
+ IMPORTED_LOCATION_RELEASE "${sodium_DLL_RELEASE}"
+ )
+ endif()
+ endif()
+endif()
diff --git a/build/fbcode_builder/CMake/FindZstd.cmake b/build/fbcode_builder/CMake/FindZstd.cmake
new file mode 100644
index 000000000..89300ddfd
--- /dev/null
+++ b/build/fbcode_builder/CMake/FindZstd.cmake
@@ -0,0 +1,41 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# - Try to find Facebook zstd library
+# This will define
+# ZSTD_FOUND
+# ZSTD_INCLUDE_DIR
+# ZSTD_LIBRARY
+#
+
+find_path(ZSTD_INCLUDE_DIR NAMES zstd.h)
+
+find_library(ZSTD_LIBRARY_DEBUG NAMES zstdd zstd_staticd)
+find_library(ZSTD_LIBRARY_RELEASE NAMES zstd zstd_static)
+
+include(SelectLibraryConfigurations)
+SELECT_LIBRARY_CONFIGURATIONS(ZSTD)
+
+include(FindPackageHandleStandardArgs)
+FIND_PACKAGE_HANDLE_STANDARD_ARGS(
+ ZSTD DEFAULT_MSG
+ ZSTD_LIBRARY ZSTD_INCLUDE_DIR
+)
+
+if (ZSTD_FOUND)
+ message(STATUS "Found Zstd: ${ZSTD_LIBRARY}")
+endif()
+
+mark_as_advanced(ZSTD_INCLUDE_DIR ZSTD_LIBRARY)
diff --git a/build/fbcode_builder/CMake/RustStaticLibrary.cmake b/build/fbcode_builder/CMake/RustStaticLibrary.cmake
new file mode 100644
index 000000000..8546fe2fb
--- /dev/null
+++ b/build/fbcode_builder/CMake/RustStaticLibrary.cmake
@@ -0,0 +1,291 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+
+include(FBCMakeParseArgs)
+
+set(
+ USE_CARGO_VENDOR AUTO CACHE STRING
+ "Download Rust Crates from an internally vendored location"
+)
+set_property(CACHE USE_CARGO_VENDOR PROPERTY STRINGS AUTO ON OFF)
+
+set(RUST_VENDORED_CRATES_DIR "$ENV{RUST_VENDORED_CRATES_DIR}")
+if("${USE_CARGO_VENDOR}" STREQUAL "AUTO")
+ if(EXISTS "${RUST_VENDORED_CRATES_DIR}")
+ set(USE_CARGO_VENDOR ON)
+ else()
+ set(USE_CARGO_VENDOR OFF)
+ endif()
+endif()
+
+if(USE_CARGO_VENDOR)
+ if(NOT EXISTS "${RUST_VENDORED_CRATES_DIR}")
+ message(
+ FATAL "vendored rust crates not present: "
+ "${RUST_VENDORED_CRATES_DIR}"
+ )
+ endif()
+
+ set(RUST_CARGO_HOME "${CMAKE_BINARY_DIR}/_cargo_home")
+ file(MAKE_DIRECTORY "${RUST_CARGO_HOME}")
+
+ file(
+ TO_NATIVE_PATH "${RUST_VENDORED_CRATES_DIR}"
+ ESCAPED_RUST_VENDORED_CRATES_DIR
+ )
+ string(
+ REPLACE "\\" "\\\\"
+ ESCAPED_RUST_VENDORED_CRATES_DIR
+ "${ESCAPED_RUST_VENDORED_CRATES_DIR}"
+ )
+ file(
+ WRITE "${RUST_CARGO_HOME}/config"
+ "[source.crates-io]\n"
+ "replace-with = \"vendored-sources\"\n"
+ "\n"
+ "[source.vendored-sources]\n"
+ "directory = \"${ESCAPED_RUST_VENDORED_CRATES_DIR}\"\n"
+ )
+endif()
+
+# Cargo is a build system in itself, and thus will try to take advantage of all
+# the cores on the system. Unfortunately, this conflicts with Ninja, since it
+# also tries to utilize all the cores. This can lead to a system that is
+# completely overloaded with compile jobs to the point where nothing else can
+# be achieved on the system.
+#
+# Let's inform Ninja of this fact so it won't try to spawn other jobs while
+# Rust being compiled.
+set_property(GLOBAL APPEND PROPERTY JOB_POOLS rust_job_pool=1)
+
+# This function creates an interface library target based on the static library
+# built by Cargo. It will call Cargo to build a staticlib and generate a CMake
+# interface library with it.
+#
+# This function requires `find_package(Python COMPONENTS Interpreter)`.
+#
+# You need to set `lib:crate-type = ["staticlib"]` in your Cargo.toml to make
+# Cargo build static library.
+#
+# ```cmake
+# rust_static_library( [CRATE ])
+# ```
+#
+# Parameters:
+# - TARGET:
+# Name of the target name. This function will create an interface library
+# target with this name.
+# - CRATE_NAME:
+# Name of the crate. This parameter is optional. If unspecified, it will
+# fallback to `${TARGET}`.
+#
+# This function creates two targets:
+# - "${TARGET}": an interface library target contains the static library built
+# from Cargo.
+# - "${TARGET}.cargo": an internal custom target that invokes Cargo.
+#
+# If you are going to use this static library from C/C++, you will need to
+# write header files for the library (or generate with cbindgen) and bind these
+# headers with the interface library.
+#
+function(rust_static_library TARGET)
+ fb_cmake_parse_args(ARG "" "CRATE" "" "${ARGN}")
+
+ if(DEFINED ARG_CRATE)
+ set(crate_name "${ARG_CRATE}")
+ else()
+ set(crate_name "${TARGET}")
+ endif()
+
+ set(cargo_target "${TARGET}.cargo")
+ set(target_dir $,debug,release>)
+ set(staticlib_name "${CMAKE_STATIC_LIBRARY_PREFIX}${crate_name}${CMAKE_STATIC_LIBRARY_SUFFIX}")
+ set(rust_staticlib "${CMAKE_CURRENT_BINARY_DIR}/${target_dir}/${staticlib_name}")
+
+ set(cargo_cmd cargo)
+ if(WIN32)
+ set(cargo_cmd cargo.exe)
+ endif()
+
+ set(cargo_flags build $,,--release> -p ${crate_name})
+ if(USE_CARGO_VENDOR)
+ set(extra_cargo_env "CARGO_HOME=${RUST_CARGO_HOME}")
+ set(cargo_flags ${cargo_flags})
+ endif()
+
+ add_custom_target(
+ ${cargo_target}
+ COMMAND
+ "${CMAKE_COMMAND}" -E remove -f "${CMAKE_CURRENT_SOURCE_DIR}/Cargo.lock"
+ COMMAND
+ "${CMAKE_COMMAND}" -E env
+ "CARGO_TARGET_DIR=${CMAKE_CURRENT_BINARY_DIR}"
+ ${extra_cargo_env}
+ ${cargo_cmd}
+ ${cargo_flags}
+ COMMENT "Building Rust crate '${crate_name}'..."
+ JOB_POOL rust_job_pool
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+ BYPRODUCTS
+ "${CMAKE_CURRENT_BINARY_DIR}/debug/${staticlib_name}"
+ "${CMAKE_CURRENT_BINARY_DIR}/release/${staticlib_name}"
+ )
+
+ add_library(${TARGET} INTERFACE)
+ add_dependencies(${TARGET} ${cargo_target})
+ set_target_properties(
+ ${TARGET}
+ PROPERTIES
+ INTERFACE_STATICLIB_OUTPUT_PATH "${rust_staticlib}"
+ INTERFACE_INSTALL_LIBNAME
+ "${CMAKE_STATIC_LIBRARY_PREFIX}${crate_name}_rs${CMAKE_STATIC_LIBRARY_SUFFIX}"
+ )
+ target_link_libraries(
+ ${TARGET}
+ INTERFACE "$"
+ )
+endfunction()
+
+# This function instructs cmake to define a target that will use `cargo build`
+# to build a bin crate referenced by the Cargo.toml file in the current source
+# directory.
+# It accepts a single `TARGET` parameter which will be passed as the package
+# name to `cargo build -p TARGET`. If binary has different name as package,
+# use optional flag BINARY_NAME to override it.
+# The cmake target will be registered to build by default as part of the
+# ALL target.
+function(rust_executable TARGET)
+ fb_cmake_parse_args(ARG "" "BINARY_NAME" "" "${ARGN}")
+
+ set(crate_name "${TARGET}")
+ set(cargo_target "${TARGET}.cargo")
+ set(target_dir $,debug,release>)
+
+ if(DEFINED ARG_BINARY_NAME)
+ set(executable_name "${ARG_BINARY_NAME}${CMAKE_EXECUTABLE_SUFFIX}")
+ else()
+ set(executable_name "${crate_name}${CMAKE_EXECUTABLE_SUFFIX}")
+ endif()
+
+ set(cargo_cmd cargo)
+ if(WIN32)
+ set(cargo_cmd cargo.exe)
+ endif()
+
+ set(cargo_flags build $,,--release> -p ${crate_name})
+ if(USE_CARGO_VENDOR)
+ set(extra_cargo_env "CARGO_HOME=${RUST_CARGO_HOME}")
+ set(cargo_flags ${cargo_flags})
+ endif()
+
+ add_custom_target(
+ ${cargo_target}
+ ALL
+ COMMAND
+ "${CMAKE_COMMAND}" -E remove -f "${CMAKE_CURRENT_SOURCE_DIR}/Cargo.lock"
+ COMMAND
+ "${CMAKE_COMMAND}" -E env
+ "CARGO_TARGET_DIR=${CMAKE_CURRENT_BINARY_DIR}"
+ ${extra_cargo_env}
+ ${cargo_cmd}
+ ${cargo_flags}
+ COMMENT "Building Rust executable '${crate_name}'..."
+ JOB_POOL rust_job_pool
+ WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+ BYPRODUCTS
+ "${CMAKE_CURRENT_BINARY_DIR}/debug/${executable_name}"
+ "${CMAKE_CURRENT_BINARY_DIR}/release/${executable_name}"
+ )
+
+ set_property(TARGET "${cargo_target}"
+ PROPERTY EXECUTABLE "${CMAKE_CURRENT_BINARY_DIR}/${target_dir}/${executable_name}")
+endfunction()
+
+# This function can be used to install the executable generated by a prior
+# call to the `rust_executable` function.
+# It requires a `TARGET` parameter to identify the target to be installed,
+# and an optional `DESTINATION` parameter to specify the installation
+# directory. If DESTINATION is not specified then the `bin` directory
+# will be assumed.
+function(install_rust_executable TARGET)
+ # Parse the arguments
+ set(one_value_args DESTINATION)
+ set(multi_value_args)
+ fb_cmake_parse_args(
+ ARG "" "${one_value_args}" "${multi_value_args}" "${ARGN}"
+ )
+
+ if(NOT DEFINED ARG_DESTINATION)
+ set(ARG_DESTINATION bin)
+ endif()
+
+ get_target_property(foo "${TARGET}.cargo" EXECUTABLE)
+
+ install(
+ PROGRAMS "${foo}"
+ DESTINATION "${ARG_DESTINATION}"
+ )
+endfunction()
+
+# This function installs the interface target generated from the function
+# `rust_static_library`. Use this function if you want to export your Rust
+# target to external CMake targets.
+#
+# ```cmake
+# install_rust_static_library(
+#
+# INSTALL_DIR
+# [EXPORT ]
+# )
+# ```
+#
+# Parameters:
+# - TARGET: Name of the Rust static library target.
+# - EXPORT_NAME: Name of the exported target.
+# - INSTALL_DIR: Path to the directory where this library will be installed.
+#
+function(install_rust_static_library TARGET)
+ fb_cmake_parse_args(ARG "" "EXPORT;INSTALL_DIR" "" "${ARGN}")
+
+ get_property(
+ staticlib_output_path
+ TARGET "${TARGET}"
+ PROPERTY INTERFACE_STATICLIB_OUTPUT_PATH
+ )
+ get_property(
+ staticlib_output_name
+ TARGET "${TARGET}"
+ PROPERTY INTERFACE_INSTALL_LIBNAME
+ )
+
+ if(NOT DEFINED staticlib_output_path)
+ message(FATAL_ERROR "Not a rust_static_library target.")
+ endif()
+
+ if(NOT DEFINED ARG_INSTALL_DIR)
+ message(FATAL_ERROR "Missing required argument.")
+ endif()
+
+ if(DEFINED ARG_EXPORT)
+ set(install_export_args EXPORT "${ARG_EXPORT}")
+ endif()
+
+ set(install_interface_dir "${ARG_INSTALL_DIR}")
+ if(NOT IS_ABSOLUTE "${install_interface_dir}")
+ set(install_interface_dir "\${_IMPORT_PREFIX}/${install_interface_dir}")
+ endif()
+
+ target_link_libraries(
+ ${TARGET} INTERFACE
+ "$"
+ )
+ install(
+ TARGETS ${TARGET}
+ ${install_export_args}
+ LIBRARY DESTINATION ${ARG_INSTALL_DIR}
+ )
+ install(
+ FILES ${staticlib_output_path}
+ RENAME ${staticlib_output_name}
+ DESTINATION ${ARG_INSTALL_DIR}
+ )
+endfunction()
diff --git a/build/fbcode_builder/CMake/fb_py_test_main.py b/build/fbcode_builder/CMake/fb_py_test_main.py
new file mode 100644
index 000000000..1f3563aff
--- /dev/null
+++ b/build/fbcode_builder/CMake/fb_py_test_main.py
@@ -0,0 +1,820 @@
+#!/usr/bin/env python
+#
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+"""
+This file contains the main module code for Python test programs.
+"""
+
+from __future__ import print_function
+
+import contextlib
+import ctypes
+import fnmatch
+import json
+import logging
+import optparse
+import os
+import platform
+import re
+import sys
+import tempfile
+import time
+import traceback
+import unittest
+import warnings
+
+# Hide warning about importing "imp"; remove once python2 is gone.
+with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
+ import imp
+
+try:
+ from StringIO import StringIO
+except ImportError:
+ from io import StringIO
+try:
+ import coverage
+except ImportError:
+ coverage = None # type: ignore
+try:
+ from importlib.machinery import SourceFileLoader
+except ImportError:
+ SourceFileLoader = None # type: ignore
+
+
+class get_cpu_instr_counter(object):
+ def read(self):
+ # TODO
+ return 0
+
+
+EXIT_CODE_SUCCESS = 0
+EXIT_CODE_TEST_FAILURE = 70
+
+
+class TestStatus(object):
+
+ ABORTED = "FAILURE"
+ PASSED = "SUCCESS"
+ FAILED = "FAILURE"
+ EXPECTED_FAILURE = "SUCCESS"
+ UNEXPECTED_SUCCESS = "FAILURE"
+ SKIPPED = "ASSUMPTION_VIOLATION"
+
+
+class PathMatcher(object):
+ def __init__(self, include_patterns, omit_patterns):
+ self.include_patterns = include_patterns
+ self.omit_patterns = omit_patterns
+
+ def omit(self, path):
+ """
+ Omit iff matches any of the omit_patterns or the include patterns are
+ not empty and none is matched
+ """
+ path = os.path.realpath(path)
+ return any(fnmatch.fnmatch(path, p) for p in self.omit_patterns) or (
+ self.include_patterns
+ and not any(fnmatch.fnmatch(path, p) for p in self.include_patterns)
+ )
+
+ def include(self, path):
+ return not self.omit(path)
+
+
+class DebugWipeFinder(object):
+ """
+ PEP 302 finder that uses a DebugWipeLoader for all files which do not need
+ coverage
+ """
+
+ def __init__(self, matcher):
+ self.matcher = matcher
+
+ def find_module(self, fullname, path=None):
+ _, _, basename = fullname.rpartition(".")
+ try:
+ fd, pypath, (_, _, kind) = imp.find_module(basename, path)
+ except Exception:
+ # Finding without hooks using the imp module failed. One reason
+ # could be that there is a zip file on sys.path. The imp module
+ # does not support loading from there. Leave finding this module to
+ # the others finders in sys.meta_path.
+ return None
+
+ if hasattr(fd, "close"):
+ fd.close()
+ if kind != imp.PY_SOURCE:
+ return None
+ if self.matcher.include(pypath):
+ return None
+
+ """
+ This is defined to match CPython's PyVarObject struct
+ """
+
+ class PyVarObject(ctypes.Structure):
+ _fields_ = [
+ ("ob_refcnt", ctypes.c_long),
+ ("ob_type", ctypes.c_void_p),
+ ("ob_size", ctypes.c_ulong),
+ ]
+
+ class DebugWipeLoader(SourceFileLoader):
+ """
+ PEP302 loader that zeros out debug information before execution
+ """
+
+ def get_code(self, fullname):
+ code = super(DebugWipeLoader, self).get_code(fullname)
+ if code:
+ # Ideally we'd do
+ # code.co_lnotab = b''
+ # But code objects are READONLY. Not to worry though; we'll
+ # directly modify CPython's object
+ code_impl = PyVarObject.from_address(id(code.co_lnotab))
+ code_impl.ob_size = 0
+ return code
+
+ return DebugWipeLoader(fullname, pypath)
+
+
+def optimize_for_coverage(cov, include_patterns, omit_patterns):
+ """
+ We get better performance if we zero out debug information for files which
+ we're not interested in. Only available in CPython 3.3+
+ """
+ matcher = PathMatcher(include_patterns, omit_patterns)
+ if SourceFileLoader and platform.python_implementation() == "CPython":
+ sys.meta_path.insert(0, DebugWipeFinder(matcher))
+
+
+class TeeStream(object):
+ def __init__(self, *streams):
+ self._streams = streams
+
+ def write(self, data):
+ for stream in self._streams:
+ stream.write(data)
+
+ def flush(self):
+ for stream in self._streams:
+ stream.flush()
+
+ def isatty(self):
+ return False
+
+
+class CallbackStream(object):
+ def __init__(self, callback, bytes_callback=None, orig=None):
+ self._callback = callback
+ self._fileno = orig.fileno() if orig else None
+
+ # Python 3 APIs:
+ # - `encoding` is a string holding the encoding name
+ # - `errors` is a string holding the error-handling mode for encoding
+ # - `buffer` should look like an io.BufferedIOBase object
+
+ self.errors = orig.errors if orig else None
+ if bytes_callback:
+ # those members are only on the io.TextIOWrapper
+ self.encoding = orig.encoding if orig else "UTF-8"
+ self.buffer = CallbackStream(bytes_callback, orig=orig)
+
+ def write(self, data):
+ self._callback(data)
+
+ def flush(self):
+ pass
+
+ def isatty(self):
+ return False
+
+ def fileno(self):
+ return self._fileno
+
+
+class BuckTestResult(unittest._TextTestResult):
+ """
+ Our own TestResult class that outputs data in a format that can be easily
+ parsed by buck's test runner.
+ """
+
+ _instr_counter = get_cpu_instr_counter()
+
+ def __init__(
+ self, stream, descriptions, verbosity, show_output, main_program, suite
+ ):
+ super(BuckTestResult, self).__init__(stream, descriptions, verbosity)
+ self._main_program = main_program
+ self._suite = suite
+ self._results = []
+ self._current_test = None
+ self._saved_stdout = sys.stdout
+ self._saved_stderr = sys.stderr
+ self._show_output = show_output
+
+ def getResults(self):
+ return self._results
+
+ def startTest(self, test):
+ super(BuckTestResult, self).startTest(test)
+
+ # Pass in the real stdout and stderr filenos. We can't really do much
+ # here to intercept callers who directly operate on these fileno
+ # objects.
+ sys.stdout = CallbackStream(
+ self.addStdout, self.addStdoutBytes, orig=sys.stdout
+ )
+ sys.stderr = CallbackStream(
+ self.addStderr, self.addStderrBytes, orig=sys.stderr
+ )
+ self._current_test = test
+ self._test_start_time = time.time()
+ self._current_status = TestStatus.ABORTED
+ self._messages = []
+ self._stacktrace = None
+ self._stdout = ""
+ self._stderr = ""
+ self._start_instr_count = self._instr_counter.read()
+
+ def _find_next_test(self, suite):
+ """
+ Find the next test that has not been run.
+ """
+
+ for test in suite:
+
+ # We identify test suites by test that are iterable (as is done in
+ # the builtin python test harness). If we see one, recurse on it.
+ if hasattr(test, "__iter__"):
+ test = self._find_next_test(test)
+
+ # The builtin python test harness sets test references to `None`
+ # after they have run, so we know we've found the next test up
+ # if it's not `None`.
+ if test is not None:
+ return test
+
+ def stopTest(self, test):
+ sys.stdout = self._saved_stdout
+ sys.stderr = self._saved_stderr
+
+ super(BuckTestResult, self).stopTest(test)
+
+ # If a failure occured during module/class setup, then this "test" may
+ # actually be a `_ErrorHolder`, which doesn't contain explicit info
+ # about the upcoming test. Since we really only care about the test
+ # name field (i.e. `_testMethodName`), we use that to detect an actual
+ # test cases, and fall back to looking the test up from the suite
+ # otherwise.
+ if not hasattr(test, "_testMethodName"):
+ test = self._find_next_test(self._suite)
+
+ result = {
+ "testCaseName": "{0}.{1}".format(
+ test.__class__.__module__, test.__class__.__name__
+ ),
+ "testCase": test._testMethodName,
+ "type": self._current_status,
+ "time": int((time.time() - self._test_start_time) * 1000),
+ "message": os.linesep.join(self._messages),
+ "stacktrace": self._stacktrace,
+ "stdOut": self._stdout,
+ "stdErr": self._stderr,
+ }
+
+ # TestPilot supports an instruction count field.
+ if "TEST_PILOT" in os.environ:
+ result["instrCount"] = (
+ int(self._instr_counter.read() - self._start_instr_count),
+ )
+
+ self._results.append(result)
+ self._current_test = None
+
+ def stopTestRun(self):
+ cov = self._main_program.get_coverage()
+ if cov is not None:
+ self._results.append({"coverage": cov})
+
+ @contextlib.contextmanager
+ def _withTest(self, test):
+ self.startTest(test)
+ yield
+ self.stopTest(test)
+
+ def _setStatus(self, test, status, message=None, stacktrace=None):
+ assert test == self._current_test
+ self._current_status = status
+ self._stacktrace = stacktrace
+ if message is not None:
+ if message.endswith(os.linesep):
+ message = message[:-1]
+ self._messages.append(message)
+
+ def setStatus(self, test, status, message=None, stacktrace=None):
+ # addError() may be called outside of a test if one of the shared
+ # fixtures (setUpClass/tearDownClass/setUpModule/tearDownModule)
+ # throws an error.
+ #
+ # In this case, create a fake test result to record the error.
+ if self._current_test is None:
+ with self._withTest(test):
+ self._setStatus(test, status, message, stacktrace)
+ else:
+ self._setStatus(test, status, message, stacktrace)
+
+ def setException(self, test, status, excinfo):
+ exctype, value, tb = excinfo
+ self.setStatus(
+ test,
+ status,
+ "{0}: {1}".format(exctype.__name__, value),
+ "".join(traceback.format_tb(tb)),
+ )
+
+ def addSuccess(self, test):
+ super(BuckTestResult, self).addSuccess(test)
+ self.setStatus(test, TestStatus.PASSED)
+
+ def addError(self, test, err):
+ super(BuckTestResult, self).addError(test, err)
+ self.setException(test, TestStatus.ABORTED, err)
+
+ def addFailure(self, test, err):
+ super(BuckTestResult, self).addFailure(test, err)
+ self.setException(test, TestStatus.FAILED, err)
+
+ def addSkip(self, test, reason):
+ super(BuckTestResult, self).addSkip(test, reason)
+ self.setStatus(test, TestStatus.SKIPPED, "Skipped: %s" % (reason,))
+
+ def addExpectedFailure(self, test, err):
+ super(BuckTestResult, self).addExpectedFailure(test, err)
+ self.setException(test, TestStatus.EXPECTED_FAILURE, err)
+
+ def addUnexpectedSuccess(self, test):
+ super(BuckTestResult, self).addUnexpectedSuccess(test)
+ self.setStatus(test, TestStatus.UNEXPECTED_SUCCESS, "Unexpected success")
+
+ def addStdout(self, val):
+ self._stdout += val
+ if self._show_output:
+ self._saved_stdout.write(val)
+ self._saved_stdout.flush()
+
+ def addStdoutBytes(self, val):
+ string = val.decode("utf-8", errors="backslashreplace")
+ self.addStdout(string)
+
+ def addStderr(self, val):
+ self._stderr += val
+ if self._show_output:
+ self._saved_stderr.write(val)
+ self._saved_stderr.flush()
+
+ def addStderrBytes(self, val):
+ string = val.decode("utf-8", errors="backslashreplace")
+ self.addStderr(string)
+
+
+class BuckTestRunner(unittest.TextTestRunner):
+ def __init__(self, main_program, suite, show_output=True, **kwargs):
+ super(BuckTestRunner, self).__init__(**kwargs)
+ self.show_output = show_output
+ self._main_program = main_program
+ self._suite = suite
+
+ def _makeResult(self):
+ return BuckTestResult(
+ self.stream,
+ self.descriptions,
+ self.verbosity,
+ self.show_output,
+ self._main_program,
+ self._suite,
+ )
+
+
+def _format_test_name(test_class, attrname):
+ return "{0}.{1}.{2}".format(test_class.__module__, test_class.__name__, attrname)
+
+
+class StderrLogHandler(logging.StreamHandler):
+ """
+ This class is very similar to logging.StreamHandler, except that it
+ always uses the current sys.stderr object.
+
+ StreamHandler caches the current sys.stderr object when it is constructed.
+ This makes it behave poorly in unit tests, which may replace sys.stderr
+ with a StringIO buffer during tests. The StreamHandler will continue using
+ the old sys.stderr object instead of the desired StringIO buffer.
+ """
+
+ def __init__(self):
+ logging.Handler.__init__(self)
+
+ @property
+ def stream(self):
+ return sys.stderr
+
+
+class RegexTestLoader(unittest.TestLoader):
+ def __init__(self, regex=None):
+ self.regex = regex
+ super(RegexTestLoader, self).__init__()
+
+ def getTestCaseNames(self, testCaseClass):
+ """
+ Return a sorted sequence of method names found within testCaseClass
+ """
+
+ testFnNames = super(RegexTestLoader, self).getTestCaseNames(testCaseClass)
+ if self.regex is None:
+ return testFnNames
+ robj = re.compile(self.regex)
+ matched = []
+ for attrname in testFnNames:
+ fullname = _format_test_name(testCaseClass, attrname)
+ if robj.search(fullname):
+ matched.append(attrname)
+ return matched
+
+
+class Loader(object):
+
+ suiteClass = unittest.TestSuite
+
+ def __init__(self, modules, regex=None):
+ self.modules = modules
+ self.regex = regex
+
+ def load_all(self):
+ loader = RegexTestLoader(self.regex)
+ test_suite = self.suiteClass()
+ for module_name in self.modules:
+ __import__(module_name, level=0)
+ module = sys.modules[module_name]
+ module_suite = loader.loadTestsFromModule(module)
+ test_suite.addTest(module_suite)
+ return test_suite
+
+ def load_args(self, args):
+ loader = RegexTestLoader(self.regex)
+
+ suites = []
+ for arg in args:
+ suite = loader.loadTestsFromName(arg)
+ # loadTestsFromName() can only process names that refer to
+ # individual test functions or modules. It can't process package
+ # names. If there were no module/function matches, check to see if
+ # this looks like a package name.
+ if suite.countTestCases() != 0:
+ suites.append(suite)
+ continue
+
+ # Load all modules whose name is .
+ prefix = arg + "."
+ for module in self.modules:
+ if module.startswith(prefix):
+ suite = loader.loadTestsFromName(module)
+ suites.append(suite)
+
+ return loader.suiteClass(suites)
+
+
+_COVERAGE_INI = """\
+[report]
+exclude_lines =
+ pragma: no cover
+ pragma: nocover
+ pragma:.*no${PLATFORM}
+ pragma:.*no${PY_IMPL}${PY_MAJOR}${PY_MINOR}
+ pragma:.*no${PY_IMPL}${PY_MAJOR}
+ pragma:.*nopy${PY_MAJOR}
+ pragma:.*nopy${PY_MAJOR}${PY_MINOR}
+"""
+
+
+class MainProgram(object):
+ """
+ This class implements the main program. It can be subclassed by
+ users who wish to customize some parts of the main program.
+ (Adding additional command line options, customizing test loading, etc.)
+ """
+
+ DEFAULT_VERBOSITY = 2
+
+ def __init__(self, argv):
+ self.init_option_parser()
+ self.parse_options(argv)
+ self.setup_logging()
+
+ def init_option_parser(self):
+ usage = "%prog [options] [TEST] ..."
+ op = optparse.OptionParser(usage=usage, add_help_option=False)
+ self.option_parser = op
+
+ op.add_option(
+ "--hide-output",
+ dest="show_output",
+ action="store_false",
+ default=True,
+ help="Suppress data that tests print to stdout/stderr, and only "
+ "show it if the test fails.",
+ )
+ op.add_option(
+ "-o",
+ "--output",
+ help="Write results to a file in a JSON format to be read by Buck",
+ )
+ op.add_option(
+ "-f",
+ "--failfast",
+ action="store_true",
+ default=False,
+ help="Stop after the first failure",
+ )
+ op.add_option(
+ "-l",
+ "--list-tests",
+ action="store_true",
+ dest="list",
+ default=False,
+ help="List tests and exit",
+ )
+ op.add_option(
+ "-r",
+ "--regex",
+ default=None,
+ help="Regex to apply to tests, to only run those tests",
+ )
+ op.add_option(
+ "--collect-coverage",
+ action="store_true",
+ default=False,
+ help="Collect test coverage information",
+ )
+ op.add_option(
+ "--coverage-include",
+ default="*",
+ help='File globs to include in converage (split by ",")',
+ )
+ op.add_option(
+ "--coverage-omit",
+ default="",
+ help='File globs to omit from converage (split by ",")',
+ )
+ op.add_option(
+ "--logger",
+ action="append",
+ metavar="=",
+ default=[],
+ help="Configure log levels for specific logger categories",
+ )
+ op.add_option(
+ "-q",
+ "--quiet",
+ action="count",
+ default=0,
+ help="Decrease the verbosity (may be specified multiple times)",
+ )
+ op.add_option(
+ "-v",
+ "--verbosity",
+ action="count",
+ default=self.DEFAULT_VERBOSITY,
+ help="Increase the verbosity (may be specified multiple times)",
+ )
+ op.add_option(
+ "-?", "--help", action="help", help="Show this help message and exit"
+ )
+
+ def parse_options(self, argv):
+ self.options, self.test_args = self.option_parser.parse_args(argv[1:])
+ self.options.verbosity -= self.options.quiet
+
+ if self.options.collect_coverage and coverage is None:
+ self.option_parser.error("coverage module is not available")
+ self.options.coverage_include = self.options.coverage_include.split(",")
+ if self.options.coverage_omit == "":
+ self.options.coverage_omit = []
+ else:
+ self.options.coverage_omit = self.options.coverage_omit.split(",")
+
+ def setup_logging(self):
+ # Configure the root logger to log at INFO level.
+ # This is similar to logging.basicConfig(), but uses our
+ # StderrLogHandler instead of a StreamHandler.
+ fmt = logging.Formatter("%(pathname)s:%(lineno)s: %(message)s")
+ log_handler = StderrLogHandler()
+ log_handler.setFormatter(fmt)
+ root_logger = logging.getLogger()
+ root_logger.addHandler(log_handler)
+ root_logger.setLevel(logging.INFO)
+
+ level_names = {
+ "debug": logging.DEBUG,
+ "info": logging.INFO,
+ "warn": logging.WARNING,
+ "warning": logging.WARNING,
+ "error": logging.ERROR,
+ "critical": logging.CRITICAL,
+ "fatal": logging.FATAL,
+ }
+
+ for value in self.options.logger:
+ parts = value.rsplit("=", 1)
+ if len(parts) != 2:
+ self.option_parser.error(
+ "--logger argument must be of the "
+ "form =: %s" % value
+ )
+ name = parts[0]
+ level_name = parts[1].lower()
+ level = level_names.get(level_name)
+ if level is None:
+ self.option_parser.error(
+ "invalid log level %r for log " "category %s" % (parts[1], name)
+ )
+ logging.getLogger(name).setLevel(level)
+
+ def create_loader(self):
+ import __test_modules__
+
+ return Loader(__test_modules__.TEST_MODULES, self.options.regex)
+
+ def load_tests(self):
+ loader = self.create_loader()
+ if self.options.collect_coverage:
+ self.start_coverage()
+ include = self.options.coverage_include
+ omit = self.options.coverage_omit
+ if include and "*" not in include:
+ optimize_for_coverage(self.cov, include, omit)
+
+ if self.test_args:
+ suite = loader.load_args(self.test_args)
+ else:
+ suite = loader.load_all()
+ if self.options.collect_coverage:
+ self.cov.start()
+ return suite
+
+ def get_tests(self, test_suite):
+ tests = []
+
+ for test in test_suite:
+ if isinstance(test, unittest.TestSuite):
+ tests.extend(self.get_tests(test))
+ else:
+ tests.append(test)
+
+ return tests
+
+ def run(self):
+ test_suite = self.load_tests()
+
+ if self.options.list:
+ for test in self.get_tests(test_suite):
+ method_name = getattr(test, "_testMethodName", "")
+ name = _format_test_name(test.__class__, method_name)
+ print(name)
+ return EXIT_CODE_SUCCESS
+ else:
+ result = self.run_tests(test_suite)
+ if self.options.output is not None:
+ with open(self.options.output, "w") as f:
+ json.dump(result.getResults(), f, indent=4, sort_keys=True)
+ if not result.wasSuccessful():
+ return EXIT_CODE_TEST_FAILURE
+ return EXIT_CODE_SUCCESS
+
+ def run_tests(self, test_suite):
+ # Install a signal handler to catch Ctrl-C and display the results
+ # (but only if running >2.6).
+ if sys.version_info[0] > 2 or sys.version_info[1] > 6:
+ unittest.installHandler()
+
+ # Run the tests
+ runner = BuckTestRunner(
+ self,
+ test_suite,
+ verbosity=self.options.verbosity,
+ show_output=self.options.show_output,
+ )
+ result = runner.run(test_suite)
+
+ if self.options.collect_coverage and self.options.show_output:
+ self.cov.stop()
+ try:
+ self.cov.report(file=sys.stdout)
+ except coverage.misc.CoverageException:
+ print("No lines were covered, potentially restricted by file filters")
+
+ return result
+
+ def get_abbr_impl(self):
+ """Return abbreviated implementation name."""
+ impl = platform.python_implementation()
+ if impl == "PyPy":
+ return "pp"
+ elif impl == "Jython":
+ return "jy"
+ elif impl == "IronPython":
+ return "ip"
+ elif impl == "CPython":
+ return "cp"
+ else:
+ raise RuntimeError("unknown python runtime")
+
+ def start_coverage(self):
+ if not self.options.collect_coverage:
+ return
+
+ with tempfile.NamedTemporaryFile("w", delete=False) as coverage_ini:
+ coverage_ini.write(_COVERAGE_INI)
+ self._coverage_ini_path = coverage_ini.name
+
+ # Keep the original working dir in case tests use os.chdir
+ self._original_working_dir = os.getcwd()
+
+ # for coverage config ignores by platform/python version
+ os.environ["PLATFORM"] = sys.platform
+ os.environ["PY_IMPL"] = self.get_abbr_impl()
+ os.environ["PY_MAJOR"] = str(sys.version_info.major)
+ os.environ["PY_MINOR"] = str(sys.version_info.minor)
+
+ self.cov = coverage.Coverage(
+ include=self.options.coverage_include,
+ omit=self.options.coverage_omit,
+ config_file=coverage_ini.name,
+ )
+ self.cov.erase()
+ self.cov.start()
+
+ def get_coverage(self):
+ if not self.options.collect_coverage:
+ return None
+
+ try:
+ os.remove(self._coverage_ini_path)
+ except OSError:
+ pass # Better to litter than to fail the test
+
+ # Switch back to the original working directory.
+ os.chdir(self._original_working_dir)
+
+ result = {}
+
+ self.cov.stop()
+
+ try:
+ f = StringIO()
+ self.cov.report(file=f)
+ lines = f.getvalue().split("\n")
+ except coverage.misc.CoverageException:
+ # Nothing was covered. That's fine by us
+ return result
+
+ # N.B.: the format of the coverage library's output differs
+ # depending on whether one or more files are in the results
+ for line in lines[2:]:
+ if line.strip("-") == "":
+ break
+ r = line.split()[0]
+ analysis = self.cov.analysis2(r)
+ covString = self.convert_to_diff_cov_str(analysis)
+ if covString:
+ result[r] = covString
+
+ return result
+
+ def convert_to_diff_cov_str(self, analysis):
+ # Info on the format of analysis:
+ # http://nedbatchelder.com/code/coverage/api.html
+ if not analysis:
+ return None
+ numLines = max(
+ analysis[1][-1] if len(analysis[1]) else 0,
+ analysis[2][-1] if len(analysis[2]) else 0,
+ analysis[3][-1] if len(analysis[3]) else 0,
+ )
+ lines = ["N"] * numLines
+ for l in analysis[1]:
+ lines[l - 1] = "C"
+ for l in analysis[2]:
+ lines[l - 1] = "X"
+ for l in analysis[3]:
+ lines[l - 1] = "U"
+ return "".join(lines)
+
+
+def main(argv):
+ return MainProgram(sys.argv).run()
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
diff --git a/build/fbcode_builder/CMake/fb_py_win_main.c b/build/fbcode_builder/CMake/fb_py_win_main.c
new file mode 100644
index 000000000..8905c3602
--- /dev/null
+++ b/build/fbcode_builder/CMake/fb_py_win_main.c
@@ -0,0 +1,126 @@
+// Copyright (c) Facebook, Inc. and its affiliates.
+
+#define WIN32_LEAN_AND_MEAN
+
+#include
+#include
+#include
+
+#define PATH_SIZE 32768
+
+typedef int (*Py_Main)(int, wchar_t**);
+
+// Add the given path to Windows's DLL search path.
+// For Windows DLL search path resolution, see:
+// https://docs.microsoft.com/en-us/windows/win32/dlls/dynamic-link-library-search-order
+void add_search_path(const wchar_t* path) {
+ wchar_t buffer[PATH_SIZE];
+ wchar_t** lppPart = NULL;
+
+ if (!GetFullPathNameW(path, PATH_SIZE, buffer, lppPart)) {
+ fwprintf(stderr, L"warning: %d unable to expand path %s\n", GetLastError(), path);
+ return;
+ }
+
+ if (!AddDllDirectory(buffer)) {
+ DWORD error = GetLastError();
+ if (error != ERROR_FILE_NOT_FOUND) {
+ fwprintf(stderr, L"warning: %d unable to set DLL search path for %s\n", GetLastError(), path);
+ }
+ }
+}
+
+int locate_py_main(int argc, wchar_t **argv) {
+ /*
+ * We have to dynamically locate Python3.dll because we may be loading a
+ * Python native module while running. If that module is built with a
+ * different Python version, we will end up a DLL import error. To resolve
+ * this, we can either ship an embedded version of Python with us or
+ * dynamically look up existing Python distribution installed on user's
+ * machine. This way, we should be able to get a consistent version of
+ * Python3.dll and .pyd modules.
+ */
+ HINSTANCE python_dll;
+ Py_Main pymain;
+
+ // last added directory has highest priority
+ add_search_path(L"C:\\Python36\\");
+ add_search_path(L"C:\\Python37\\");
+ add_search_path(L"C:\\Python38\\");
+
+ python_dll = LoadLibraryExW(L"python3.dll", NULL, LOAD_LIBRARY_SEARCH_DEFAULT_DIRS);
+
+ int returncode = 0;
+ if (python_dll != NULL) {
+ pymain = (Py_Main) GetProcAddress(python_dll, "Py_Main");
+
+ if (pymain != NULL) {
+ returncode = (pymain)(argc, argv);
+ } else {
+ fprintf(stderr, "error: %d unable to load Py_Main\n", GetLastError());
+ }
+
+ FreeLibrary(python_dll);
+ } else {
+ fprintf(stderr, "error: %d unable to locate python3.dll\n", GetLastError());
+ return 1;
+ }
+ return returncode;
+}
+
+int wmain() {
+ /*
+ * This executable will be prepended to the start of a Python ZIP archive.
+ * Python will be able to directly execute the ZIP archive, so we simply
+ * need to tell Py_Main() to run our own file. Duplicate the argument list
+ * and add our file name to the beginning to tell Python what file to invoke.
+ */
+ wchar_t** pyargv = malloc(sizeof(wchar_t*) * (__argc + 1));
+ if (!pyargv) {
+ fprintf(stderr, "error: failed to allocate argument vector\n");
+ return 1;
+ }
+
+ /* Py_Main wants the wide character version of the argv so we pull those
+ * values from the global __wargv array that has been prepared by MSVCRT.
+ *
+ * In order for the zipapp to run we need to insert an extra argument in
+ * the front of the argument vector that points to ourselves.
+ *
+ * An additional complication is that, depending on who prepared the argument
+ * string used to start our process, the computed __wargv[0] can be a simple
+ * shell word like `watchman-wait` which is normally resolved together with
+ * the PATH by the shell.
+ * That unresolved path isn't sufficient to start the zipapp on windows;
+ * we need the fully qualified path.
+ *
+ * Given:
+ * __wargv == {"watchman-wait", "-h"}
+ *
+ * we want to pass the following to Py_Main:
+ *
+ * {
+ * "z:\build\watchman\python\watchman-wait.exe",
+ * "z:\build\watchman\python\watchman-wait.exe",
+ * "-h"
+ * }
+ */
+ wchar_t full_path_to_argv0[PATH_SIZE];
+ DWORD len = GetModuleFileNameW(NULL, full_path_to_argv0, PATH_SIZE);
+ if (len == 0 ||
+ len == PATH_SIZE && GetLastError() == ERROR_INSUFFICIENT_BUFFER) {
+ fprintf(
+ stderr,
+ "error: %d while retrieving full path to this executable\n",
+ GetLastError());
+ return 1;
+ }
+
+ for (int n = 1; n < __argc; ++n) {
+ pyargv[n + 1] = __wargv[n];
+ }
+ pyargv[0] = full_path_to_argv0;
+ pyargv[1] = full_path_to_argv0;
+
+ return locate_py_main(__argc + 1, pyargv);
+}
diff --git a/build/fbcode_builder/CMake/make_fbpy_archive.py b/build/fbcode_builder/CMake/make_fbpy_archive.py
new file mode 100755
index 000000000..3724feb21
--- /dev/null
+++ b/build/fbcode_builder/CMake/make_fbpy_archive.py
@@ -0,0 +1,327 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+import argparse
+import collections
+import errno
+import os
+import shutil
+import sys
+import tempfile
+import zipapp
+
+MANIFEST_SEPARATOR = " :: "
+MANIFEST_HEADER_V1 = "FBPY_MANIFEST 1\n"
+
+
+class UsageError(Exception):
+ def __init__(self, message):
+ self.message = message
+
+ def __str__(self):
+ return self.message
+
+
+class BadManifestError(UsageError):
+ def __init__(self, path, line_num, message):
+ full_msg = "%s:%s: %s" % (path, line_num, message)
+ super().__init__(full_msg)
+ self.path = path
+ self.line_num = line_num
+ self.raw_message = message
+
+
+PathInfo = collections.namedtuple(
+ "PathInfo", ("src", "dest", "manifest_path", "manifest_line")
+)
+
+
+def parse_manifest(manifest, path_map):
+ bad_prefix = ".." + os.path.sep
+ manifest_dir = os.path.dirname(manifest)
+ with open(manifest, "r") as f:
+ line_num = 1
+ line = f.readline()
+ if line != MANIFEST_HEADER_V1:
+ raise BadManifestError(
+ manifest, line_num, "Unexpected manifest file header"
+ )
+
+ for line in f:
+ line_num += 1
+ if line.startswith("#"):
+ continue
+ line = line.rstrip("\n")
+ parts = line.split(MANIFEST_SEPARATOR)
+ if len(parts) != 2:
+ msg = "line must be of the form SRC %s DEST" % MANIFEST_SEPARATOR
+ raise BadManifestError(manifest, line_num, msg)
+ src, dest = parts
+ dest = os.path.normpath(dest)
+ if dest.startswith(bad_prefix):
+ msg = "destination path starts with %s: %s" % (bad_prefix, dest)
+ raise BadManifestError(manifest, line_num, msg)
+
+ if not os.path.isabs(src):
+ src = os.path.normpath(os.path.join(manifest_dir, src))
+
+ if dest in path_map:
+ prev_info = path_map[dest]
+ msg = (
+ "multiple source paths specified for destination "
+ "path %s. Previous source was %s from %s:%s"
+ % (
+ dest,
+ prev_info.src,
+ prev_info.manifest_path,
+ prev_info.manifest_line,
+ )
+ )
+ raise BadManifestError(manifest, line_num, msg)
+
+ info = PathInfo(
+ src=src,
+ dest=dest,
+ manifest_path=manifest,
+ manifest_line=line_num,
+ )
+ path_map[dest] = info
+
+
+def populate_install_tree(inst_dir, path_map):
+ os.mkdir(inst_dir)
+ dest_dirs = {"": False}
+
+ def make_dest_dir(path):
+ if path in dest_dirs:
+ return
+ parent = os.path.dirname(path)
+ make_dest_dir(parent)
+ abs_path = os.path.join(inst_dir, path)
+ os.mkdir(abs_path)
+ dest_dirs[path] = False
+
+ def install_file(info):
+ dir_name, base_name = os.path.split(info.dest)
+ make_dest_dir(dir_name)
+ if base_name == "__init__.py":
+ dest_dirs[dir_name] = True
+ abs_dest = os.path.join(inst_dir, info.dest)
+ shutil.copy2(info.src, abs_dest)
+
+ # Copy all of the destination files
+ for info in path_map.values():
+ install_file(info)
+
+ # Create __init__ files in any directories that don't have them.
+ for dir_path, has_init in dest_dirs.items():
+ if has_init:
+ continue
+ init_path = os.path.join(inst_dir, dir_path, "__init__.py")
+ with open(init_path, "w"):
+ pass
+
+
+def build_zipapp(args, path_map):
+ """Create a self executing python binary using Python 3's built-in
+ zipapp module.
+
+ This type of Python binary is relatively simple, as zipapp is part of the
+ standard library, but it does not support native language extensions
+ (.so/.dll files).
+ """
+ dest_dir = os.path.dirname(args.output)
+ with tempfile.TemporaryDirectory(prefix="make_fbpy.", dir=dest_dir) as tmpdir:
+ inst_dir = os.path.join(tmpdir, "tree")
+ populate_install_tree(inst_dir, path_map)
+
+ tmp_output = os.path.join(tmpdir, "output.exe")
+ zipapp.create_archive(
+ inst_dir, target=tmp_output, interpreter=args.python, main=args.main
+ )
+ os.replace(tmp_output, args.output)
+
+
+def create_main_module(args, inst_dir, path_map):
+ if not args.main:
+ assert "__main__.py" in path_map
+ return
+
+ dest_path = os.path.join(inst_dir, "__main__.py")
+ main_module, main_fn = args.main.split(":")
+ main_contents = """\
+#!{python}
+
+if __name__ == "__main__":
+ import {main_module}
+ {main_module}.{main_fn}()
+""".format(
+ python=args.python, main_module=main_module, main_fn=main_fn
+ )
+ with open(dest_path, "w") as f:
+ f.write(main_contents)
+ os.chmod(dest_path, 0o755)
+
+
+def build_install_dir(args, path_map):
+ """Create a directory that contains all of the sources, with a __main__
+ module to run the program.
+ """
+ # Populate a temporary directory first, then rename to the destination
+ # location. This ensures that we don't ever leave a halfway-built
+ # directory behind at the output path if something goes wrong.
+ dest_dir = os.path.dirname(args.output)
+ with tempfile.TemporaryDirectory(prefix="make_fbpy.", dir=dest_dir) as tmpdir:
+ inst_dir = os.path.join(tmpdir, "tree")
+ populate_install_tree(inst_dir, path_map)
+ create_main_module(args, inst_dir, path_map)
+ os.rename(inst_dir, args.output)
+
+
+def ensure_directory(path):
+ try:
+ os.makedirs(path)
+ except OSError as ex:
+ if ex.errno != errno.EEXIST:
+ raise
+
+
+def install_library(args, path_map):
+ """Create an installation directory a python library."""
+ out_dir = args.output
+ out_manifest = args.output + ".manifest"
+
+ install_dir = args.install_dir
+ if not install_dir:
+ install_dir = out_dir
+
+ os.makedirs(out_dir)
+ with open(out_manifest, "w") as manifest:
+ manifest.write(MANIFEST_HEADER_V1)
+ for info in path_map.values():
+ abs_dest = os.path.join(out_dir, info.dest)
+ ensure_directory(os.path.dirname(abs_dest))
+ print("copy %r --> %r" % (info.src, abs_dest))
+ shutil.copy2(info.src, abs_dest)
+ installed_dest = os.path.join(install_dir, info.dest)
+ manifest.write("%s%s%s\n" % (installed_dest, MANIFEST_SEPARATOR, info.dest))
+
+
+def parse_manifests(args):
+ # Process args.manifest_separator to help support older versions of CMake
+ if args.manifest_separator:
+ manifests = []
+ for manifest_arg in args.manifests:
+ split_arg = manifest_arg.split(args.manifest_separator)
+ manifests.extend(split_arg)
+ args.manifests = manifests
+
+ path_map = {}
+ for manifest in args.manifests:
+ parse_manifest(manifest, path_map)
+
+ return path_map
+
+
+def check_main_module(args, path_map):
+ # Translate an empty string in the --main argument to None,
+ # just to allow the CMake logic to be slightly simpler and pass in an
+ # empty string when it really wants the default __main__.py module to be
+ # used.
+ if args.main == "":
+ args.main = None
+
+ if args.type == "lib-install":
+ if args.main is not None:
+ raise UsageError("cannot specify a --main argument with --type=lib-install")
+ return
+
+ main_info = path_map.get("__main__.py")
+ if args.main:
+ if main_info is not None:
+ msg = (
+ "specified an explicit main module with --main, "
+ "but the file listing already includes __main__.py"
+ )
+ raise BadManifestError(
+ main_info.manifest_path, main_info.manifest_line, msg
+ )
+ parts = args.main.split(":")
+ if len(parts) != 2:
+ raise UsageError(
+ "argument to --main must be of the form MODULE:CALLABLE "
+ "(received %s)" % (args.main,)
+ )
+ else:
+ if main_info is None:
+ raise UsageError(
+ "no main module specified with --main, "
+ "and no __main__.py module present"
+ )
+
+
+BUILD_TYPES = {
+ "zipapp": build_zipapp,
+ "dir": build_install_dir,
+ "lib-install": install_library,
+}
+
+
+def main():
+ ap = argparse.ArgumentParser()
+ ap.add_argument("-o", "--output", required=True, help="The output file path")
+ ap.add_argument(
+ "--install-dir",
+ help="When used with --type=lib-install, this parameter specifies the "
+ "final location where the library where be installed. This can be "
+ "used to generate the library in one directory first, when you plan "
+ "to move or copy it to another final location later.",
+ )
+ ap.add_argument(
+ "--manifest-separator",
+ help="Split manifest arguments around this separator. This is used "
+ "to support older versions of CMake that cannot supply the manifests "
+ "as separate arguments.",
+ )
+ ap.add_argument(
+ "--main",
+ help="The main module to run, specified as :. "
+ "This must be specified if and only if the archive does not contain "
+ "a __main__.py file.",
+ )
+ ap.add_argument(
+ "--python",
+ help="Explicitly specify the python interpreter to use for the " "executable.",
+ )
+ ap.add_argument(
+ "--type", choices=BUILD_TYPES.keys(), help="The type of output to build."
+ )
+ ap.add_argument(
+ "manifests",
+ nargs="+",
+ help="The manifest files specifying how to construct the archive",
+ )
+ args = ap.parse_args()
+
+ if args.python is None:
+ args.python = sys.executable
+
+ if args.type is None:
+ # In the future we might want different default output types
+ # for different platforms.
+ args.type = "zipapp"
+ build_fn = BUILD_TYPES[args.type]
+
+ try:
+ path_map = parse_manifests(args)
+ check_main_module(args, path_map)
+ except UsageError as ex:
+ print("error: %s" % (ex,), file=sys.stderr)
+ sys.exit(1)
+
+ build_fn(args, path_map)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/build/fbcode_builder/LICENSE b/build/fbcode_builder/LICENSE
new file mode 100644
index 000000000..b96dcb048
--- /dev/null
+++ b/build/fbcode_builder/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) Facebook, Inc. and its affiliates.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/build/fbcode_builder/README.docker b/build/fbcode_builder/README.docker
new file mode 100644
index 000000000..4e9fa8a29
--- /dev/null
+++ b/build/fbcode_builder/README.docker
@@ -0,0 +1,44 @@
+## Debugging Docker builds
+
+To debug a a build failure, start up a shell inside the just-failed image as
+follows:
+
+```
+docker ps -a | head # Grab the container ID
+docker commit CONTAINER_ID # Grab the SHA string
+docker run -it SHA_STRING /bin/bash
+# Debug as usual, e.g. `./run-cmake.sh Debug`, `make`, `apt-get install gdb`
+```
+
+## A note on Docker security
+
+While the Dockerfile generated above is quite simple, you must be aware that
+using Docker to run arbitrary code can present significant security risks:
+
+ - Code signature validation is off by default (as of 2016), exposing you to
+ man-in-the-middle malicious code injection.
+
+ - You implicitly trust the world -- a Dockerfile cannot annotate that
+ you trust the image `debian:8.6` because you trust a particular
+ certificate -- rather, you trust the name, and that it will never be
+ hijacked.
+
+ - Sandboxing in the Linux kernel is not perfect, and the builds run code as
+ root. Any compromised code can likely escalate to the host system.
+
+Specifically, you must be very careful only to add trusted OS images to the
+build flow.
+
+Consider setting this variable before running any Docker container -- this
+will validate a signature on the base image before running code from it:
+
+```
+export DOCKER_CONTENT_TRUST=1
+```
+
+Note that unless you go through the extra steps of notarizing the resulting
+images, you will have to disable trust to enter intermediate images, e.g.
+
+```
+DOCKER_CONTENT_TRUST= docker run -it YOUR_IMAGE_ID /bin/bash
+```
diff --git a/build/fbcode_builder/README.md b/build/fbcode_builder/README.md
new file mode 100644
index 000000000..d47dd41c0
--- /dev/null
+++ b/build/fbcode_builder/README.md
@@ -0,0 +1,43 @@
+# Easy builds for Facebook projects
+
+This directory contains tools designed to simplify continuous-integration
+(and other builds) of Facebook open source projects. In particular, this helps
+manage builds for cross-project dependencies.
+
+The main entry point is the `getdeps.py` script. This script has several
+subcommands, but the most notable is the `build` command. This will download
+and build all dependencies for a project, and then build the project itself.
+
+## Deployment
+
+This directory is copied literally into a number of different Facebook open
+source repositories. Any change made to code in this directory will be
+automatically be replicated by our open source tooling into all GitHub hosted
+repositories that use `fbcode_builder`. Typically this directory is copied
+into the open source repositories as `build/fbcode_builder/`.
+
+
+# Project Configuration Files
+
+The `manifests` subdirectory contains configuration files for many different
+projects, describing how to build each project. These files also list
+dependencies between projects, enabling `getdeps.py` to build all dependencies
+for a project before building the project itself.
+
+
+# Shared CMake utilities
+
+Since this directory is copied into many Facebook open source repositories,
+it is also used to help share some CMake utility files across projects. The
+`CMake/` subdirectory contains a number of `.cmake` files that are shared by
+the CMake-based build systems across several different projects.
+
+
+# Older Build Scripts
+
+This directory also still contains a handful of older build scripts that
+pre-date the current `getdeps.py` build system. Most of the other `.py` files
+in this top directory, apart from `getdeps.py` itself, are from this older
+build system. This older system is only used by a few remaining projects, and
+new projects should generally use the newer `getdeps.py` script, by adding a
+new configuration file in the `manifests/` subdirectory.
diff --git a/build/fbcode_builder/docker_build_with_ccache.sh b/build/fbcode_builder/docker_build_with_ccache.sh
new file mode 100755
index 000000000..e922810d5
--- /dev/null
+++ b/build/fbcode_builder/docker_build_with_ccache.sh
@@ -0,0 +1,219 @@
+#!/bin/bash -uex
+# Copyright (c) Facebook, Inc. and its affiliates.
+set -o pipefail # Be sure to `|| :` commands that are allowed to fail.
+
+#
+# Future: port this to Python if you are making significant changes.
+#
+
+# Parse command-line arguments
+build_timeout="" # Default to no time-out
+print_usage() {
+ echo "Usage: $0 [--build-timeout TIMEOUT_VAL] SAVE-CCACHE-TO-DIR"
+ echo "SAVE-CCACHE-TO-DIR is required. An empty string discards the ccache."
+}
+while [[ $# -gt 0 ]]; do
+ case "$1" in
+ --build-timeout)
+ shift
+ build_timeout="$1"
+ if [[ "$build_timeout" != "" ]] ; then
+ timeout "$build_timeout" true # fail early on invalid timeouts
+ fi
+ ;;
+ -h|--help)
+ print_usage
+ exit
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+done
+# There is one required argument, but an empty string is allowed.
+if [[ "$#" != 1 ]] ; then
+ print_usage
+ exit 1
+fi
+save_ccache_to_dir="$1"
+if [[ "$save_ccache_to_dir" != "" ]] ; then
+ mkdir -p "$save_ccache_to_dir" # fail early if there's nowhere to save
+else
+ echo "WARNING: Will not save /ccache from inside the Docker container"
+fi
+
+rand_guid() {
+ echo "$(date +%s)_${RANDOM}_${RANDOM}_${RANDOM}_${RANDOM}"
+}
+
+id=fbcode_builder_image_id=$(rand_guid)
+logfile=$(mktemp)
+
+echo "
+
+
+Running build with timeout '$build_timeout', label $id, and log in $logfile
+
+
+"
+
+if [[ "$build_timeout" != "" ]] ; then
+ # Kill the container after $build_timeout. Using `/bin/timeout` would cause
+ # Docker to destroy the most recent container and lose its cache.
+ (
+ sleep "$build_timeout"
+ echo "Build timed out after $build_timeout" 1>&2
+ while true; do
+ maybe_container=$(
+ grep -E '^( ---> Running in [0-9a-f]+|FBCODE_BUILDER_EXIT)$' "$logfile" |
+ tail -n 1 | awk '{print $NF}'
+ )
+ if [[ "$maybe_container" == "FBCODE_BUILDER_EXIT" ]] ; then
+ echo "Time-out successfully terminated build" 1>&2
+ break
+ fi
+ echo "Time-out: trying to kill $maybe_container" 1>&2
+ # This kill fail if we get unlucky, try again soon.
+ docker kill "$maybe_container" || sleep 5
+ done
+ ) &
+fi
+
+build_exit_code=0
+# `docker build` is allowed to fail, and `pipefail` means we must check the
+# failure explicitly.
+if ! docker build --label="$id" . 2>&1 | tee "$logfile" ; then
+ build_exit_code="${PIPESTATUS[0]}"
+ # NB: We are going to deliberately forge ahead even if `tee` failed.
+ # If it did, we have a problem with tempfile creation, and all is sad.
+ echo "Build failed with code $build_exit_code, trying to save ccache" 1>&2
+fi
+# Stop trying to kill the container.
+echo $'\nFBCODE_BUILDER_EXIT' >> "$logfile"
+
+if [[ "$save_ccache_to_dir" == "" ]] ; then
+ echo "Not inspecting Docker build, since saving the ccache wasn't requested."
+ exit "$build_exit_code"
+fi
+
+img=$(docker images --filter "label=$id" -a -q)
+if [[ "$img" == "" ]] ; then
+ docker images -a
+ echo "In the above list, failed to find most recent image with $id" 1>&2
+ # Usually, the above `docker kill` will leave us with an up-to-the-second
+ # container, from which we can extract the cache. However, if that fails
+ # for any reason, this loop will instead grab the latest available image.
+ #
+ # It's possible for this log search to get confused due to the output of
+ # the build command itself, but since our builds aren't **trying** to
+ # break cache, we probably won't randomly hit an ID from another build.
+ img=$(
+ grep -E '^ ---> (Running in [0-9a-f]+|[0-9a-f]+)$' "$logfile" | tac |
+ sed 's/Running in /container_/;s/ ---> //;' | (
+ while read -r x ; do
+ # Both docker commands below print an image ID to stdout on
+ # success, so we just need to know when to stop.
+ if [[ "$x" =~ container_.* ]] ; then
+ if docker commit "${x#container_}" ; then
+ break
+ fi
+ elif docker inspect --type image -f '{{.Id}}' "$x" ; then
+ break
+ fi
+ done
+ )
+ )
+ if [[ "$img" == "" ]] ; then
+ echo "Failed to find valid container or image ID in log $logfile" 1>&2
+ exit 1
+ fi
+elif [[ "$(echo "$img" | wc -l)" != 1 ]] ; then
+ # Shouldn't really happen, but be explicit if it does.
+ echo "Multiple images with label $id, taking the latest of:"
+ echo "$img"
+ img=$(echo "$img" | head -n 1)
+fi
+
+container_name="fbcode_builder_container_$(rand_guid)"
+echo "Starting $container_name from latest image of the build with $id --"
+echo "$img"
+
+# ccache collection must be done outside of the Docker build steps because
+# we need to be able to kill it on timeout.
+#
+# This step grows the max cache size to slightly exceed than the working set
+# of a successful build. This simple design persists the max size in the
+# cache directory itself (the env var CCACHE_MAXSIZE does not even work with
+# older ccaches like the one on 14.04).
+#
+# Future: copy this script into the Docker image via Dockerfile.
+(
+ # By default, fbcode_builder creates an unsigned image, so the `docker
+ # run` below would fail if DOCKER_CONTENT_TRUST were set. So we unset it
+ # just for this one run.
+ export DOCKER_CONTENT_TRUST=
+ # CAUTION: The inner bash runs without -uex, so code accordingly.
+ docker run --user root --name "$container_name" "$img" /bin/bash -c '
+ build_exit_code='"$build_exit_code"'
+
+ # Might be useful if debugging whether max cache size is too small?
+ grep " Cleaning up cache directory " /tmp/ccache.log
+
+ export CCACHE_DIR=/ccache
+ ccache -s
+
+ echo "Total bytes in /ccache:";
+ total_bytes=$(du -sb /ccache | awk "{print \$1}")
+ echo "$total_bytes"
+
+ echo "Used bytes in /ccache:";
+ used_bytes=$(
+ du -sb $(find /ccache -type f -newermt @$(
+ cat /FBCODE_BUILDER_CCACHE_START_TIME
+ )) | awk "{t += \$1} END {print t}"
+ )
+ echo "$used_bytes"
+
+ # Goal: set the max cache to 750MB over 125% of the usage of a
+ # successful build. If this is too small, it takes too long to get a
+ # cache fully warmed up. Plus, ccache cleans 100-200MB before reaching
+ # the max cache size, so a large margin is essential to prevent misses.
+ desired_mb=$(( 750 + used_bytes / 800000 )) # 125% in decimal MB: 1e6/1.25
+ if [[ "$build_exit_code" != "0" ]] ; then
+ # For a bad build, disallow shrinking the max cache size. Instead of
+ # the max cache size, we use on-disk size, which ccache keeps at least
+ # 150MB under the actual max size, hence the 400MB safety margin.
+ cur_max_mb=$(( 400 + total_bytes / 1000000 )) # ccache uses decimal MB
+ if [[ "$desired_mb" -le "$cur_max_mb" ]] ; then
+ desired_mb=""
+ fi
+ fi
+
+ if [[ "$desired_mb" != "" ]] ; then
+ echo "Updating cache size to $desired_mb MB"
+ ccache -M "${desired_mb}M"
+ ccache -s
+ fi
+
+ # Subshell because `time` the binary may not be installed.
+ if (time tar czf /ccache.tgz /ccache) ; then
+ ls -l /ccache.tgz
+ else
+ # This `else` ensures we never overwrite the current cache with
+ # partial data in case of error, even if somebody adds code below.
+ rm /ccache.tgz
+ exit 1
+ fi
+ '
+)
+
+echo "Updating $save_ccache_to_dir/ccache.tgz"
+# This will not delete the existing cache if `docker run` didn't make one
+docker cp "$container_name:/ccache.tgz" "$save_ccache_to_dir/"
+
+# Future: it'd be nice if Travis allowed us to retry if the build timed out,
+# since we'll make more progress thanks to the cache. As-is, we have to
+# wait for the next commit to land.
+echo "Build exited with code $build_exit_code"
+exit "$build_exit_code"
diff --git a/build/fbcode_builder/docker_builder.py b/build/fbcode_builder/docker_builder.py
new file mode 100644
index 000000000..83df7137c
--- /dev/null
+++ b/build/fbcode_builder/docker_builder.py
@@ -0,0 +1,190 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+"""
+
+Extends FBCodeBuilder to produce Docker context directories.
+
+In order to get the largest iteration-time savings from Docker's build
+caching, you will want to:
+ - Use fine-grained steps as appropriate (e.g. separate make & make install),
+ - Start your action sequence with the lowest-risk steps, and with the steps
+ that change the least often, and
+ - Put the steps that you are debugging towards the very end.
+
+"""
+import logging
+import os
+import shutil
+import tempfile
+
+from fbcode_builder import FBCodeBuilder
+from shell_quoting import raw_shell, shell_comment, shell_join, ShellQuoted, path_join
+from utils import recursively_flatten_list, run_command
+
+
+class DockerFBCodeBuilder(FBCodeBuilder):
+ def _user(self):
+ return self.option("user", "root")
+
+ def _change_user(self):
+ return ShellQuoted("USER {u}").format(u=self._user())
+
+ def setup(self):
+ # Please add RPM-based OSes here as appropriate.
+ #
+ # To allow exercising non-root installs -- we change users after the
+ # system packages are installed. TODO: For users not defined in the
+ # image, we should probably `useradd`.
+ return self.step(
+ "Setup",
+ [
+ # Docker's FROM does not understand shell quoting.
+ ShellQuoted("FROM {}".format(self.option("os_image"))),
+ # /bin/sh syntax is a pain
+ ShellQuoted('SHELL ["/bin/bash", "-c"]'),
+ ]
+ + self.install_debian_deps()
+ + [self._change_user()]
+ + [self.workdir(self.option("prefix"))]
+ + self.create_python_venv()
+ + self.python_venv()
+ + self.rust_toolchain(),
+ )
+
+ def python_venv(self):
+ # To both avoid calling venv activate on each RUN command AND to ensure
+ # it is present when the resulting container is run add to PATH
+ actions = []
+ if self.option("PYTHON_VENV", "OFF") == "ON":
+ actions = ShellQuoted("ENV PATH={p}:$PATH").format(
+ p=path_join(self.option("prefix"), "venv", "bin")
+ )
+ return actions
+
+ def step(self, name, actions):
+ assert "\n" not in name, "Name {0} would span > 1 line".format(name)
+ b = ShellQuoted("")
+ return [ShellQuoted("### {0} ###".format(name)), b] + actions + [b]
+
+ def run(self, shell_cmd):
+ return ShellQuoted("RUN {cmd}").format(cmd=shell_cmd)
+
+ def set_env(self, key, value):
+ return ShellQuoted("ENV {key}={val}").format(key=key, val=value)
+
+ def workdir(self, dir):
+ return [
+ # As late as Docker 1.12.5, this results in `build` being owned
+ # by root:root -- the explicit `mkdir` works around the bug:
+ # USER nobody
+ # WORKDIR build
+ ShellQuoted("USER root"),
+ ShellQuoted("RUN mkdir -p {d} && chown {u} {d}").format(
+ d=dir, u=self._user()
+ ),
+ self._change_user(),
+ ShellQuoted("WORKDIR {dir}").format(dir=dir),
+ ]
+
+ def comment(self, comment):
+ # This should not be a command since we don't want comment changes
+ # to invalidate the Docker build cache.
+ return shell_comment(comment)
+
+ def copy_local_repo(self, repo_dir, dest_name):
+ fd, archive_path = tempfile.mkstemp(
+ prefix="local_repo_{0}_".format(dest_name),
+ suffix=".tgz",
+ dir=os.path.abspath(self.option("docker_context_dir")),
+ )
+ os.close(fd)
+ run_command("tar", "czf", archive_path, ".", cwd=repo_dir)
+ return [
+ ShellQuoted("ADD {archive} {dest_name}").format(
+ archive=os.path.basename(archive_path), dest_name=dest_name
+ ),
+ # Docker permissions make very little sense... see also workdir()
+ ShellQuoted("USER root"),
+ ShellQuoted("RUN chown -R {u} {d}").format(d=dest_name, u=self._user()),
+ self._change_user(),
+ ]
+
+ def _render_impl(self, steps):
+ return raw_shell(shell_join("\n", recursively_flatten_list(steps)))
+
+ def debian_ccache_setup_steps(self):
+ source_ccache_tgz = self.option("ccache_tgz", "")
+ if not source_ccache_tgz:
+ logging.info("Docker ccache not enabled")
+ return []
+
+ dest_ccache_tgz = os.path.join(self.option("docker_context_dir"), "ccache.tgz")
+
+ try:
+ try:
+ os.link(source_ccache_tgz, dest_ccache_tgz)
+ except OSError:
+ logging.exception(
+ "Hard-linking {s} to {d} failed, falling back to copy".format(
+ s=source_ccache_tgz, d=dest_ccache_tgz
+ )
+ )
+ shutil.copyfile(source_ccache_tgz, dest_ccache_tgz)
+ except Exception:
+ logging.exception(
+ "Failed to copy or link {s} to {d}, aborting".format(
+ s=source_ccache_tgz, d=dest_ccache_tgz
+ )
+ )
+ raise
+
+ return [
+ # Separate layer so that in development we avoid re-downloads.
+ self.run(ShellQuoted("apt-get install -yq ccache")),
+ ShellQuoted("ADD ccache.tgz /"),
+ ShellQuoted(
+ # Set CCACHE_DIR before the `ccache` invocations below.
+ "ENV CCACHE_DIR=/ccache "
+ # No clang support for now, so it's easiest to hardcode gcc.
+ 'CC="ccache gcc" CXX="ccache g++" '
+ # Always log for ease of debugging. For real FB projects,
+ # this log is several megabytes, so dumping it to stdout
+ # would likely exceed the Travis log limit of 4MB.
+ #
+ # On a local machine, `docker cp` will get you the data. To
+ # get the data out from Travis, I would compress and dump
+ # uuencoded bytes to the log -- for Bistro this was about
+ # 600kb or 8000 lines:
+ #
+ # apt-get install sharutils
+ # bzip2 -9 < /tmp/ccache.log | uuencode -m ccache.log.bz2
+ "CCACHE_LOGFILE=/tmp/ccache.log"
+ ),
+ self.run(
+ ShellQuoted(
+ # Future: Skipping this part made this Docker step instant,
+ # saving ~1min of build time. It's unclear if it is the
+ # chown or the du, but probably the chown -- since a large
+ # part of the cost is incurred at image save time.
+ #
+ # ccache.tgz may be empty, or may have the wrong
+ # permissions.
+ "mkdir -p /ccache && time chown -R nobody /ccache && "
+ "time du -sh /ccache && "
+ # Reset stats so `docker_build_with_ccache.sh` can print
+ # useful values at the end of the run.
+ "echo === Prev run stats === && ccache -s && ccache -z && "
+ # Record the current time to let travis_build.sh figure out
+ # the number of bytes in the cache that are actually used --
+ # this is crucial for tuning the maximum cache size.
+ "date +%s > /FBCODE_BUILDER_CCACHE_START_TIME && "
+ # The build running as `nobody` should be able to write here
+ "chown nobody /tmp/ccache.log"
+ )
+ ),
+ ]
diff --git a/build/fbcode_builder/docker_enable_ipv6.sh b/build/fbcode_builder/docker_enable_ipv6.sh
new file mode 100755
index 000000000..3752f6f5e
--- /dev/null
+++ b/build/fbcode_builder/docker_enable_ipv6.sh
@@ -0,0 +1,13 @@
+#!/bin/sh
+# Copyright (c) Facebook, Inc. and its affiliates.
+
+
+# `daemon.json` is normally missing, but let's log it in case that changes.
+touch /etc/docker/daemon.json
+service docker stop
+echo '{"ipv6": true, "fixed-cidr-v6": "2001:db8:1::/64"}' > /etc/docker/daemon.json
+service docker start
+# Fail early if docker failed on start -- add `- sudo dockerd` to debug.
+docker info
+# Paranoia log: what if our config got overwritten?
+cat /etc/docker/daemon.json
diff --git a/build/fbcode_builder/fbcode_builder.py b/build/fbcode_builder/fbcode_builder.py
new file mode 100644
index 000000000..742099321
--- /dev/null
+++ b/build/fbcode_builder/fbcode_builder.py
@@ -0,0 +1,536 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+"""
+
+This is a small DSL to describe builds of Facebook's open-source projects
+that are published to Github from a single internal repo, including projects
+that depend on folly, wangle, proxygen, fbthrift, etc.
+
+This file defines the interface of the DSL, and common utilieis, but you
+will have to instantiate a specific builder, with specific options, in
+order to get work done -- see e.g. make_docker_context.py.
+
+== Design notes ==
+
+Goals:
+
+ - A simple declarative language for what needs to be checked out & built,
+ how, in what order.
+
+ - The same specification should work for external continuous integration
+ builds (e.g. Travis + Docker) and for internal VM-based continuous
+ integration builds.
+
+ - One should be able to build without root, and to install to a prefix.
+
+Non-goals:
+
+ - General usefulness. The only point of this is to make it easier to build
+ and test Facebook's open-source services.
+
+Ideas for the future -- these may not be very good :)
+
+ - Especially on Ubuntu 14.04 the current initial setup is inefficient:
+ we add PPAs after having installed a bunch of packages -- this prompts
+ reinstalls of large amounts of code. We also `apt-get update` a few
+ times.
+
+ - A "shell script" builder. Like DockerFBCodeBuilder, but outputs a
+ shell script that runs outside of a container. Or maybe even
+ synchronously executes the shell commands, `make`-style.
+
+ - A "Makefile" generator. That might make iterating on builds even quicker
+ than what you can currently get with Docker build caching.
+
+ - Generate a rebuild script that can be run e.g. inside the built Docker
+ container by tagging certain steps with list-inheriting Python objects:
+ * do change directories
+ * do NOT `git clone` -- if we want to update code this should be a
+ separate script that e.g. runs rebase on top of specific targets
+ across all the repos.
+ * do NOT install software (most / all setup can be skipped)
+ * do NOT `autoreconf` or `configure`
+ * do `make` and `cmake`
+
+ - If we get non-Debian OSes, part of ccache setup should be factored out.
+"""
+
+import os
+import re
+
+from shell_quoting import path_join, shell_join, ShellQuoted
+
+
+def _read_project_github_hashes():
+ base_dir = "deps/github_hashes/" # trailing slash used in regex below
+ for dirname, _, files in os.walk(base_dir):
+ for filename in files:
+ path = os.path.join(dirname, filename)
+ with open(path) as f:
+ m_proj = re.match("^" + base_dir + "(.*)-rev\.txt$", path)
+ if m_proj is None:
+ raise RuntimeError("Not a hash file? {0}".format(path))
+ m_hash = re.match("^Subproject commit ([0-9a-f]+)\n$", f.read())
+ if m_hash is None:
+ raise RuntimeError("No hash in {0}".format(path))
+ yield m_proj.group(1), m_hash.group(1)
+
+
+class FBCodeBuilder(object):
+ def __init__(self, **kwargs):
+ self._options_do_not_access = kwargs # Use .option() instead.
+ # This raises upon detecting options that are specified but unused,
+ # because otherwise it is very easy to make a typo in option names.
+ self.options_used = set()
+ # Mark 'projects_dir' used even if the build installs no github
+ # projects. This is needed because driver programs like
+ # `shell_builder.py` unconditionally set this for all builds.
+ self._github_dir = self.option("projects_dir")
+ self._github_hashes = dict(_read_project_github_hashes())
+
+ def __repr__(self):
+ return "{0}({1})".format(
+ self.__class__.__name__,
+ ", ".join(
+ "{0}={1}".format(k, repr(v))
+ for k, v in self._options_do_not_access.items()
+ ),
+ )
+
+ def option(self, name, default=None):
+ value = self._options_do_not_access.get(name, default)
+ if value is None:
+ raise RuntimeError("Option {0} is required".format(name))
+ self.options_used.add(name)
+ return value
+
+ def has_option(self, name):
+ return name in self._options_do_not_access
+
+ def add_option(self, name, value):
+ if name in self._options_do_not_access:
+ raise RuntimeError("Option {0} already set".format(name))
+ self._options_do_not_access[name] = value
+
+ #
+ # Abstract parts common to every installation flow
+ #
+
+ def render(self, steps):
+ """
+
+ Converts nested actions to your builder's expected output format.
+ Typically takes the output of build().
+
+ """
+ res = self._render_impl(steps) # Implementation-dependent
+ # Now that the output is rendered, we expect all options to have
+ # been used.
+ unused_options = set(self._options_do_not_access)
+ unused_options -= self.options_used
+ if unused_options:
+ raise RuntimeError(
+ "Unused options: {0} -- please check if you made a typo "
+ "in any of them. Those that are truly not useful should "
+ "be not be set so that this typo detection can be useful.".format(
+ unused_options
+ )
+ )
+ return res
+
+ def build(self, steps):
+ if not steps:
+ raise RuntimeError(
+ "Please ensure that the config you are passing " "contains steps"
+ )
+ return [self.setup(), self.diagnostics()] + steps
+
+ def setup(self):
+ "Your builder may want to install packages here."
+ raise NotImplementedError
+
+ def diagnostics(self):
+ "Log some system diagnostics before/after setup for ease of debugging"
+ # The builder's repr is not used in a command to avoid pointlessly
+ # invalidating Docker's build cache.
+ return self.step(
+ "Diagnostics",
+ [
+ self.comment("Builder {0}".format(repr(self))),
+ self.run(ShellQuoted("hostname")),
+ self.run(ShellQuoted("cat /etc/issue || echo no /etc/issue")),
+ self.run(ShellQuoted("g++ --version || echo g++ not installed")),
+ self.run(ShellQuoted("cmake --version || echo cmake not installed")),
+ ],
+ )
+
+ def step(self, name, actions):
+ "A labeled collection of actions or other steps"
+ raise NotImplementedError
+
+ def run(self, shell_cmd):
+ "Run this bash command"
+ raise NotImplementedError
+
+ def set_env(self, key, value):
+ 'Set the environment "key" to value "value"'
+ raise NotImplementedError
+
+ def workdir(self, dir):
+ "Create this directory if it does not exist, and change into it"
+ raise NotImplementedError
+
+ def copy_local_repo(self, dir, dest_name):
+ """
+ Copy the local repo at `dir` into this step's `workdir()`, analog of:
+ cp -r /path/to/folly folly
+ """
+ raise NotImplementedError
+
+ def python_deps(self):
+ return [
+ "wheel",
+ "cython==0.28.6",
+ ]
+
+ def debian_deps(self):
+ return [
+ "autoconf-archive",
+ "bison",
+ "build-essential",
+ "cmake",
+ "curl",
+ "flex",
+ "git",
+ "gperf",
+ "joe",
+ "libboost-all-dev",
+ "libcap-dev",
+ "libdouble-conversion-dev",
+ "libevent-dev",
+ "libgflags-dev",
+ "libgoogle-glog-dev",
+ "libkrb5-dev",
+ "libpcre3-dev",
+ "libpthread-stubs0-dev",
+ "libnuma-dev",
+ "libsasl2-dev",
+ "libsnappy-dev",
+ "libsqlite3-dev",
+ "libssl-dev",
+ "libtool",
+ "netcat-openbsd",
+ "pkg-config",
+ "sudo",
+ "unzip",
+ "wget",
+ "python3-venv",
+ ]
+
+ #
+ # Specific build helpers
+ #
+
+ def install_debian_deps(self):
+ actions = [
+ self.run(
+ ShellQuoted("apt-get update && apt-get install -yq {deps}").format(
+ deps=shell_join(
+ " ", (ShellQuoted(dep) for dep in self.debian_deps())
+ )
+ )
+ ),
+ ]
+ gcc_version = self.option("gcc_version")
+
+ # Make the selected GCC the default before building anything
+ actions.extend(
+ [
+ self.run(
+ ShellQuoted("apt-get install -yq {c} {cpp}").format(
+ c=ShellQuoted("gcc-{v}").format(v=gcc_version),
+ cpp=ShellQuoted("g++-{v}").format(v=gcc_version),
+ )
+ ),
+ self.run(
+ ShellQuoted(
+ "update-alternatives --install /usr/bin/gcc gcc {c} 40 "
+ "--slave /usr/bin/g++ g++ {cpp}"
+ ).format(
+ c=ShellQuoted("/usr/bin/gcc-{v}").format(v=gcc_version),
+ cpp=ShellQuoted("/usr/bin/g++-{v}").format(v=gcc_version),
+ )
+ ),
+ self.run(ShellQuoted("update-alternatives --config gcc")),
+ ]
+ )
+
+ actions.extend(self.debian_ccache_setup_steps())
+
+ return self.step("Install packages for Debian-based OS", actions)
+
+ def create_python_venv(self):
+ actions = []
+ if self.option("PYTHON_VENV", "OFF") == "ON":
+ actions.append(
+ self.run(
+ ShellQuoted("python3 -m venv {p}").format(
+ p=path_join(self.option("prefix"), "venv")
+ )
+ )
+ )
+ return actions
+
+ def python_venv(self):
+ actions = []
+ if self.option("PYTHON_VENV", "OFF") == "ON":
+ actions.append(
+ ShellQuoted("source {p}").format(
+ p=path_join(self.option("prefix"), "venv", "bin", "activate")
+ )
+ )
+
+ actions.append(
+ self.run(
+ ShellQuoted("python3 -m pip install {deps}").format(
+ deps=shell_join(
+ " ", (ShellQuoted(dep) for dep in self.python_deps())
+ )
+ )
+ )
+ )
+ return actions
+
+ def enable_rust_toolchain(self, toolchain="stable", is_bootstrap=True):
+ choices = set(["stable", "beta", "nightly"])
+
+ assert toolchain in choices, (
+ "while enabling rust toolchain: {} is not in {}"
+ ).format(toolchain, choices)
+
+ rust_toolchain_opt = (toolchain, is_bootstrap)
+ prev_opt = self.option("rust_toolchain", rust_toolchain_opt)
+ assert prev_opt == rust_toolchain_opt, (
+ "while enabling rust toolchain: previous toolchain already set to"
+ " {}, but trying to set it to {} now"
+ ).format(prev_opt, rust_toolchain_opt)
+
+ self.add_option("rust_toolchain", rust_toolchain_opt)
+
+ def rust_toolchain(self):
+ actions = []
+ if self.option("rust_toolchain", False):
+ (toolchain, is_bootstrap) = self.option("rust_toolchain")
+ rust_dir = path_join(self.option("prefix"), "rust")
+ actions = [
+ self.set_env("CARGO_HOME", rust_dir),
+ self.set_env("RUSTUP_HOME", rust_dir),
+ self.set_env("RUSTC_BOOTSTRAP", "1" if is_bootstrap else "0"),
+ self.run(
+ ShellQuoted(
+ "curl -sSf https://build.travis-ci.com/files/rustup-init.sh"
+ " | sh -s --"
+ " --default-toolchain={r} "
+ " --profile=minimal"
+ " --no-modify-path"
+ " -y"
+ ).format(p=rust_dir, r=toolchain)
+ ),
+ self.set_env(
+ "PATH",
+ ShellQuoted("{p}:$PATH").format(p=path_join(rust_dir, "bin")),
+ ),
+ self.run(ShellQuoted("rustup update")),
+ self.run(ShellQuoted("rustc --version")),
+ self.run(ShellQuoted("rustup --version")),
+ self.run(ShellQuoted("cargo --version")),
+ ]
+ return actions
+
+ def debian_ccache_setup_steps(self):
+ return [] # It's ok to ship a renderer without ccache support.
+
+ def github_project_workdir(self, project, path):
+ # Only check out a non-default branch if requested. This especially
+ # makes sense when building from a local repo.
+ git_hash = self.option(
+ "{0}:git_hash".format(project),
+ # Any repo that has a hash in deps/github_hashes defaults to
+ # that, with the goal of making builds maximally consistent.
+ self._github_hashes.get(project, ""),
+ )
+ maybe_change_branch = (
+ [
+ self.run(ShellQuoted("git checkout {hash}").format(hash=git_hash)),
+ ]
+ if git_hash
+ else []
+ )
+
+ local_repo_dir = self.option("{0}:local_repo_dir".format(project), "")
+ return self.step(
+ "Check out {0}, workdir {1}".format(project, path),
+ [
+ self.workdir(self._github_dir),
+ self.run(
+ ShellQuoted("git clone {opts} https://github.com/{p}").format(
+ p=project,
+ opts=ShellQuoted(
+ self.option("{}:git_clone_opts".format(project), "")
+ ),
+ )
+ )
+ if not local_repo_dir
+ else self.copy_local_repo(local_repo_dir, os.path.basename(project)),
+ self.workdir(
+ path_join(self._github_dir, os.path.basename(project), path),
+ ),
+ ]
+ + maybe_change_branch,
+ )
+
+ def fb_github_project_workdir(self, project_and_path, github_org="facebook"):
+ "This helper lets Facebook-internal CI special-cases FB projects"
+ project, path = project_and_path.split("/", 1)
+ return self.github_project_workdir(github_org + "/" + project, path)
+
+ def _make_vars(self, make_vars):
+ return shell_join(
+ " ",
+ (
+ ShellQuoted("{k}={v}").format(k=k, v=v)
+ for k, v in ({} if make_vars is None else make_vars).items()
+ ),
+ )
+
+ def parallel_make(self, make_vars=None):
+ return self.run(
+ ShellQuoted("make -j {n} VERBOSE=1 {vars}").format(
+ n=self.option("make_parallelism"),
+ vars=self._make_vars(make_vars),
+ )
+ )
+
+ def make_and_install(self, make_vars=None):
+ return [
+ self.parallel_make(make_vars),
+ self.run(
+ ShellQuoted("make install VERBOSE=1 {vars}").format(
+ vars=self._make_vars(make_vars),
+ )
+ ),
+ ]
+
+ def configure(self, name=None):
+ autoconf_options = {}
+ if name is not None:
+ autoconf_options.update(
+ self.option("{0}:autoconf_options".format(name), {})
+ )
+ return [
+ self.run(
+ ShellQuoted(
+ 'LDFLAGS="$LDFLAGS -L"{p}"/lib -Wl,-rpath="{p}"/lib" '
+ 'CFLAGS="$CFLAGS -I"{p}"/include" '
+ 'CPPFLAGS="$CPPFLAGS -I"{p}"/include" '
+ "PY_PREFIX={p} "
+ "./configure --prefix={p} {args}"
+ ).format(
+ p=self.option("prefix"),
+ args=shell_join(
+ " ",
+ (
+ ShellQuoted("{k}={v}").format(k=k, v=v)
+ for k, v in autoconf_options.items()
+ ),
+ ),
+ )
+ ),
+ ]
+
+ def autoconf_install(self, name):
+ return self.step(
+ "Build and install {0}".format(name),
+ [
+ self.run(ShellQuoted("autoreconf -ivf")),
+ ]
+ + self.configure()
+ + self.make_and_install(),
+ )
+
+ def cmake_configure(self, name, cmake_path=".."):
+ cmake_defines = {
+ "BUILD_SHARED_LIBS": "ON",
+ "CMAKE_INSTALL_PREFIX": self.option("prefix"),
+ }
+
+ # Hacks to add thriftpy3 support
+ if "BUILD_THRIFT_PY3" in os.environ and "folly" in name:
+ cmake_defines["PYTHON_EXTENSIONS"] = "True"
+
+ if "BUILD_THRIFT_PY3" in os.environ and "fbthrift" in name:
+ cmake_defines["thriftpy3"] = "ON"
+
+ cmake_defines.update(self.option("{0}:cmake_defines".format(name), {}))
+ return [
+ self.run(
+ ShellQuoted(
+ 'CXXFLAGS="$CXXFLAGS -fPIC -isystem "{p}"/include" '
+ 'CFLAGS="$CFLAGS -fPIC -isystem "{p}"/include" '
+ "cmake {args} {cmake_path}"
+ ).format(
+ p=self.option("prefix"),
+ args=shell_join(
+ " ",
+ (
+ ShellQuoted("-D{k}={v}").format(k=k, v=v)
+ for k, v in cmake_defines.items()
+ ),
+ ),
+ cmake_path=cmake_path,
+ )
+ ),
+ ]
+
+ def cmake_install(self, name, cmake_path=".."):
+ return self.step(
+ "Build and install {0}".format(name),
+ self.cmake_configure(name, cmake_path) + self.make_and_install(),
+ )
+
+ def cargo_build(self, name):
+ return self.step(
+ "Build {0}".format(name),
+ [
+ self.run(
+ ShellQuoted("cargo build -j {n}").format(
+ n=self.option("make_parallelism")
+ )
+ )
+ ],
+ )
+
+ def fb_github_autoconf_install(self, project_and_path, github_org="facebook"):
+ return [
+ self.fb_github_project_workdir(project_and_path, github_org),
+ self.autoconf_install(project_and_path),
+ ]
+
+ def fb_github_cmake_install(
+ self, project_and_path, cmake_path="..", github_org="facebook"
+ ):
+ return [
+ self.fb_github_project_workdir(project_and_path, github_org),
+ self.cmake_install(project_and_path, cmake_path),
+ ]
+
+ def fb_github_cargo_build(self, project_and_path, github_org="facebook"):
+ return [
+ self.fb_github_project_workdir(project_and_path, github_org),
+ self.cargo_build(project_and_path),
+ ]
diff --git a/build/fbcode_builder/fbcode_builder_config.py b/build/fbcode_builder/fbcode_builder_config.py
new file mode 100644
index 000000000..5ba6e607a
--- /dev/null
+++ b/build/fbcode_builder/fbcode_builder_config.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+"Demo config, so that `make_docker_context.py --help` works in this directory."
+
+config = {
+ "fbcode_builder_spec": lambda _builder: {
+ "depends_on": [],
+ "steps": [],
+ },
+ "github_project": "demo/project",
+}
diff --git a/build/fbcode_builder/getdeps.py b/build/fbcode_builder/getdeps.py
new file mode 100755
index 000000000..1b539735f
--- /dev/null
+++ b/build/fbcode_builder/getdeps.py
@@ -0,0 +1,1071 @@
+#!/usr/bin/env python3
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import argparse
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+
+# We don't import cache.create_cache directly as the facebook
+# specific import below may monkey patch it, and we want to
+# observe the patched version of this function!
+import getdeps.cache as cache_module
+from getdeps.buildopts import setup_build_options
+from getdeps.dyndeps import create_dyn_dep_munger
+from getdeps.errors import TransientFailure
+from getdeps.fetcher import (
+ SystemPackageFetcher,
+ file_name_is_cmake_file,
+ list_files_under_dir_newer_than_timestamp,
+)
+from getdeps.load import ManifestLoader
+from getdeps.manifest import ManifestParser
+from getdeps.platform import HostType
+from getdeps.runcmd import run_cmd
+from getdeps.subcmd import SubCmd, add_subcommands, cmd
+
+
+try:
+ import getdeps.facebook # noqa: F401
+except ImportError:
+ # we don't ship the facebook specific subdir,
+ # so allow that to fail silently
+ pass
+
+
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "getdeps"))
+
+
+class UsageError(Exception):
+ pass
+
+
+@cmd("validate-manifest", "parse a manifest and validate that it is correct")
+class ValidateManifest(SubCmd):
+ def run(self, args):
+ try:
+ ManifestParser(file_name=args.file_name)
+ print("OK", file=sys.stderr)
+ return 0
+ except Exception as exc:
+ print("ERROR: %s" % str(exc), file=sys.stderr)
+ return 1
+
+ def setup_parser(self, parser):
+ parser.add_argument("file_name", help="path to the manifest file")
+
+
+@cmd("show-host-type", "outputs the host type tuple for the host machine")
+class ShowHostType(SubCmd):
+ def run(self, args):
+ host = HostType()
+ print("%s" % host.as_tuple_string())
+ return 0
+
+
+class ProjectCmdBase(SubCmd):
+ def run(self, args):
+ opts = setup_build_options(args)
+
+ if args.current_project is not None:
+ opts.repo_project = args.current_project
+ if args.project is None:
+ if opts.repo_project is None:
+ raise UsageError(
+ "no project name specified, and no .projectid file found"
+ )
+ if opts.repo_project == "fbsource":
+ # The fbsource repository is a little special. There is no project
+ # manifest file for it. A specific project must always be explicitly
+ # specified when building from fbsource.
+ raise UsageError(
+ "no project name specified (required when building in fbsource)"
+ )
+ args.project = opts.repo_project
+
+ ctx_gen = opts.get_context_generator(facebook_internal=args.facebook_internal)
+ if args.test_dependencies:
+ ctx_gen.set_value_for_all_projects("test", "on")
+ if args.enable_tests:
+ ctx_gen.set_value_for_project(args.project, "test", "on")
+ else:
+ ctx_gen.set_value_for_project(args.project, "test", "off")
+
+ loader = ManifestLoader(opts, ctx_gen)
+ self.process_project_dir_arguments(args, loader)
+
+ manifest = loader.load_manifest(args.project)
+
+ self.run_project_cmd(args, loader, manifest)
+
+ def process_project_dir_arguments(self, args, loader):
+ def parse_project_arg(arg, arg_type):
+ parts = arg.split(":")
+ if len(parts) == 2:
+ project, path = parts
+ elif len(parts) == 1:
+ project = args.project
+ path = parts[0]
+ # On Windows path contains colon, e.g. C:\open
+ elif os.name == "nt" and len(parts) == 3:
+ project = parts[0]
+ path = parts[1] + ":" + parts[2]
+ else:
+ raise UsageError(
+ "invalid %s argument; too many ':' characters: %s" % (arg_type, arg)
+ )
+
+ return project, os.path.abspath(path)
+
+ # If we are currently running from a project repository,
+ # use the current repository for the project sources.
+ build_opts = loader.build_opts
+ if build_opts.repo_project is not None and build_opts.repo_root is not None:
+ loader.set_project_src_dir(build_opts.repo_project, build_opts.repo_root)
+
+ for arg in args.src_dir:
+ project, path = parse_project_arg(arg, "--src-dir")
+ loader.set_project_src_dir(project, path)
+
+ for arg in args.build_dir:
+ project, path = parse_project_arg(arg, "--build-dir")
+ loader.set_project_build_dir(project, path)
+
+ for arg in args.install_dir:
+ project, path = parse_project_arg(arg, "--install-dir")
+ loader.set_project_install_dir(project, path)
+
+ for arg in args.project_install_prefix:
+ project, path = parse_project_arg(arg, "--install-prefix")
+ loader.set_project_install_prefix(project, path)
+
+ def setup_parser(self, parser):
+ parser.add_argument(
+ "project",
+ nargs="?",
+ help=(
+ "name of the project or path to a manifest "
+ "file describing the project"
+ ),
+ )
+ parser.add_argument(
+ "--no-tests",
+ action="store_false",
+ dest="enable_tests",
+ default=True,
+ help="Disable building tests for this project.",
+ )
+ parser.add_argument(
+ "--test-dependencies",
+ action="store_true",
+ help="Enable building tests for dependencies as well.",
+ )
+ parser.add_argument(
+ "--current-project",
+ help="Specify the name of the fbcode_builder manifest file for the "
+ "current repository. If not specified, the code will attempt to find "
+ "this in a .projectid file in the repository root.",
+ )
+ parser.add_argument(
+ "--src-dir",
+ default=[],
+ action="append",
+ help="Specify a local directory to use for the project source, "
+ "rather than fetching it.",
+ )
+ parser.add_argument(
+ "--build-dir",
+ default=[],
+ action="append",
+ help="Explicitly specify the build directory to use for the "
+ "project, instead of the default location in the scratch path. "
+ "This only affects the project specified, and not its dependencies.",
+ )
+ parser.add_argument(
+ "--install-dir",
+ default=[],
+ action="append",
+ help="Explicitly specify the install directory to use for the "
+ "project, instead of the default location in the scratch path. "
+ "This only affects the project specified, and not its dependencies.",
+ )
+ parser.add_argument(
+ "--project-install-prefix",
+ default=[],
+ action="append",
+ help="Specify the final deployment installation path for a project",
+ )
+
+ self.setup_project_cmd_parser(parser)
+
+ def setup_project_cmd_parser(self, parser):
+ pass
+
+
+class CachedProject(object):
+ """A helper that allows calling the cache logic for a project
+ from both the build and the fetch code"""
+
+ def __init__(self, cache, loader, m):
+ self.m = m
+ self.inst_dir = loader.get_project_install_dir(m)
+ self.project_hash = loader.get_project_hash(m)
+ self.ctx = loader.ctx_gen.get_context(m.name)
+ self.loader = loader
+ self.cache = cache
+
+ self.cache_file_name = "-".join(
+ (
+ m.name,
+ self.ctx.get("os"),
+ self.ctx.get("distro") or "none",
+ self.ctx.get("distro_vers") or "none",
+ self.project_hash,
+ "buildcache.tgz",
+ )
+ )
+
+ def is_cacheable(self):
+ """We only cache third party projects"""
+ return self.cache and self.m.shipit_project is None
+
+ def was_cached(self):
+ cached_marker = os.path.join(self.inst_dir, ".getdeps-cached-build")
+ return os.path.exists(cached_marker)
+
+ def download(self):
+ if self.is_cacheable() and not os.path.exists(self.inst_dir):
+ print("check cache for %s" % self.cache_file_name)
+ dl_dir = os.path.join(self.loader.build_opts.scratch_dir, "downloads")
+ if not os.path.exists(dl_dir):
+ os.makedirs(dl_dir)
+ try:
+ target_file_name = os.path.join(dl_dir, self.cache_file_name)
+ if self.cache.download_to_file(self.cache_file_name, target_file_name):
+ tf = tarfile.open(target_file_name, "r")
+ print(
+ "Extracting %s -> %s..." % (self.cache_file_name, self.inst_dir)
+ )
+ tf.extractall(self.inst_dir)
+
+ cached_marker = os.path.join(self.inst_dir, ".getdeps-cached-build")
+ with open(cached_marker, "w") as f:
+ f.write("\n")
+
+ return True
+ except Exception as exc:
+ print("%s" % str(exc))
+
+ return False
+
+ def upload(self):
+ if self.is_cacheable():
+ # We can prepare an archive and stick it in LFS
+ tempdir = tempfile.mkdtemp()
+ tarfilename = os.path.join(tempdir, self.cache_file_name)
+ print("Archiving for cache: %s..." % tarfilename)
+ tf = tarfile.open(tarfilename, "w:gz")
+ tf.add(self.inst_dir, arcname=".")
+ tf.close()
+ try:
+ self.cache.upload_from_file(self.cache_file_name, tarfilename)
+ except Exception as exc:
+ print(
+ "Failed to upload to cache (%s), continue anyway" % str(exc),
+ file=sys.stderr,
+ )
+ shutil.rmtree(tempdir)
+
+
+@cmd("fetch", "fetch the code for a given project")
+class FetchCmd(ProjectCmdBase):
+ def setup_project_cmd_parser(self, parser):
+ parser.add_argument(
+ "--recursive",
+ help="fetch the transitive deps also",
+ action="store_true",
+ default=False,
+ )
+ parser.add_argument(
+ "--host-type",
+ help=(
+ "When recursively fetching, fetch deps for "
+ "this host type rather than the current system"
+ ),
+ )
+
+ def run_project_cmd(self, args, loader, manifest):
+ if args.recursive:
+ projects = loader.manifests_in_dependency_order()
+ else:
+ projects = [manifest]
+
+ cache = cache_module.create_cache()
+ for m in projects:
+ cached_project = CachedProject(cache, loader, m)
+ if cached_project.download():
+ continue
+
+ inst_dir = loader.get_project_install_dir(m)
+ built_marker = os.path.join(inst_dir, ".built-by-getdeps")
+ if os.path.exists(built_marker):
+ with open(built_marker, "r") as f:
+ built_hash = f.read().strip()
+
+ project_hash = loader.get_project_hash(m)
+ if built_hash == project_hash:
+ continue
+
+ # We need to fetch the sources
+ fetcher = loader.create_fetcher(m)
+ fetcher.update()
+
+
+@cmd("install-system-deps", "Install system packages to satisfy the deps for a project")
+class InstallSysDepsCmd(ProjectCmdBase):
+ def setup_project_cmd_parser(self, parser):
+ parser.add_argument(
+ "--recursive",
+ help="install the transitive deps also",
+ action="store_true",
+ default=False,
+ )
+
+ def run_project_cmd(self, args, loader, manifest):
+ if args.recursive:
+ projects = loader.manifests_in_dependency_order()
+ else:
+ projects = [manifest]
+
+ cache = cache_module.create_cache()
+ all_packages = {}
+ for m in projects:
+ ctx = loader.ctx_gen.get_context(m.name)
+ packages = m.get_required_system_packages(ctx)
+ for k, v in packages.items():
+ merged = all_packages.get(k, [])
+ merged += v
+ all_packages[k] = merged
+
+ manager = loader.build_opts.host_type.get_package_manager()
+ if manager == "rpm":
+ packages = sorted(list(set(all_packages["rpm"])))
+ if packages:
+ run_cmd(["dnf", "install", "-y"] + packages)
+ elif manager == "deb":
+ packages = sorted(list(set(all_packages["deb"])))
+ if packages:
+ run_cmd(["apt", "install", "-y"] + packages)
+ else:
+ print("I don't know how to install any packages on this system")
+
+
+@cmd("list-deps", "lists the transitive deps for a given project")
+class ListDepsCmd(ProjectCmdBase):
+ def run_project_cmd(self, args, loader, manifest):
+ for m in loader.manifests_in_dependency_order():
+ print(m.name)
+ return 0
+
+ def setup_project_cmd_parser(self, parser):
+ parser.add_argument(
+ "--host-type",
+ help=(
+ "Produce the list for the specified host type, "
+ "rather than that of the current system"
+ ),
+ )
+
+
+def clean_dirs(opts):
+ for d in ["build", "installed", "extracted", "shipit"]:
+ d = os.path.join(opts.scratch_dir, d)
+ print("Cleaning %s..." % d)
+ if os.path.exists(d):
+ shutil.rmtree(d)
+
+
+@cmd("clean", "clean up the scratch dir")
+class CleanCmd(SubCmd):
+ def run(self, args):
+ opts = setup_build_options(args)
+ clean_dirs(opts)
+
+
+@cmd("show-build-dir", "print the build dir for a given project")
+class ShowBuildDirCmd(ProjectCmdBase):
+ def run_project_cmd(self, args, loader, manifest):
+ if args.recursive:
+ manifests = loader.manifests_in_dependency_order()
+ else:
+ manifests = [manifest]
+
+ for m in manifests:
+ inst_dir = loader.get_project_build_dir(m)
+ print(inst_dir)
+
+ def setup_project_cmd_parser(self, parser):
+ parser.add_argument(
+ "--recursive",
+ help="print the transitive deps also",
+ action="store_true",
+ default=False,
+ )
+
+
+@cmd("show-inst-dir", "print the installation dir for a given project")
+class ShowInstDirCmd(ProjectCmdBase):
+ def run_project_cmd(self, args, loader, manifest):
+ if args.recursive:
+ manifests = loader.manifests_in_dependency_order()
+ else:
+ manifests = [manifest]
+
+ for m in manifests:
+ inst_dir = loader.get_project_install_dir_respecting_install_prefix(m)
+ print(inst_dir)
+
+ def setup_project_cmd_parser(self, parser):
+ parser.add_argument(
+ "--recursive",
+ help="print the transitive deps also",
+ action="store_true",
+ default=False,
+ )
+
+
+@cmd("show-source-dir", "print the source dir for a given project")
+class ShowSourceDirCmd(ProjectCmdBase):
+ def run_project_cmd(self, args, loader, manifest):
+ if args.recursive:
+ manifests = loader.manifests_in_dependency_order()
+ else:
+ manifests = [manifest]
+
+ for m in manifests:
+ fetcher = loader.create_fetcher(m)
+ print(fetcher.get_src_dir())
+
+ def setup_project_cmd_parser(self, parser):
+ parser.add_argument(
+ "--recursive",
+ help="print the transitive deps also",
+ action="store_true",
+ default=False,
+ )
+
+
+@cmd("build", "build a given project")
+class BuildCmd(ProjectCmdBase):
+ def run_project_cmd(self, args, loader, manifest):
+ if args.clean:
+ clean_dirs(loader.build_opts)
+
+ print("Building on %s" % loader.ctx_gen.get_context(args.project))
+ projects = loader.manifests_in_dependency_order()
+
+ cache = cache_module.create_cache() if args.use_build_cache else None
+
+ # Accumulate the install directories so that the build steps
+ # can find their dep installation
+ install_dirs = []
+
+ for m in projects:
+ fetcher = loader.create_fetcher(m)
+
+ if isinstance(fetcher, SystemPackageFetcher):
+ # We are guaranteed that if the fetcher is set to
+ # SystemPackageFetcher then this item is completely
+ # satisfied by the appropriate system packages
+ continue
+
+ if args.clean:
+ fetcher.clean()
+
+ build_dir = loader.get_project_build_dir(m)
+ inst_dir = loader.get_project_install_dir(m)
+
+ if (
+ m == manifest
+ and not args.only_deps
+ or m != manifest
+ and not args.no_deps
+ ):
+ print("Assessing %s..." % m.name)
+ project_hash = loader.get_project_hash(m)
+ ctx = loader.ctx_gen.get_context(m.name)
+ built_marker = os.path.join(inst_dir, ".built-by-getdeps")
+
+ cached_project = CachedProject(cache, loader, m)
+
+ reconfigure, sources_changed = self.compute_source_change_status(
+ cached_project, fetcher, m, built_marker, project_hash
+ )
+
+ if os.path.exists(built_marker) and not cached_project.was_cached():
+ # We've previously built this. We may need to reconfigure if
+ # our deps have changed, so let's check them.
+ dep_reconfigure, dep_build = self.compute_dep_change_status(
+ m, built_marker, loader
+ )
+ if dep_reconfigure:
+ reconfigure = True
+ if dep_build:
+ sources_changed = True
+
+ extra_cmake_defines = (
+ json.loads(args.extra_cmake_defines)
+ if args.extra_cmake_defines
+ else {}
+ )
+
+ if sources_changed or reconfigure or not os.path.exists(built_marker):
+ if os.path.exists(built_marker):
+ os.unlink(built_marker)
+ src_dir = fetcher.get_src_dir()
+ builder = m.create_builder(
+ loader.build_opts,
+ src_dir,
+ build_dir,
+ inst_dir,
+ ctx,
+ loader,
+ final_install_prefix=loader.get_project_install_prefix(m),
+ extra_cmake_defines=extra_cmake_defines,
+ )
+ builder.build(install_dirs, reconfigure=reconfigure)
+
+ with open(built_marker, "w") as f:
+ f.write(project_hash)
+
+ # Only populate the cache from continuous build runs
+ if args.schedule_type == "continuous":
+ cached_project.upload()
+
+ install_dirs.append(inst_dir)
+
+ def compute_dep_change_status(self, m, built_marker, loader):
+ reconfigure = False
+ sources_changed = False
+ st = os.lstat(built_marker)
+
+ ctx = loader.ctx_gen.get_context(m.name)
+ dep_list = sorted(m.get_section_as_dict("dependencies", ctx).keys())
+ for dep in dep_list:
+ if reconfigure and sources_changed:
+ break
+
+ dep_manifest = loader.load_manifest(dep)
+ dep_root = loader.get_project_install_dir(dep_manifest)
+ for dep_file in list_files_under_dir_newer_than_timestamp(
+ dep_root, st.st_mtime
+ ):
+ if os.path.basename(dep_file) == ".built-by-getdeps":
+ continue
+ if file_name_is_cmake_file(dep_file):
+ if not reconfigure:
+ reconfigure = True
+ print(
+ f"Will reconfigure cmake because {dep_file} is newer than {built_marker}"
+ )
+ else:
+ if not sources_changed:
+ sources_changed = True
+ print(
+ f"Will run build because {dep_file} is newer than {built_marker}"
+ )
+
+ if reconfigure and sources_changed:
+ break
+
+ return reconfigure, sources_changed
+
+ def compute_source_change_status(
+ self, cached_project, fetcher, m, built_marker, project_hash
+ ):
+ reconfigure = False
+ sources_changed = False
+ if not cached_project.download():
+ check_fetcher = True
+ if os.path.exists(built_marker):
+ check_fetcher = False
+ with open(built_marker, "r") as f:
+ built_hash = f.read().strip()
+ if built_hash == project_hash:
+ if cached_project.is_cacheable():
+ # We can blindly trust the build status
+ reconfigure = False
+ sources_changed = False
+ else:
+ # Otherwise, we may have changed the source, so let's
+ # check in with the fetcher layer
+ check_fetcher = True
+ else:
+ # Some kind of inconsistency with a prior build,
+ # let's run it again to be sure
+ os.unlink(built_marker)
+ reconfigure = True
+ sources_changed = True
+ # While we don't need to consult the fetcher for the
+ # status in this case, we may still need to have eg: shipit
+ # run in order to have a correct source tree.
+ fetcher.update()
+
+ if check_fetcher:
+ change_status = fetcher.update()
+ reconfigure = change_status.build_changed()
+ sources_changed = change_status.sources_changed()
+
+ return reconfigure, sources_changed
+
+ def setup_project_cmd_parser(self, parser):
+ parser.add_argument(
+ "--clean",
+ action="store_true",
+ default=False,
+ help=(
+ "Clean up the build and installation area prior to building, "
+ "causing the projects to be built from scratch"
+ ),
+ )
+ parser.add_argument(
+ "--no-deps",
+ action="store_true",
+ default=False,
+ help=(
+ "Only build the named project, not its deps. "
+ "This is most useful after you've built all of the deps, "
+ "and helps to avoid waiting for relatively "
+ "slow up-to-date-ness checks"
+ ),
+ )
+ parser.add_argument(
+ "--only-deps",
+ action="store_true",
+ default=False,
+ help=(
+ "Only build the named project's deps. "
+ "This is most useful when you want to separate out building "
+ "of all of the deps and your project"
+ ),
+ )
+ parser.add_argument(
+ "--no-build-cache",
+ action="store_false",
+ default=True,
+ dest="use_build_cache",
+ help="Do not attempt to use the build cache.",
+ )
+ parser.add_argument(
+ "--schedule-type", help="Indicates how the build was activated"
+ )
+ parser.add_argument(
+ "--extra-cmake-defines",
+ help=(
+ "Input json map that contains extra cmake defines to be used "
+ "when compiling the current project and all its deps. "
+ 'e.g: \'{"CMAKE_CXX_FLAGS": "--bla"}\''
+ ),
+ )
+
+
+@cmd("fixup-dyn-deps", "Adjusts dynamic dependencies for packaging purposes")
+class FixupDeps(ProjectCmdBase):
+ def run_project_cmd(self, args, loader, manifest):
+ projects = loader.manifests_in_dependency_order()
+
+ # Accumulate the install directories so that the build steps
+ # can find their dep installation
+ install_dirs = []
+
+ for m in projects:
+ inst_dir = loader.get_project_install_dir_respecting_install_prefix(m)
+ install_dirs.append(inst_dir)
+
+ if m == manifest:
+ dep_munger = create_dyn_dep_munger(
+ loader.build_opts, install_dirs, args.strip
+ )
+ dep_munger.process_deps(args.destdir, args.final_install_prefix)
+
+ def setup_project_cmd_parser(self, parser):
+ parser.add_argument("destdir", help="Where to copy the fixed up executables")
+ parser.add_argument(
+ "--final-install-prefix", help="specify the final installation prefix"
+ )
+ parser.add_argument(
+ "--strip",
+ action="store_true",
+ default=False,
+ help="Strip debug info while processing executables",
+ )
+
+
+@cmd("test", "test a given project")
+class TestCmd(ProjectCmdBase):
+ def run_project_cmd(self, args, loader, manifest):
+ projects = loader.manifests_in_dependency_order()
+
+ # Accumulate the install directories so that the test steps
+ # can find their dep installation
+ install_dirs = []
+
+ for m in projects:
+ inst_dir = loader.get_project_install_dir(m)
+
+ if m == manifest or args.test_dependencies:
+ built_marker = os.path.join(inst_dir, ".built-by-getdeps")
+ if not os.path.exists(built_marker):
+ print("project %s has not been built" % m.name)
+ # TODO: we could just go ahead and build it here, but I
+ # want to tackle that as part of adding build-for-test
+ # support.
+ return 1
+ fetcher = loader.create_fetcher(m)
+ src_dir = fetcher.get_src_dir()
+ ctx = loader.ctx_gen.get_context(m.name)
+ build_dir = loader.get_project_build_dir(m)
+ builder = m.create_builder(
+ loader.build_opts, src_dir, build_dir, inst_dir, ctx, loader
+ )
+
+ builder.run_tests(
+ install_dirs,
+ schedule_type=args.schedule_type,
+ owner=args.test_owner,
+ test_filter=args.filter,
+ retry=args.retry,
+ no_testpilot=args.no_testpilot,
+ )
+
+ install_dirs.append(inst_dir)
+
+ def setup_project_cmd_parser(self, parser):
+ parser.add_argument(
+ "--schedule-type", help="Indicates how the build was activated"
+ )
+ parser.add_argument("--test-owner", help="Owner for testpilot")
+ parser.add_argument("--filter", help="Only run the tests matching the regex")
+ parser.add_argument(
+ "--retry",
+ type=int,
+ default=3,
+ help="Number of immediate retries for failed tests "
+ "(noop in continuous and testwarden runs)",
+ )
+ parser.add_argument(
+ "--no-testpilot",
+ help="Do not use Test Pilot even when available",
+ action="store_true",
+ )
+
+
+@cmd("generate-github-actions", "generate a GitHub actions configuration")
+class GenerateGitHubActionsCmd(ProjectCmdBase):
+ RUN_ON_ALL = """ [push, pull_request]"""
+ RUN_ON_DEFAULT = """
+ push:
+ branches:
+ - master
+ pull_request:
+ branches:
+ - master"""
+
+ def run_project_cmd(self, args, loader, manifest):
+ platforms = [
+ HostType("linux", "ubuntu", "18"),
+ HostType("darwin", None, None),
+ HostType("windows", None, None),
+ ]
+
+ for p in platforms:
+ self.write_job_for_platform(p, args)
+
+ # TODO: Break up complex function
+ def write_job_for_platform(self, platform, args): # noqa: C901
+ build_opts = setup_build_options(args, platform)
+ ctx_gen = build_opts.get_context_generator(facebook_internal=False)
+ loader = ManifestLoader(build_opts, ctx_gen)
+ manifest = loader.load_manifest(args.project)
+ manifest_ctx = loader.ctx_gen.get_context(manifest.name)
+ run_on = self.RUN_ON_ALL if args.run_on_all_branches else self.RUN_ON_DEFAULT
+
+ # Some projects don't do anything "useful" as a leaf project, only
+ # as a dep for a leaf project. Check for those here; we don't want
+ # to waste the effort scheduling them on CI.
+ # We do this by looking at the builder type in the manifest file
+ # rather than creating a builder and checking its type because we
+ # don't know enough to create the full builder instance here.
+ if manifest.get("build", "builder", ctx=manifest_ctx) == "nop":
+ return None
+
+ # We want to be sure that we're running things with python 3
+ # but python versioning is honestly a bit of a frustrating mess.
+ # `python` may be version 2 or version 3 depending on the system.
+ # python3 may not be a thing at all!
+ # Assume an optimistic default
+ py3 = "python3"
+
+ if build_opts.is_linux():
+ job_name = "linux"
+ runs_on = f"ubuntu-{args.ubuntu_version}"
+ elif build_opts.is_windows():
+ # We're targeting the windows-2016 image because it has
+ # Visual Studio 2017 installed, and at the time of writing,
+ # the version of boost in the manifests (1.69) is not
+ # buildable with Visual Studio 2019
+ job_name = "windows"
+ runs_on = "windows-2016"
+ # The windows runners are python 3 by default; python2.exe
+ # is available if needed.
+ py3 = "python"
+ else:
+ job_name = "mac"
+ runs_on = "macOS-latest"
+
+ os.makedirs(args.output_dir, exist_ok=True)
+ output_file = os.path.join(args.output_dir, f"getdeps_{job_name}.yml")
+ with open(output_file, "w") as out:
+ # Deliberate line break here because the @ and the generated
+ # symbols are meaningful to our internal tooling when they
+ # appear in a single token
+ out.write("# This file was @")
+ out.write("generated by getdeps.py\n")
+ out.write(
+ f"""
+name: {job_name}
+
+on:{run_on}
+
+jobs:
+"""
+ )
+
+ getdeps = f"{py3} build/fbcode_builder/getdeps.py"
+
+ out.write(" build:\n")
+ out.write(" runs-on: %s\n" % runs_on)
+ out.write(" steps:\n")
+ out.write(" - uses: actions/checkout@v1\n")
+
+ if build_opts.is_windows():
+ # cmake relies on BOOST_ROOT but GH deliberately don't set it in order
+ # to avoid versioning issues:
+ # https://github.com/actions/virtual-environments/issues/319
+ # Instead, set the version we think we need; this is effectively
+ # coupled with the boost manifest
+ # This is the unusual syntax for setting an env var for the rest of
+ # the steps in a workflow:
+ # https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/
+ out.write(" - name: Export boost environment\n")
+ out.write(
+ ' run: "echo BOOST_ROOT=%BOOST_ROOT_1_69_0% >> %GITHUB_ENV%"\n'
+ )
+ out.write(" shell: cmd\n")
+
+ # The git installation may not like long filenames, so tell it
+ # that we want it to use them!
+ out.write(" - name: Fix Git config\n")
+ out.write(" run: git config --system core.longpaths true\n")
+
+ projects = loader.manifests_in_dependency_order()
+
+ for m in projects:
+ if m != manifest:
+ out.write(" - name: Fetch %s\n" % m.name)
+ out.write(f" run: {getdeps} fetch --no-tests {m.name}\n")
+
+ for m in projects:
+ if m != manifest:
+ out.write(" - name: Build %s\n" % m.name)
+ out.write(f" run: {getdeps} build --no-tests {m.name}\n")
+
+ out.write(" - name: Build %s\n" % manifest.name)
+
+ project_prefix = ""
+ if not build_opts.is_windows():
+ project_prefix = (
+ " --project-install-prefix %s:/usr/local" % manifest.name
+ )
+
+ out.write(
+ f" run: {getdeps} build --src-dir=. {manifest.name} {project_prefix}\n"
+ )
+
+ out.write(" - name: Copy artifacts\n")
+ if build_opts.is_linux():
+ # Strip debug info from the binaries, but only on linux.
+ # While the `strip` utility is also available on macOS,
+ # attempting to strip there results in an error.
+ # The `strip` utility is not available on Windows.
+ strip = " --strip"
+ else:
+ strip = ""
+
+ out.write(
+ f" run: {getdeps} fixup-dyn-deps{strip} "
+ f"--src-dir=. {manifest.name} _artifacts/{job_name} {project_prefix} "
+ f"--final-install-prefix /usr/local\n"
+ )
+
+ out.write(" - uses: actions/upload-artifact@master\n")
+ out.write(" with:\n")
+ out.write(" name: %s\n" % manifest.name)
+ out.write(" path: _artifacts\n")
+
+ out.write(" - name: Test %s\n" % manifest.name)
+ out.write(
+ f" run: {getdeps} test --src-dir=. {manifest.name} {project_prefix}\n"
+ )
+
+ def setup_project_cmd_parser(self, parser):
+ parser.add_argument(
+ "--disallow-system-packages",
+ help="Disallow satisfying third party deps from installed system packages",
+ action="store_true",
+ default=False,
+ )
+ parser.add_argument(
+ "--output-dir", help="The directory that will contain the yml files"
+ )
+ parser.add_argument(
+ "--run-on-all-branches",
+ action="store_true",
+ help="Allow CI to fire on all branches - Handy for testing",
+ )
+ parser.add_argument(
+ "--ubuntu-version", default="18.04", help="Version of Ubuntu to use"
+ )
+
+
+def get_arg_var_name(args):
+ for arg in args:
+ if arg.startswith("--"):
+ return arg[2:].replace("-", "_")
+
+ raise Exception("unable to determine argument variable name from %r" % (args,))
+
+
+def parse_args():
+ # We want to allow common arguments to be specified either before or after
+ # the subcommand name. In order to do this we add them to the main parser
+ # and to subcommand parsers. In order for this to work, we need to tell
+ # argparse that the default value is SUPPRESS, so that the default values
+ # from the subparser arguments won't override values set by the user from
+ # the main parser. We maintain our own list of desired defaults in the
+ # common_defaults dictionary, and manually set those if the argument wasn't
+ # present at all.
+ common_args = argparse.ArgumentParser(add_help=False)
+ common_defaults = {}
+
+ def add_common_arg(*args, **kwargs):
+ var_name = get_arg_var_name(args)
+ default_value = kwargs.pop("default", None)
+ common_defaults[var_name] = default_value
+ kwargs["default"] = argparse.SUPPRESS
+ common_args.add_argument(*args, **kwargs)
+
+ add_common_arg("--scratch-path", help="Where to maintain checkouts and build dirs")
+ add_common_arg(
+ "--vcvars-path", default=None, help="Path to the vcvarsall.bat on Windows."
+ )
+ add_common_arg(
+ "--install-prefix",
+ help=(
+ "Where the final build products will be installed "
+ "(default is [scratch-path]/installed)"
+ ),
+ )
+ add_common_arg(
+ "--num-jobs",
+ type=int,
+ help=(
+ "Number of concurrent jobs to use while building. "
+ "(default=number of cpu cores)"
+ ),
+ )
+ add_common_arg(
+ "--use-shipit",
+ help="use the real ShipIt instead of the simple shipit transformer",
+ action="store_true",
+ default=False,
+ )
+ add_common_arg(
+ "--facebook-internal",
+ help="Setup the build context as an FB internal build",
+ action="store_true",
+ default=None,
+ )
+ add_common_arg(
+ "--no-facebook-internal",
+ help="Perform a non-FB internal build, even when in an fbsource repository",
+ action="store_false",
+ dest="facebook_internal",
+ )
+ add_common_arg(
+ "--allow-system-packages",
+ help="Allow satisfying third party deps from installed system packages",
+ action="store_true",
+ default=False,
+ )
+ add_common_arg(
+ "--lfs-path",
+ help="Provide a parent directory for lfs when fbsource is unavailable",
+ default=None,
+ )
+
+ ap = argparse.ArgumentParser(
+ description="Get and build dependencies and projects", parents=[common_args]
+ )
+ sub = ap.add_subparsers(
+ # metavar suppresses the long and ugly default list of subcommands on a
+ # single line. We still render the nicer list below where we would
+ # have shown the nasty one.
+ metavar="",
+ title="Available commands",
+ help="",
+ )
+
+ add_subcommands(sub, common_args)
+
+ args = ap.parse_args()
+ for var_name, default_value in common_defaults.items():
+ if not hasattr(args, var_name):
+ setattr(args, var_name, default_value)
+
+ return ap, args
+
+
+def main():
+ ap, args = parse_args()
+ if getattr(args, "func", None) is None:
+ ap.print_help()
+ return 0
+ try:
+ return args.func(args)
+ except UsageError as exc:
+ ap.error(str(exc))
+ return 1
+ except TransientFailure as exc:
+ print("TransientFailure: %s" % str(exc))
+ # This return code is treated as a retryable transient infrastructure
+ # error by Facebook's internal CI, rather than eg: a build or code
+ # related error that needs to be fixed before progress can be made.
+ return 128
+ except subprocess.CalledProcessError as exc:
+ print("%s" % str(exc), file=sys.stderr)
+ print("!! Failed", file=sys.stderr)
+ return 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/yarpl/include/yarpl/flowable/FlowableOperator_Merge.h b/build/fbcode_builder/getdeps/__init__.py
similarity index 100%
rename from yarpl/include/yarpl/flowable/FlowableOperator_Merge.h
rename to build/fbcode_builder/getdeps/__init__.py
diff --git a/build/fbcode_builder/getdeps/builder.py b/build/fbcode_builder/getdeps/builder.py
new file mode 100644
index 000000000..4e523c2dc
--- /dev/null
+++ b/build/fbcode_builder/getdeps/builder.py
@@ -0,0 +1,1400 @@
+#!/usr/bin/env python3
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import json
+import os
+import shutil
+import stat
+import subprocess
+import sys
+
+from .dyndeps import create_dyn_dep_munger
+from .envfuncs import Env, add_path_entry, path_search
+from .fetcher import copy_if_different
+from .runcmd import run_cmd
+
+
+class BuilderBase(object):
+ def __init__(
+ self,
+ build_opts,
+ ctx,
+ manifest,
+ src_dir,
+ build_dir,
+ inst_dir,
+ env=None,
+ final_install_prefix=None,
+ ):
+ self.env = Env()
+ if env:
+ self.env.update(env)
+
+ subdir = manifest.get("build", "subdir", ctx=ctx)
+ if subdir:
+ src_dir = os.path.join(src_dir, subdir)
+
+ self.ctx = ctx
+ self.src_dir = src_dir
+ self.build_dir = build_dir or src_dir
+ self.inst_dir = inst_dir
+ self.build_opts = build_opts
+ self.manifest = manifest
+ self.final_install_prefix = final_install_prefix
+
+ def _get_cmd_prefix(self):
+ if self.build_opts.is_windows():
+ vcvarsall = self.build_opts.get_vcvars_path()
+ if vcvarsall is not None:
+ # Since it sets rather a large number of variables we mildly abuse
+ # the cmd quoting rules to assemble a command that calls the script
+ # to prep the environment and then triggers the actual command that
+ # we wanted to run.
+ return [vcvarsall, "amd64", "&&"]
+ return []
+
+ def _run_cmd(self, cmd, cwd=None, env=None, use_cmd_prefix=True, allow_fail=False):
+ if env:
+ e = self.env.copy()
+ e.update(env)
+ env = e
+ else:
+ env = self.env
+
+ if use_cmd_prefix:
+ cmd_prefix = self._get_cmd_prefix()
+ if cmd_prefix:
+ cmd = cmd_prefix + cmd
+
+ log_file = os.path.join(self.build_dir, "getdeps_build.log")
+ return run_cmd(
+ cmd=cmd,
+ env=env,
+ cwd=cwd or self.build_dir,
+ log_file=log_file,
+ allow_fail=allow_fail,
+ )
+
+ def build(self, install_dirs, reconfigure):
+ print("Building %s..." % self.manifest.name)
+
+ if self.build_dir is not None:
+ if not os.path.isdir(self.build_dir):
+ os.makedirs(self.build_dir)
+ reconfigure = True
+
+ self._build(install_dirs=install_dirs, reconfigure=reconfigure)
+
+ # On Windows, emit a wrapper script that can be used to run build artifacts
+ # directly from the build directory, without installing them. On Windows $PATH
+ # needs to be updated to include all of the directories containing the runtime
+ # library dependencies in order to run the binaries.
+ if self.build_opts.is_windows():
+ script_path = self.get_dev_run_script_path()
+ dep_munger = create_dyn_dep_munger(self.build_opts, install_dirs)
+ dep_dirs = self.get_dev_run_extra_path_dirs(install_dirs, dep_munger)
+ dep_munger.emit_dev_run_script(script_path, dep_dirs)
+
+ def run_tests(
+ self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot
+ ):
+ """Execute any tests that we know how to run. If they fail,
+ raise an exception."""
+ pass
+
+ def _build(self, install_dirs, reconfigure):
+ """Perform the build.
+ install_dirs contains the list of installation directories for
+ the dependencies of this project.
+ reconfigure will be set to true if the fetcher determined
+ that the sources have changed in such a way that the build
+ system needs to regenerate its rules."""
+ pass
+
+ def _compute_env(self, install_dirs):
+ # CMAKE_PREFIX_PATH is only respected when passed through the
+ # environment, so we construct an appropriate path to pass down
+ return self.build_opts.compute_env_for_install_dirs(
+ install_dirs, env=self.env, manifest=self.manifest
+ )
+
+ def get_dev_run_script_path(self):
+ assert self.build_opts.is_windows()
+ return os.path.join(self.build_dir, "run.ps1")
+
+ def get_dev_run_extra_path_dirs(self, install_dirs, dep_munger=None):
+ assert self.build_opts.is_windows()
+ if dep_munger is None:
+ dep_munger = create_dyn_dep_munger(self.build_opts, install_dirs)
+ return dep_munger.compute_dependency_paths(self.build_dir)
+
+
+class MakeBuilder(BuilderBase):
+ def __init__(
+ self,
+ build_opts,
+ ctx,
+ manifest,
+ src_dir,
+ build_dir,
+ inst_dir,
+ build_args,
+ install_args,
+ test_args,
+ ):
+ super(MakeBuilder, self).__init__(
+ build_opts, ctx, manifest, src_dir, build_dir, inst_dir
+ )
+ self.build_args = build_args or []
+ self.install_args = install_args or []
+ self.test_args = test_args
+
+ def _get_prefix(self):
+ return ["PREFIX=" + self.inst_dir, "prefix=" + self.inst_dir]
+
+ def _build(self, install_dirs, reconfigure):
+ env = self._compute_env(install_dirs)
+
+ # Need to ensure that PREFIX is set prior to install because
+ # libbpf uses it when generating its pkg-config file.
+ # The lowercase prefix is used by some projects.
+ cmd = (
+ ["make", "-j%s" % self.build_opts.num_jobs]
+ + self.build_args
+ + self._get_prefix()
+ )
+ self._run_cmd(cmd, env=env)
+
+ install_cmd = ["make"] + self.install_args + self._get_prefix()
+ self._run_cmd(install_cmd, env=env)
+
+ def run_tests(
+ self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot
+ ):
+ if not self.test_args:
+ return
+
+ env = self._compute_env(install_dirs)
+
+ cmd = ["make"] + self.test_args + self._get_prefix()
+ self._run_cmd(cmd, env=env)
+
+
+class CMakeBootStrapBuilder(MakeBuilder):
+ def _build(self, install_dirs, reconfigure):
+ self._run_cmd(["./bootstrap", "--prefix=" + self.inst_dir])
+ super(CMakeBootStrapBuilder, self)._build(install_dirs, reconfigure)
+
+
+class AutoconfBuilder(BuilderBase):
+ def __init__(self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir, args):
+ super(AutoconfBuilder, self).__init__(
+ build_opts, ctx, manifest, src_dir, build_dir, inst_dir
+ )
+ self.args = args or []
+
+ def _build(self, install_dirs, reconfigure):
+ configure_path = os.path.join(self.src_dir, "configure")
+ autogen_path = os.path.join(self.src_dir, "autogen.sh")
+
+ env = self._compute_env(install_dirs)
+
+ if not os.path.exists(configure_path):
+ print("%s doesn't exist, so reconfiguring" % configure_path)
+ # This libtoolize call is a bit gross; the issue is that
+ # `autoreconf` as invoked by libsodium's `autogen.sh` doesn't
+ # seem to realize that it should invoke libtoolize and then
+ # error out when the configure script references a libtool
+ # related symbol.
+ self._run_cmd(["libtoolize"], cwd=self.src_dir, env=env)
+
+ # We generally prefer to call the `autogen.sh` script provided
+ # by the project on the basis that it may know more than plain
+ # autoreconf does.
+ if os.path.exists(autogen_path):
+ self._run_cmd(["bash", autogen_path], cwd=self.src_dir, env=env)
+ else:
+ self._run_cmd(["autoreconf", "-ivf"], cwd=self.src_dir, env=env)
+ configure_cmd = [configure_path, "--prefix=" + self.inst_dir] + self.args
+ self._run_cmd(configure_cmd, env=env)
+ self._run_cmd(["make", "-j%s" % self.build_opts.num_jobs], env=env)
+ self._run_cmd(["make", "install"], env=env)
+
+
+class Iproute2Builder(BuilderBase):
+ # ./configure --prefix does not work for iproute2.
+ # Thus, explicitly copy sources from src_dir to build_dir, bulid,
+ # and then install to inst_dir using DESTDIR
+ # lastly, also copy include from build_dir to inst_dir
+ def __init__(self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir):
+ super(Iproute2Builder, self).__init__(
+ build_opts, ctx, manifest, src_dir, build_dir, inst_dir
+ )
+
+ def _patch(self):
+ # FBOSS build currently depends on an old version of iproute2 (commit
+ # 7ca63aef7d1b0c808da0040c6b366ef7a61f38c1). This is missing a commit
+ # (ae717baf15fb4d30749ada3948d9445892bac239) needed to build iproute2
+ # successfully. Apply it viz.: include stdint.h
+ # Reference: https://fburl.com/ilx9g5xm
+ with open(self.build_dir + "/tc/tc_core.c", "r") as f:
+ data = f.read()
+
+ with open(self.build_dir + "/tc/tc_core.c", "w") as f:
+ f.write("#include \n")
+ f.write(data)
+
+ def _build(self, install_dirs, reconfigure):
+ configure_path = os.path.join(self.src_dir, "configure")
+
+ env = self.env.copy()
+ self._run_cmd([configure_path], env=env)
+ shutil.rmtree(self.build_dir)
+ shutil.copytree(self.src_dir, self.build_dir)
+ self._patch()
+ self._run_cmd(["make", "-j%s" % self.build_opts.num_jobs], env=env)
+ install_cmd = ["make", "install", "DESTDIR=" + self.inst_dir]
+
+ for d in ["include", "lib"]:
+ if not os.path.isdir(os.path.join(self.inst_dir, d)):
+ shutil.copytree(
+ os.path.join(self.build_dir, d), os.path.join(self.inst_dir, d)
+ )
+
+ self._run_cmd(install_cmd, env=env)
+
+
+class BistroBuilder(BuilderBase):
+ def _build(self, install_dirs, reconfigure):
+ p = os.path.join(self.src_dir, "bistro", "bistro")
+ env = self._compute_env(install_dirs)
+ env["PATH"] = env["PATH"] + ":" + os.path.join(p, "bin")
+ env["TEMPLATES_PATH"] = os.path.join(p, "include", "thrift", "templates")
+ self._run_cmd(
+ [
+ os.path.join(".", "cmake", "run-cmake.sh"),
+ "Release",
+ "-DCMAKE_INSTALL_PREFIX=" + self.inst_dir,
+ ],
+ cwd=p,
+ env=env,
+ )
+ self._run_cmd(
+ [
+ "make",
+ "install",
+ "-j",
+ str(self.build_opts.num_jobs),
+ ],
+ cwd=os.path.join(p, "cmake", "Release"),
+ env=env,
+ )
+
+ def run_tests(
+ self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot
+ ):
+ env = self._compute_env(install_dirs)
+ build_dir = os.path.join(self.src_dir, "bistro", "bistro", "cmake", "Release")
+ NUM_RETRIES = 5
+ for i in range(NUM_RETRIES):
+ cmd = ["ctest", "--output-on-failure"]
+ if i > 0:
+ cmd.append("--rerun-failed")
+ cmd.append(build_dir)
+ try:
+ self._run_cmd(
+ cmd,
+ cwd=build_dir,
+ env=env,
+ )
+ except Exception:
+ print(f"Tests failed... retrying ({i+1}/{NUM_RETRIES})")
+ else:
+ return
+ raise Exception(f"Tests failed even after {NUM_RETRIES} retries")
+
+
+class CMakeBuilder(BuilderBase):
+ MANUAL_BUILD_SCRIPT = """\
+#!{sys.executable}
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import argparse
+import subprocess
+import sys
+
+CMAKE = {cmake!r}
+CTEST = {ctest!r}
+SRC_DIR = {src_dir!r}
+BUILD_DIR = {build_dir!r}
+INSTALL_DIR = {install_dir!r}
+CMD_PREFIX = {cmd_prefix!r}
+CMAKE_ENV = {env_str}
+CMAKE_DEFINE_ARGS = {define_args_str}
+
+
+def get_jobs_argument(num_jobs_arg: int) -> str:
+ if num_jobs_arg > 0:
+ return "-j" + str(num_jobs_arg)
+
+ import multiprocessing
+ num_jobs = multiprocessing.cpu_count() // 2
+ return "-j" + str(num_jobs)
+
+
+def main():
+ ap = argparse.ArgumentParser()
+ ap.add_argument(
+ "cmake_args",
+ nargs=argparse.REMAINDER,
+ help='Any extra arguments after an "--" argument will be passed '
+ "directly to CMake."
+ )
+ ap.add_argument(
+ "--mode",
+ choices=["configure", "build", "install", "test"],
+ default="configure",
+ help="The mode to run: configure, build, or install. "
+ "Defaults to configure",
+ )
+ ap.add_argument(
+ "--build",
+ action="store_const",
+ const="build",
+ dest="mode",
+ help="An alias for --mode=build",
+ )
+ ap.add_argument(
+ "-j",
+ "--num-jobs",
+ action="store",
+ type=int,
+ default=0,
+ help="Run the build or tests with the specified number of parallel jobs",
+ )
+ ap.add_argument(
+ "--install",
+ action="store_const",
+ const="install",
+ dest="mode",
+ help="An alias for --mode=install",
+ )
+ ap.add_argument(
+ "--test",
+ action="store_const",
+ const="test",
+ dest="mode",
+ help="An alias for --mode=test",
+ )
+ args = ap.parse_args()
+
+ # Strip off a leading "--" from the additional CMake arguments
+ if args.cmake_args and args.cmake_args[0] == "--":
+ args.cmake_args = args.cmake_args[1:]
+
+ env = CMAKE_ENV
+
+ if args.mode == "configure":
+ full_cmd = CMD_PREFIX + [CMAKE, SRC_DIR] + CMAKE_DEFINE_ARGS + args.cmake_args
+ elif args.mode in ("build", "install"):
+ target = "all" if args.mode == "build" else "install"
+ full_cmd = CMD_PREFIX + [
+ CMAKE,
+ "--build",
+ BUILD_DIR,
+ "--target",
+ target,
+ "--config",
+ "Release",
+ get_jobs_argument(args.num_jobs),
+ ] + args.cmake_args
+ elif args.mode == "test":
+ full_cmd = CMD_PREFIX + [
+ {dev_run_script}CTEST,
+ "--output-on-failure",
+ get_jobs_argument(args.num_jobs),
+ ] + args.cmake_args
+ else:
+ ap.error("unknown invocation mode: %s" % (args.mode,))
+
+ cmd_str = " ".join(full_cmd)
+ print("Running: %r" % (cmd_str,))
+ proc = subprocess.run(full_cmd, env=env, cwd=BUILD_DIR)
+ sys.exit(proc.returncode)
+
+
+if __name__ == "__main__":
+ main()
+"""
+
+ def __init__(
+ self,
+ build_opts,
+ ctx,
+ manifest,
+ src_dir,
+ build_dir,
+ inst_dir,
+ defines,
+ final_install_prefix=None,
+ extra_cmake_defines=None,
+ ):
+ super(CMakeBuilder, self).__init__(
+ build_opts,
+ ctx,
+ manifest,
+ src_dir,
+ build_dir,
+ inst_dir,
+ final_install_prefix=final_install_prefix,
+ )
+ self.defines = defines or {}
+ if extra_cmake_defines:
+ self.defines.update(extra_cmake_defines)
+
+ def _invalidate_cache(self):
+ for name in [
+ "CMakeCache.txt",
+ "CMakeFiles/CMakeError.log",
+ "CMakeFiles/CMakeOutput.log",
+ ]:
+ name = os.path.join(self.build_dir, name)
+ if os.path.isdir(name):
+ shutil.rmtree(name)
+ elif os.path.exists(name):
+ os.unlink(name)
+
+ def _needs_reconfigure(self):
+ for name in ["CMakeCache.txt", "build.ninja"]:
+ name = os.path.join(self.build_dir, name)
+ if not os.path.exists(name):
+ return True
+ return False
+
+ def _write_build_script(self, **kwargs):
+ env_lines = [" {!r}: {!r},".format(k, v) for k, v in kwargs["env"].items()]
+ kwargs["env_str"] = "\n".join(["{"] + env_lines + ["}"])
+
+ if self.build_opts.is_windows():
+ kwargs["dev_run_script"] = '"powershell.exe", {!r}, '.format(
+ self.get_dev_run_script_path()
+ )
+ else:
+ kwargs["dev_run_script"] = ""
+
+ define_arg_lines = ["["]
+ for arg in kwargs["define_args"]:
+ # Replace the CMAKE_INSTALL_PREFIX argument to use the INSTALL_DIR
+ # variable that we define in the MANUAL_BUILD_SCRIPT code.
+ if arg.startswith("-DCMAKE_INSTALL_PREFIX="):
+ value = " {!r}.format(INSTALL_DIR),".format(
+ "-DCMAKE_INSTALL_PREFIX={}"
+ )
+ else:
+ value = " {!r},".format(arg)
+ define_arg_lines.append(value)
+ define_arg_lines.append("]")
+ kwargs["define_args_str"] = "\n".join(define_arg_lines)
+
+ # In order to make it easier for developers to manually run builds for
+ # CMake-based projects, write out some build scripts that can be used to invoke
+ # CMake manually.
+ build_script_path = os.path.join(self.build_dir, "run_cmake.py")
+ script_contents = self.MANUAL_BUILD_SCRIPT.format(**kwargs)
+ with open(build_script_path, "wb") as f:
+ f.write(script_contents.encode())
+ os.chmod(build_script_path, 0o755)
+
+ def _compute_cmake_define_args(self, env):
+ defines = {
+ "CMAKE_INSTALL_PREFIX": self.final_install_prefix or self.inst_dir,
+ "BUILD_SHARED_LIBS": "OFF",
+ # Some of the deps (rsocket) default to UBSAN enabled if left
+ # unspecified. Some of the deps fail to compile in release mode
+ # due to warning->error promotion. RelWithDebInfo is the happy
+ # medium.
+ "CMAKE_BUILD_TYPE": "RelWithDebInfo",
+ }
+ if "SANDCASTLE" not in os.environ:
+ # We sometimes see intermittent ccache related breakages on some
+ # of the FB internal CI hosts, so we prefer to disable ccache
+ # when running in that environment.
+ ccache = path_search(env, "ccache")
+ if ccache:
+ defines["CMAKE_CXX_COMPILER_LAUNCHER"] = ccache
+ else:
+ # rocksdb does its own probing for ccache.
+ # Ensure that it is disabled on sandcastle
+ env["CCACHE_DISABLE"] = "1"
+ # Some sandcastle hosts have broken ccache related dirs, and
+ # even though we've asked for it to be disabled ccache is
+ # still invoked by rocksdb's cmake.
+ # Redirect its config directory to somewhere that is guaranteed
+ # fresh to us, and that won't have any ccache data inside.
+ env["CCACHE_DIR"] = f"{self.build_opts.scratch_dir}/ccache"
+
+ if "GITHUB_ACTIONS" in os.environ and self.build_opts.is_windows():
+ # GitHub actions: the host has both gcc and msvc installed, and
+ # the default behavior of cmake is to prefer gcc.
+ # Instruct cmake that we want it to use cl.exe; this is important
+ # because Boost prefers cl.exe and the mismatch results in cmake
+ # with gcc not being able to find boost built with cl.exe.
+ defines["CMAKE_C_COMPILER"] = "cl.exe"
+ defines["CMAKE_CXX_COMPILER"] = "cl.exe"
+
+ if self.build_opts.is_darwin():
+ # Try to persuade cmake to set the rpath to match the lib
+ # dirs of the dependencies. This isn't automatic, and to
+ # make things more interesting, cmake uses `;` as the path
+ # separator, so translate the runtime path to something
+ # that cmake will parse
+ defines["CMAKE_INSTALL_RPATH"] = ";".join(
+ env.get("DYLD_LIBRARY_PATH", "").split(":")
+ )
+ # Tell cmake that we want to set the rpath in the tree
+ # at build time. Without this the rpath is only set
+ # at the moment that the binaries are installed. That
+ # default is problematic for example when using the
+ # gtest integration in cmake which runs the built test
+ # executables during the build to discover the set of
+ # tests.
+ defines["CMAKE_BUILD_WITH_INSTALL_RPATH"] = "ON"
+
+ defines.update(self.defines)
+ define_args = ["-D%s=%s" % (k, v) for (k, v) in defines.items()]
+
+ # if self.build_opts.is_windows():
+ # define_args += ["-G", "Visual Studio 15 2017 Win64"]
+ define_args += ["-G", "Ninja"]
+
+ return define_args
+
+ def _build(self, install_dirs, reconfigure):
+ reconfigure = reconfigure or self._needs_reconfigure()
+
+ env = self._compute_env(install_dirs)
+ if not self.build_opts.is_windows() and self.final_install_prefix:
+ env["DESTDIR"] = self.inst_dir
+
+ # Resolve the cmake that we installed
+ cmake = path_search(env, "cmake")
+ if cmake is None:
+ raise Exception("Failed to find CMake")
+
+ if reconfigure:
+ define_args = self._compute_cmake_define_args(env)
+ self._write_build_script(
+ cmd_prefix=self._get_cmd_prefix(),
+ cmake=cmake,
+ ctest=path_search(env, "ctest"),
+ env=env,
+ define_args=define_args,
+ src_dir=self.src_dir,
+ build_dir=self.build_dir,
+ install_dir=self.inst_dir,
+ sys=sys,
+ )
+
+ self._invalidate_cache()
+ self._run_cmd([cmake, self.src_dir] + define_args, env=env)
+
+ self._run_cmd(
+ [
+ cmake,
+ "--build",
+ self.build_dir,
+ "--target",
+ "install",
+ "--config",
+ "Release",
+ "-j",
+ str(self.build_opts.num_jobs),
+ ],
+ env=env,
+ )
+
+ def run_tests(
+ self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot
+ ):
+ env = self._compute_env(install_dirs)
+ ctest = path_search(env, "ctest")
+ cmake = path_search(env, "cmake")
+
+ # On Windows, we also need to update $PATH to include the directories that
+ # contain runtime library dependencies. This is not needed on other platforms
+ # since CMake will emit RPATH properly in the binary so they can find these
+ # dependencies.
+ if self.build_opts.is_windows():
+ path_entries = self.get_dev_run_extra_path_dirs(install_dirs)
+ path = env.get("PATH")
+ if path:
+ path_entries.insert(0, path)
+ env["PATH"] = ";".join(path_entries)
+
+ # Don't use the cmd_prefix when running tests. This is vcvarsall.bat on
+ # Windows. vcvarsall.bat is only needed for the build, not tests. It
+ # unfortunately fails if invoked with a long PATH environment variable when
+ # running the tests.
+ use_cmd_prefix = False
+
+ def get_property(test, propname, defval=None):
+ """extracts a named property from a cmake test info json blob.
+ The properties look like:
+ [{"name": "WORKING_DIRECTORY"},
+ {"value": "something"}]
+ We assume that it is invalid for the same named property to be
+ listed more than once.
+ """
+ props = test.get("properties", [])
+ for p in props:
+ if p.get("name", None) == propname:
+ return p.get("value", defval)
+ return defval
+
+ def list_tests():
+ output = subprocess.check_output(
+ [ctest, "--show-only=json-v1"], env=env, cwd=self.build_dir
+ )
+ try:
+ data = json.loads(output.decode("utf-8"))
+ except ValueError as exc:
+ raise Exception(
+ "Failed to decode cmake test info using %s: %s. Output was: %r"
+ % (ctest, str(exc), output)
+ )
+
+ tests = []
+ machine_suffix = self.build_opts.host_type.as_tuple_string()
+ for test in data["tests"]:
+ working_dir = get_property(test, "WORKING_DIRECTORY")
+ labels = []
+ machine_suffix = self.build_opts.host_type.as_tuple_string()
+ labels.append("tpx_test_config::buildsystem=getdeps")
+ labels.append("tpx_test_config::platform={}".format(machine_suffix))
+
+ if get_property(test, "DISABLED"):
+ labels.append("disabled")
+ command = test["command"]
+ if working_dir:
+ command = [cmake, "-E", "chdir", working_dir] + command
+
+ import os
+
+ tests.append(
+ {
+ "type": "custom",
+ "target": "%s-%s-getdeps-%s"
+ % (self.manifest.name, test["name"], machine_suffix),
+ "command": command,
+ "labels": labels,
+ "env": {},
+ "required_paths": [],
+ "contacts": [],
+ "cwd": os.getcwd(),
+ }
+ )
+ return tests
+
+ if schedule_type == "continuous" or schedule_type == "testwarden":
+ # for continuous and testwarden runs, disabling retry can give up
+ # better signals for flaky tests.
+ retry = 0
+
+ from sys import platform
+
+ testpilot = path_search(env, "testpilot")
+ tpx = path_search(env, "tpx")
+ if (tpx or testpilot) and not no_testpilot:
+ buck_test_info = list_tests()
+ import os
+
+ buck_test_info_name = os.path.join(self.build_dir, ".buck-test-info.json")
+ with open(buck_test_info_name, "w") as f:
+ json.dump(buck_test_info, f)
+
+ env.set("http_proxy", "")
+ env.set("https_proxy", "")
+ runs = []
+ from sys import platform
+
+ if platform == "win32":
+ machine_suffix = self.build_opts.host_type.as_tuple_string()
+ testpilot_args = [
+ "parexec-testinfra.exe",
+ "C:/tools/testpilot/sc_testpilot.par",
+ # Need to force the repo type otherwise testpilot on windows
+ # can be confused (presumably sparse profile related)
+ "--force-repo",
+ "fbcode",
+ "--force-repo-root",
+ self.build_opts.fbsource_dir,
+ "--buck-test-info",
+ buck_test_info_name,
+ "--retry=%d" % retry,
+ "-j=%s" % str(self.build_opts.num_jobs),
+ "--test-config",
+ "platform=%s" % machine_suffix,
+ "buildsystem=getdeps",
+ "--return-nonzero-on-failures",
+ ]
+ else:
+ testpilot_args = [
+ tpx,
+ "--buck-test-info",
+ buck_test_info_name,
+ "--retry=%d" % retry,
+ "-j=%s" % str(self.build_opts.num_jobs),
+ "--print-long-results",
+ ]
+
+ if owner:
+ testpilot_args += ["--contacts", owner]
+
+ if tpx and env:
+ testpilot_args.append("--env")
+ testpilot_args.extend(f"{key}={val}" for key, val in env.items())
+
+ if test_filter:
+ testpilot_args += ["--", test_filter]
+
+ if schedule_type == "continuous":
+ runs.append(
+ [
+ "--tag-new-tests",
+ "--collection",
+ "oss-continuous",
+ "--purpose",
+ "continuous",
+ ]
+ )
+ elif schedule_type == "testwarden":
+ # One run to assess new tests
+ runs.append(
+ [
+ "--tag-new-tests",
+ "--collection",
+ "oss-new-test-stress",
+ "--stress-runs",
+ "10",
+ "--purpose",
+ "stress-run-new-test",
+ ]
+ )
+ # And another for existing tests
+ runs.append(
+ [
+ "--tag-new-tests",
+ "--collection",
+ "oss-existing-test-stress",
+ "--stress-runs",
+ "10",
+ "--purpose",
+ "stress-run",
+ ]
+ )
+ else:
+ runs.append(["--collection", "oss-diff", "--purpose", "diff"])
+
+ for run in runs:
+ self._run_cmd(
+ testpilot_args + run,
+ cwd=self.build_opts.fbcode_builder_dir,
+ env=env,
+ use_cmd_prefix=use_cmd_prefix,
+ )
+ else:
+ args = [ctest, "--output-on-failure", "-j", str(self.build_opts.num_jobs)]
+ if test_filter:
+ args += ["-R", test_filter]
+
+ count = 0
+ while count <= retry:
+ retcode = self._run_cmd(
+ args, env=env, use_cmd_prefix=use_cmd_prefix, allow_fail=True
+ )
+
+ if retcode == 0:
+ break
+ if count == 0:
+ # Only add this option in the second run.
+ args += ["--rerun-failed"]
+ count += 1
+ if retcode != 0:
+ # Allow except clause in getdeps.main to catch and exit gracefully
+ # This allows non-testpilot runs to fail through the same logic as failed testpilot runs, which may become handy in case if post test processing is needed in the future
+ raise subprocess.CalledProcessError(retcode, args)
+
+
+class NinjaBootstrap(BuilderBase):
+ def __init__(self, build_opts, ctx, manifest, build_dir, src_dir, inst_dir):
+ super(NinjaBootstrap, self).__init__(
+ build_opts, ctx, manifest, src_dir, build_dir, inst_dir
+ )
+
+ def _build(self, install_dirs, reconfigure):
+ self._run_cmd([sys.executable, "configure.py", "--bootstrap"], cwd=self.src_dir)
+ src_ninja = os.path.join(self.src_dir, "ninja")
+ dest_ninja = os.path.join(self.inst_dir, "bin/ninja")
+ bin_dir = os.path.dirname(dest_ninja)
+ if not os.path.exists(bin_dir):
+ os.makedirs(bin_dir)
+ shutil.copyfile(src_ninja, dest_ninja)
+ shutil.copymode(src_ninja, dest_ninja)
+
+
+class OpenSSLBuilder(BuilderBase):
+ def __init__(self, build_opts, ctx, manifest, build_dir, src_dir, inst_dir):
+ super(OpenSSLBuilder, self).__init__(
+ build_opts, ctx, manifest, src_dir, build_dir, inst_dir
+ )
+
+ def _build(self, install_dirs, reconfigure):
+ configure = os.path.join(self.src_dir, "Configure")
+
+ # prefer to resolve the perl that we installed from
+ # our manifest on windows, but fall back to the system
+ # path on eg: darwin
+ env = self.env.copy()
+ for d in install_dirs:
+ bindir = os.path.join(d, "bin")
+ add_path_entry(env, "PATH", bindir, append=False)
+
+ perl = path_search(env, "perl", "perl")
+
+ if self.build_opts.is_windows():
+ make = "nmake.exe"
+ args = ["VC-WIN64A-masm", "-utf-8"]
+ elif self.build_opts.is_darwin():
+ make = "make"
+ args = ["darwin64-x86_64-cc"]
+ elif self.build_opts.is_linux():
+ make = "make"
+ args = (
+ ["linux-x86_64"] if not self.build_opts.is_arm() else ["linux-aarch64"]
+ )
+ else:
+ raise Exception("don't know how to build openssl for %r" % self.ctx)
+
+ self._run_cmd(
+ [
+ perl,
+ configure,
+ "--prefix=%s" % self.inst_dir,
+ "--openssldir=%s" % self.inst_dir,
+ ]
+ + args
+ + [
+ "enable-static-engine",
+ "enable-capieng",
+ "no-makedepend",
+ "no-unit-test",
+ "no-tests",
+ ]
+ )
+ self._run_cmd([make, "install_sw", "install_ssldirs"])
+
+
+class Boost(BuilderBase):
+ def __init__(
+ self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir, b2_args
+ ):
+ children = os.listdir(src_dir)
+ assert len(children) == 1, "expected a single directory entry: %r" % (children,)
+ boost_src = children[0]
+ assert boost_src.startswith("boost")
+ src_dir = os.path.join(src_dir, children[0])
+ super(Boost, self).__init__(
+ build_opts, ctx, manifest, src_dir, build_dir, inst_dir
+ )
+ self.b2_args = b2_args
+
+ def _build(self, install_dirs, reconfigure):
+ env = self._compute_env(install_dirs)
+ linkage = ["static"]
+ if self.build_opts.is_windows():
+ linkage.append("shared")
+
+ args = []
+ if self.build_opts.is_darwin():
+ clang = subprocess.check_output(["xcrun", "--find", "clang"])
+ user_config = os.path.join(self.build_dir, "project-config.jam")
+ with open(user_config, "w") as jamfile:
+ jamfile.write("using clang : : %s ;\n" % clang.decode().strip())
+ args.append("--user-config=%s" % user_config)
+
+ for link in linkage:
+ if self.build_opts.is_windows():
+ bootstrap = os.path.join(self.src_dir, "bootstrap.bat")
+ self._run_cmd([bootstrap], cwd=self.src_dir, env=env)
+ args += ["address-model=64"]
+ else:
+ bootstrap = os.path.join(self.src_dir, "bootstrap.sh")
+ self._run_cmd(
+ [bootstrap, "--prefix=%s" % self.inst_dir],
+ cwd=self.src_dir,
+ env=env,
+ )
+
+ b2 = os.path.join(self.src_dir, "b2")
+ self._run_cmd(
+ [
+ b2,
+ "-j%s" % self.build_opts.num_jobs,
+ "--prefix=%s" % self.inst_dir,
+ "--builddir=%s" % self.build_dir,
+ ]
+ + args
+ + self.b2_args
+ + [
+ "link=%s" % link,
+ "runtime-link=shared",
+ "variant=release",
+ "threading=multi",
+ "debug-symbols=on",
+ "visibility=global",
+ "-d2",
+ "install",
+ ],
+ cwd=self.src_dir,
+ env=env,
+ )
+
+
+class NopBuilder(BuilderBase):
+ def __init__(self, build_opts, ctx, manifest, src_dir, inst_dir):
+ super(NopBuilder, self).__init__(
+ build_opts, ctx, manifest, src_dir, None, inst_dir
+ )
+
+ def build(self, install_dirs, reconfigure):
+ print("Installing %s -> %s" % (self.src_dir, self.inst_dir))
+ parent = os.path.dirname(self.inst_dir)
+ if not os.path.exists(parent):
+ os.makedirs(parent)
+
+ install_files = self.manifest.get_section_as_ordered_pairs(
+ "install.files", self.ctx
+ )
+ if install_files:
+ for src_name, dest_name in self.manifest.get_section_as_ordered_pairs(
+ "install.files", self.ctx
+ ):
+ full_dest = os.path.join(self.inst_dir, dest_name)
+ full_src = os.path.join(self.src_dir, src_name)
+
+ dest_parent = os.path.dirname(full_dest)
+ if not os.path.exists(dest_parent):
+ os.makedirs(dest_parent)
+ if os.path.isdir(full_src):
+ if not os.path.exists(full_dest):
+ shutil.copytree(full_src, full_dest)
+ else:
+ shutil.copyfile(full_src, full_dest)
+ shutil.copymode(full_src, full_dest)
+ # This is a bit gross, but the mac ninja.zip doesn't
+ # give ninja execute permissions, so force them on
+ # for things that look like they live in a bin dir
+ if os.path.dirname(dest_name) == "bin":
+ st = os.lstat(full_dest)
+ os.chmod(full_dest, st.st_mode | stat.S_IXUSR)
+ else:
+ if not os.path.exists(self.inst_dir):
+ shutil.copytree(self.src_dir, self.inst_dir)
+
+
+class OpenNSABuilder(NopBuilder):
+ # OpenNSA libraries are stored with git LFS. As a result, fetcher fetches
+ # LFS pointers and not the contents. Use git-lfs to pull the real contents
+ # before copying to install dir using NoopBuilder.
+ # In future, if more builders require git-lfs, we would consider installing
+ # git-lfs as part of the sandcastle infra as against repeating similar
+ # logic for each builder that requires git-lfs.
+ def __init__(self, build_opts, ctx, manifest, src_dir, inst_dir):
+ super(OpenNSABuilder, self).__init__(
+ build_opts, ctx, manifest, src_dir, inst_dir
+ )
+
+ def build(self, install_dirs, reconfigure):
+ env = self._compute_env(install_dirs)
+ self._run_cmd(["git", "lfs", "install", "--local"], cwd=self.src_dir, env=env)
+ self._run_cmd(["git", "lfs", "pull"], cwd=self.src_dir, env=env)
+
+ super(OpenNSABuilder, self).build(install_dirs, reconfigure)
+
+
+class SqliteBuilder(BuilderBase):
+ def __init__(self, build_opts, ctx, manifest, src_dir, build_dir, inst_dir):
+ super(SqliteBuilder, self).__init__(
+ build_opts, ctx, manifest, src_dir, build_dir, inst_dir
+ )
+
+ def _build(self, install_dirs, reconfigure):
+ for f in ["sqlite3.c", "sqlite3.h", "sqlite3ext.h"]:
+ src = os.path.join(self.src_dir, f)
+ dest = os.path.join(self.build_dir, f)
+ copy_if_different(src, dest)
+
+ cmake_lists = """
+cmake_minimum_required(VERSION 3.1.3 FATAL_ERROR)
+project(sqlite3 C)
+add_library(sqlite3 STATIC sqlite3.c)
+# These options are taken from the defaults in Makefile.msc in
+# the sqlite distribution
+target_compile_definitions(sqlite3 PRIVATE
+ -DSQLITE_ENABLE_COLUMN_METADATA=1
+ -DSQLITE_ENABLE_FTS3=1
+ -DSQLITE_ENABLE_RTREE=1
+ -DSQLITE_ENABLE_GEOPOLY=1
+ -DSQLITE_ENABLE_JSON1=1
+ -DSQLITE_ENABLE_STMTVTAB=1
+ -DSQLITE_ENABLE_DBPAGE_VTAB=1
+ -DSQLITE_ENABLE_DBSTAT_VTAB=1
+ -DSQLITE_INTROSPECTION_PRAGMAS=1
+ -DSQLITE_ENABLE_DESERIALIZE=1
+)
+install(TARGETS sqlite3)
+install(FILES sqlite3.h sqlite3ext.h DESTINATION include)
+ """
+
+ with open(os.path.join(self.build_dir, "CMakeLists.txt"), "w") as f:
+ f.write(cmake_lists)
+
+ defines = {
+ "CMAKE_INSTALL_PREFIX": self.inst_dir,
+ "BUILD_SHARED_LIBS": "OFF",
+ "CMAKE_BUILD_TYPE": "RelWithDebInfo",
+ }
+ define_args = ["-D%s=%s" % (k, v) for (k, v) in defines.items()]
+ define_args += ["-G", "Ninja"]
+
+ env = self._compute_env(install_dirs)
+
+ # Resolve the cmake that we installed
+ cmake = path_search(env, "cmake")
+
+ self._run_cmd([cmake, self.build_dir] + define_args, env=env)
+ self._run_cmd(
+ [
+ cmake,
+ "--build",
+ self.build_dir,
+ "--target",
+ "install",
+ "--config",
+ "Release",
+ "-j",
+ str(self.build_opts.num_jobs),
+ ],
+ env=env,
+ )
+
+
+class CargoBuilder(BuilderBase):
+ def __init__(
+ self,
+ build_opts,
+ ctx,
+ manifest,
+ src_dir,
+ build_dir,
+ inst_dir,
+ build_doc,
+ workspace_dir,
+ manifests_to_build,
+ loader,
+ ):
+ super(CargoBuilder, self).__init__(
+ build_opts, ctx, manifest, src_dir, build_dir, inst_dir
+ )
+ self.build_doc = build_doc
+ self.ws_dir = workspace_dir
+ self.manifests_to_build = manifests_to_build and manifests_to_build.split(",")
+ self.loader = loader
+
+ def run_cargo(self, install_dirs, operation, args=None):
+ args = args or []
+ env = self._compute_env(install_dirs)
+ # Enable using nightly features with stable compiler
+ env["RUSTC_BOOTSTRAP"] = "1"
+ env["LIBZ_SYS_STATIC"] = "1"
+ cmd = [
+ "cargo",
+ operation,
+ "--workspace",
+ "-j%s" % self.build_opts.num_jobs,
+ ] + args
+ self._run_cmd(cmd, cwd=self.workspace_dir(), env=env)
+
+ def build_source_dir(self):
+ return os.path.join(self.build_dir, "source")
+
+ def workspace_dir(self):
+ return os.path.join(self.build_source_dir(), self.ws_dir or "")
+
+ def manifest_dir(self, manifest):
+ return os.path.join(self.build_source_dir(), manifest)
+
+ def recreate_dir(self, src, dst):
+ if os.path.isdir(dst):
+ shutil.rmtree(dst)
+ shutil.copytree(src, dst)
+
+ def _build(self, install_dirs, reconfigure):
+ build_source_dir = self.build_source_dir()
+ self.recreate_dir(self.src_dir, build_source_dir)
+
+ dot_cargo_dir = os.path.join(build_source_dir, ".cargo")
+ if not os.path.isdir(dot_cargo_dir):
+ os.mkdir(dot_cargo_dir)
+
+ with open(os.path.join(dot_cargo_dir, "config"), "w+") as f:
+ f.write(
+ """\
+[build]
+target-dir = '''{}'''
+
+[net]
+git-fetch-with-cli = true
+
+[profile.dev]
+debug = false
+incremental = false
+""".format(
+ self.build_dir.replace("\\", "\\\\")
+ )
+ )
+
+ if self.ws_dir is not None:
+ self._patchup_workspace()
+
+ try:
+ from getdeps.facebook.rust import vendored_crates
+
+ vendored_crates(self.build_opts, build_source_dir)
+ except ImportError:
+ # This FB internal module isn't shippped to github,
+ # so just rely on cargo downloading crates on it's own
+ pass
+
+ if self.manifests_to_build is None:
+ self.run_cargo(
+ install_dirs,
+ "build",
+ ["--out-dir", os.path.join(self.inst_dir, "bin"), "-Zunstable-options"],
+ )
+ else:
+ for manifest in self.manifests_to_build:
+ self.run_cargo(
+ install_dirs,
+ "build",
+ [
+ "--out-dir",
+ os.path.join(self.inst_dir, "bin"),
+ "-Zunstable-options",
+ "--manifest-path",
+ self.manifest_dir(manifest),
+ ],
+ )
+
+ self.recreate_dir(build_source_dir, os.path.join(self.inst_dir, "source"))
+
+ def run_tests(
+ self, install_dirs, schedule_type, owner, test_filter, retry, no_testpilot
+ ):
+ if test_filter:
+ args = ["--", test_filter]
+ else:
+ args = []
+
+ if self.manifests_to_build is None:
+ self.run_cargo(install_dirs, "test", args)
+ if self.build_doc:
+ self.run_cargo(install_dirs, "doc", ["--no-deps"])
+ else:
+ for manifest in self.manifests_to_build:
+ margs = ["--manifest-path", self.manifest_dir(manifest)]
+ self.run_cargo(install_dirs, "test", args + margs)
+ if self.build_doc:
+ self.run_cargo(install_dirs, "doc", ["--no-deps"] + margs)
+
+ def _patchup_workspace(self):
+ """
+ This method makes some assumptions about the state of the project and
+ its cargo dependendies:
+ 1. Crates from cargo dependencies can be extracted from Cargo.toml files
+ using _extract_crates function. It is using a heuristic so check its
+ code to understand how it is done.
+ 2. The extracted cargo dependencies crates can be found in the
+ dependency's install dir using _resolve_crate_to_path function
+ which again is using a heuristic.
+
+ Notice that many things might go wrong here. E.g. if someone depends
+ on another getdeps crate by writing in their Cargo.toml file:
+
+ my-rename-of-crate = { package = "crate", git = "..." }
+
+ they can count themselves lucky because the code will raise an
+ Exception. There migh be more cases where the code will silently pass
+ producing bad results.
+ """
+ workspace_dir = self.workspace_dir()
+ config = self._resolve_config()
+ if config:
+ with open(os.path.join(workspace_dir, "Cargo.toml"), "r+") as f:
+ manifest_content = f.read()
+ if "[package]" not in manifest_content:
+ # A fake manifest has to be crated to change the virtual
+ # manifest into a non-virtual. The virtual manifests are limited
+ # in many ways and the inability to define patches on them is
+ # one. Check https://github.com/rust-lang/cargo/issues/4934 to
+ # see if it is resolved.
+ f.write(
+ """
+ [package]
+ name = "fake_manifest_of_{}"
+ version = "0.0.0"
+ [lib]
+ path = "/dev/null"
+ """.format(
+ self.manifest.name
+ )
+ )
+ else:
+ f.write("\n")
+ f.write(config)
+
+ def _resolve_config(self):
+ """
+ Returns a configuration to be put inside root Cargo.toml file which
+ patches the dependencies git code with local getdeps versions.
+ See https://doc.rust-lang.org/cargo/reference/manifest.html#the-patch-section
+ """
+ dep_to_git = self._resolve_dep_to_git()
+ dep_to_crates = CargoBuilder._resolve_dep_to_crates(
+ self.build_source_dir(), dep_to_git
+ )
+
+ config = []
+ for name in sorted(dep_to_git.keys()):
+ git_conf = dep_to_git[name]
+ crates = sorted(dep_to_crates.get(name, []))
+ if not crates:
+ continue # nothing to patch, move along
+ crates_patches = [
+ '{} = {{ path = "{}" }}'.format(
+ crate,
+ CargoBuilder._resolve_crate_to_path(crate, git_conf).replace(
+ "\\", "\\\\"
+ ),
+ )
+ for crate in crates
+ ]
+
+ config.append(
+ '[patch."{0}"]\n'.format(git_conf["repo_url"])
+ + "\n".join(crates_patches)
+ )
+ return "\n".join(config)
+
+ def _resolve_dep_to_git(self):
+ """
+ For each direct dependency of the currently build manifest check if it
+ is also cargo-builded and if yes then extract it's git configs and
+ install dir
+ """
+ dependencies = self.manifest.get_section_as_dict("dependencies", ctx=self.ctx)
+ if not dependencies:
+ return []
+
+ dep_to_git = {}
+ for dep in dependencies.keys():
+ dep_manifest = self.loader.load_manifest(dep)
+ dep_builder = dep_manifest.get("build", "builder", ctx=self.ctx)
+ if dep_builder not in ["cargo", "nop"] or dep == "rust":
+ # This is a direct dependency, but it is not build with cargo
+ # and it is not simply copying files with nop, so ignore it.
+ # The "rust" dependency is an exception since it contains the
+ # toolchain.
+ continue
+
+ git_conf = dep_manifest.get_section_as_dict("git", ctx=self.ctx)
+ if "repo_url" not in git_conf:
+ raise Exception(
+ "A cargo dependency requires git.repo_url to be defined."
+ )
+ source_dir = self.loader.get_project_install_dir(dep_manifest)
+ if dep_builder == "cargo":
+ source_dir = os.path.join(source_dir, "source")
+ git_conf["source_dir"] = source_dir
+ dep_to_git[dep] = git_conf
+ return dep_to_git
+
+ @staticmethod
+ def _resolve_dep_to_crates(build_source_dir, dep_to_git):
+ """
+ This function traverse the build_source_dir in search of Cargo.toml
+ files, extracts the crate names from them using _extract_crates
+ function and returns a merged result containing crate names per
+ dependency name from all Cargo.toml files in the project.
+ """
+ if not dep_to_git:
+ return {} # no deps, so don't waste time traversing files
+
+ dep_to_crates = {}
+ for root, _, files in os.walk(build_source_dir):
+ for f in files:
+ if f == "Cargo.toml":
+ more_dep_to_crates = CargoBuilder._extract_crates(
+ os.path.join(root, f), dep_to_git
+ )
+ for name, crates in more_dep_to_crates.items():
+ dep_to_crates.setdefault(name, set()).update(crates)
+ return dep_to_crates
+
+ @staticmethod
+ def _extract_crates(cargo_toml_file, dep_to_git):
+ """
+ This functions reads content of provided cargo toml file and extracts
+ crate names per each dependency. The extraction is done by a heuristic
+ so it might be incorrect.
+ """
+ deps_to_crates = {}
+ with open(cargo_toml_file, "r") as f:
+ for line in f.readlines():
+ if line.startswith("#") or "git = " not in line:
+ continue # filter out commented lines and ones without git deps
+ for name, conf in dep_to_git.items():
+ if 'git = "{}"'.format(conf["repo_url"]) in line:
+ pkg_template = ' package = "'
+ if pkg_template in line:
+ crate_name, _, _ = line.partition(pkg_template)[
+ 2
+ ].partition('"')
+ else:
+ crate_name, _, _ = line.partition("=")
+ deps_to_crates.setdefault(name, set()).add(crate_name.strip())
+ return deps_to_crates
+
+ @staticmethod
+ def _resolve_crate_to_path(crate, git_conf):
+ """
+ Tries to find in git_conf["inst_dir"] by searching a [package]
+ keyword followed by name = "".
+ """
+ source_dir = git_conf["source_dir"]
+ search_pattern = '[package]\nname = "{}"'.format(crate)
+
+ for root, _, files in os.walk(source_dir):
+ for fname in files:
+ if fname == "Cargo.toml":
+ with open(os.path.join(root, fname), "r") as f:
+ if search_pattern in f.read():
+ return root
+
+ raise Exception("Failed to found crate {} in path {}".format(crate, source_dir))
diff --git a/build/fbcode_builder/getdeps/buildopts.py b/build/fbcode_builder/getdeps/buildopts.py
new file mode 100644
index 000000000..bc6d2da87
--- /dev/null
+++ b/build/fbcode_builder/getdeps/buildopts.py
@@ -0,0 +1,458 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import errno
+import glob
+import ntpath
+import os
+import subprocess
+import sys
+import tempfile
+
+from .copytree import containing_repo_type
+from .envfuncs import Env, add_path_entry
+from .fetcher import get_fbsource_repo_data
+from .manifest import ContextGenerator
+from .platform import HostType, is_windows
+
+
+try:
+ import typing # noqa: F401
+except ImportError:
+ pass
+
+
+def detect_project(path):
+ repo_type, repo_root = containing_repo_type(path)
+ if repo_type is None:
+ return None, None
+
+ # Look for a .projectid file. If it exists, read the project name from it.
+ project_id_path = os.path.join(repo_root, ".projectid")
+ try:
+ with open(project_id_path, "r") as f:
+ project_name = f.read().strip()
+ return repo_root, project_name
+ except EnvironmentError as ex:
+ if ex.errno != errno.ENOENT:
+ raise
+
+ return repo_root, None
+
+
+class BuildOptions(object):
+ def __init__(
+ self,
+ fbcode_builder_dir,
+ scratch_dir,
+ host_type,
+ install_dir=None,
+ num_jobs=0,
+ use_shipit=False,
+ vcvars_path=None,
+ allow_system_packages=False,
+ lfs_path=None,
+ ):
+ """fbcode_builder_dir - the path to either the in-fbsource fbcode_builder dir,
+ or for shipit-transformed repos, the build dir that
+ has been mapped into that dir.
+ scratch_dir - a place where we can store repos and build bits.
+ This path should be stable across runs and ideally
+ should not be in the repo of the project being built,
+ but that is ultimately where we generally fall back
+ for builds outside of FB
+ install_dir - where the project will ultimately be installed
+ num_jobs - the level of concurrency to use while building
+ use_shipit - use real shipit instead of the simple shipit transformer
+ vcvars_path - Path to external VS toolchain's vsvarsall.bat
+ """
+ if not num_jobs:
+ import multiprocessing
+
+ num_jobs = multiprocessing.cpu_count() // 2
+
+ if not install_dir:
+ install_dir = os.path.join(scratch_dir, "installed")
+
+ self.project_hashes = None
+ for p in ["../deps/github_hashes", "../project_hashes"]:
+ hashes = os.path.join(fbcode_builder_dir, p)
+ if os.path.exists(hashes):
+ self.project_hashes = hashes
+ break
+
+ # Detect what repository and project we are being run from.
+ self.repo_root, self.repo_project = detect_project(os.getcwd())
+
+ # If we are running from an fbsource repository, set self.fbsource_dir
+ # to allow the ShipIt-based fetchers to use it.
+ if self.repo_project == "fbsource":
+ self.fbsource_dir = self.repo_root
+ else:
+ self.fbsource_dir = None
+
+ self.num_jobs = num_jobs
+ self.scratch_dir = scratch_dir
+ self.install_dir = install_dir
+ self.fbcode_builder_dir = fbcode_builder_dir
+ self.host_type = host_type
+ self.use_shipit = use_shipit
+ self.allow_system_packages = allow_system_packages
+ self.lfs_path = lfs_path
+ if vcvars_path is None and is_windows():
+
+ # On Windows, the compiler is not available in the PATH by
+ # default so we need to run the vcvarsall script to populate the
+ # environment. We use a glob to find some version of this script
+ # as deployed with Visual Studio 2017. This logic can also
+ # locate Visual Studio 2019 but note that at the time of writing
+ # the version of boost in our manifest cannot be built with
+ # VS 2019, so we're effectively tied to VS 2017 until we upgrade
+ # the boost dependency.
+ vcvarsall = []
+ for year in ["2017", "2019"]:
+ vcvarsall += glob.glob(
+ os.path.join(
+ os.environ["ProgramFiles(x86)"],
+ "Microsoft Visual Studio",
+ year,
+ "*",
+ "VC",
+ "Auxiliary",
+ "Build",
+ "vcvarsall.bat",
+ )
+ )
+ vcvars_path = vcvarsall[0]
+
+ self.vcvars_path = vcvars_path
+
+ @property
+ def manifests_dir(self):
+ return os.path.join(self.fbcode_builder_dir, "manifests")
+
+ def is_darwin(self):
+ return self.host_type.is_darwin()
+
+ def is_windows(self):
+ return self.host_type.is_windows()
+
+ def is_arm(self):
+ return self.host_type.is_arm()
+
+ def get_vcvars_path(self):
+ return self.vcvars_path
+
+ def is_linux(self):
+ return self.host_type.is_linux()
+
+ def get_context_generator(self, host_tuple=None, facebook_internal=None):
+ """Create a manifest ContextGenerator for the specified target platform."""
+ if host_tuple is None:
+ host_type = self.host_type
+ elif isinstance(host_tuple, HostType):
+ host_type = host_tuple
+ else:
+ host_type = HostType.from_tuple_string(host_tuple)
+
+ # facebook_internal is an Optional[bool]
+ # If it is None, default to assuming this is a Facebook-internal build if
+ # we are running in an fbsource repository.
+ if facebook_internal is None:
+ facebook_internal = self.fbsource_dir is not None
+
+ return ContextGenerator(
+ {
+ "os": host_type.ostype,
+ "distro": host_type.distro,
+ "distro_vers": host_type.distrovers,
+ "fb": "on" if facebook_internal else "off",
+ "test": "off",
+ }
+ )
+
+ def compute_env_for_install_dirs(self, install_dirs, env=None, manifest=None):
+ if env is not None:
+ env = env.copy()
+ else:
+ env = Env()
+
+ env["GETDEPS_BUILD_DIR"] = os.path.join(self.scratch_dir, "build")
+ env["GETDEPS_INSTALL_DIR"] = self.install_dir
+
+ # On macOS we need to set `SDKROOT` when we use clang for system
+ # header files.
+ if self.is_darwin() and "SDKROOT" not in env:
+ sdkroot = subprocess.check_output(["xcrun", "--show-sdk-path"])
+ env["SDKROOT"] = sdkroot.decode().strip()
+
+ if self.fbsource_dir:
+ env["YARN_YARN_OFFLINE_MIRROR"] = os.path.join(
+ self.fbsource_dir, "xplat/third-party/yarn/offline-mirror"
+ )
+ yarn_exe = "yarn.bat" if self.is_windows() else "yarn"
+ env["YARN_PATH"] = os.path.join(
+ self.fbsource_dir, "xplat/third-party/yarn/", yarn_exe
+ )
+ node_exe = "node-win-x64.exe" if self.is_windows() else "node"
+ env["NODE_BIN"] = os.path.join(
+ self.fbsource_dir, "xplat/third-party/node/bin/", node_exe
+ )
+ env["RUST_VENDORED_CRATES_DIR"] = os.path.join(
+ self.fbsource_dir, "third-party/rust/vendor"
+ )
+ hash_data = get_fbsource_repo_data(self)
+ env["FBSOURCE_HASH"] = hash_data.hash
+ env["FBSOURCE_DATE"] = hash_data.date
+
+ lib_path = None
+ if self.is_darwin():
+ lib_path = "DYLD_LIBRARY_PATH"
+ elif self.is_linux():
+ lib_path = "LD_LIBRARY_PATH"
+ elif self.is_windows():
+ lib_path = "PATH"
+ else:
+ lib_path = None
+
+ for d in install_dirs:
+ bindir = os.path.join(d, "bin")
+
+ if not (
+ manifest and manifest.get("build", "disable_env_override_pkgconfig")
+ ):
+ pkgconfig = os.path.join(d, "lib/pkgconfig")
+ if os.path.exists(pkgconfig):
+ add_path_entry(env, "PKG_CONFIG_PATH", pkgconfig)
+
+ pkgconfig = os.path.join(d, "lib64/pkgconfig")
+ if os.path.exists(pkgconfig):
+ add_path_entry(env, "PKG_CONFIG_PATH", pkgconfig)
+
+ if not (manifest and manifest.get("build", "disable_env_override_path")):
+ add_path_entry(env, "CMAKE_PREFIX_PATH", d)
+
+ # Allow resolving shared objects built earlier (eg: zstd
+ # doesn't include the full path to the dylib in its linkage
+ # so we need to give it an assist)
+ if lib_path:
+ for lib in ["lib", "lib64"]:
+ libdir = os.path.join(d, lib)
+ if os.path.exists(libdir):
+ add_path_entry(env, lib_path, libdir)
+
+ # Allow resolving binaries (eg: cmake, ninja) and dlls
+ # built by earlier steps
+ if os.path.exists(bindir):
+ add_path_entry(env, "PATH", bindir, append=False)
+
+ # If rustc is present in the `bin` directory, set RUSTC to prevent
+ # cargo uses the rustc installed in the system.
+ if self.is_windows():
+ cargo_path = os.path.join(bindir, "cargo.exe")
+ rustc_path = os.path.join(bindir, "rustc.exe")
+ rustdoc_path = os.path.join(bindir, "rustdoc.exe")
+ else:
+ cargo_path = os.path.join(bindir, "cargo")
+ rustc_path = os.path.join(bindir, "rustc")
+ rustdoc_path = os.path.join(bindir, "rustdoc")
+
+ if os.path.isfile(rustc_path):
+ env["CARGO_BIN"] = cargo_path
+ env["RUSTC"] = rustc_path
+ env["RUSTDOC"] = rustdoc_path
+
+ openssl_include = os.path.join(d, "include/openssl")
+ if os.path.isdir(openssl_include) and any(
+ os.path.isfile(os.path.join(d, "lib", libcrypto))
+ for libcrypto in ("libcrypto.lib", "libcrypto.so", "libcrypto.a")
+ ):
+ # This must be the openssl library, let Rust know about it
+ env["OPENSSL_DIR"] = d
+
+ return env
+
+
+def list_win32_subst_letters():
+ output = subprocess.check_output(["subst"]).decode("utf-8")
+ # The output is a set of lines like: `F:\: => C:\open\some\where`
+ lines = output.strip().split("\r\n")
+ mapping = {}
+ for line in lines:
+ fields = line.split(": => ")
+ if len(fields) != 2:
+ continue
+ letter = fields[0]
+ path = fields[1]
+ mapping[letter] = path
+
+ return mapping
+
+
+def find_existing_win32_subst_for_path(
+ path, # type: str
+ subst_mapping, # type: typing.Mapping[str, str]
+):
+ # type: (...) -> typing.Optional[str]
+ path = ntpath.normcase(ntpath.normpath(path))
+ for letter, target in subst_mapping.items():
+ if ntpath.normcase(target) == path:
+ return letter
+ return None
+
+
+def find_unused_drive_letter():
+ import ctypes
+
+ buffer_len = 256
+ blen = ctypes.c_uint(buffer_len)
+ rv = ctypes.c_uint()
+ bufs = ctypes.create_string_buffer(buffer_len)
+ rv = ctypes.windll.kernel32.GetLogicalDriveStringsA(blen, bufs)
+ if rv > buffer_len:
+ raise Exception("GetLogicalDriveStringsA result too large for buffer")
+ nul = "\x00".encode("ascii")
+
+ used = [drive.decode("ascii")[0] for drive in bufs.raw.strip(nul).split(nul)]
+ possible = [c for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ"]
+ available = sorted(list(set(possible) - set(used)))
+ if len(available) == 0:
+ return None
+ # Prefer to assign later letters rather than earlier letters
+ return available[-1]
+
+
+def create_subst_path(path):
+ for _attempt in range(0, 24):
+ drive = find_existing_win32_subst_for_path(
+ path, subst_mapping=list_win32_subst_letters()
+ )
+ if drive:
+ return drive
+ available = find_unused_drive_letter()
+ if available is None:
+ raise Exception(
+ (
+ "unable to make shorter subst mapping for %s; "
+ "no available drive letters"
+ )
+ % path
+ )
+
+ # Try to set up a subst mapping; note that we may be racing with
+ # other processes on the same host, so this may not succeed.
+ try:
+ subprocess.check_call(["subst", "%s:" % available, path])
+ return "%s:\\" % available
+ except Exception:
+ print("Failed to map %s -> %s" % (available, path))
+
+ raise Exception("failed to set up a subst path for %s" % path)
+
+
+def _check_host_type(args, host_type):
+ if host_type is None:
+ host_tuple_string = getattr(args, "host_type", None)
+ if host_tuple_string:
+ host_type = HostType.from_tuple_string(host_tuple_string)
+ else:
+ host_type = HostType()
+
+ assert isinstance(host_type, HostType)
+ return host_type
+
+
+def setup_build_options(args, host_type=None):
+ """Create a BuildOptions object based on the arguments"""
+
+ fbcode_builder_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ scratch_dir = args.scratch_path
+ if not scratch_dir:
+ # TODO: `mkscratch` doesn't currently know how best to place things on
+ # sandcastle, so whip up something reasonable-ish
+ if "SANDCASTLE" in os.environ:
+ if "DISK_TEMP" not in os.environ:
+ raise Exception(
+ (
+ "I need DISK_TEMP to be set in the sandcastle environment "
+ "so that I can store build products somewhere sane"
+ )
+ )
+ scratch_dir = os.path.join(
+ os.environ["DISK_TEMP"], "fbcode_builder_getdeps"
+ )
+ if not scratch_dir:
+ try:
+ scratch_dir = (
+ subprocess.check_output(
+ ["mkscratch", "path", "--subdir", "fbcode_builder_getdeps"]
+ )
+ .strip()
+ .decode("utf-8")
+ )
+ except OSError as exc:
+ if exc.errno != errno.ENOENT:
+ # A legit failure; don't fall back, surface the error
+ raise
+ # This system doesn't have mkscratch so we fall back to
+ # something local.
+ munged = fbcode_builder_dir.replace("Z", "zZ")
+ for s in ["/", "\\", ":"]:
+ munged = munged.replace(s, "Z")
+
+ if is_windows() and os.path.isdir("c:/open"):
+ temp = "c:/open/scratch"
+ else:
+ temp = tempfile.gettempdir()
+
+ scratch_dir = os.path.join(temp, "fbcode_builder_getdeps-%s" % munged)
+ if not is_windows() and os.geteuid() == 0:
+ # Running as root; in the case where someone runs
+ # sudo getdeps.py install-system-deps
+ # and then runs as build without privs, we want to avoid creating
+ # a scratch dir that the second stage cannot write to.
+ # So we generate a different path if we are root.
+ scratch_dir += "-root"
+
+ if not os.path.exists(scratch_dir):
+ os.makedirs(scratch_dir)
+
+ if is_windows():
+ subst = create_subst_path(scratch_dir)
+ print(
+ "Mapping scratch dir %s -> %s" % (scratch_dir, subst), file=sys.stderr
+ )
+ scratch_dir = subst
+ else:
+ if not os.path.exists(scratch_dir):
+ os.makedirs(scratch_dir)
+
+ # Make sure we normalize the scratch path. This path is used as part of the hash
+ # computation for detecting if projects have been updated, so we need to always
+ # use the exact same string to refer to a given directory.
+ # But! realpath in some combinations of Windows/Python3 versions can expand the
+ # drive substitutions on Windows, so avoid that!
+ if not is_windows():
+ scratch_dir = os.path.realpath(scratch_dir)
+
+ # Save any extra cmake defines passed by the user in an env variable, so it
+ # can be used while hashing this build.
+ os.environ["GETDEPS_CMAKE_DEFINES"] = getattr(args, "extra_cmake_defines", "") or ""
+
+ host_type = _check_host_type(args, host_type)
+
+ return BuildOptions(
+ fbcode_builder_dir,
+ scratch_dir,
+ host_type,
+ install_dir=args.install_prefix,
+ num_jobs=args.num_jobs,
+ use_shipit=args.use_shipit,
+ vcvars_path=args.vcvars_path,
+ allow_system_packages=args.allow_system_packages,
+ lfs_path=args.lfs_path,
+ )
diff --git a/build/fbcode_builder/getdeps/cache.py b/build/fbcode_builder/getdeps/cache.py
new file mode 100644
index 000000000..a261541c7
--- /dev/null
+++ b/build/fbcode_builder/getdeps/cache.py
@@ -0,0 +1,39 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+
+class ArtifactCache(object):
+ """The ArtifactCache is a small abstraction that allows caching
+ named things in some external storage mechanism.
+ The primary use case is for storing the build products on CI
+ systems to accelerate the build"""
+
+ def download_to_file(self, name, dest_file_name):
+ """If `name` exists in the cache, download it and place it
+ in the specified `dest_file_name` location on the filesystem.
+ If a transient issue was encountered a TransientFailure shall
+ be raised.
+ If `name` doesn't exist in the cache `False` shall be returned.
+ If `dest_file_name` was successfully updated `True` shall be
+ returned.
+ All other conditions shall raise an appropriate exception."""
+ return False
+
+ def upload_from_file(self, name, source_file_name):
+ """Causes `name` to be populated in the cache by uploading
+ the contents of `source_file_name` to the storage system.
+ If a transient issue was encountered a TransientFailure shall
+ be raised.
+ If the upload failed for some other reason, an appropriate
+ exception shall be raised."""
+ pass
+
+
+def create_cache():
+ """This function is monkey patchable to provide an actual
+ implementation"""
+ return None
diff --git a/build/fbcode_builder/getdeps/copytree.py b/build/fbcode_builder/getdeps/copytree.py
new file mode 100644
index 000000000..2790bc0d9
--- /dev/null
+++ b/build/fbcode_builder/getdeps/copytree.py
@@ -0,0 +1,78 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import os
+import shutil
+import subprocess
+
+from .platform import is_windows
+
+
+PREFETCHED_DIRS = set()
+
+
+def containing_repo_type(path):
+ while True:
+ if os.path.exists(os.path.join(path, ".git")):
+ return ("git", path)
+ if os.path.exists(os.path.join(path, ".hg")):
+ return ("hg", path)
+
+ parent = os.path.dirname(path)
+ if parent == path:
+ return None, None
+ path = parent
+
+
+def find_eden_root(dirpath):
+ """If the specified directory is inside an EdenFS checkout, returns
+ the canonical absolute path to the root of that checkout.
+
+ Returns None if the specified directory is not in an EdenFS checkout.
+ """
+ if is_windows():
+ repo_type, repo_root = containing_repo_type(dirpath)
+ if repo_root is not None:
+ if os.path.exists(os.path.join(repo_root, ".eden", "config")):
+ return os.path.realpath(repo_root)
+ return None
+
+ try:
+ return os.readlink(os.path.join(dirpath, ".eden", "root"))
+ except OSError:
+ return None
+
+
+def prefetch_dir_if_eden(dirpath):
+ """After an amend/rebase, Eden may need to fetch a large number
+ of trees from the servers. The simplistic single threaded walk
+ performed by copytree makes this more expensive than is desirable
+ so we help accelerate things by performing a prefetch on the
+ source directory"""
+ global PREFETCHED_DIRS
+ if dirpath in PREFETCHED_DIRS:
+ return
+ root = find_eden_root(dirpath)
+ if root is None:
+ return
+ glob = f"{os.path.relpath(dirpath, root).replace(os.sep, '/')}/**"
+ print(f"Prefetching {glob}")
+ subprocess.call(["edenfsctl", "prefetch", "--repo", root, "--silent", glob])
+ PREFETCHED_DIRS.add(dirpath)
+
+
+def copytree(src_dir, dest_dir, ignore=None):
+ """Recursively copy the src_dir to the dest_dir, filtering
+ out entries using the ignore lambda. The behavior of the
+ ignore lambda must match that described by `shutil.copytree`.
+ This `copytree` function knows how to prefetch data when
+ running in an eden repo.
+ TODO: I'd like to either extend this or add a variant that
+ uses watchman to mirror src_dir into dest_dir.
+ """
+ prefetch_dir_if_eden(src_dir)
+ return shutil.copytree(src_dir, dest_dir, ignore=ignore)
diff --git a/build/fbcode_builder/getdeps/dyndeps.py b/build/fbcode_builder/getdeps/dyndeps.py
new file mode 100644
index 000000000..216f26c46
--- /dev/null
+++ b/build/fbcode_builder/getdeps/dyndeps.py
@@ -0,0 +1,430 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import errno
+import glob
+import os
+import re
+import shutil
+import stat
+import subprocess
+import sys
+from struct import unpack
+
+from .envfuncs import path_search
+
+
+OBJECT_SUBDIRS = ("bin", "lib", "lib64")
+
+
+def copyfile(src, dest):
+ shutil.copyfile(src, dest)
+ shutil.copymode(src, dest)
+
+
+class DepBase(object):
+ def __init__(self, buildopts, install_dirs, strip):
+ self.buildopts = buildopts
+ self.env = buildopts.compute_env_for_install_dirs(install_dirs)
+ self.install_dirs = install_dirs
+ self.strip = strip
+ self.processed_deps = set()
+
+ def list_dynamic_deps(self, objfile):
+ raise RuntimeError("list_dynamic_deps not implemented")
+
+ def interesting_dep(self, d):
+ return True
+
+ # final_install_prefix must be the equivalent path to `destdir` on the
+ # installed system. For example, if destdir is `/tmp/RANDOM/usr/local' which
+ # is intended to map to `/usr/local` in the install image, then
+ # final_install_prefix='/usr/local'.
+ # If left unspecified, destdir will be used.
+ def process_deps(self, destdir, final_install_prefix=None):
+ if self.buildopts.is_windows():
+ lib_dir = "bin"
+ else:
+ lib_dir = "lib"
+ self.munged_lib_dir = os.path.join(destdir, lib_dir)
+
+ final_lib_dir = os.path.join(final_install_prefix or destdir, lib_dir)
+
+ if not os.path.isdir(self.munged_lib_dir):
+ os.makedirs(self.munged_lib_dir)
+
+ # Look only at the things that got installed in the leaf package,
+ # which will be the last entry in the install dirs list
+ inst_dir = self.install_dirs[-1]
+ print("Process deps under %s" % inst_dir, file=sys.stderr)
+
+ for dir in OBJECT_SUBDIRS:
+ src_dir = os.path.join(inst_dir, dir)
+ if not os.path.isdir(src_dir):
+ continue
+ dest_dir = os.path.join(destdir, dir)
+ if not os.path.exists(dest_dir):
+ os.makedirs(dest_dir)
+
+ for objfile in self.list_objs_in_dir(src_dir):
+ print("Consider %s/%s" % (dir, objfile))
+ dest_obj = os.path.join(dest_dir, objfile)
+ copyfile(os.path.join(src_dir, objfile), dest_obj)
+ self.munge_in_place(dest_obj, final_lib_dir)
+
+ def find_all_dependencies(self, build_dir):
+ all_deps = set()
+ for objfile in self.list_objs_in_dir(
+ build_dir, recurse=True, output_prefix=build_dir
+ ):
+ for d in self.list_dynamic_deps(objfile):
+ all_deps.add(d)
+
+ interesting_deps = {d for d in all_deps if self.interesting_dep(d)}
+ dep_paths = []
+ for dep in interesting_deps:
+ dep_path = self.resolve_loader_path(dep)
+ if dep_path:
+ dep_paths.append(dep_path)
+
+ return dep_paths
+
+ def munge_in_place(self, objfile, final_lib_dir):
+ print("Munging %s" % objfile)
+ for d in self.list_dynamic_deps(objfile):
+ if not self.interesting_dep(d):
+ continue
+
+ # Resolve this dep: does it exist in any of our installation
+ # directories? If so, then it is a candidate for processing
+ dep = self.resolve_loader_path(d)
+ print("dep: %s -> %s" % (d, dep))
+ if dep:
+ dest_dep = os.path.join(self.munged_lib_dir, os.path.basename(dep))
+ if dep not in self.processed_deps:
+ self.processed_deps.add(dep)
+ copyfile(dep, dest_dep)
+ self.munge_in_place(dest_dep, final_lib_dir)
+
+ self.rewrite_dep(objfile, d, dep, dest_dep, final_lib_dir)
+
+ if self.strip:
+ self.strip_debug_info(objfile)
+
+ def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir):
+ raise RuntimeError("rewrite_dep not implemented")
+
+ def resolve_loader_path(self, dep):
+ if os.path.isabs(dep):
+ return dep
+ d = os.path.basename(dep)
+ for inst_dir in self.install_dirs:
+ for libdir in OBJECT_SUBDIRS:
+ candidate = os.path.join(inst_dir, libdir, d)
+ if os.path.exists(candidate):
+ return candidate
+ return None
+
+ def list_objs_in_dir(self, dir, recurse=False, output_prefix=""):
+ for entry in os.listdir(dir):
+ entry_path = os.path.join(dir, entry)
+ st = os.lstat(entry_path)
+ if stat.S_ISREG(st.st_mode):
+ if self.is_objfile(entry_path):
+ relative_result = os.path.join(output_prefix, entry)
+ yield os.path.normcase(relative_result)
+ elif recurse and stat.S_ISDIR(st.st_mode):
+ child_prefix = os.path.join(output_prefix, entry)
+ for result in self.list_objs_in_dir(
+ entry_path, recurse=recurse, output_prefix=child_prefix
+ ):
+ yield result
+
+ def is_objfile(self, objfile):
+ return True
+
+ def strip_debug_info(self, objfile):
+ """override this to define how to remove debug information
+ from an object file"""
+ pass
+
+
+class WinDeps(DepBase):
+ def __init__(self, buildopts, install_dirs, strip):
+ super(WinDeps, self).__init__(buildopts, install_dirs, strip)
+ self.dumpbin = self.find_dumpbin()
+
+ def find_dumpbin(self):
+ # Looking for dumpbin in the following hardcoded paths.
+ # The registry option to find the install dir doesn't work anymore.
+ globs = [
+ (
+ "C:/Program Files (x86)/"
+ "Microsoft Visual Studio/"
+ "*/*/VC/Tools/"
+ "MSVC/*/bin/Hostx64/x64/dumpbin.exe"
+ ),
+ (
+ "C:/Program Files (x86)/"
+ "Common Files/"
+ "Microsoft/Visual C++ for Python/*/"
+ "VC/bin/dumpbin.exe"
+ ),
+ ("c:/Program Files (x86)/Microsoft Visual Studio */VC/bin/dumpbin.exe"),
+ ]
+ for pattern in globs:
+ for exe in glob.glob(pattern):
+ return exe
+
+ raise RuntimeError("could not find dumpbin.exe")
+
+ def list_dynamic_deps(self, exe):
+ deps = []
+ print("Resolve deps for %s" % exe)
+ output = subprocess.check_output(
+ [self.dumpbin, "/nologo", "/dependents", exe]
+ ).decode("utf-8")
+
+ lines = output.split("\n")
+ for line in lines:
+ m = re.match("\\s+(\\S+.dll)", line, re.IGNORECASE)
+ if m:
+ deps.append(m.group(1).lower())
+
+ return deps
+
+ def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir):
+ # We can't rewrite on windows, but we will
+ # place the deps alongside the exe so that
+ # they end up in the search path
+ pass
+
+ # These are the Windows system dll, which we don't want to copy while
+ # packaging.
+ SYSTEM_DLLS = set( # noqa: C405
+ [
+ "advapi32.dll",
+ "dbghelp.dll",
+ "kernel32.dll",
+ "msvcp140.dll",
+ "vcruntime140.dll",
+ "ws2_32.dll",
+ "ntdll.dll",
+ "shlwapi.dll",
+ ]
+ )
+
+ def interesting_dep(self, d):
+ if "api-ms-win-crt" in d:
+ return False
+ if d in self.SYSTEM_DLLS:
+ return False
+ return True
+
+ def is_objfile(self, objfile):
+ if not os.path.isfile(objfile):
+ return False
+ if objfile.lower().endswith(".exe"):
+ return True
+ return False
+
+ def emit_dev_run_script(self, script_path, dep_dirs):
+ """Emit a script that can be used to run build artifacts directly from the
+ build directory, without installing them.
+
+ The dep_dirs parameter should be a list of paths that need to be added to $PATH.
+ This can be computed by calling compute_dependency_paths() or
+ compute_dependency_paths_fast().
+
+ This is only necessary on Windows, which does not have RPATH, and instead
+ requires the $PATH environment variable be updated in order to find the proper
+ library dependencies.
+ """
+ contents = self._get_dev_run_script_contents(dep_dirs)
+ with open(script_path, "w") as f:
+ f.write(contents)
+
+ def compute_dependency_paths(self, build_dir):
+ """Return a list of all directories that need to be added to $PATH to ensure
+ that library dependencies can be found correctly. This is computed by scanning
+ binaries to determine exactly the right list of dependencies.
+
+ The compute_dependency_paths_fast() is a alternative function that runs faster
+ but may return additional extraneous paths.
+ """
+ dep_dirs = set()
+ # Find paths by scanning the binaries.
+ for dep in self.find_all_dependencies(build_dir):
+ dep_dirs.add(os.path.dirname(dep))
+
+ dep_dirs.update(self.read_custom_dep_dirs(build_dir))
+ return sorted(dep_dirs)
+
+ def compute_dependency_paths_fast(self, build_dir):
+ """Similar to compute_dependency_paths(), but rather than actually scanning
+ binaries, just add all library paths from the specified installation
+ directories. This is much faster than scanning the binaries, but may result in
+ more paths being returned than actually necessary.
+ """
+ dep_dirs = set()
+ for inst_dir in self.install_dirs:
+ for subdir in OBJECT_SUBDIRS:
+ path = os.path.join(inst_dir, subdir)
+ if os.path.exists(path):
+ dep_dirs.add(path)
+
+ dep_dirs.update(self.read_custom_dep_dirs(build_dir))
+ return sorted(dep_dirs)
+
+ def read_custom_dep_dirs(self, build_dir):
+ # The build system may also have included libraries from other locations that
+ # we might not be able to find normally in find_all_dependencies().
+ # To handle this situation we support reading additional library paths
+ # from a LIBRARY_DEP_DIRS.txt file that may have been generated in the build
+ # output directory.
+ dep_dirs = set()
+ try:
+ explicit_dep_dirs_path = os.path.join(build_dir, "LIBRARY_DEP_DIRS.txt")
+ with open(explicit_dep_dirs_path, "r") as f:
+ for line in f.read().splitlines():
+ dep_dirs.add(line)
+ except OSError as ex:
+ if ex.errno != errno.ENOENT:
+ raise
+
+ return dep_dirs
+
+ def _get_dev_run_script_contents(self, path_dirs):
+ path_entries = ["$env:PATH"] + path_dirs
+ path_str = ";".join(path_entries)
+ return """\
+$orig_env = $env:PATH
+$env:PATH = "{path_str}"
+
+try {{
+ $cmd_args = $args[1..$args.length]
+ & $args[0] @cmd_args
+}} finally {{
+ $env:PATH = $orig_env
+}}
+""".format(
+ path_str=path_str
+ )
+
+
+class ElfDeps(DepBase):
+ def __init__(self, buildopts, install_dirs, strip):
+ super(ElfDeps, self).__init__(buildopts, install_dirs, strip)
+
+ # We need patchelf to rewrite deps, so ensure that it is built...
+ subprocess.check_call([sys.executable, sys.argv[0], "build", "patchelf"])
+ # ... and that we know where it lives
+ self.patchelf = os.path.join(
+ os.fsdecode(
+ subprocess.check_output(
+ [sys.executable, sys.argv[0], "show-inst-dir", "patchelf"]
+ ).strip()
+ ),
+ "bin/patchelf",
+ )
+
+ def list_dynamic_deps(self, objfile):
+ out = (
+ subprocess.check_output(
+ [self.patchelf, "--print-needed", objfile], env=dict(self.env.items())
+ )
+ .decode("utf-8")
+ .strip()
+ )
+ lines = out.split("\n")
+ return lines
+
+ def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir):
+ final_dep = os.path.join(
+ final_lib_dir, os.path.relpath(new_dep, self.munged_lib_dir)
+ )
+ subprocess.check_call(
+ [self.patchelf, "--replace-needed", depname, final_dep, objfile]
+ )
+
+ def is_objfile(self, objfile):
+ if not os.path.isfile(objfile):
+ return False
+ with open(objfile, "rb") as f:
+ # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
+ magic = f.read(4)
+ return magic == b"\x7fELF"
+
+ def strip_debug_info(self, objfile):
+ subprocess.check_call(["strip", objfile])
+
+
+# MACH-O magic number
+MACH_MAGIC = 0xFEEDFACF
+
+
+class MachDeps(DepBase):
+ def interesting_dep(self, d):
+ if d.startswith("/usr/lib/") or d.startswith("/System/"):
+ return False
+ return True
+
+ def is_objfile(self, objfile):
+ if not os.path.isfile(objfile):
+ return False
+ with open(objfile, "rb") as f:
+ # mach stores the magic number in native endianness,
+ # so unpack as native here and compare
+ header = f.read(4)
+ if len(header) != 4:
+ return False
+ magic = unpack("I", header)[0]
+ return magic == MACH_MAGIC
+
+ def list_dynamic_deps(self, objfile):
+ if not self.interesting_dep(objfile):
+ return
+ out = (
+ subprocess.check_output(
+ ["otool", "-L", objfile], env=dict(self.env.items())
+ )
+ .decode("utf-8")
+ .strip()
+ )
+ lines = out.split("\n")
+ deps = []
+ for line in lines:
+ m = re.match("\t(\\S+)\\s", line)
+ if m:
+ if os.path.basename(m.group(1)) != os.path.basename(objfile):
+ deps.append(os.path.normcase(m.group(1)))
+ return deps
+
+ def rewrite_dep(self, objfile, depname, old_dep, new_dep, final_lib_dir):
+ if objfile.endswith(".dylib"):
+ # Erase the original location from the id of the shared
+ # object. It doesn't appear to hurt to retain it, but
+ # it does look weird, so let's rewrite it to be sure.
+ subprocess.check_call(
+ ["install_name_tool", "-id", os.path.basename(objfile), objfile]
+ )
+ final_dep = os.path.join(
+ final_lib_dir, os.path.relpath(new_dep, self.munged_lib_dir)
+ )
+
+ subprocess.check_call(
+ ["install_name_tool", "-change", depname, final_dep, objfile]
+ )
+
+
+def create_dyn_dep_munger(buildopts, install_dirs, strip=False):
+ if buildopts.is_linux():
+ return ElfDeps(buildopts, install_dirs, strip)
+ if buildopts.is_darwin():
+ return MachDeps(buildopts, install_dirs, strip)
+ if buildopts.is_windows():
+ return WinDeps(buildopts, install_dirs, strip)
diff --git a/build/fbcode_builder/getdeps/envfuncs.py b/build/fbcode_builder/getdeps/envfuncs.py
new file mode 100644
index 000000000..f2e13f16f
--- /dev/null
+++ b/build/fbcode_builder/getdeps/envfuncs.py
@@ -0,0 +1,195 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import os
+import shlex
+import sys
+
+
+class Env(object):
+ def __init__(self, src=None):
+ self._dict = {}
+ if src is None:
+ self.update(os.environ)
+ else:
+ self.update(src)
+
+ def update(self, src):
+ for k, v in src.items():
+ self.set(k, v)
+
+ def copy(self):
+ return Env(self._dict)
+
+ def _key(self, key):
+ # The `str` cast may not appear to be needed, but without it we run
+ # into issues when passing the environment to subprocess. The main
+ # issue is that in python2 `os.environ` (which is the initial source
+ # of data for the environment) uses byte based strings, but this
+ # project uses `unicode_literals`. `subprocess` will raise an error
+ # if the environment that it is passed has a mixture of byte and
+ # unicode strings.
+ # It is simplest to force everthing to be `str` for the sake of
+ # consistency.
+ key = str(key)
+ if sys.platform.startswith("win"):
+ # Windows env var names are case insensitive but case preserving.
+ # An implementation of PAR files on windows gets confused if
+ # the env block contains keys with conflicting case, so make a
+ # pass over the contents to remove any.
+ # While this O(n) scan is technically expensive and gross, it
+ # is practically not a problem because the volume of calls is
+ # relatively low and the cost of manipulating the env is dwarfed
+ # by the cost of spawning a process on windows. In addition,
+ # since the processes that we run are expensive anyway, this
+ # overhead is not the worst thing to worry about.
+ for k in list(self._dict.keys()):
+ if str(k).lower() == key.lower():
+ return k
+ elif key in self._dict:
+ return key
+ return None
+
+ def get(self, key, defval=None):
+ key = self._key(key)
+ if key is None:
+ return defval
+ return self._dict[key]
+
+ def __getitem__(self, key):
+ val = self.get(key)
+ if key is None:
+ raise KeyError(key)
+ return val
+
+ def unset(self, key):
+ if key is None:
+ raise KeyError("attempting to unset env[None]")
+
+ key = self._key(key)
+ if key:
+ del self._dict[key]
+
+ def __delitem__(self, key):
+ self.unset(key)
+
+ def __repr__(self):
+ return repr(self._dict)
+
+ def set(self, key, value):
+ if key is None:
+ raise KeyError("attempting to assign env[None] = %r" % value)
+
+ if value is None:
+ raise ValueError("attempting to assign env[%s] = None" % key)
+
+ # The `str` conversion is important to avoid triggering errors
+ # with subprocess if we pass in a unicode value; see commentary
+ # in the `_key` method.
+ key = str(key)
+ value = str(value)
+
+ # The `unset` call is necessary on windows where the keys are
+ # case insensitive. Since this dict is case sensitive, simply
+ # assigning the value to the new key is not sufficient to remove
+ # the old value. The `unset` call knows how to match keys and
+ # remove any potential duplicates.
+ self.unset(key)
+ self._dict[key] = value
+
+ def __setitem__(self, key, value):
+ self.set(key, value)
+
+ def __iter__(self):
+ return self._dict.__iter__()
+
+ def __len__(self):
+ return len(self._dict)
+
+ def keys(self):
+ return self._dict.keys()
+
+ def values(self):
+ return self._dict.values()
+
+ def items(self):
+ return self._dict.items()
+
+
+def add_path_entry(env, name, item, append=True, separator=os.pathsep):
+ """Cause `item` to be added to the path style env var named
+ `name` held in the `env` dict. `append` specifies whether
+ the item is added to the end (the default) or should be
+ prepended if `name` already exists."""
+ val = env.get(name, "")
+ if len(val) > 0:
+ val = val.split(separator)
+ else:
+ val = []
+ if append:
+ val.append(item)
+ else:
+ val.insert(0, item)
+ env.set(name, separator.join(val))
+
+
+def add_flag(env, name, flag, append=True):
+ """Cause `flag` to be added to the CXXFLAGS-style env var named
+ `name` held in the `env` dict. `append` specifies whether the
+ flag is added to the end (the default) or should be prepended if
+ `name` already exists."""
+ val = shlex.split(env.get(name, ""))
+ if append:
+ val.append(flag)
+ else:
+ val.insert(0, flag)
+ env.set(name, " ".join(val))
+
+
+_path_search_cache = {}
+_not_found = object()
+
+
+def tpx_path():
+ return "xplat/testinfra/tpx/ctp.tpx"
+
+
+def path_search(env, exename, defval=None):
+ """Search for exename in the PATH specified in env.
+ exename is eg: `ninja` and this function knows to append a .exe
+ to the end on windows.
+ Returns the path to the exe if found, or None if either no
+ PATH is set in env or no executable is found."""
+
+ path = env.get("PATH", None)
+ if path is None:
+ return defval
+
+ # The project hash computation code searches for C++ compilers (g++, clang, etc)
+ # repeatedly. Cache the result so we don't end up searching for these over and over
+ # again.
+ cache_key = (path, exename)
+ result = _path_search_cache.get(cache_key, _not_found)
+ if result is _not_found:
+ result = _perform_path_search(path, exename)
+ _path_search_cache[cache_key] = result
+ return result
+
+
+def _perform_path_search(path, exename):
+ is_win = sys.platform.startswith("win")
+ if is_win:
+ exename = "%s.exe" % exename
+
+ for bindir in path.split(os.pathsep):
+ full_name = os.path.join(bindir, exename)
+ if os.path.exists(full_name) and os.path.isfile(full_name):
+ if not is_win and not os.access(full_name, os.X_OK):
+ continue
+ return full_name
+
+ return None
diff --git a/build/fbcode_builder/getdeps/errors.py b/build/fbcode_builder/getdeps/errors.py
new file mode 100644
index 000000000..3fad1a1de
--- /dev/null
+++ b/build/fbcode_builder/getdeps/errors.py
@@ -0,0 +1,19 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+
+class TransientFailure(Exception):
+ """Raising this error causes getdeps to return with an error code
+ that Sandcastle will consider to be a retryable transient
+ infrastructure error"""
+
+ pass
+
+
+class ManifestNotFound(Exception):
+ def __init__(self, manifest_name):
+ super(Exception, self).__init__("Unable to find manifest '%s'" % manifest_name)
diff --git a/build/fbcode_builder/getdeps/expr.py b/build/fbcode_builder/getdeps/expr.py
new file mode 100644
index 000000000..6c0485d03
--- /dev/null
+++ b/build/fbcode_builder/getdeps/expr.py
@@ -0,0 +1,184 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import re
+import shlex
+
+
+def parse_expr(expr_text, valid_variables):
+ """parses the simple criteria expression syntax used in
+ dependency specifications.
+ Returns an ExprNode instance that can be evaluated like this:
+
+ ```
+ expr = parse_expr("os=windows")
+ ok = expr.eval({
+ "os": "windows"
+ })
+ ```
+
+ Whitespace is allowed between tokens. The following terms
+ are recognized:
+
+ KEY = VALUE # Evaluates to True if ctx[KEY] == VALUE
+ not(EXPR) # Evaluates to True if EXPR evaluates to False
+ # and vice versa
+ all(EXPR1, EXPR2, ...) # Evaluates True if all of the supplied
+ # EXPR's also evaluate True
+ any(EXPR1, EXPR2, ...) # Evaluates True if any of the supplied
+ # EXPR's also evaluate True, False if
+ # none of them evaluated true.
+ """
+
+ p = Parser(expr_text, valid_variables)
+ return p.parse()
+
+
+class ExprNode(object):
+ def eval(self, ctx):
+ return False
+
+
+class TrueExpr(ExprNode):
+ def eval(self, ctx):
+ return True
+
+ def __str__(self):
+ return "true"
+
+
+class NotExpr(ExprNode):
+ def __init__(self, node):
+ self._node = node
+
+ def eval(self, ctx):
+ return not self._node.eval(ctx)
+
+ def __str__(self):
+ return "not(%s)" % self._node
+
+
+class AllExpr(ExprNode):
+ def __init__(self, nodes):
+ self._nodes = nodes
+
+ def eval(self, ctx):
+ for node in self._nodes:
+ if not node.eval(ctx):
+ return False
+ return True
+
+ def __str__(self):
+ items = []
+ for node in self._nodes:
+ items.append(str(node))
+ return "all(%s)" % ",".join(items)
+
+
+class AnyExpr(ExprNode):
+ def __init__(self, nodes):
+ self._nodes = nodes
+
+ def eval(self, ctx):
+ for node in self._nodes:
+ if node.eval(ctx):
+ return True
+ return False
+
+ def __str__(self):
+ items = []
+ for node in self._nodes:
+ items.append(str(node))
+ return "any(%s)" % ",".join(items)
+
+
+class EqualExpr(ExprNode):
+ def __init__(self, key, value):
+ self._key = key
+ self._value = value
+
+ def eval(self, ctx):
+ return ctx.get(self._key) == self._value
+
+ def __str__(self):
+ return "%s=%s" % (self._key, self._value)
+
+
+class Parser(object):
+ def __init__(self, text, valid_variables):
+ self.text = text
+ self.lex = shlex.shlex(text)
+ self.valid_variables = valid_variables
+
+ def parse(self):
+ expr = self.top()
+ garbage = self.lex.get_token()
+ if garbage != "":
+ raise Exception(
+ "Unexpected token %s after EqualExpr in %s" % (garbage, self.text)
+ )
+ return expr
+
+ def top(self):
+ name = self.ident()
+ op = self.lex.get_token()
+
+ if op == "(":
+ parsers = {
+ "not": self.parse_not,
+ "any": self.parse_any,
+ "all": self.parse_all,
+ }
+ func = parsers.get(name)
+ if not func:
+ raise Exception("invalid term %s in %s" % (name, self.text))
+ return func()
+
+ if op == "=":
+ if name not in self.valid_variables:
+ raise Exception("unknown variable %r in expression" % (name,))
+ return EqualExpr(name, self.lex.get_token())
+
+ raise Exception(
+ "Unexpected token sequence '%s %s' in %s" % (name, op, self.text)
+ )
+
+ def ident(self):
+ ident = self.lex.get_token()
+ if not re.match("[a-zA-Z]+", ident):
+ raise Exception("expected identifier found %s" % ident)
+ return ident
+
+ def parse_not(self):
+ node = self.top()
+ expr = NotExpr(node)
+ tok = self.lex.get_token()
+ if tok != ")":
+ raise Exception("expected ')' found %s" % tok)
+ return expr
+
+ def parse_any(self):
+ nodes = []
+ while True:
+ nodes.append(self.top())
+ tok = self.lex.get_token()
+ if tok == ")":
+ break
+ if tok != ",":
+ raise Exception("expected ',' or ')' but found %s" % tok)
+ return AnyExpr(nodes)
+
+ def parse_all(self):
+ nodes = []
+ while True:
+ nodes.append(self.top())
+ tok = self.lex.get_token()
+ if tok == ")":
+ break
+ if tok != ",":
+ raise Exception("expected ',' or ')' but found %s" % tok)
+ return AllExpr(nodes)
diff --git a/build/fbcode_builder/getdeps/fetcher.py b/build/fbcode_builder/getdeps/fetcher.py
new file mode 100644
index 000000000..041549ad7
--- /dev/null
+++ b/build/fbcode_builder/getdeps/fetcher.py
@@ -0,0 +1,771 @@
+#!/usr/bin/env python3
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import errno
+import hashlib
+import os
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tarfile
+import time
+import zipfile
+from datetime import datetime
+from typing import Dict, NamedTuple
+
+from .copytree import prefetch_dir_if_eden
+from .envfuncs import Env
+from .errors import TransientFailure
+from .platform import is_windows
+from .runcmd import run_cmd
+
+
+try:
+ from urllib import urlretrieve
+ from urlparse import urlparse
+except ImportError:
+ from urllib.parse import urlparse
+ from urllib.request import urlretrieve
+
+
+def file_name_is_cmake_file(file_name):
+ file_name = file_name.lower()
+ base = os.path.basename(file_name)
+ return (
+ base.endswith(".cmake")
+ or base.endswith(".cmake.in")
+ or base == "cmakelists.txt"
+ )
+
+
+class ChangeStatus(object):
+ """Indicates the nature of changes that happened while updating
+ the source directory. There are two broad uses:
+ * When extracting archives for third party software we want to
+ know that we did something (eg: we either extracted code or
+ we didn't do anything)
+ * For 1st party code where we use shipit to transform the code,
+ we want to know if we changed anything so that we can perform
+ a build, but we generally want to be a little more nuanced
+ and be able to distinguish between just changing a source file
+ and whether we might need to reconfigure the build system.
+ """
+
+ def __init__(self, all_changed=False):
+ """Construct a ChangeStatus object. The default is to create
+ a status that indicates no changes, but passing all_changed=True
+ will create one that indicates that everything changed"""
+ if all_changed:
+ self.source_files = 1
+ self.make_files = 1
+ else:
+ self.source_files = 0
+ self.make_files = 0
+
+ def record_change(self, file_name):
+ """Used by the shipit fetcher to record changes as it updates
+ files in the destination. If the file name might be one used
+ in the cmake build system that we use for 1st party code, then
+ record that as a "make file" change. We could broaden this
+ to match any file used by various build systems, but it is
+ only really useful for our internal cmake stuff at this time.
+ If the file isn't a build file and is under the `fbcode_builder`
+ dir then we don't class that as an interesting change that we
+ might need to rebuild, so we ignore it.
+ Otherwise we record the file as a source file change."""
+
+ file_name = file_name.lower()
+ if file_name_is_cmake_file(file_name):
+ self.make_files += 1
+ elif "/fbcode_builder/cmake" in file_name:
+ self.source_files += 1
+ elif "/fbcode_builder/" not in file_name:
+ self.source_files += 1
+
+ def sources_changed(self):
+ """Returns true if any source files were changed during
+ an update operation. This will typically be used to decide
+ that the build system to be run on the source dir in an
+ incremental mode"""
+ return self.source_files > 0
+
+ def build_changed(self):
+ """Returns true if any build files were changed during
+ an update operation. This will typically be used to decidfe
+ that the build system should be reconfigured and re-run
+ as a full build"""
+ return self.make_files > 0
+
+
+class Fetcher(object):
+ """The Fetcher is responsible for fetching and extracting the
+ sources for project. The Fetcher instance defines where the
+ extracted data resides and reports this to the consumer via
+ its `get_src_dir` method."""
+
+ def update(self):
+ """Brings the src dir up to date, ideally minimizing
+ changes so that a subsequent build doesn't over-build.
+ Returns a ChangeStatus object that helps the caller to
+ understand the nature of the changes required during
+ the update."""
+ return ChangeStatus()
+
+ def clean(self):
+ """Reverts any changes that might have been made to
+ the src dir"""
+ pass
+
+ def hash(self):
+ """Returns a hash that identifies the version of the code in the
+ working copy. For a git repo this is commit hash for the working
+ copy. For other Fetchers this should relate to the version of
+ the code in the src dir. The intent is that if a manifest
+ changes the version/rev of a project that the hash be different.
+ Importantly, this should be computable without actually fetching
+ the code, as we want this to factor into a hash used to download
+ a pre-built version of the code, without having to first download
+ and extract its sources (eg: boost on windows is pretty painful).
+ """
+ pass
+
+ def get_src_dir(self):
+ """Returns the source directory that the project was
+ extracted into"""
+ pass
+
+
+class LocalDirFetcher(object):
+ """This class exists to override the normal fetching behavior, and
+ use an explicit user-specified directory for the project sources.
+
+ This fetcher cannot update or track changes. It always reports that the
+ project has changed, forcing it to always be built."""
+
+ def __init__(self, path):
+ self.path = os.path.realpath(path)
+
+ def update(self):
+ return ChangeStatus(all_changed=True)
+
+ def hash(self):
+ return "0" * 40
+
+ def get_src_dir(self):
+ return self.path
+
+
+class SystemPackageFetcher(object):
+ def __init__(self, build_options, packages):
+ self.manager = build_options.host_type.get_package_manager()
+ self.packages = packages.get(self.manager)
+ if self.packages:
+ self.installed = None
+ else:
+ self.installed = False
+
+ def packages_are_installed(self):
+ if self.installed is not None:
+ return self.installed
+
+ if self.manager == "rpm":
+ result = run_cmd(["rpm", "-q"] + self.packages, allow_fail=True)
+ self.installed = result == 0
+ elif self.manager == "deb":
+ result = run_cmd(["dpkg", "-s"] + self.packages, allow_fail=True)
+ self.installed = result == 0
+ else:
+ self.installed = False
+
+ return self.installed
+
+ def update(self):
+ assert self.installed
+ return ChangeStatus(all_changed=False)
+
+ def hash(self):
+ return "0" * 40
+
+ def get_src_dir(self):
+ return None
+
+
+class PreinstalledNopFetcher(SystemPackageFetcher):
+ def __init__(self):
+ self.installed = True
+
+
+class GitFetcher(Fetcher):
+ DEFAULT_DEPTH = 1
+
+ def __init__(self, build_options, manifest, repo_url, rev, depth):
+ # Extract the host/path portions of the URL and generate a flattened
+ # directory name. eg:
+ # github.com/facebook/folly.git -> github.com-facebook-folly.git
+ url = urlparse(repo_url)
+ directory = "%s%s" % (url.netloc, url.path)
+ for s in ["/", "\\", ":"]:
+ directory = directory.replace(s, "-")
+
+ # Place it in a repos dir in the scratch space
+ repos_dir = os.path.join(build_options.scratch_dir, "repos")
+ if not os.path.exists(repos_dir):
+ os.makedirs(repos_dir)
+ self.repo_dir = os.path.join(repos_dir, directory)
+
+ if not rev and build_options.project_hashes:
+ hash_file = os.path.join(
+ build_options.project_hashes,
+ re.sub("\\.git$", "-rev.txt", url.path[1:]),
+ )
+ if os.path.exists(hash_file):
+ with open(hash_file, "r") as f:
+ data = f.read()
+ m = re.match("Subproject commit ([a-fA-F0-9]{40})", data)
+ if not m:
+ raise Exception("Failed to parse rev from %s" % hash_file)
+ rev = m.group(1)
+ print("Using pinned rev %s for %s" % (rev, repo_url))
+
+ self.rev = rev or "master"
+ self.origin_repo = repo_url
+ self.manifest = manifest
+ self.depth = depth if depth else GitFetcher.DEFAULT_DEPTH
+
+ def _update(self):
+ current_hash = (
+ subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=self.repo_dir)
+ .strip()
+ .decode("utf-8")
+ )
+ target_hash = (
+ subprocess.check_output(["git", "rev-parse", self.rev], cwd=self.repo_dir)
+ .strip()
+ .decode("utf-8")
+ )
+ if target_hash == current_hash:
+ # It's up to date, so there are no changes. This doesn't detect eg:
+ # if origin/master moved and rev='master', but that's ok for our purposes;
+ # we should be using explicit hashes or eg: a stable branch for the cases
+ # that we care about, and it isn't unreasonable to require that the user
+ # explicitly perform a clean build if those have moved. For the most
+ # part we prefer that folks build using a release tarball from github
+ # rather than use the git protocol, as it is generally a bit quicker
+ # to fetch and easier to hash and verify tarball downloads.
+ return ChangeStatus()
+
+ print("Updating %s -> %s" % (self.repo_dir, self.rev))
+ run_cmd(["git", "fetch", "origin", self.rev], cwd=self.repo_dir)
+ run_cmd(["git", "checkout", self.rev], cwd=self.repo_dir)
+ run_cmd(["git", "submodule", "update", "--init"], cwd=self.repo_dir)
+
+ return ChangeStatus(True)
+
+ def update(self):
+ if os.path.exists(self.repo_dir):
+ return self._update()
+ self._clone()
+ return ChangeStatus(True)
+
+ def _clone(self):
+ print("Cloning %s..." % self.origin_repo)
+ # The basename/dirname stuff allows us to dance around issues where
+ # eg: this python process is native win32, but the git.exe is cygwin
+ # or msys and doesn't like the absolute windows path that we'd otherwise
+ # pass to it. Careful use of cwd helps avoid headaches with cygpath.
+ run_cmd(
+ [
+ "git",
+ "clone",
+ "--depth=" + str(self.depth),
+ "--",
+ self.origin_repo,
+ os.path.basename(self.repo_dir),
+ ],
+ cwd=os.path.dirname(self.repo_dir),
+ )
+ self._update()
+
+ def clean(self):
+ if os.path.exists(self.repo_dir):
+ run_cmd(["git", "clean", "-fxd"], cwd=self.repo_dir)
+
+ def hash(self):
+ return self.rev
+
+ def get_src_dir(self):
+ return self.repo_dir
+
+
+def does_file_need_update(src_name, src_st, dest_name):
+ try:
+ target_st = os.lstat(dest_name)
+ except OSError as exc:
+ if exc.errno != errno.ENOENT:
+ raise
+ return True
+
+ if src_st.st_size != target_st.st_size:
+ return True
+
+ if stat.S_IFMT(src_st.st_mode) != stat.S_IFMT(target_st.st_mode):
+ return True
+ if stat.S_ISLNK(src_st.st_mode):
+ return os.readlink(src_name) != os.readlink(dest_name)
+ if not stat.S_ISREG(src_st.st_mode):
+ return True
+
+ # They might have the same content; compare.
+ with open(src_name, "rb") as sf, open(dest_name, "rb") as df:
+ chunk_size = 8192
+ while True:
+ src_data = sf.read(chunk_size)
+ dest_data = df.read(chunk_size)
+ if src_data != dest_data:
+ return True
+ if len(src_data) < chunk_size:
+ # EOF
+ break
+ return False
+
+
+def copy_if_different(src_name, dest_name):
+ """Copy src_name -> dest_name, but only touch dest_name
+ if src_name is different from dest_name, making this a
+ more build system friendly way to copy."""
+ src_st = os.lstat(src_name)
+ if not does_file_need_update(src_name, src_st, dest_name):
+ return False
+
+ dest_parent = os.path.dirname(dest_name)
+ if not os.path.exists(dest_parent):
+ os.makedirs(dest_parent)
+ if stat.S_ISLNK(src_st.st_mode):
+ try:
+ os.unlink(dest_name)
+ except OSError as exc:
+ if exc.errno != errno.ENOENT:
+ raise
+ target = os.readlink(src_name)
+ print("Symlinking %s -> %s" % (dest_name, target))
+ os.symlink(target, dest_name)
+ else:
+ print("Copying %s -> %s" % (src_name, dest_name))
+ shutil.copy2(src_name, dest_name)
+
+ return True
+
+
+def list_files_under_dir_newer_than_timestamp(dir_to_scan, ts):
+ for root, _dirs, files in os.walk(dir_to_scan):
+ for src_file in files:
+ full_name = os.path.join(root, src_file)
+ st = os.lstat(full_name)
+ if st.st_mtime > ts:
+ yield full_name
+
+
+class ShipitPathMap(object):
+ def __init__(self):
+ self.roots = []
+ self.mapping = []
+ self.exclusion = []
+
+ def add_mapping(self, fbsource_dir, target_dir):
+ """Add a posix path or pattern. We cannot normpath the input
+ here because that would change the paths from posix to windows
+ form and break the logic throughout this class."""
+ self.roots.append(fbsource_dir)
+ self.mapping.append((fbsource_dir, target_dir))
+
+ def add_exclusion(self, pattern):
+ self.exclusion.append(re.compile(pattern))
+
+ def _minimize_roots(self):
+ """compute the de-duplicated set of roots within fbsource.
+ We take the shortest common directory prefix to make this
+ determination"""
+ self.roots.sort(key=len)
+ minimized = []
+
+ for r in self.roots:
+ add_this_entry = True
+ for existing in minimized:
+ if r.startswith(existing + "/"):
+ add_this_entry = False
+ break
+ if add_this_entry:
+ minimized.append(r)
+
+ self.roots = minimized
+
+ def _sort_mapping(self):
+ self.mapping.sort(reverse=True, key=lambda x: len(x[0]))
+
+ def _map_name(self, norm_name, dest_root):
+ if norm_name.endswith(".pyc") or norm_name.endswith(".swp"):
+ # Ignore some incidental garbage while iterating
+ return None
+
+ for excl in self.exclusion:
+ if excl.match(norm_name):
+ return None
+
+ for src_name, dest_name in self.mapping:
+ if norm_name == src_name or norm_name.startswith(src_name + "/"):
+ rel_name = os.path.relpath(norm_name, src_name)
+ # We can have "." as a component of some paths, depending
+ # on the contents of the shipit transformation section.
+ # normpath doesn't always remove `.` as the final component
+ # of the path, which be problematic when we later mkdir
+ # the dirname of the path that we return. Take care to avoid
+ # returning a path with a `.` in it.
+ rel_name = os.path.normpath(rel_name)
+ if dest_name == ".":
+ return os.path.normpath(os.path.join(dest_root, rel_name))
+ dest_name = os.path.normpath(dest_name)
+ return os.path.normpath(os.path.join(dest_root, dest_name, rel_name))
+
+ raise Exception("%s did not match any rules" % norm_name)
+
+ def mirror(self, fbsource_root, dest_root):
+ self._minimize_roots()
+ self._sort_mapping()
+
+ change_status = ChangeStatus()
+
+ # Record the full set of files that should be in the tree
+ full_file_list = set()
+
+ for fbsource_subdir in self.roots:
+ dir_to_mirror = os.path.join(fbsource_root, fbsource_subdir)
+ prefetch_dir_if_eden(dir_to_mirror)
+ if not os.path.exists(dir_to_mirror):
+ raise Exception(
+ "%s doesn't exist; check your sparse profile!" % dir_to_mirror
+ )
+ for root, _dirs, files in os.walk(dir_to_mirror):
+ for src_file in files:
+ full_name = os.path.join(root, src_file)
+ rel_name = os.path.relpath(full_name, fbsource_root)
+ norm_name = rel_name.replace("\\", "/")
+
+ target_name = self._map_name(norm_name, dest_root)
+ if target_name:
+ full_file_list.add(target_name)
+ if copy_if_different(full_name, target_name):
+ change_status.record_change(target_name)
+
+ # Compare the list of previously shipped files; if a file is
+ # in the old list but not the new list then it has been
+ # removed from the source and should be removed from the
+ # destination.
+ # Why don't we simply create this list by walking dest_root?
+ # Some builds currently have to be in-source builds and
+ # may legitimately need to keep some state in the source tree :-/
+ installed_name = os.path.join(dest_root, ".shipit_shipped")
+ if os.path.exists(installed_name):
+ with open(installed_name, "rb") as f:
+ for name in f.read().decode("utf-8").splitlines():
+ name = name.strip()
+ if name not in full_file_list:
+ print("Remove %s" % name)
+ os.unlink(name)
+ change_status.record_change(name)
+
+ with open(installed_name, "wb") as f:
+ for name in sorted(list(full_file_list)):
+ f.write(("%s\n" % name).encode("utf-8"))
+
+ return change_status
+
+
+class FbsourceRepoData(NamedTuple):
+ hash: str
+ date: str
+
+
+FBSOURCE_REPO_DATA: Dict[str, FbsourceRepoData] = {}
+
+
+def get_fbsource_repo_data(build_options):
+ """Returns the commit metadata for the fbsource repo.
+ Since we may have multiple first party projects to
+ hash, and because we don't mutate the repo, we cache
+ this hash in a global."""
+ cached_data = FBSOURCE_REPO_DATA.get(build_options.fbsource_dir)
+ if cached_data:
+ return cached_data
+
+ cmd = ["hg", "log", "-r.", "-T{node}\n{date|hgdate}"]
+ env = Env()
+ env.set("HGPLAIN", "1")
+ log_data = subprocess.check_output(
+ cmd, cwd=build_options.fbsource_dir, env=dict(env.items())
+ ).decode("ascii")
+
+ (hash, datestr) = log_data.split("\n")
+
+ # datestr is like "seconds fractionalseconds"
+ # We want "20200324.113140"
+ (unixtime, _fractional) = datestr.split(" ")
+ date = datetime.fromtimestamp(int(unixtime)).strftime("%Y%m%d.%H%M%S")
+ cached_data = FbsourceRepoData(hash=hash, date=date)
+
+ FBSOURCE_REPO_DATA[build_options.fbsource_dir] = cached_data
+
+ return cached_data
+
+
+class SimpleShipitTransformerFetcher(Fetcher):
+ def __init__(self, build_options, manifest):
+ self.build_options = build_options
+ self.manifest = manifest
+ self.repo_dir = os.path.join(build_options.scratch_dir, "shipit", manifest.name)
+
+ def clean(self):
+ if os.path.exists(self.repo_dir):
+ shutil.rmtree(self.repo_dir)
+
+ def update(self):
+ mapping = ShipitPathMap()
+ for src, dest in self.manifest.get_section_as_ordered_pairs("shipit.pathmap"):
+ mapping.add_mapping(src, dest)
+ if self.manifest.shipit_fbcode_builder:
+ mapping.add_mapping(
+ "fbcode/opensource/fbcode_builder", "build/fbcode_builder"
+ )
+ for pattern in self.manifest.get_section_as_args("shipit.strip"):
+ mapping.add_exclusion(pattern)
+
+ return mapping.mirror(self.build_options.fbsource_dir, self.repo_dir)
+
+ def hash(self):
+ # We return a fixed non-hash string for in-fbsource builds.
+ # We're relying on the `update` logic to correctly invalidate
+ # the build in the case that files have changed.
+ return "fbsource"
+
+ def get_src_dir(self):
+ return self.repo_dir
+
+
+class ShipitTransformerFetcher(Fetcher):
+ SHIPIT = "/var/www/scripts/opensource/shipit/run_shipit.php"
+
+ def __init__(self, build_options, project_name):
+ self.build_options = build_options
+ self.project_name = project_name
+ self.repo_dir = os.path.join(build_options.scratch_dir, "shipit", project_name)
+
+ def update(self):
+ if os.path.exists(self.repo_dir):
+ return ChangeStatus()
+ self.run_shipit()
+ return ChangeStatus(True)
+
+ def clean(self):
+ if os.path.exists(self.repo_dir):
+ shutil.rmtree(self.repo_dir)
+
+ @classmethod
+ def available(cls):
+ return os.path.exists(cls.SHIPIT)
+
+ def run_shipit(self):
+ tmp_path = self.repo_dir + ".new"
+ try:
+ if os.path.exists(tmp_path):
+ shutil.rmtree(tmp_path)
+
+ # Run shipit
+ run_cmd(
+ [
+ "php",
+ ShipitTransformerFetcher.SHIPIT,
+ "--project=" + self.project_name,
+ "--create-new-repo",
+ "--source-repo-dir=" + self.build_options.fbsource_dir,
+ "--source-branch=.",
+ "--skip-source-init",
+ "--skip-source-pull",
+ "--skip-source-clean",
+ "--skip-push",
+ "--skip-reset",
+ "--destination-use-anonymous-https",
+ "--create-new-repo-output-path=" + tmp_path,
+ ]
+ )
+
+ # Remove the .git directory from the repository it generated.
+ # There is no need to commit this.
+ repo_git_dir = os.path.join(tmp_path, ".git")
+ shutil.rmtree(repo_git_dir)
+ os.rename(tmp_path, self.repo_dir)
+ except Exception:
+ # Clean up after a failed extraction
+ if os.path.exists(tmp_path):
+ shutil.rmtree(tmp_path)
+ self.clean()
+ raise
+
+ def hash(self):
+ # We return a fixed non-hash string for in-fbsource builds.
+ return "fbsource"
+
+ def get_src_dir(self):
+ return self.repo_dir
+
+
+def download_url_to_file_with_progress(url, file_name):
+ print("Download %s -> %s ..." % (url, file_name))
+
+ class Progress(object):
+ last_report = 0
+
+ def progress(self, count, block, total):
+ if total == -1:
+ total = "(Unknown)"
+ amount = count * block
+
+ if sys.stdout.isatty():
+ sys.stdout.write("\r downloading %s of %s " % (amount, total))
+ else:
+ # When logging to CI logs, avoid spamming the logs and print
+ # status every few seconds
+ now = time.time()
+ if now - self.last_report > 5:
+ sys.stdout.write(".. %s of %s " % (amount, total))
+ self.last_report = now
+ sys.stdout.flush()
+
+ progress = Progress()
+ start = time.time()
+ try:
+ (_filename, headers) = urlretrieve(url, file_name, reporthook=progress.progress)
+ except (OSError, IOError) as exc: # noqa: B014
+ raise TransientFailure(
+ "Failed to download %s to %s: %s" % (url, file_name, str(exc))
+ )
+
+ end = time.time()
+ sys.stdout.write(" [Complete in %f seconds]\n" % (end - start))
+ sys.stdout.flush()
+ print(f"{headers}")
+
+
+class ArchiveFetcher(Fetcher):
+ def __init__(self, build_options, manifest, url, sha256):
+ self.manifest = manifest
+ self.url = url
+ self.sha256 = sha256
+ self.build_options = build_options
+
+ url = urlparse(self.url)
+ basename = "%s-%s" % (manifest.name, os.path.basename(url.path))
+ self.file_name = os.path.join(build_options.scratch_dir, "downloads", basename)
+ self.src_dir = os.path.join(build_options.scratch_dir, "extracted", basename)
+ self.hash_file = self.src_dir + ".hash"
+
+ def _verify_hash(self):
+ h = hashlib.sha256()
+ with open(self.file_name, "rb") as f:
+ while True:
+ block = f.read(8192)
+ if not block:
+ break
+ h.update(block)
+ digest = h.hexdigest()
+ if digest != self.sha256:
+ os.unlink(self.file_name)
+ raise Exception(
+ "%s: expected sha256 %s but got %s" % (self.url, self.sha256, digest)
+ )
+
+ def _download_dir(self):
+ """returns the download dir, creating it if it doesn't already exist"""
+ download_dir = os.path.dirname(self.file_name)
+ if not os.path.exists(download_dir):
+ os.makedirs(download_dir)
+ return download_dir
+
+ def _download(self):
+ self._download_dir()
+ download_url_to_file_with_progress(self.url, self.file_name)
+ self._verify_hash()
+
+ def clean(self):
+ if os.path.exists(self.src_dir):
+ shutil.rmtree(self.src_dir)
+
+ def update(self):
+ try:
+ with open(self.hash_file, "r") as f:
+ saved_hash = f.read().strip()
+ if saved_hash == self.sha256 and os.path.exists(self.src_dir):
+ # Everything is up to date
+ return ChangeStatus()
+ print(
+ "saved hash %s doesn't match expected hash %s, re-validating"
+ % (saved_hash, self.sha256)
+ )
+ os.unlink(self.hash_file)
+ except EnvironmentError:
+ pass
+
+ # If we got here we know the contents of src_dir are either missing
+ # or wrong, so blow away whatever happened to be there first.
+ if os.path.exists(self.src_dir):
+ shutil.rmtree(self.src_dir)
+
+ # If we already have a file here, make sure it looks legit before
+ # proceeding: any errors and we just remove it and re-download
+ if os.path.exists(self.file_name):
+ try:
+ self._verify_hash()
+ except Exception:
+ if os.path.exists(self.file_name):
+ os.unlink(self.file_name)
+
+ if not os.path.exists(self.file_name):
+ self._download()
+
+ if tarfile.is_tarfile(self.file_name):
+ opener = tarfile.open
+ elif zipfile.is_zipfile(self.file_name):
+ opener = zipfile.ZipFile
+ else:
+ raise Exception("don't know how to extract %s" % self.file_name)
+ os.makedirs(self.src_dir)
+ print("Extract %s -> %s" % (self.file_name, self.src_dir))
+ t = opener(self.file_name)
+ if is_windows():
+ # Ensure that we don't fall over when dealing with long paths
+ # on windows
+ src = r"\\?\%s" % os.path.normpath(self.src_dir)
+ else:
+ src = self.src_dir
+ # The `str` here is necessary to ensure that we don't pass a unicode
+ # object down to tarfile.extractall on python2. When extracting
+ # the boost tarball it makes some assumptions and tries to convert
+ # a non-ascii path to ascii and throws.
+ src = str(src)
+ t.extractall(src)
+
+ with open(self.hash_file, "w") as f:
+ f.write(self.sha256)
+
+ return ChangeStatus(True)
+
+ def hash(self):
+ return self.sha256
+
+ def get_src_dir(self):
+ return self.src_dir
diff --git a/build/fbcode_builder/getdeps/load.py b/build/fbcode_builder/getdeps/load.py
new file mode 100644
index 000000000..c5f40d2fa
--- /dev/null
+++ b/build/fbcode_builder/getdeps/load.py
@@ -0,0 +1,354 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import base64
+import hashlib
+import os
+
+from . import fetcher
+from .envfuncs import path_search
+from .errors import ManifestNotFound
+from .manifest import ManifestParser
+
+
+class Loader(object):
+ """The loader allows our tests to patch the load operation"""
+
+ def _list_manifests(self, build_opts):
+ """Returns a generator that iterates all the available manifests"""
+ for (path, _, files) in os.walk(build_opts.manifests_dir):
+ for name in files:
+ # skip hidden files
+ if name.startswith("."):
+ continue
+
+ yield os.path.join(path, name)
+
+ def _load_manifest(self, path):
+ return ManifestParser(path)
+
+ def load_project(self, build_opts, project_name):
+ if "/" in project_name or "\\" in project_name:
+ # Assume this is a path already
+ return ManifestParser(project_name)
+
+ for manifest in self._list_manifests(build_opts):
+ if os.path.basename(manifest) == project_name:
+ return ManifestParser(manifest)
+
+ raise ManifestNotFound(project_name)
+
+ def load_all(self, build_opts):
+ manifests_by_name = {}
+
+ for manifest in self._list_manifests(build_opts):
+ m = self._load_manifest(manifest)
+
+ if m.name in manifests_by_name:
+ raise Exception("found duplicate manifest '%s'" % m.name)
+
+ manifests_by_name[m.name] = m
+
+ return manifests_by_name
+
+
+class ResourceLoader(Loader):
+ def __init__(self, namespace, manifests_dir):
+ self.namespace = namespace
+ self.manifests_dir = manifests_dir
+
+ def _list_manifests(self, _build_opts):
+ import pkg_resources
+
+ dirs = [self.manifests_dir]
+
+ while dirs:
+ current = dirs.pop(0)
+ for name in pkg_resources.resource_listdir(self.namespace, current):
+ path = "%s/%s" % (current, name)
+
+ if pkg_resources.resource_isdir(self.namespace, path):
+ dirs.append(path)
+ else:
+ yield "%s/%s" % (current, name)
+
+ def _find_manifest(self, project_name):
+ for name in self._list_manifests():
+ if name.endswith("/%s" % project_name):
+ return name
+
+ raise ManifestNotFound(project_name)
+
+ def _load_manifest(self, path):
+ import pkg_resources
+
+ contents = pkg_resources.resource_string(self.namespace, path).decode("utf8")
+ return ManifestParser(file_name=path, fp=contents)
+
+ def load_project(self, build_opts, project_name):
+ project_name = self._find_manifest(project_name)
+ return self._load_resource_manifest(project_name)
+
+
+LOADER = Loader()
+
+
+def patch_loader(namespace, manifests_dir="manifests"):
+ global LOADER
+ LOADER = ResourceLoader(namespace, manifests_dir)
+
+
+def load_project(build_opts, project_name):
+ """given the name of a project or a path to a manifest file,
+ load up the ManifestParser instance for it and return it"""
+ return LOADER.load_project(build_opts, project_name)
+
+
+def load_all_manifests(build_opts):
+ return LOADER.load_all(build_opts)
+
+
+class ManifestLoader(object):
+ """ManifestLoader stores information about project manifest relationships for a
+ given set of (build options + platform) configuration.
+
+ The ManifestLoader class primarily serves as a location to cache project dependency
+ relationships and project hash values for this build configuration.
+ """
+
+ def __init__(self, build_opts, ctx_gen=None):
+ self._loader = LOADER
+ self.build_opts = build_opts
+ if ctx_gen is None:
+ self.ctx_gen = self.build_opts.get_context_generator()
+ else:
+ self.ctx_gen = ctx_gen
+
+ self.manifests_by_name = {}
+ self._loaded_all = False
+ self._project_hashes = {}
+ self._fetcher_overrides = {}
+ self._build_dir_overrides = {}
+ self._install_dir_overrides = {}
+ self._install_prefix_overrides = {}
+
+ def load_manifest(self, name):
+ manifest = self.manifests_by_name.get(name)
+ if manifest is None:
+ manifest = self._loader.load_project(self.build_opts, name)
+ self.manifests_by_name[name] = manifest
+ return manifest
+
+ def load_all_manifests(self):
+ if not self._loaded_all:
+ all_manifests_by_name = self._loader.load_all(self.build_opts)
+ if self.manifests_by_name:
+ # To help ensure that we only ever have a single manifest object for a
+ # given project, and that it can't change once we have loaded it,
+ # only update our mapping for projects that weren't already loaded.
+ for name, manifest in all_manifests_by_name.items():
+ self.manifests_by_name.setdefault(name, manifest)
+ else:
+ self.manifests_by_name = all_manifests_by_name
+ self._loaded_all = True
+
+ return self.manifests_by_name
+
+ def manifests_in_dependency_order(self, manifest=None):
+ """Compute all dependencies of the specified project. Returns a list of the
+ dependencies plus the project itself, in topologically sorted order.
+
+ Each entry in the returned list only depends on projects that appear before it
+ in the list.
+
+ If the input manifest is None, the dependencies for all currently loaded
+ projects will be computed. i.e., if you call load_all_manifests() followed by
+ manifests_in_dependency_order() this will return a global dependency ordering of
+ all projects."""
+ # The list of deps that have been fully processed
+ seen = set()
+ # The list of deps which have yet to be evaluated. This
+ # can potentially contain duplicates.
+ if manifest is None:
+ deps = list(self.manifests_by_name.values())
+ else:
+ assert manifest.name in self.manifests_by_name
+ deps = [manifest]
+ # The list of manifests in dependency order
+ dep_order = []
+
+ while len(deps) > 0:
+ m = deps.pop(0)
+ if m.name in seen:
+ continue
+
+ # Consider its deps, if any.
+ # We sort them for increased determinism; we'll produce
+ # a correct order even if they aren't sorted, but we prefer
+ # to produce the same order regardless of how they are listed
+ # in the project manifest files.
+ ctx = self.ctx_gen.get_context(m.name)
+ dep_list = sorted(m.get_section_as_dict("dependencies", ctx).keys())
+ builder = m.get("build", "builder", ctx=ctx)
+ if builder in ("cmake", "python-wheel"):
+ dep_list.append("cmake")
+ elif builder == "autoconf" and m.name not in (
+ "autoconf",
+ "libtool",
+ "automake",
+ ):
+ # they need libtool and its deps (automake, autoconf) so add
+ # those as deps (but obviously not if we're building those
+ # projects themselves)
+ dep_list.append("libtool")
+
+ dep_count = 0
+ for dep_name in dep_list:
+ # If we're not sure whether it is done, queue it up
+ if dep_name not in seen:
+ dep = self.manifests_by_name.get(dep_name)
+ if dep is None:
+ dep = self._loader.load_project(self.build_opts, dep_name)
+ self.manifests_by_name[dep.name] = dep
+
+ deps.append(dep)
+ dep_count += 1
+
+ if dep_count > 0:
+ # If we queued anything, re-queue this item, as it depends
+ # those new item(s) and their transitive deps.
+ deps.append(m)
+ continue
+
+ # Its deps are done, so we can emit it
+ seen.add(m.name)
+ dep_order.append(m)
+
+ return dep_order
+
+ def set_project_src_dir(self, project_name, path):
+ self._fetcher_overrides[project_name] = fetcher.LocalDirFetcher(path)
+
+ def set_project_build_dir(self, project_name, path):
+ self._build_dir_overrides[project_name] = path
+
+ def set_project_install_dir(self, project_name, path):
+ self._install_dir_overrides[project_name] = path
+
+ def set_project_install_prefix(self, project_name, path):
+ self._install_prefix_overrides[project_name] = path
+
+ def create_fetcher(self, manifest):
+ override = self._fetcher_overrides.get(manifest.name)
+ if override is not None:
+ return override
+
+ ctx = self.ctx_gen.get_context(manifest.name)
+ return manifest.create_fetcher(self.build_opts, ctx)
+
+ def get_project_hash(self, manifest):
+ h = self._project_hashes.get(manifest.name)
+ if h is None:
+ h = self._compute_project_hash(manifest)
+ self._project_hashes[manifest.name] = h
+ return h
+
+ def _compute_project_hash(self, manifest):
+ """This recursive function computes a hash for a given manifest.
+ The hash takes into account some environmental factors on the
+ host machine and includes the hashes of its dependencies.
+ No caching of the computation is performed, which is theoretically
+ wasteful but the computation is fast enough that it is not required
+ to cache across multiple invocations."""
+ ctx = self.ctx_gen.get_context(manifest.name)
+
+ hasher = hashlib.sha256()
+ # Some environmental and configuration things matter
+ env = {}
+ env["install_dir"] = self.build_opts.install_dir
+ env["scratch_dir"] = self.build_opts.scratch_dir
+ env["vcvars_path"] = self.build_opts.vcvars_path
+ env["os"] = self.build_opts.host_type.ostype
+ env["distro"] = self.build_opts.host_type.distro
+ env["distro_vers"] = self.build_opts.host_type.distrovers
+ for name in [
+ "CXXFLAGS",
+ "CPPFLAGS",
+ "LDFLAGS",
+ "CXX",
+ "CC",
+ "GETDEPS_CMAKE_DEFINES",
+ ]:
+ env[name] = os.environ.get(name)
+ for tool in ["cc", "c++", "gcc", "g++", "clang", "clang++"]:
+ env["tool-%s" % tool] = path_search(os.environ, tool)
+ for name in manifest.get_section_as_args("depends.environment", ctx):
+ env[name] = os.environ.get(name)
+
+ fetcher = self.create_fetcher(manifest)
+ env["fetcher.hash"] = fetcher.hash()
+
+ for name in sorted(env.keys()):
+ hasher.update(name.encode("utf-8"))
+ value = env.get(name)
+ if value is not None:
+ try:
+ hasher.update(value.encode("utf-8"))
+ except AttributeError as exc:
+ raise AttributeError("name=%r, value=%r: %s" % (name, value, exc))
+
+ manifest.update_hash(hasher, ctx)
+
+ dep_list = sorted(manifest.get_section_as_dict("dependencies", ctx).keys())
+ for dep in dep_list:
+ dep_manifest = self.load_manifest(dep)
+ dep_hash = self.get_project_hash(dep_manifest)
+ hasher.update(dep_hash.encode("utf-8"))
+
+ # Use base64 to represent the hash, rather than the simple hex digest,
+ # so that the string is shorter. Use the URL-safe encoding so that
+ # the hash can also be safely used as a filename component.
+ h = base64.urlsafe_b64encode(hasher.digest()).decode("ascii")
+ # ... and because cmd.exe is troublesome with `=` signs, nerf those.
+ # They tend to be padding characters at the end anyway, so we can
+ # safely discard them.
+ h = h.replace("=", "")
+
+ return h
+
+ def _get_project_dir_name(self, manifest):
+ if manifest.is_first_party_project():
+ return manifest.name
+ else:
+ project_hash = self.get_project_hash(manifest)
+ return "%s-%s" % (manifest.name, project_hash)
+
+ def get_project_install_dir(self, manifest):
+ override = self._install_dir_overrides.get(manifest.name)
+ if override:
+ return override
+
+ project_dir_name = self._get_project_dir_name(manifest)
+ return os.path.join(self.build_opts.install_dir, project_dir_name)
+
+ def get_project_build_dir(self, manifest):
+ override = self._build_dir_overrides.get(manifest.name)
+ if override:
+ return override
+
+ project_dir_name = self._get_project_dir_name(manifest)
+ return os.path.join(self.build_opts.scratch_dir, "build", project_dir_name)
+
+ def get_project_install_prefix(self, manifest):
+ return self._install_prefix_overrides.get(manifest.name)
+
+ def get_project_install_dir_respecting_install_prefix(self, manifest):
+ inst_dir = self.get_project_install_dir(manifest)
+ prefix = self.get_project_install_prefix(manifest)
+ if prefix:
+ return inst_dir + prefix
+ return inst_dir
diff --git a/build/fbcode_builder/getdeps/manifest.py b/build/fbcode_builder/getdeps/manifest.py
new file mode 100644
index 000000000..71566d659
--- /dev/null
+++ b/build/fbcode_builder/getdeps/manifest.py
@@ -0,0 +1,606 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import io
+import os
+
+from .builder import (
+ AutoconfBuilder,
+ Boost,
+ CargoBuilder,
+ CMakeBuilder,
+ BistroBuilder,
+ Iproute2Builder,
+ MakeBuilder,
+ NinjaBootstrap,
+ NopBuilder,
+ OpenNSABuilder,
+ OpenSSLBuilder,
+ SqliteBuilder,
+ CMakeBootStrapBuilder,
+)
+from .expr import parse_expr
+from .fetcher import (
+ ArchiveFetcher,
+ GitFetcher,
+ PreinstalledNopFetcher,
+ ShipitTransformerFetcher,
+ SimpleShipitTransformerFetcher,
+ SystemPackageFetcher,
+)
+from .py_wheel_builder import PythonWheelBuilder
+
+
+try:
+ import configparser
+except ImportError:
+ import ConfigParser as configparser
+
+REQUIRED = "REQUIRED"
+OPTIONAL = "OPTIONAL"
+
+SCHEMA = {
+ "manifest": {
+ "optional_section": False,
+ "fields": {
+ "name": REQUIRED,
+ "fbsource_path": OPTIONAL,
+ "shipit_project": OPTIONAL,
+ "shipit_fbcode_builder": OPTIONAL,
+ },
+ },
+ "dependencies": {"optional_section": True, "allow_values": False},
+ "depends.environment": {"optional_section": True},
+ "git": {
+ "optional_section": True,
+ "fields": {"repo_url": REQUIRED, "rev": OPTIONAL, "depth": OPTIONAL},
+ },
+ "download": {
+ "optional_section": True,
+ "fields": {"url": REQUIRED, "sha256": REQUIRED},
+ },
+ "build": {
+ "optional_section": True,
+ "fields": {
+ "builder": REQUIRED,
+ "subdir": OPTIONAL,
+ "build_in_src_dir": OPTIONAL,
+ "disable_env_override_pkgconfig": OPTIONAL,
+ "disable_env_override_path": OPTIONAL,
+ },
+ },
+ "msbuild": {"optional_section": True, "fields": {"project": REQUIRED}},
+ "cargo": {
+ "optional_section": True,
+ "fields": {
+ "build_doc": OPTIONAL,
+ "workspace_dir": OPTIONAL,
+ "manifests_to_build": OPTIONAL,
+ },
+ },
+ "cmake.defines": {"optional_section": True},
+ "autoconf.args": {"optional_section": True},
+ "rpms": {"optional_section": True},
+ "debs": {"optional_section": True},
+ "preinstalled.env": {"optional_section": True},
+ "b2.args": {"optional_section": True},
+ "make.build_args": {"optional_section": True},
+ "make.install_args": {"optional_section": True},
+ "make.test_args": {"optional_section": True},
+ "header-only": {"optional_section": True, "fields": {"includedir": REQUIRED}},
+ "shipit.pathmap": {"optional_section": True},
+ "shipit.strip": {"optional_section": True},
+ "install.files": {"optional_section": True},
+}
+
+# These sections are allowed to vary for different platforms
+# using the expression syntax to enable/disable sections
+ALLOWED_EXPR_SECTIONS = [
+ "autoconf.args",
+ "build",
+ "cmake.defines",
+ "dependencies",
+ "make.build_args",
+ "make.install_args",
+ "b2.args",
+ "download",
+ "git",
+ "install.files",
+]
+
+
+def parse_conditional_section_name(name, section_def):
+ expr = name[len(section_def) + 1 :]
+ return parse_expr(expr, ManifestContext.ALLOWED_VARIABLES)
+
+
+def validate_allowed_fields(file_name, section, config, allowed_fields):
+ for field in config.options(section):
+ if not allowed_fields.get(field):
+ raise Exception(
+ ("manifest file %s section '%s' contains " "unknown field '%s'")
+ % (file_name, section, field)
+ )
+
+ for field in allowed_fields:
+ if allowed_fields[field] == REQUIRED and not config.has_option(section, field):
+ raise Exception(
+ ("manifest file %s section '%s' is missing " "required field '%s'")
+ % (file_name, section, field)
+ )
+
+
+def validate_allow_values(file_name, section, config):
+ for field in config.options(section):
+ value = config.get(section, field)
+ if value is not None:
+ raise Exception(
+ (
+ "manifest file %s section '%s' has '%s = %s' but "
+ "this section doesn't allow specifying values "
+ "for its entries"
+ )
+ % (file_name, section, field, value)
+ )
+
+
+def validate_section(file_name, section, config):
+ section_def = SCHEMA.get(section)
+ if not section_def:
+ for name in ALLOWED_EXPR_SECTIONS:
+ if section.startswith(name + "."):
+ # Verify that the conditional parses, but discard it
+ try:
+ parse_conditional_section_name(section, name)
+ except Exception as exc:
+ raise Exception(
+ ("manifest file %s section '%s' has invalid " "conditional: %s")
+ % (file_name, section, str(exc))
+ )
+ section_def = SCHEMA.get(name)
+ canonical_section_name = name
+ break
+ if not section_def:
+ raise Exception(
+ "manifest file %s contains unknown section '%s'" % (file_name, section)
+ )
+ else:
+ canonical_section_name = section
+
+ allowed_fields = section_def.get("fields")
+ if allowed_fields:
+ validate_allowed_fields(file_name, section, config, allowed_fields)
+ elif not section_def.get("allow_values", True):
+ validate_allow_values(file_name, section, config)
+ return canonical_section_name
+
+
+class ManifestParser(object):
+ def __init__(self, file_name, fp=None):
+ # allow_no_value enables listing parameters in the
+ # autoconf.args section one per line
+ config = configparser.RawConfigParser(allow_no_value=True)
+ config.optionxform = str # make it case sensitive
+
+ if fp is None:
+ with open(file_name, "r") as fp:
+ config.read_file(fp)
+ elif isinstance(fp, type("")):
+ # For testing purposes, parse from a string (str
+ # or unicode)
+ config.read_file(io.StringIO(fp))
+ else:
+ config.read_file(fp)
+
+ # validate against the schema
+ seen_sections = set()
+
+ for section in config.sections():
+ seen_sections.add(validate_section(file_name, section, config))
+
+ for section in SCHEMA.keys():
+ section_def = SCHEMA[section]
+ if (
+ not section_def.get("optional_section", False)
+ and section not in seen_sections
+ ):
+ raise Exception(
+ "manifest file %s is missing required section %s"
+ % (file_name, section)
+ )
+
+ self._config = config
+ self.name = config.get("manifest", "name")
+ self.fbsource_path = self.get("manifest", "fbsource_path")
+ self.shipit_project = self.get("manifest", "shipit_project")
+ self.shipit_fbcode_builder = self.get("manifest", "shipit_fbcode_builder")
+
+ if self.name != os.path.basename(file_name):
+ raise Exception(
+ "filename of the manifest '%s' does not match the manifest name '%s'"
+ % (file_name, self.name)
+ )
+
+ def get(self, section, key, defval=None, ctx=None):
+ ctx = ctx or {}
+
+ for s in self._config.sections():
+ if s == section:
+ if self._config.has_option(s, key):
+ return self._config.get(s, key)
+ return defval
+
+ if s.startswith(section + "."):
+ expr = parse_conditional_section_name(s, section)
+ if not expr.eval(ctx):
+ continue
+
+ if self._config.has_option(s, key):
+ return self._config.get(s, key)
+
+ return defval
+
+ def get_section_as_args(self, section, ctx=None):
+ """Intended for use with the make.[build_args/install_args] and
+ autoconf.args sections, this method collects the entries and returns an
+ array of strings.
+ If the manifest contains conditional sections, ctx is used to
+ evaluate the condition and merge in the values.
+ """
+ args = []
+ ctx = ctx or {}
+
+ for s in self._config.sections():
+ if s != section:
+ if not s.startswith(section + "."):
+ continue
+ expr = parse_conditional_section_name(s, section)
+ if not expr.eval(ctx):
+ continue
+ for field in self._config.options(s):
+ value = self._config.get(s, field)
+ if value is None:
+ args.append(field)
+ else:
+ args.append("%s=%s" % (field, value))
+ return args
+
+ def get_section_as_ordered_pairs(self, section, ctx=None):
+ """Used for eg: shipit.pathmap which has strong
+ ordering requirements"""
+ res = []
+ ctx = ctx or {}
+
+ for s in self._config.sections():
+ if s != section:
+ if not s.startswith(section + "."):
+ continue
+ expr = parse_conditional_section_name(s, section)
+ if not expr.eval(ctx):
+ continue
+
+ for key in self._config.options(s):
+ value = self._config.get(s, key)
+ res.append((key, value))
+ return res
+
+ def get_section_as_dict(self, section, ctx=None):
+ d = {}
+ ctx = ctx or {}
+
+ for s in self._config.sections():
+ if s != section:
+ if not s.startswith(section + "."):
+ continue
+ expr = parse_conditional_section_name(s, section)
+ if not expr.eval(ctx):
+ continue
+ for field in self._config.options(s):
+ value = self._config.get(s, field)
+ d[field] = value
+ return d
+
+ def update_hash(self, hasher, ctx):
+ """Compute a hash over the configuration for the given
+ context. The goal is for the hash to change if the config
+ for that context changes, but not if a change is made to
+ the config only for a different platform than that expressed
+ by ctx. The hash is intended to be used to help invalidate
+ a future cache for the third party build products.
+ The hasher argument is a hash object returned from hashlib."""
+ for section in sorted(SCHEMA.keys()):
+ hasher.update(section.encode("utf-8"))
+
+ # Note: at the time of writing, nothing in the implementation
+ # relies on keys in any config section being ordered.
+ # In theory we could have conflicting flags in different
+ # config sections and later flags override earlier flags.
+ # For the purposes of computing a hash we're not super
+ # concerned about this: manifest changes should be rare
+ # enough and we'd rather that this trigger an invalidation
+ # than strive for a cache hit at this time.
+ pairs = self.get_section_as_ordered_pairs(section, ctx)
+ pairs.sort(key=lambda pair: pair[0])
+ for key, value in pairs:
+ hasher.update(key.encode("utf-8"))
+ if value is not None:
+ hasher.update(value.encode("utf-8"))
+
+ def is_first_party_project(self):
+ """returns true if this is an FB first-party project"""
+ return self.shipit_project is not None
+
+ def get_required_system_packages(self, ctx):
+ """Returns dictionary of packager system -> list of packages"""
+ return {
+ "rpm": self.get_section_as_args("rpms", ctx),
+ "deb": self.get_section_as_args("debs", ctx),
+ }
+
+ def _is_satisfied_by_preinstalled_environment(self, ctx):
+ envs = self.get_section_as_args("preinstalled.env", ctx)
+ if not envs:
+ return False
+ for key in envs:
+ val = os.environ.get(key, None)
+ print(f"Testing ENV[{key}]: {repr(val)}")
+ if val is None:
+ return False
+ if len(val) == 0:
+ return False
+
+ return True
+
+ def create_fetcher(self, build_options, ctx):
+ use_real_shipit = (
+ ShipitTransformerFetcher.available() and build_options.use_shipit
+ )
+ if (
+ not use_real_shipit
+ and self.fbsource_path
+ and build_options.fbsource_dir
+ and self.shipit_project
+ ):
+ return SimpleShipitTransformerFetcher(build_options, self)
+
+ if (
+ self.fbsource_path
+ and build_options.fbsource_dir
+ and self.shipit_project
+ and ShipitTransformerFetcher.available()
+ ):
+ # We can use the code from fbsource
+ return ShipitTransformerFetcher(build_options, self.shipit_project)
+
+ # Can we satisfy this dep with system packages?
+ if build_options.allow_system_packages:
+ if self._is_satisfied_by_preinstalled_environment(ctx):
+ return PreinstalledNopFetcher()
+
+ packages = self.get_required_system_packages(ctx)
+ package_fetcher = SystemPackageFetcher(build_options, packages)
+ if package_fetcher.packages_are_installed():
+ return package_fetcher
+
+ repo_url = self.get("git", "repo_url", ctx=ctx)
+ if repo_url:
+ rev = self.get("git", "rev")
+ depth = self.get("git", "depth")
+ return GitFetcher(build_options, self, repo_url, rev, depth)
+
+ url = self.get("download", "url", ctx=ctx)
+ if url:
+ # We need to defer this import until now to avoid triggering
+ # a cycle when the facebook/__init__.py is loaded.
+ try:
+ from getdeps.facebook.lfs import LFSCachingArchiveFetcher
+
+ return LFSCachingArchiveFetcher(
+ build_options, self, url, self.get("download", "sha256", ctx=ctx)
+ )
+ except ImportError:
+ # This FB internal module isn't shippped to github,
+ # so just use its base class
+ return ArchiveFetcher(
+ build_options, self, url, self.get("download", "sha256", ctx=ctx)
+ )
+
+ raise KeyError(
+ "project %s has no fetcher configuration matching %s" % (self.name, ctx)
+ )
+
+ def create_builder( # noqa:C901
+ self,
+ build_options,
+ src_dir,
+ build_dir,
+ inst_dir,
+ ctx,
+ loader,
+ final_install_prefix=None,
+ extra_cmake_defines=None,
+ ):
+ builder = self.get("build", "builder", ctx=ctx)
+ if not builder:
+ raise Exception("project %s has no builder for %r" % (self.name, ctx))
+ build_in_src_dir = self.get("build", "build_in_src_dir", "false", ctx=ctx)
+ if build_in_src_dir == "true":
+ # Some scripts don't work when they are configured and build in
+ # a different directory than source (or when the build directory
+ # is not a subdir of source).
+ build_dir = src_dir
+ subdir = self.get("build", "subdir", None, ctx=ctx)
+ if subdir is not None:
+ build_dir = os.path.join(build_dir, subdir)
+ print("build_dir is %s" % build_dir) # just to quiet lint
+
+ if builder == "make" or builder == "cmakebootstrap":
+ build_args = self.get_section_as_args("make.build_args", ctx)
+ install_args = self.get_section_as_args("make.install_args", ctx)
+ test_args = self.get_section_as_args("make.test_args", ctx)
+ if builder == "cmakebootstrap":
+ return CMakeBootStrapBuilder(
+ build_options,
+ ctx,
+ self,
+ src_dir,
+ None,
+ inst_dir,
+ build_args,
+ install_args,
+ test_args,
+ )
+ else:
+ return MakeBuilder(
+ build_options,
+ ctx,
+ self,
+ src_dir,
+ None,
+ inst_dir,
+ build_args,
+ install_args,
+ test_args,
+ )
+
+ if builder == "autoconf":
+ args = self.get_section_as_args("autoconf.args", ctx)
+ return AutoconfBuilder(
+ build_options, ctx, self, src_dir, build_dir, inst_dir, args
+ )
+
+ if builder == "boost":
+ args = self.get_section_as_args("b2.args", ctx)
+ return Boost(build_options, ctx, self, src_dir, build_dir, inst_dir, args)
+
+ if builder == "bistro":
+ return BistroBuilder(
+ build_options,
+ ctx,
+ self,
+ src_dir,
+ build_dir,
+ inst_dir,
+ )
+
+ if builder == "cmake":
+ defines = self.get_section_as_dict("cmake.defines", ctx)
+ return CMakeBuilder(
+ build_options,
+ ctx,
+ self,
+ src_dir,
+ build_dir,
+ inst_dir,
+ defines,
+ final_install_prefix,
+ extra_cmake_defines,
+ )
+
+ if builder == "python-wheel":
+ return PythonWheelBuilder(
+ build_options, ctx, self, src_dir, build_dir, inst_dir
+ )
+
+ if builder == "sqlite":
+ return SqliteBuilder(build_options, ctx, self, src_dir, build_dir, inst_dir)
+
+ if builder == "ninja_bootstrap":
+ return NinjaBootstrap(
+ build_options, ctx, self, build_dir, src_dir, inst_dir
+ )
+
+ if builder == "nop":
+ return NopBuilder(build_options, ctx, self, src_dir, inst_dir)
+
+ if builder == "openssl":
+ return OpenSSLBuilder(
+ build_options, ctx, self, build_dir, src_dir, inst_dir
+ )
+
+ if builder == "iproute2":
+ return Iproute2Builder(
+ build_options, ctx, self, src_dir, build_dir, inst_dir
+ )
+
+ if builder == "cargo":
+ build_doc = self.get("cargo", "build_doc", False, ctx)
+ workspace_dir = self.get("cargo", "workspace_dir", None, ctx)
+ manifests_to_build = self.get("cargo", "manifests_to_build", None, ctx)
+ return CargoBuilder(
+ build_options,
+ ctx,
+ self,
+ src_dir,
+ build_dir,
+ inst_dir,
+ build_doc,
+ workspace_dir,
+ manifests_to_build,
+ loader,
+ )
+
+ if builder == "OpenNSA":
+ return OpenNSABuilder(build_options, ctx, self, src_dir, inst_dir)
+
+ raise KeyError("project %s has no known builder" % (self.name))
+
+
+class ManifestContext(object):
+ """ProjectContext contains a dictionary of values to use when evaluating boolean
+ expressions in a project manifest.
+
+ This object should be passed as the `ctx` parameter in ManifestParser.get() calls.
+ """
+
+ ALLOWED_VARIABLES = {"os", "distro", "distro_vers", "fb", "test"}
+
+ def __init__(self, ctx_dict):
+ assert set(ctx_dict.keys()) == self.ALLOWED_VARIABLES
+ self.ctx_dict = ctx_dict
+
+ def get(self, key):
+ return self.ctx_dict[key]
+
+ def set(self, key, value):
+ assert key in self.ALLOWED_VARIABLES
+ self.ctx_dict[key] = value
+
+ def copy(self):
+ return ManifestContext(dict(self.ctx_dict))
+
+ def __str__(self):
+ s = ", ".join(
+ "%s=%s" % (key, value) for key, value in sorted(self.ctx_dict.items())
+ )
+ return "{" + s + "}"
+
+
+class ContextGenerator(object):
+ """ContextGenerator allows creating ManifestContext objects on a per-project basis.
+ This allows us to evaluate different projects with slightly different contexts.
+
+ For instance, this can be used to only enable tests for some projects."""
+
+ def __init__(self, default_ctx):
+ self.default_ctx = ManifestContext(default_ctx)
+ self.ctx_by_project = {}
+
+ def set_value_for_project(self, project_name, key, value):
+ project_ctx = self.ctx_by_project.get(project_name)
+ if project_ctx is None:
+ project_ctx = self.default_ctx.copy()
+ self.ctx_by_project[project_name] = project_ctx
+ project_ctx.set(key, value)
+
+ def set_value_for_all_projects(self, key, value):
+ self.default_ctx.set(key, value)
+ for ctx in self.ctx_by_project.values():
+ ctx.set(key, value)
+
+ def get_context(self, project_name):
+ return self.ctx_by_project.get(project_name, self.default_ctx)
diff --git a/build/fbcode_builder/getdeps/platform.py b/build/fbcode_builder/getdeps/platform.py
new file mode 100644
index 000000000..fd8382e73
--- /dev/null
+++ b/build/fbcode_builder/getdeps/platform.py
@@ -0,0 +1,118 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import platform
+import re
+import shlex
+import sys
+
+
+def is_windows():
+ """Returns true if the system we are currently running on
+ is a Windows system"""
+ return sys.platform.startswith("win")
+
+
+def get_linux_type():
+ try:
+ with open("/etc/os-release") as f:
+ data = f.read()
+ except EnvironmentError:
+ return (None, None)
+
+ os_vars = {}
+ for line in data.splitlines():
+ parts = line.split("=", 1)
+ if len(parts) != 2:
+ continue
+ key = parts[0].strip()
+ value_parts = shlex.split(parts[1].strip())
+ if not value_parts:
+ value = ""
+ else:
+ value = value_parts[0]
+ os_vars[key] = value
+
+ name = os_vars.get("NAME")
+ if name:
+ name = name.lower()
+ name = re.sub("linux", "", name)
+ name = name.strip()
+
+ version_id = os_vars.get("VERSION_ID")
+ if version_id:
+ version_id = version_id.lower()
+
+ return "linux", name, version_id
+
+
+class HostType(object):
+ def __init__(self, ostype=None, distro=None, distrovers=None):
+ if ostype is None:
+ distro = None
+ distrovers = None
+ if sys.platform.startswith("linux"):
+ ostype, distro, distrovers = get_linux_type()
+ elif sys.platform.startswith("darwin"):
+ ostype = "darwin"
+ elif is_windows():
+ ostype = "windows"
+ distrovers = str(sys.getwindowsversion().major)
+ else:
+ ostype = sys.platform
+
+ # The operating system type
+ self.ostype = ostype
+ # The distribution, if applicable
+ self.distro = distro
+ # The OS/distro version if known
+ self.distrovers = distrovers
+ machine = platform.machine().lower()
+ if "arm" in machine or "aarch" in machine:
+ self.isarm = True
+ else:
+ self.isarm = False
+
+ def is_windows(self):
+ return self.ostype == "windows"
+
+ def is_arm(self):
+ return self.isarm
+
+ def is_darwin(self):
+ return self.ostype == "darwin"
+
+ def is_linux(self):
+ return self.ostype == "linux"
+
+ def as_tuple_string(self):
+ return "%s-%s-%s" % (
+ self.ostype,
+ self.distro or "none",
+ self.distrovers or "none",
+ )
+
+ def get_package_manager(self):
+ if not self.is_linux():
+ return None
+ if self.distro in ("fedora", "centos"):
+ return "rpm"
+ if self.distro in ("debian", "ubuntu"):
+ return "deb"
+ return None
+
+ @staticmethod
+ def from_tuple_string(s):
+ ostype, distro, distrovers = s.split("-")
+ return HostType(ostype=ostype, distro=distro, distrovers=distrovers)
+
+ def __eq__(self, b):
+ return (
+ self.ostype == b.ostype
+ and self.distro == b.distro
+ and self.distrovers == b.distrovers
+ )
diff --git a/build/fbcode_builder/getdeps/py_wheel_builder.py b/build/fbcode_builder/getdeps/py_wheel_builder.py
new file mode 100644
index 000000000..82ad8b807
--- /dev/null
+++ b/build/fbcode_builder/getdeps/py_wheel_builder.py
@@ -0,0 +1,289 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import codecs
+import collections
+import email
+import os
+import re
+import stat
+
+from .builder import BuilderBase, CMakeBuilder
+
+
+WheelNameInfo = collections.namedtuple(
+ "WheelNameInfo", ("distribution", "version", "build", "python", "abi", "platform")
+)
+
+CMAKE_HEADER = """
+cmake_minimum_required(VERSION 3.8)
+
+project("{manifest_name}" LANGUAGES C)
+
+set(CMAKE_MODULE_PATH
+ "{cmake_dir}"
+ ${{CMAKE_MODULE_PATH}}
+)
+include(FBPythonBinary)
+
+set(CMAKE_INSTALL_DIR lib/cmake/{manifest_name} CACHE STRING
+ "The subdirectory where CMake package config files should be installed")
+"""
+
+CMAKE_FOOTER = """
+install_fb_python_library({lib_name} EXPORT all)
+install(
+ EXPORT all
+ FILE {manifest_name}-targets.cmake
+ NAMESPACE {namespace}::
+ DESTINATION ${{CMAKE_INSTALL_DIR}}
+)
+
+include(CMakePackageConfigHelpers)
+configure_package_config_file(
+ ${{CMAKE_BINARY_DIR}}/{manifest_name}-config.cmake.in
+ {manifest_name}-config.cmake
+ INSTALL_DESTINATION ${{CMAKE_INSTALL_DIR}}
+ PATH_VARS
+ CMAKE_INSTALL_DIR
+)
+install(
+ FILES ${{CMAKE_CURRENT_BINARY_DIR}}/{manifest_name}-config.cmake
+ DESTINATION ${{CMAKE_INSTALL_DIR}}
+)
+"""
+
+CMAKE_CONFIG_FILE = """
+@PACKAGE_INIT@
+
+include(CMakeFindDependencyMacro)
+
+set_and_check({upper_name}_CMAKE_DIR "@PACKAGE_CMAKE_INSTALL_DIR@")
+
+if (NOT TARGET {namespace}::{lib_name})
+ include("${{{upper_name}_CMAKE_DIR}}/{manifest_name}-targets.cmake")
+endif()
+
+set({upper_name}_LIBRARIES {namespace}::{lib_name})
+
+{find_dependency_lines}
+
+if (NOT {manifest_name}_FIND_QUIETLY)
+ message(STATUS "Found {manifest_name}: ${{PACKAGE_PREFIX_DIR}}")
+endif()
+"""
+
+
+# Note: for now we are manually manipulating the wheel packet contents.
+# The wheel format is documented here:
+# https://www.python.org/dev/peps/pep-0491/#file-format
+#
+# We currently aren't particularly smart about correctly handling the full wheel
+# functionality, but this is good enough to handle simple pure-python wheels,
+# which is the main thing we care about right now.
+#
+# We could potentially use pip to install the wheel to a temporary location and
+# then copy its "installed" files, but this has its own set of complications.
+# This would require pip to already be installed and available, and we would
+# need to correctly find the right version of pip or pip3 to use.
+# If we did ever want to go down that path, we would probably want to use
+# something like the following pip3 command:
+# pip3 --isolated install --no-cache-dir --no-index --system \
+# --target
+class PythonWheelBuilder(BuilderBase):
+ """This Builder can take Python wheel archives and install them as python libraries
+ that can be used by add_fb_python_library()/add_fb_python_executable() CMake rules.
+ """
+
+ def _build(self, install_dirs, reconfigure):
+ # type: (List[str], bool) -> None
+
+ # When we are invoked, self.src_dir contains the unpacked wheel contents.
+ #
+ # Since a wheel file is just a zip file, the Fetcher code recognizes it as such
+ # and goes ahead and unpacks it. (We could disable that Fetcher behavior in the
+ # future if we ever wanted to, say if we wanted to call pip here.)
+ wheel_name = self._parse_wheel_name()
+ name_version_prefix = "-".join((wheel_name.distribution, wheel_name.version))
+ dist_info_name = name_version_prefix + ".dist-info"
+ data_dir_name = name_version_prefix + ".data"
+ self.dist_info_dir = os.path.join(self.src_dir, dist_info_name)
+ wheel_metadata = self._read_wheel_metadata(wheel_name)
+
+ # Check that we can understand the wheel version.
+ # We don't really care about wheel_metadata["Root-Is-Purelib"] since
+ # we are generating our own standalone python archives rather than installing
+ # into site-packages.
+ version = wheel_metadata["Wheel-Version"]
+ if not version.startswith("1."):
+ raise Exception("unsupported wheel version %s" % (version,))
+
+ # Add a find_dependency() call for each of our dependencies.
+ # The dependencies are also listed in the wheel METADATA file, but it is simpler
+ # to pull this directly from the getdeps manifest.
+ dep_list = sorted(
+ self.manifest.get_section_as_dict("dependencies", self.ctx).keys()
+ )
+ find_dependency_lines = ["find_dependency({})".format(dep) for dep in dep_list]
+
+ getdeps_cmake_dir = os.path.join(
+ os.path.dirname(os.path.dirname(__file__)), "CMake"
+ )
+ self.template_format_dict = {
+ # Note that CMake files always uses forward slash separators in path names,
+ # even on Windows. Therefore replace path separators here.
+ "cmake_dir": _to_cmake_path(getdeps_cmake_dir),
+ "lib_name": self.manifest.name,
+ "manifest_name": self.manifest.name,
+ "namespace": self.manifest.name,
+ "upper_name": self.manifest.name.upper().replace("-", "_"),
+ "find_dependency_lines": "\n".join(find_dependency_lines),
+ }
+
+ # Find sources from the root directory
+ path_mapping = {}
+ for entry in os.listdir(self.src_dir):
+ if entry in (dist_info_name, data_dir_name):
+ continue
+ self._add_sources(path_mapping, os.path.join(self.src_dir, entry), entry)
+
+ # Files under the .data directory also need to be installed in the correct
+ # locations
+ if os.path.exists(data_dir_name):
+ # TODO: process the subdirectories of data_dir_name
+ # This isn't implemented yet since for now we have only needed dependencies
+ # on some simple pure Python wheels, so I haven't tested against wheels with
+ # additional files in the .data directory.
+ raise Exception(
+ "handling of the subdirectories inside %s is not implemented yet"
+ % data_dir_name
+ )
+
+ # Emit CMake files
+ self._write_cmakelists(path_mapping, dep_list)
+ self._write_cmake_config_template()
+
+ # Run the build
+ self._run_cmake_build(install_dirs, reconfigure)
+
+ def _run_cmake_build(self, install_dirs, reconfigure):
+ # type: (List[str], bool) -> None
+
+ cmake_builder = CMakeBuilder(
+ build_opts=self.build_opts,
+ ctx=self.ctx,
+ manifest=self.manifest,
+ # Note that we intentionally supply src_dir=build_dir,
+ # since we wrote out our generated CMakeLists.txt in the build directory
+ src_dir=self.build_dir,
+ build_dir=self.build_dir,
+ inst_dir=self.inst_dir,
+ defines={},
+ final_install_prefix=None,
+ )
+ cmake_builder.build(install_dirs=install_dirs, reconfigure=reconfigure)
+
+ def _write_cmakelists(self, path_mapping, dependencies):
+ # type: (List[str]) -> None
+
+ cmake_path = os.path.join(self.build_dir, "CMakeLists.txt")
+ with open(cmake_path, "w") as f:
+ f.write(CMAKE_HEADER.format(**self.template_format_dict))
+ for dep in dependencies:
+ f.write("find_package({0} REQUIRED)\n".format(dep))
+
+ f.write(
+ "add_fb_python_library({lib_name}\n".format(**self.template_format_dict)
+ )
+ f.write(' BASE_DIR "%s"\n' % _to_cmake_path(self.src_dir))
+ f.write(" SOURCES\n")
+ for src_path, install_path in path_mapping.items():
+ f.write(
+ ' "%s=%s"\n'
+ % (_to_cmake_path(src_path), _to_cmake_path(install_path))
+ )
+ if dependencies:
+ f.write(" DEPENDS\n")
+ for dep in dependencies:
+ f.write(' "{0}::{0}"\n'.format(dep))
+ f.write(")\n")
+
+ f.write(CMAKE_FOOTER.format(**self.template_format_dict))
+
+ def _write_cmake_config_template(self):
+ config_path_name = self.manifest.name + "-config.cmake.in"
+ output_path = os.path.join(self.build_dir, config_path_name)
+
+ with open(output_path, "w") as f:
+ f.write(CMAKE_CONFIG_FILE.format(**self.template_format_dict))
+
+ def _add_sources(self, path_mapping, src_path, install_path):
+ # type: (List[str], str, str) -> None
+
+ s = os.lstat(src_path)
+ if not stat.S_ISDIR(s.st_mode):
+ path_mapping[src_path] = install_path
+ return
+
+ for entry in os.listdir(src_path):
+ self._add_sources(
+ path_mapping,
+ os.path.join(src_path, entry),
+ os.path.join(install_path, entry),
+ )
+
+ def _parse_wheel_name(self):
+ # type: () -> WheelNameInfo
+
+ # The ArchiveFetcher prepends "manifest_name-", so strip that off first.
+ wheel_name = os.path.basename(self.src_dir)
+ prefix = self.manifest.name + "-"
+ if not wheel_name.startswith(prefix):
+ raise Exception(
+ "expected wheel source directory to be of the form %s-NAME.whl"
+ % (prefix,)
+ )
+ wheel_name = wheel_name[len(prefix) :]
+
+ wheel_name_re = re.compile(
+ r"(?P[^-]+)"
+ r"-(?P\d+[^-]*)"
+ r"(-(?P\d+[^-]*))?"
+ r"-(?P\w+\d+(\.\w+\d+)*)"
+ r"-(?P\w+)"
+ r"-(?P\w+(\.\w+)*)"
+ r"\.whl"
+ )
+ match = wheel_name_re.match(wheel_name)
+ if not match:
+ raise Exception(
+ "bad python wheel name %s: expected to have the form "
+ "DISTRIBUTION-VERSION-[-BUILD]-PYTAG-ABI-PLATFORM"
+ )
+
+ return WheelNameInfo(
+ distribution=match.group("distribution"),
+ version=match.group("version"),
+ build=match.group("build"),
+ python=match.group("python"),
+ abi=match.group("abi"),
+ platform=match.group("platform"),
+ )
+
+ def _read_wheel_metadata(self, wheel_name):
+ metadata_path = os.path.join(self.dist_info_dir, "WHEEL")
+ with codecs.open(metadata_path, "r", encoding="utf-8") as f:
+ return email.message_from_file(f)
+
+
+def _to_cmake_path(path):
+ # CMake always uses forward slashes to separate paths in CMakeLists.txt files,
+ # even on Windows. It treats backslashes as character escapes, so using
+ # backslashes in the path will cause problems. Therefore replace all path
+ # separators with forward slashes to make sure the paths are correct on Windows.
+ # e.g. "C:\foo\bar.txt" becomes "C:/foo/bar.txt"
+ return path.replace(os.path.sep, "/")
diff --git a/build/fbcode_builder/getdeps/runcmd.py b/build/fbcode_builder/getdeps/runcmd.py
new file mode 100644
index 000000000..44e7994aa
--- /dev/null
+++ b/build/fbcode_builder/getdeps/runcmd.py
@@ -0,0 +1,169 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import os
+import select
+import subprocess
+import sys
+
+from .envfuncs import Env
+from .platform import is_windows
+
+
+try:
+ from shlex import quote as shellquote
+except ImportError:
+ from pipes import quote as shellquote
+
+
+class RunCommandError(Exception):
+ pass
+
+
+def _print_env_diff(env, log_fn):
+ current_keys = set(os.environ.keys())
+ wanted_env = set(env.keys())
+
+ unset_keys = current_keys.difference(wanted_env)
+ for k in sorted(unset_keys):
+ log_fn("+ unset %s\n" % k)
+
+ added_keys = wanted_env.difference(current_keys)
+ for k in wanted_env.intersection(current_keys):
+ if os.environ[k] != env[k]:
+ added_keys.add(k)
+
+ for k in sorted(added_keys):
+ if ("PATH" in k) and (os.pathsep in env[k]):
+ log_fn("+ %s=\\\n" % k)
+ for elem in env[k].split(os.pathsep):
+ log_fn("+ %s%s\\\n" % (shellquote(elem), os.pathsep))
+ else:
+ log_fn("+ %s=%s \\\n" % (k, shellquote(env[k])))
+
+
+def run_cmd(cmd, env=None, cwd=None, allow_fail=False, log_file=None):
+ def log_to_stdout(msg):
+ sys.stdout.buffer.write(msg.encode(errors="surrogateescape"))
+
+ if log_file is not None:
+ with open(log_file, "a", encoding="utf-8", errors="surrogateescape") as log:
+
+ def log_function(msg):
+ log.write(msg)
+ log_to_stdout(msg)
+
+ return _run_cmd(
+ cmd, env=env, cwd=cwd, allow_fail=allow_fail, log_fn=log_function
+ )
+ else:
+ return _run_cmd(
+ cmd, env=env, cwd=cwd, allow_fail=allow_fail, log_fn=log_to_stdout
+ )
+
+
+def _run_cmd(cmd, env, cwd, allow_fail, log_fn):
+ log_fn("---\n")
+ try:
+ cmd_str = " \\\n+ ".join(shellquote(arg) for arg in cmd)
+ except TypeError:
+ # eg: one of the elements is None
+ raise RunCommandError("problem quoting cmd: %r" % cmd)
+
+ if env:
+ assert isinstance(env, Env)
+ _print_env_diff(env, log_fn)
+
+ # Convert from our Env type to a regular dict.
+ # This is needed because python3 looks up b'PATH' and 'PATH'
+ # and emits an error if both are present. In our Env type
+ # we'll return the same value for both requests, but we don't
+ # have duplicate potentially conflicting values which is the
+ # spirit of the check.
+ env = dict(env.items())
+
+ if cwd:
+ log_fn("+ cd %s && \\\n" % shellquote(cwd))
+ # Our long path escape sequence may confuse cmd.exe, so if the cwd
+ # is short enough, strip that off.
+ if is_windows() and (len(cwd) < 250) and cwd.startswith("\\\\?\\"):
+ cwd = cwd[4:]
+
+ log_fn("+ %s\n" % cmd_str)
+
+ isinteractive = os.isatty(sys.stdout.fileno())
+ if isinteractive:
+ stdout = None
+ sys.stdout.buffer.flush()
+ else:
+ stdout = subprocess.PIPE
+
+ try:
+ p = subprocess.Popen(
+ cmd, env=env, cwd=cwd, stdout=stdout, stderr=subprocess.STDOUT
+ )
+ except (TypeError, ValueError, OSError) as exc:
+ log_fn("error running `%s`: %s" % (cmd_str, exc))
+ raise RunCommandError(
+ "%s while running `%s` with env=%r\nos.environ=%r"
+ % (str(exc), cmd_str, env, os.environ)
+ )
+
+ if not isinteractive:
+ _pipe_output(p, log_fn)
+
+ p.wait()
+ if p.returncode != 0 and not allow_fail:
+ raise subprocess.CalledProcessError(p.returncode, cmd)
+
+ return p.returncode
+
+
+if hasattr(select, "poll"):
+
+ def _pipe_output(p, log_fn):
+ """Read output from p.stdout and call log_fn() with each chunk of data as it
+ becomes available."""
+ # Perform non-blocking reads
+ import fcntl
+
+ fcntl.fcntl(p.stdout.fileno(), fcntl.F_SETFL, os.O_NONBLOCK)
+ poll = select.poll()
+ poll.register(p.stdout.fileno(), select.POLLIN)
+
+ buffer_size = 4096
+ while True:
+ poll.poll()
+ data = p.stdout.read(buffer_size)
+ if not data:
+ break
+ # log_fn() accepts arguments as str (binary in Python 2, unicode in
+ # Python 3). In Python 3 the subprocess output will be plain bytes,
+ # and need to be decoded.
+ if not isinstance(data, str):
+ data = data.decode("utf-8", errors="surrogateescape")
+ log_fn(data)
+
+
+else:
+
+ def _pipe_output(p, log_fn):
+ """Read output from p.stdout and call log_fn() with each chunk of data as it
+ becomes available."""
+ # Perform blocking reads. Use a smaller buffer size to avoid blocking
+ # for very long when data is available.
+ buffer_size = 64
+ while True:
+ data = p.stdout.read(buffer_size)
+ if not data:
+ break
+ # log_fn() accepts arguments as str (binary in Python 2, unicode in
+ # Python 3). In Python 3 the subprocess output will be plain bytes,
+ # and need to be decoded.
+ if not isinstance(data, str):
+ data = data.decode("utf-8", errors="surrogateescape")
+ log_fn(data)
diff --git a/build/fbcode_builder/getdeps/subcmd.py b/build/fbcode_builder/getdeps/subcmd.py
new file mode 100644
index 000000000..95f9a07ca
--- /dev/null
+++ b/build/fbcode_builder/getdeps/subcmd.py
@@ -0,0 +1,58 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+
+class SubCmd(object):
+ NAME = None
+ HELP = None
+
+ def run(self, args):
+ """perform the command"""
+ return 0
+
+ def setup_parser(self, parser):
+ # Subclasses should override setup_parser() if they have any
+ # command line options or arguments.
+ pass
+
+
+CmdTable = []
+
+
+def add_subcommands(parser, common_args, cmd_table=CmdTable):
+ """Register parsers for the defined commands with the provided parser"""
+ for cls in cmd_table:
+ command = cls()
+ command_parser = parser.add_parser(
+ command.NAME, help=command.HELP, parents=[common_args]
+ )
+ command.setup_parser(command_parser)
+ command_parser.set_defaults(func=command.run)
+
+
+def cmd(name, help=None, cmd_table=CmdTable):
+ """
+ @cmd() is a decorator that can be used to help define Subcmd instances
+
+ Example usage:
+
+ @subcmd('list', 'Show the result list')
+ class ListCmd(Subcmd):
+ def run(self, args):
+ # Perform the command actions here...
+ pass
+ """
+
+ def wrapper(cls):
+ class SubclassedCmd(cls):
+ NAME = name
+ HELP = help
+
+ cmd_table.append(SubclassedCmd)
+ return SubclassedCmd
+
+ return wrapper
diff --git a/build/fbcode_builder/getdeps/test/expr_test.py b/build/fbcode_builder/getdeps/test/expr_test.py
new file mode 100644
index 000000000..59d66a943
--- /dev/null
+++ b/build/fbcode_builder/getdeps/test/expr_test.py
@@ -0,0 +1,49 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import unittest
+
+from ..expr import parse_expr
+
+
+class ExprTest(unittest.TestCase):
+ def test_equal(self):
+ valid_variables = {"foo", "some_var", "another_var"}
+ e = parse_expr("foo=bar", valid_variables)
+ self.assertTrue(e.eval({"foo": "bar"}))
+ self.assertFalse(e.eval({"foo": "not-bar"}))
+ self.assertFalse(e.eval({"not-foo": "bar"}))
+
+ def test_not_equal(self):
+ valid_variables = {"foo"}
+ e = parse_expr("not(foo=bar)", valid_variables)
+ self.assertFalse(e.eval({"foo": "bar"}))
+ self.assertTrue(e.eval({"foo": "not-bar"}))
+
+ def test_bad_not(self):
+ valid_variables = {"foo"}
+ with self.assertRaises(Exception):
+ parse_expr("foo=not(bar)", valid_variables)
+
+ def test_bad_variable(self):
+ valid_variables = {"bar"}
+ with self.assertRaises(Exception):
+ parse_expr("foo=bar", valid_variables)
+
+ def test_all(self):
+ valid_variables = {"foo", "baz"}
+ e = parse_expr("all(foo = bar, baz = qux)", valid_variables)
+ self.assertTrue(e.eval({"foo": "bar", "baz": "qux"}))
+ self.assertFalse(e.eval({"foo": "bar", "baz": "nope"}))
+ self.assertFalse(e.eval({"foo": "nope", "baz": "nope"}))
+
+ def test_any(self):
+ valid_variables = {"foo", "baz"}
+ e = parse_expr("any(foo = bar, baz = qux)", valid_variables)
+ self.assertTrue(e.eval({"foo": "bar", "baz": "qux"}))
+ self.assertTrue(e.eval({"foo": "bar", "baz": "nope"}))
+ self.assertFalse(e.eval({"foo": "nope", "baz": "nope"}))
diff --git a/build/fbcode_builder/getdeps/test/fixtures/duplicate/foo b/build/fbcode_builder/getdeps/test/fixtures/duplicate/foo
new file mode 100644
index 000000000..a0384ee3b
--- /dev/null
+++ b/build/fbcode_builder/getdeps/test/fixtures/duplicate/foo
@@ -0,0 +1,2 @@
+[manifest]
+name = foo
diff --git a/build/fbcode_builder/getdeps/test/fixtures/duplicate/subdir/foo b/build/fbcode_builder/getdeps/test/fixtures/duplicate/subdir/foo
new file mode 100644
index 000000000..a0384ee3b
--- /dev/null
+++ b/build/fbcode_builder/getdeps/test/fixtures/duplicate/subdir/foo
@@ -0,0 +1,2 @@
+[manifest]
+name = foo
diff --git a/build/fbcode_builder/getdeps/test/manifest_test.py b/build/fbcode_builder/getdeps/test/manifest_test.py
new file mode 100644
index 000000000..8be9896d8
--- /dev/null
+++ b/build/fbcode_builder/getdeps/test/manifest_test.py
@@ -0,0 +1,233 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import sys
+import unittest
+
+from ..load import load_all_manifests, patch_loader
+from ..manifest import ManifestParser
+
+
+class ManifestTest(unittest.TestCase):
+ def test_missing_section(self):
+ with self.assertRaisesRegex(
+ Exception, "manifest file test is missing required section manifest"
+ ):
+ ManifestParser("test", "")
+
+ def test_missing_name(self):
+ with self.assertRaisesRegex(
+ Exception,
+ "manifest file test section 'manifest' is missing required field 'name'",
+ ):
+ ManifestParser(
+ "test",
+ """
+[manifest]
+""",
+ )
+
+ def test_minimal(self):
+ p = ManifestParser(
+ "test",
+ """
+[manifest]
+name = test
+""",
+ )
+ self.assertEqual(p.name, "test")
+ self.assertEqual(p.fbsource_path, None)
+
+ def test_minimal_with_fbsource_path(self):
+ p = ManifestParser(
+ "test",
+ """
+[manifest]
+name = test
+fbsource_path = fbcode/wat
+""",
+ )
+ self.assertEqual(p.name, "test")
+ self.assertEqual(p.fbsource_path, "fbcode/wat")
+
+ def test_unknown_field(self):
+ with self.assertRaisesRegex(
+ Exception,
+ (
+ "manifest file test section 'manifest' contains "
+ "unknown field 'invalid.field'"
+ ),
+ ):
+ ManifestParser(
+ "test",
+ """
+[manifest]
+name = test
+invalid.field = woot
+""",
+ )
+
+ def test_invalid_section_name(self):
+ with self.assertRaisesRegex(
+ Exception, "manifest file test contains unknown section 'invalid.section'"
+ ):
+ ManifestParser(
+ "test",
+ """
+[manifest]
+name = test
+
+[invalid.section]
+foo = bar
+""",
+ )
+
+ def test_value_in_dependencies_section(self):
+ with self.assertRaisesRegex(
+ Exception,
+ (
+ "manifest file test section 'dependencies' has "
+ "'foo = bar' but this section doesn't allow "
+ "specifying values for its entries"
+ ),
+ ):
+ ManifestParser(
+ "test",
+ """
+[manifest]
+name = test
+
+[dependencies]
+foo = bar
+""",
+ )
+
+ def test_invalid_conditional_section_name(self):
+ with self.assertRaisesRegex(
+ Exception,
+ (
+ "manifest file test section 'dependencies.=' "
+ "has invalid conditional: expected "
+ "identifier found ="
+ ),
+ ):
+ ManifestParser(
+ "test",
+ """
+[manifest]
+name = test
+
+[dependencies.=]
+""",
+ )
+
+ def test_section_as_args(self):
+ p = ManifestParser(
+ "test",
+ """
+[manifest]
+name = test
+
+[dependencies]
+a
+b
+c
+
+[dependencies.test=on]
+foo
+""",
+ )
+ self.assertEqual(p.get_section_as_args("dependencies"), ["a", "b", "c"])
+ self.assertEqual(
+ p.get_section_as_args("dependencies", {"test": "off"}), ["a", "b", "c"]
+ )
+ self.assertEqual(
+ p.get_section_as_args("dependencies", {"test": "on"}),
+ ["a", "b", "c", "foo"],
+ )
+
+ p2 = ManifestParser(
+ "test",
+ """
+[manifest]
+name = test
+
+[autoconf.args]
+--prefix=/foo
+--with-woot
+""",
+ )
+ self.assertEqual(
+ p2.get_section_as_args("autoconf.args"), ["--prefix=/foo", "--with-woot"]
+ )
+
+ def test_section_as_dict(self):
+ p = ManifestParser(
+ "test",
+ """
+[manifest]
+name = test
+
+[cmake.defines]
+foo = bar
+
+[cmake.defines.test=on]
+foo = baz
+""",
+ )
+ self.assertEqual(p.get_section_as_dict("cmake.defines"), {"foo": "bar"})
+ self.assertEqual(
+ p.get_section_as_dict("cmake.defines", {"test": "on"}), {"foo": "baz"}
+ )
+
+ p2 = ManifestParser(
+ "test",
+ """
+[manifest]
+name = test
+
+[cmake.defines.test=on]
+foo = baz
+
+[cmake.defines]
+foo = bar
+""",
+ )
+ self.assertEqual(
+ p2.get_section_as_dict("cmake.defines", {"test": "on"}),
+ {"foo": "bar"},
+ msg="sections cascade in the order they appear in the manifest",
+ )
+
+ def test_parse_common_manifests(self):
+ patch_loader(__name__)
+ manifests = load_all_manifests(None)
+ self.assertNotEqual(0, len(manifests), msg="parsed some number of manifests")
+
+ def test_mismatch_name(self):
+ with self.assertRaisesRegex(
+ Exception,
+ "filename of the manifest 'foo' does not match the manifest name 'bar'",
+ ):
+ ManifestParser(
+ "foo",
+ """
+[manifest]
+name = bar
+""",
+ )
+
+ def test_duplicate_manifest(self):
+ patch_loader(__name__, "fixtures/duplicate")
+
+ with self.assertRaisesRegex(Exception, "found duplicate manifest 'foo'"):
+ load_all_manifests(None)
+
+ if sys.version_info < (3, 2):
+
+ def assertRaisesRegex(self, *args, **kwargs):
+ return self.assertRaisesRegexp(*args, **kwargs)
diff --git a/build/fbcode_builder/getdeps/test/platform_test.py b/build/fbcode_builder/getdeps/test/platform_test.py
new file mode 100644
index 000000000..311e9c76c
--- /dev/null
+++ b/build/fbcode_builder/getdeps/test/platform_test.py
@@ -0,0 +1,40 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import unittest
+
+from ..platform import HostType
+
+
+class PlatformTest(unittest.TestCase):
+ def test_create(self):
+ p = HostType()
+ self.assertNotEqual(p.ostype, None, msg="probed and returned something")
+
+ tuple_string = p.as_tuple_string()
+ round_trip = HostType.from_tuple_string(tuple_string)
+ self.assertEqual(round_trip, p)
+
+ def test_rendering_of_none(self):
+ p = HostType(ostype="foo")
+ self.assertEqual(p.as_tuple_string(), "foo-none-none")
+
+ def test_is_methods(self):
+ p = HostType(ostype="windows")
+ self.assertTrue(p.is_windows())
+ self.assertFalse(p.is_darwin())
+ self.assertFalse(p.is_linux())
+
+ p = HostType(ostype="darwin")
+ self.assertFalse(p.is_windows())
+ self.assertTrue(p.is_darwin())
+ self.assertFalse(p.is_linux())
+
+ p = HostType(ostype="linux")
+ self.assertFalse(p.is_windows())
+ self.assertFalse(p.is_darwin())
+ self.assertTrue(p.is_linux())
diff --git a/build/fbcode_builder/getdeps/test/scratch_test.py b/build/fbcode_builder/getdeps/test/scratch_test.py
new file mode 100644
index 000000000..1f43c5951
--- /dev/null
+++ b/build/fbcode_builder/getdeps/test/scratch_test.py
@@ -0,0 +1,80 @@
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# This source code is licensed under the MIT license found in the
+# LICENSE file in the root directory of this source tree.
+
+from __future__ import absolute_import, division, print_function
+
+import unittest
+
+from ..buildopts import find_existing_win32_subst_for_path
+
+
+class Win32SubstTest(unittest.TestCase):
+ def test_no_existing_subst(self):
+ self.assertIsNone(
+ find_existing_win32_subst_for_path(
+ r"C:\users\alice\appdata\local\temp\fbcode_builder_getdeps",
+ subst_mapping={},
+ )
+ )
+ self.assertIsNone(
+ find_existing_win32_subst_for_path(
+ r"C:\users\alice\appdata\local\temp\fbcode_builder_getdeps",
+ subst_mapping={"X:\\": r"C:\users\alice\appdata\local\temp\other"},
+ )
+ )
+
+ def test_exact_match_returns_drive_path(self):
+ self.assertEqual(
+ find_existing_win32_subst_for_path(
+ r"C:\temp\fbcode_builder_getdeps",
+ subst_mapping={"X:\\": r"C:\temp\fbcode_builder_getdeps"},
+ ),
+ "X:\\",
+ )
+ self.assertEqual(
+ find_existing_win32_subst_for_path(
+ r"C:/temp/fbcode_builder_getdeps",
+ subst_mapping={"X:\\": r"C:/temp/fbcode_builder_getdeps"},
+ ),
+ "X:\\",
+ )
+
+ def test_multiple_exact_matches_returns_arbitrary_drive_path(self):
+ self.assertIn(
+ find_existing_win32_subst_for_path(
+ r"C:\temp\fbcode_builder_getdeps",
+ subst_mapping={
+ "X:\\": r"C:\temp\fbcode_builder_getdeps",
+ "Y:\\": r"C:\temp\fbcode_builder_getdeps",
+ "Z:\\": r"C:\temp\fbcode_builder_getdeps",
+ },
+ ),
+ ("X:\\", "Y:\\", "Z:\\"),
+ )
+
+ def test_drive_letter_is_case_insensitive(self):
+ self.assertEqual(
+ find_existing_win32_subst_for_path(
+ r"C:\temp\fbcode_builder_getdeps",
+ subst_mapping={"X:\\": r"c:\temp\fbcode_builder_getdeps"},
+ ),
+ "X:\\",
+ )
+
+ def test_path_components_are_case_insensitive(self):
+ self.assertEqual(
+ find_existing_win32_subst_for_path(
+ r"C:\TEMP\FBCODE_builder_getdeps",
+ subst_mapping={"X:\\": r"C:\temp\fbcode_builder_getdeps"},
+ ),
+ "X:\\",
+ )
+ self.assertEqual(
+ find_existing_win32_subst_for_path(
+ r"C:\temp\fbcode_builder_getdeps",
+ subst_mapping={"X:\\": r"C:\TEMP\FBCODE_builder_getdeps"},
+ ),
+ "X:\\",
+ )
diff --git a/build/fbcode_builder/make_docker_context.py b/build/fbcode_builder/make_docker_context.py
new file mode 100755
index 000000000..d4b0f0a89
--- /dev/null
+++ b/build/fbcode_builder/make_docker_context.py
@@ -0,0 +1,199 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+"""
+Reads `fbcode_builder_config.py` from the current directory, and prepares a
+Docker context directory to build this project. Prints to stdout the path
+to the context directory.
+
+Try `.../make_docker_context.py --help` from a project's `build/` directory.
+
+By default, the Docker context directory will be in /tmp. It will always
+contain a Dockerfile, and might also contain copies of your local repos, and
+other data needed for the build container.
+"""
+
+import os
+import tempfile
+import textwrap
+
+from docker_builder import DockerFBCodeBuilder
+from parse_args import parse_args_to_fbcode_builder_opts
+
+
+def make_docker_context(
+ get_steps_fn, github_project, opts=None, default_context_dir=None
+):
+ """
+ Returns a path to the Docker context directory. See parse_args.py.
+
+ Helper for making a command-line utility that writes your project's
+ Dockerfile and associated data into a (temporary) directory. Your main
+ program might look something like this:
+
+ print(make_docker_context(
+ lambda builder: [builder.step(...), ...],
+ 'facebook/your_project',
+ ))
+ """
+
+ if opts is None:
+ opts = {}
+
+ valid_versions = (
+ ("ubuntu:16.04", "5"),
+ ("ubuntu:18.04", "7"),
+ )
+
+ def add_args(parser):
+ parser.add_argument(
+ "--docker-context-dir",
+ metavar="DIR",
+ default=default_context_dir,
+ help="Write the Dockerfile and its context into this directory. "
+ "If empty, make a temporary directory. Default: %(default)s.",
+ )
+ parser.add_argument(
+ "--user",
+ metavar="NAME",
+ default=opts.get("user", "nobody"),
+ help="Build and install as this user. Default: %(default)s.",
+ )
+ parser.add_argument(
+ "--prefix",
+ metavar="DIR",
+ default=opts.get("prefix", "/home/install"),
+ help="Install all libraries in this prefix. Default: %(default)s.",
+ )
+ parser.add_argument(
+ "--projects-dir",
+ metavar="DIR",
+ default=opts.get("projects_dir", "/home"),
+ help="Place project code directories here. Default: %(default)s.",
+ )
+ parser.add_argument(
+ "--os-image",
+ metavar="IMG",
+ choices=zip(*valid_versions)[0],
+ default=opts.get("os_image", valid_versions[0][0]),
+ help="Docker OS image -- be sure to use only ones you trust (See "
+ "README.docker). Choices: %(choices)s. Default: %(default)s.",
+ )
+ parser.add_argument(
+ "--gcc-version",
+ metavar="VER",
+ choices=set(zip(*valid_versions)[1]),
+ default=opts.get("gcc_version", valid_versions[0][1]),
+ help="Choices: %(choices)s. Default: %(default)s.",
+ )
+ parser.add_argument(
+ "--make-parallelism",
+ metavar="NUM",
+ type=int,
+ default=opts.get("make_parallelism", 1),
+ help="Use `make -j` on multi-CPU systems with lots of RAM. "
+ "Default: %(default)s.",
+ )
+ parser.add_argument(
+ "--local-repo-dir",
+ metavar="DIR",
+ help="If set, build {0} from a local directory instead of Github.".format(
+ github_project
+ ),
+ )
+ parser.add_argument(
+ "--ccache-tgz",
+ metavar="PATH",
+ help="If set, enable ccache for the build. To initialize the "
+ "cache, first try to hardlink, then to copy --cache-tgz "
+ "as ccache.tgz into the --docker-context-dir.",
+ )
+
+ opts = parse_args_to_fbcode_builder_opts(
+ add_args,
+ # These have add_argument() calls, others are set via --option.
+ (
+ "docker_context_dir",
+ "user",
+ "prefix",
+ "projects_dir",
+ "os_image",
+ "gcc_version",
+ "make_parallelism",
+ "local_repo_dir",
+ "ccache_tgz",
+ ),
+ opts,
+ help=textwrap.dedent(
+ """
+
+ Reads `fbcode_builder_config.py` from the current directory, and
+ prepares a Docker context directory to build {github_project} and
+ its dependencies. Prints to stdout the path to the context
+ directory.
+
+ Pass --option {github_project}:git_hash SHA1 to build something
+ other than the master branch from Github.
+
+ Or, pass --option {github_project}:local_repo_dir LOCAL_PATH to
+ build from a local repo instead of cloning from Github.
+
+ Usage:
+ (cd $(./make_docker_context.py) && docker build . 2>&1 | tee log)
+
+ """.format(
+ github_project=github_project
+ )
+ ),
+ )
+
+ # This allows travis_docker_build.sh not to know the main Github project.
+ local_repo_dir = opts.pop("local_repo_dir", None)
+ if local_repo_dir is not None:
+ opts["{0}:local_repo_dir".format(github_project)] = local_repo_dir
+
+ if (opts.get("os_image"), opts.get("gcc_version")) not in valid_versions:
+ raise Exception(
+ "Due to 4/5 ABI changes (std::string), we can only use {0}".format(
+ " / ".join("GCC {1} on {0}".format(*p) for p in valid_versions)
+ )
+ )
+
+ if opts.get("docker_context_dir") is None:
+ opts["docker_context_dir"] = tempfile.mkdtemp(prefix="docker-context-")
+ elif not os.path.exists(opts.get("docker_context_dir")):
+ os.makedirs(opts.get("docker_context_dir"))
+
+ builder = DockerFBCodeBuilder(**opts)
+ context_dir = builder.option("docker_context_dir") # Mark option "in-use"
+ # The renderer may also populate some files into the context_dir.
+ dockerfile = builder.render(get_steps_fn(builder))
+
+ with os.fdopen(
+ os.open(
+ os.path.join(context_dir, "Dockerfile"),
+ os.O_RDWR | os.O_CREAT | os.O_EXCL, # Do not overwrite existing files
+ 0o644,
+ ),
+ "w",
+ ) as f:
+ f.write(dockerfile)
+
+ return context_dir
+
+
+if __name__ == "__main__":
+ from utils import read_fbcode_builder_config, build_fbcode_builder_config
+
+ # Load a spec from the current directory
+ config = read_fbcode_builder_config("fbcode_builder_config.py")
+ print(
+ make_docker_context(
+ build_fbcode_builder_config(config),
+ config["github_project"],
+ )
+ )
diff --git a/build/fbcode_builder/manifests/CLI11 b/build/fbcode_builder/manifests/CLI11
new file mode 100644
index 000000000..14cb2332a
--- /dev/null
+++ b/build/fbcode_builder/manifests/CLI11
@@ -0,0 +1,14 @@
+[manifest]
+name = CLI11
+
+[download]
+url = https://github.com/CLIUtils/CLI11/archive/v2.0.0.tar.gz
+sha256 = 2c672f17bf56e8e6223a3bfb74055a946fa7b1ff376510371902adb9cb0ab6a3
+
+[build]
+builder = cmake
+subdir = CLI11-2.0.0
+
+[cmake.defines]
+CLI11_BUILD_TESTS = OFF
+CLI11_BUILD_EXAMPLES = OFF
diff --git a/build/fbcode_builder/manifests/OpenNSA b/build/fbcode_builder/manifests/OpenNSA
new file mode 100644
index 000000000..62354c997
--- /dev/null
+++ b/build/fbcode_builder/manifests/OpenNSA
@@ -0,0 +1,17 @@
+[manifest]
+name = OpenNSA
+
+[download]
+url = https://docs.broadcom.com/docs-and-downloads/csg/opennsa-6.5.22.tgz
+sha256 = 74bfbdaebb6bfe9ebb0deac3aff624385cdcf5aa416ba63706c36538b3c3c46c
+
+[build]
+builder = nop
+subdir = opennsa-6.5.22
+
+[install.files]
+lib/x86-64 = lib
+include = include
+src/gpl-modules/systems/bde/linux/include = include/systems/bde/linux
+src/gpl-modules/include/ibde.h = include/ibde.h
+src/gpl-modules = src/gpl-modules
diff --git a/build/fbcode_builder/manifests/autoconf b/build/fbcode_builder/manifests/autoconf
new file mode 100644
index 000000000..35963096c
--- /dev/null
+++ b/build/fbcode_builder/manifests/autoconf
@@ -0,0 +1,16 @@
+[manifest]
+name = autoconf
+
+[rpms]
+autoconf
+
+[debs]
+autoconf
+
+[download]
+url = http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz
+sha256 = 954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd4c2969
+
+[build]
+builder = autoconf
+subdir = autoconf-2.69
diff --git a/build/fbcode_builder/manifests/automake b/build/fbcode_builder/manifests/automake
new file mode 100644
index 000000000..71115068a
--- /dev/null
+++ b/build/fbcode_builder/manifests/automake
@@ -0,0 +1,19 @@
+[manifest]
+name = automake
+
+[rpms]
+automake
+
+[debs]
+automake
+
+[download]
+url = http://ftp.gnu.org/gnu/automake/automake-1.16.1.tar.gz
+sha256 = 608a97523f97db32f1f5d5615c98ca69326ced2054c9f82e65bade7fc4c9dea8
+
+[build]
+builder = autoconf
+subdir = automake-1.16.1
+
+[dependencies]
+autoconf
diff --git a/build/fbcode_builder/manifests/bison b/build/fbcode_builder/manifests/bison
new file mode 100644
index 000000000..6e355d052
--- /dev/null
+++ b/build/fbcode_builder/manifests/bison
@@ -0,0 +1,27 @@
+[manifest]
+name = bison
+
+[rpms]
+bison
+
+[debs]
+bison
+
+[download.not(os=windows)]
+url = https://mirrors.kernel.org/gnu/bison/bison-3.3.tar.gz
+sha256 = fdeafb7fffade05604a61e66b8c040af4b2b5cbb1021dcfe498ed657ac970efd
+
+[download.os=windows]
+url = https://github.com/lexxmark/winflexbison/releases/download/v2.5.17/winflexbison-2.5.17.zip
+sha256 = 3dc27a16c21b717bcc5de8590b564d4392a0b8577170c058729d067d95ded825
+
+[build.not(os=windows)]
+builder = autoconf
+subdir = bison-3.3
+
+[build.os=windows]
+builder = nop
+
+[install.files.os=windows]
+data = bin/data
+win_bison.exe = bin/bison.exe
diff --git a/build/fbcode_builder/manifests/bistro b/build/fbcode_builder/manifests/bistro
new file mode 100644
index 000000000..d93839275
--- /dev/null
+++ b/build/fbcode_builder/manifests/bistro
@@ -0,0 +1,28 @@
+[manifest]
+name = bistro
+fbsource_path = fbcode/bistro
+shipit_project = bistro
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebook/bistro.git
+
+[build.os=linux]
+builder = bistro
+
+# Bistro is Linux-specific
+[build.not(os=linux)]
+builder = nop
+
+[dependencies]
+fmt
+folly
+proxygen
+fbthrift
+libsodium
+googletest_1_8
+sqlite3
+
+[shipit.pathmap]
+fbcode/bistro/public_tld = .
+fbcode/bistro = bistro
diff --git a/build/fbcode_builder/manifests/boost b/build/fbcode_builder/manifests/boost
new file mode 100644
index 000000000..4b254e308
--- /dev/null
+++ b/build/fbcode_builder/manifests/boost
@@ -0,0 +1,86 @@
+[manifest]
+name = boost
+
+[download.not(os=windows)]
+url = https://versaweb.dl.sourceforge.net/project/boost/boost/1.69.0/boost_1_69_0.tar.bz2
+sha256 = 8f32d4617390d1c2d16f26a27ab60d97807b35440d45891fa340fc2648b04406
+
+[download.os=windows]
+url = https://versaweb.dl.sourceforge.net/project/boost/boost/1.69.0/boost_1_69_0.zip
+sha256 = d074bcbcc0501c4917b965fc890e303ee70d8b01ff5712bae4a6c54f2b6b4e52
+
+[preinstalled.env]
+BOOST_ROOT_1_69_0
+
+[debs]
+libboost-all-dev
+
+[rpms]
+boost
+boost-math
+boost-test
+boost-fiber
+boost-graph
+boost-log
+boost-openmpi
+boost-timer
+boost-chrono
+boost-locale
+boost-thread
+boost-atomic
+boost-random
+boost-static
+boost-contract
+boost-date-time
+boost-iostreams
+boost-container
+boost-coroutine
+boost-filesystem
+boost-system
+boost-stacktrace
+boost-regex
+boost-devel
+boost-context
+boost-python3-devel
+boost-type_erasure
+boost-wave
+boost-python3
+boost-serialization
+boost-program-options
+
+[build]
+builder = boost
+
+[b2.args]
+--with-atomic
+--with-chrono
+--with-container
+--with-context
+--with-contract
+--with-coroutine
+--with-date_time
+--with-exception
+--with-fiber
+--with-filesystem
+--with-graph
+--with-graph_parallel
+--with-iostreams
+--with-locale
+--with-log
+--with-math
+--with-mpi
+--with-program_options
+--with-python
+--with-random
+--with-regex
+--with-serialization
+--with-stacktrace
+--with-system
+--with-test
+--with-thread
+--with-timer
+--with-type_erasure
+--with-wave
+
+[b2.args.os=darwin]
+toolset=clang
diff --git a/build/fbcode_builder/manifests/cmake b/build/fbcode_builder/manifests/cmake
new file mode 100644
index 000000000..f756caed0
--- /dev/null
+++ b/build/fbcode_builder/manifests/cmake
@@ -0,0 +1,43 @@
+[manifest]
+name = cmake
+
+[rpms]
+cmake
+
+# All current deb based distros have a cmake that is too old
+#[debs]
+#cmake
+
+[dependencies]
+ninja
+
+[download.os=windows]
+url = https://github.com/Kitware/CMake/releases/download/v3.14.0/cmake-3.14.0-win64-x64.zip
+sha256 = 40e8140d68120378262322bbc8c261db8d184d7838423b2e5bf688a6209d3807
+
+[download.os=darwin]
+url = https://github.com/Kitware/CMake/releases/download/v3.14.0/cmake-3.14.0-Darwin-x86_64.tar.gz
+sha256 = a02ad0d5b955dfad54c095bd7e937eafbbbfe8a99860107025cc442290a3e903
+
+[download.os=linux]
+url = https://github.com/Kitware/CMake/releases/download/v3.14.0/cmake-3.14.0.tar.gz
+sha256 = aa76ba67b3c2af1946701f847073f4652af5cbd9f141f221c97af99127e75502
+
+[build.os=windows]
+builder = nop
+subdir = cmake-3.14.0-win64-x64
+
+[build.os=darwin]
+builder = nop
+subdir = cmake-3.14.0-Darwin-x86_64
+
+[install.files.os=darwin]
+CMake.app/Contents/bin = bin
+CMake.app/Contents/share = share
+
+[build.os=linux]
+builder = cmakebootstrap
+subdir = cmake-3.14.0
+
+[make.install_args.os=linux]
+install
diff --git a/build/fbcode_builder/manifests/cpptoml b/build/fbcode_builder/manifests/cpptoml
new file mode 100644
index 000000000..5a3c781dc
--- /dev/null
+++ b/build/fbcode_builder/manifests/cpptoml
@@ -0,0 +1,10 @@
+[manifest]
+name = cpptoml
+
+[download]
+url = https://github.com/skystrife/cpptoml/archive/v0.1.1.tar.gz
+sha256 = 23af72468cfd4040984d46a0dd2a609538579c78ddc429d6b8fd7a10a6e24403
+
+[build]
+builder = cmake
+subdir = cpptoml-0.1.1
diff --git a/build/fbcode_builder/manifests/delos_core b/build/fbcode_builder/manifests/delos_core
new file mode 100644
index 000000000..1de6c3342
--- /dev/null
+++ b/build/fbcode_builder/manifests/delos_core
@@ -0,0 +1,25 @@
+[manifest]
+name = delos_core
+fbsource_path = fbcode/delos_core
+shipit_project = delos_core
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebookincubator/delos_core.git
+
+[build.os=linux]
+builder = cmake
+
+[build.not(os=linux)]
+builder = nop
+
+[dependencies]
+glog
+googletest
+folly
+fbthrift
+fb303
+re2
+
+[shipit.pathmap]
+fbcode/delos_core = .
diff --git a/build/fbcode_builder/manifests/double-conversion b/build/fbcode_builder/manifests/double-conversion
new file mode 100644
index 000000000..e27c7ae06
--- /dev/null
+++ b/build/fbcode_builder/manifests/double-conversion
@@ -0,0 +1,11 @@
+[manifest]
+name = double-conversion
+
+[download]
+url = https://github.com/google/double-conversion/archive/v3.1.4.tar.gz
+sha256 = 95004b65e43fefc6100f337a25da27bb99b9ef8d4071a36a33b5e83eb1f82021
+
+[build]
+builder = cmake
+subdir = double-conversion-3.1.4
+
diff --git a/build/fbcode_builder/manifests/eden b/build/fbcode_builder/manifests/eden
new file mode 100644
index 000000000..700cc82ec
--- /dev/null
+++ b/build/fbcode_builder/manifests/eden
@@ -0,0 +1,70 @@
+[manifest]
+name = eden
+fbsource_path = fbcode/eden
+shipit_project = eden
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebookexperimental/eden.git
+
+[build]
+builder = cmake
+
+[dependencies]
+googletest
+folly
+fbthrift
+fb303
+cpptoml
+rocksdb
+re2
+libgit2
+lz4
+pexpect
+python-toml
+
+[dependencies.fb=on]
+rust
+
+# macOS ships with sqlite3, and some of the core system
+# frameworks require that that version be linked rather
+# than the one we might build for ourselves here, so we
+# skip building it on macos.
+[dependencies.not(os=darwin)]
+sqlite3
+
+[dependencies.os=darwin]
+osxfuse
+
+# TODO: teach getdeps to compile curl on Windows.
+# Enabling curl on Windows requires us to find a way to compile libcurl with
+# msvc.
+[dependencies.not(os=windows)]
+libcurl
+
+[shipit.pathmap]
+fbcode/common/rust/shed/hostcaps = common/rust/shed/hostcaps
+fbcode/eden/oss = .
+fbcode/eden = eden
+fbcode/tools/lfs = tools/lfs
+fbcode/thrift/lib/rust = thrift/lib/rust
+
+[shipit.strip]
+^fbcode/eden/fs/eden-config\.h$
+^fbcode/eden/fs/py/eden/config\.py$
+^fbcode/eden/hg/.*$
+^fbcode/eden/mononoke/(?!lfs_protocol)
+^fbcode/eden/scm/build/.*$
+^fbcode/eden/scm/lib/third-party/rust/.*/Cargo.toml$
+^fbcode/eden/.*/\.cargo/.*$
+/Cargo\.lock$
+\.pyc$
+
+[cmake.defines.all(fb=on,os=windows)]
+INSTALL_PYTHON_LIB=ON
+
+[cmake.defines.fb=on]
+USE_CARGO_VENDOR=ON
+
+[depends.environment]
+EDEN_VERSION_OVERRIDE
diff --git a/build/fbcode_builder/manifests/eden_scm b/build/fbcode_builder/manifests/eden_scm
new file mode 100644
index 000000000..cfe9c7096
--- /dev/null
+++ b/build/fbcode_builder/manifests/eden_scm
@@ -0,0 +1,57 @@
+[manifest]
+name = eden_scm
+fbsource_path = fbcode/eden
+shipit_project = eden
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebookexperimental/eden.git
+
+[build.not(os=windows)]
+builder = make
+subdir = eden/scm
+disable_env_override_pkgconfig = 1
+disable_env_override_path = 1
+
+[build.os=windows]
+# For now the biggest blocker is missing "make" on windows, but there are bound
+# to be more
+builder = nop
+
+[make.build_args]
+getdepsbuild
+
+[make.install_args]
+install-getdeps
+
+[make.test_args]
+test-getdeps
+
+[shipit.pathmap]
+fbcode/common/rust = common/rust
+fbcode/eden/oss = .
+fbcode/eden = eden
+fbcode/tools/lfs = tools/lfs
+fbcode/fboss/common = common
+
+[shipit.strip]
+^fbcode/eden/fs/eden-config\.h$
+^fbcode/eden/fs/py/eden/config\.py$
+^fbcode/eden/hg/.*$
+^fbcode/eden/mononoke/(?!lfs_protocol)
+^fbcode/eden/scm/build/.*$
+^fbcode/eden/scm/lib/third-party/rust/.*/Cargo.toml$
+^fbcode/eden/.*/\.cargo/.*$
+^.*/fb/.*$
+/Cargo\.lock$
+\.pyc$
+
+[dependencies]
+fb303-source
+fbthrift
+fbthrift-source
+openssl
+rust-shed
+
+[dependencies.fb=on]
+rust
diff --git a/build/fbcode_builder/manifests/eden_scm_lib_edenapi_tools b/build/fbcode_builder/manifests/eden_scm_lib_edenapi_tools
new file mode 100644
index 000000000..be29d70f8
--- /dev/null
+++ b/build/fbcode_builder/manifests/eden_scm_lib_edenapi_tools
@@ -0,0 +1,36 @@
+[manifest]
+name = eden_scm_lib_edenapi_tools
+fbsource_path = fbcode/eden
+shipit_project = eden
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebookexperimental/eden.git
+
+[build]
+builder = cargo
+
+[cargo]
+build_doc = true
+manifests_to_build = eden/scm/lib/edenapi/tools/make_req/Cargo.toml,eden/scm/lib/edenapi/tools/read_res/Cargo.toml
+
+[shipit.pathmap]
+fbcode/eden/oss = .
+fbcode/eden = eden
+fbcode/tools/lfs = tools/lfs
+fbcode/fboss/common = common
+
+[shipit.strip]
+^fbcode/eden/fs/eden-config\.h$
+^fbcode/eden/fs/py/eden/config\.py$
+^fbcode/eden/hg/.*$
+^fbcode/eden/mononoke/(?!lfs_protocol)
+^fbcode/eden/scm/build/.*$
+^fbcode/eden/scm/lib/third-party/rust/.*/Cargo.toml$
+^fbcode/eden/.*/\.cargo/.*$
+^.*/fb/.*$
+/Cargo\.lock$
+\.pyc$
+
+[dependencies.fb=on]
+rust
diff --git a/build/fbcode_builder/manifests/f4d b/build/fbcode_builder/manifests/f4d
new file mode 100644
index 000000000..db30894c7
--- /dev/null
+++ b/build/fbcode_builder/manifests/f4d
@@ -0,0 +1,29 @@
+[manifest]
+name = f4d
+fbsource_path = fbcode/f4d
+shipit_project = f4d
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebookexternal/f4d.git
+
+[build.os=windows]
+builder = nop
+
+[build.not(os=windows)]
+builder = cmake
+
+[dependencies]
+double-conversion
+folly
+glog
+googletest
+boost
+protobuf
+lzo
+libicu
+re2
+
+[shipit.pathmap]
+fbcode/f4d/public_tld = .
+fbcode/f4d = f4d
diff --git a/build/fbcode_builder/manifests/fatal b/build/fbcode_builder/manifests/fatal
new file mode 100644
index 000000000..3c333561f
--- /dev/null
+++ b/build/fbcode_builder/manifests/fatal
@@ -0,0 +1,15 @@
+[manifest]
+name = fatal
+fbsource_path = fbcode/fatal
+shipit_project = fatal
+
+[git]
+repo_url = https://github.com/facebook/fatal.git
+
+[shipit.pathmap]
+fbcode/fatal = .
+fbcode/fatal/public_tld = .
+
+[build]
+builder = nop
+subdir = .
diff --git a/build/fbcode_builder/manifests/fb303 b/build/fbcode_builder/manifests/fb303
new file mode 100644
index 000000000..743aca01e
--- /dev/null
+++ b/build/fbcode_builder/manifests/fb303
@@ -0,0 +1,27 @@
+[manifest]
+name = fb303
+fbsource_path = fbcode/fb303
+shipit_project = fb303
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebookincubator/fb303.git
+
+[build]
+builder = cmake
+
+[dependencies]
+folly
+gflags
+glog
+fbthrift
+
+[cmake.defines.test=on]
+BUILD_TESTS=ON
+
+[cmake.defines.test=off]
+BUILD_TESTS=OFF
+
+[shipit.pathmap]
+fbcode/fb303/github = .
+fbcode/fb303 = fb303
diff --git a/build/fbcode_builder/manifests/fb303-source b/build/fbcode_builder/manifests/fb303-source
new file mode 100644
index 000000000..ea160c500
--- /dev/null
+++ b/build/fbcode_builder/manifests/fb303-source
@@ -0,0 +1,15 @@
+[manifest]
+name = fb303-source
+fbsource_path = fbcode/fb303
+shipit_project = fb303
+shipit_fbcode_builder = false
+
+[git]
+repo_url = https://github.com/facebook/fb303.git
+
+[build]
+builder = nop
+
+[shipit.pathmap]
+fbcode/fb303/github = .
+fbcode/fb303 = fb303
diff --git a/build/fbcode_builder/manifests/fboss b/build/fbcode_builder/manifests/fboss
new file mode 100644
index 000000000..f29873e72
--- /dev/null
+++ b/build/fbcode_builder/manifests/fboss
@@ -0,0 +1,42 @@
+[manifest]
+name = fboss
+fbsource_path = fbcode/fboss
+shipit_project = fboss
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebook/fboss.git
+
+[build.os=linux]
+builder = cmake
+
+[build.not(os=linux)]
+builder = nop
+
+[dependencies]
+folly
+fb303
+wangle
+fizz
+fmt
+libsodium
+googletest
+zstd
+fbthrift
+iproute2
+libmnl
+libusb
+libcurl
+libnl
+libsai
+OpenNSA
+re2
+python
+yaml-cpp
+libyaml
+CLI11
+
+[shipit.pathmap]
+fbcode/fboss/github = .
+fbcode/fboss/common = common
+fbcode/fboss = fboss
diff --git a/build/fbcode_builder/manifests/fbthrift b/build/fbcode_builder/manifests/fbthrift
new file mode 100644
index 000000000..072dd4512
--- /dev/null
+++ b/build/fbcode_builder/manifests/fbthrift
@@ -0,0 +1,33 @@
+[manifest]
+name = fbthrift
+fbsource_path = fbcode/thrift
+shipit_project = fbthrift
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebook/fbthrift.git
+
+[build]
+builder = cmake
+
+[dependencies]
+bison
+flex
+folly
+wangle
+fizz
+fmt
+googletest
+libsodium
+python-six
+zstd
+
+[shipit.pathmap]
+fbcode/thrift/public_tld = .
+fbcode/thrift = thrift
+
+[shipit.strip]
+^fbcode/thrift/thrift-config\.h$
+^fbcode/thrift/perf/canary.py$
+^fbcode/thrift/perf/loadtest.py$
+^fbcode/thrift/.castle/.*
diff --git a/build/fbcode_builder/manifests/fbthrift-source b/build/fbcode_builder/manifests/fbthrift-source
new file mode 100644
index 000000000..7af0d6dda
--- /dev/null
+++ b/build/fbcode_builder/manifests/fbthrift-source
@@ -0,0 +1,21 @@
+[manifest]
+name = fbthrift-source
+fbsource_path = fbcode/thrift
+shipit_project = fbthrift
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebook/fbthrift.git
+
+[build]
+builder = nop
+
+[shipit.pathmap]
+fbcode/thrift/public_tld = .
+fbcode/thrift = thrift
+
+[shipit.strip]
+^fbcode/thrift/thrift-config\.h$
+^fbcode/thrift/perf/canary.py$
+^fbcode/thrift/perf/loadtest.py$
+^fbcode/thrift/.castle/.*
diff --git a/build/fbcode_builder/manifests/fbzmq b/build/fbcode_builder/manifests/fbzmq
new file mode 100644
index 000000000..5739016c8
--- /dev/null
+++ b/build/fbcode_builder/manifests/fbzmq
@@ -0,0 +1,29 @@
+[manifest]
+name = fbzmq
+fbsource_path = facebook/fbzmq
+shipit_project = fbzmq
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebook/fbzmq.git
+
+[build.os=linux]
+builder = cmake
+
+[build.not(os=linux)]
+# boost.fiber is required and that is not available on macos.
+# libzmq doesn't currently build on windows.
+builder = nop
+
+[dependencies]
+boost
+folly
+fbthrift
+googletest
+libzmq
+
+[shipit.pathmap]
+fbcode/fbzmq = fbzmq
+fbcode/fbzmq/public_tld = .
+
+[shipit.strip]
diff --git a/build/fbcode_builder/manifests/fizz b/build/fbcode_builder/manifests/fizz
new file mode 100644
index 000000000..72f29973f
--- /dev/null
+++ b/build/fbcode_builder/manifests/fizz
@@ -0,0 +1,36 @@
+[manifest]
+name = fizz
+fbsource_path = fbcode/fizz
+shipit_project = fizz
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebookincubator/fizz.git
+
+[build]
+builder = cmake
+subdir = fizz
+
+[cmake.defines]
+BUILD_EXAMPLES = OFF
+
+[cmake.defines.test=on]
+BUILD_TESTS = ON
+
+[cmake.defines.all(os=windows, test=on)]
+BUILD_TESTS = OFF
+
+[cmake.defines.test=off]
+BUILD_TESTS = OFF
+
+[dependencies]
+folly
+libsodium
+zstd
+
+[dependencies.all(test=on, not(os=windows))]
+googletest_1_8
+
+[shipit.pathmap]
+fbcode/fizz/public_tld = .
+fbcode/fizz = fizz
diff --git a/build/fbcode_builder/manifests/flex b/build/fbcode_builder/manifests/flex
new file mode 100644
index 000000000..f266c4033
--- /dev/null
+++ b/build/fbcode_builder/manifests/flex
@@ -0,0 +1,32 @@
+[manifest]
+name = flex
+
+[rpms]
+flex
+
+[debs]
+flex
+
+[download.not(os=windows)]
+url = https://github.com/westes/flex/releases/download/v2.6.4/flex-2.6.4.tar.gz
+sha256 = e87aae032bf07c26f85ac0ed3250998c37621d95f8bd748b31f15b33c45ee995
+
+[download.os=windows]
+url = https://github.com/lexxmark/winflexbison/releases/download/v2.5.17/winflexbison-2.5.17.zip
+sha256 = 3dc27a16c21b717bcc5de8590b564d4392a0b8577170c058729d067d95ded825
+
+[build.not(os=windows)]
+builder = autoconf
+subdir = flex-2.6.4
+
+[build.os=windows]
+builder = nop
+
+[install.files.os=windows]
+data = bin/data
+win_flex.exe = bin/flex.exe
+
+# Moral equivalent to this PR that fixes a crash when bootstrapping flex
+# on linux: https://github.com/easybuilders/easybuild-easyconfigs/pull/5792
+[autoconf.args.os=linux]
+CFLAGS=-D_GNU_SOURCE
diff --git a/build/fbcode_builder/manifests/fmt b/build/fbcode_builder/manifests/fmt
new file mode 100644
index 000000000..21503d202
--- /dev/null
+++ b/build/fbcode_builder/manifests/fmt
@@ -0,0 +1,14 @@
+[manifest]
+name = fmt
+
+[download]
+url = https://github.com/fmtlib/fmt/archive/6.1.1.tar.gz
+sha256 = bf4e50955943c1773cc57821d6c00f7e2b9e10eb435fafdd66739d36056d504e
+
+[build]
+builder = cmake
+subdir = fmt-6.1.1
+
+[cmake.defines]
+FMT_TEST = OFF
+FMT_DOC = OFF
diff --git a/build/fbcode_builder/manifests/folly b/build/fbcode_builder/manifests/folly
new file mode 100644
index 000000000..9647b17f8
--- /dev/null
+++ b/build/fbcode_builder/manifests/folly
@@ -0,0 +1,58 @@
+[manifest]
+name = folly
+fbsource_path = fbcode/folly
+shipit_project = folly
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebook/folly.git
+
+[build]
+builder = cmake
+
+[dependencies]
+gflags
+glog
+googletest
+boost
+libevent
+double-conversion
+fmt
+lz4
+snappy
+zstd
+# no openssl or zlib in the linux case, why?
+# these are usually installed on the system
+# and are the easiest system deps to pull in.
+# In the future we want to be able to express
+# that a system dep is sufficient in the manifest
+# for eg: openssl and zlib, but for now we don't
+# have it.
+
+# macOS doesn't expose the openssl api so we need
+# to build our own.
+[dependencies.os=darwin]
+openssl
+
+# Windows has neither openssl nor zlib, so we get
+# to provide both
+[dependencies.os=windows]
+openssl
+zlib
+
+[shipit.pathmap]
+fbcode/folly/public_tld = .
+fbcode/folly = folly
+
+[shipit.strip]
+^fbcode/folly/folly-config\.h$
+^fbcode/folly/public_tld/build/facebook_.*
+
+[cmake.defines]
+BUILD_SHARED_LIBS=OFF
+
+[cmake.defines.test=on]
+BUILD_TESTS=ON
+
+[cmake.defines.test=off]
+BUILD_TESTS=OFF
diff --git a/build/fbcode_builder/manifests/gflags b/build/fbcode_builder/manifests/gflags
new file mode 100644
index 000000000..d7ec44eab
--- /dev/null
+++ b/build/fbcode_builder/manifests/gflags
@@ -0,0 +1,17 @@
+[manifest]
+name = gflags
+
+[download]
+url = https://github.com/gflags/gflags/archive/v2.2.2.tar.gz
+sha256 = 34af2f15cf7367513b352bdcd2493ab14ce43692d2dcd9dfc499492966c64dcf
+
+[build]
+builder = cmake
+subdir = gflags-2.2.2
+
+[cmake.defines]
+BUILD_SHARED_LIBS = ON
+BUILD_STATIC_LIBS = ON
+#BUILD_gflags_nothreads_LIB = OFF
+BUILD_gflags_LIB = ON
+
diff --git a/build/fbcode_builder/manifests/git-lfs b/build/fbcode_builder/manifests/git-lfs
new file mode 100644
index 000000000..38a5e6aeb
--- /dev/null
+++ b/build/fbcode_builder/manifests/git-lfs
@@ -0,0 +1,12 @@
+[manifest]
+name = git-lfs
+
+[download.os=linux]
+url = https://github.com/git-lfs/git-lfs/releases/download/v2.9.1/git-lfs-linux-amd64-v2.9.1.tar.gz
+sha256 = 2a8e60cf51ec45aa0f4332aa0521d60ec75c76e485d13ebaeea915b9d70ea466
+
+[build]
+builder = nop
+
+[install.files]
+git-lfs = bin/git-lfs
diff --git a/build/fbcode_builder/manifests/glog b/build/fbcode_builder/manifests/glog
new file mode 100644
index 000000000..d2354610a
--- /dev/null
+++ b/build/fbcode_builder/manifests/glog
@@ -0,0 +1,16 @@
+[manifest]
+name = glog
+
+[download]
+url = https://github.com/google/glog/archive/v0.4.0.tar.gz
+sha256 = f28359aeba12f30d73d9e4711ef356dc842886968112162bc73002645139c39c
+
+[build]
+builder = cmake
+subdir = glog-0.4.0
+
+[dependencies]
+gflags
+
+[cmake.defines]
+BUILD_SHARED_LIBS=ON
diff --git a/build/fbcode_builder/manifests/gnu-bash b/build/fbcode_builder/manifests/gnu-bash
new file mode 100644
index 000000000..89da77ca2
--- /dev/null
+++ b/build/fbcode_builder/manifests/gnu-bash
@@ -0,0 +1,20 @@
+[manifest]
+name = gnu-bash
+
+[download.os=darwin]
+url = https://ftp.gnu.org/gnu/bash/bash-5.1-rc1.tar.gz
+sha256 = 0b2684eb1990329d499c96decfe2459f3e150deb915b0a9d03cf1be692b1d6d3
+
+[build.os=darwin]
+# The buildin FreeBSD bash on OSX is both outdated and incompatible with the
+# modern GNU bash, so for the sake of being cross-platform friendly this
+# manifest provides GNU bash.
+# NOTE: This is the 5.1-rc1 version, which is almost the same as what Homebrew
+# uses (Homebrew installs 5.0 with the 18 patches that in fact make the 5.1-rc1
+# version).
+builder = autoconf
+subdir = bash-5.1-rc1
+build_in_src_dir = true
+
+[build.not(os=darwin)]
+builder = nop
diff --git a/build/fbcode_builder/manifests/gnu-coreutils b/build/fbcode_builder/manifests/gnu-coreutils
new file mode 100644
index 000000000..1ab4d9d4a
--- /dev/null
+++ b/build/fbcode_builder/manifests/gnu-coreutils
@@ -0,0 +1,15 @@
+[manifest]
+name = gnu-coreutils
+
+[download.os=darwin]
+url = https://ftp.gnu.org/gnu/coreutils/coreutils-8.32.tar.gz
+sha256 = d5ab07435a74058ab69a2007e838be4f6a90b5635d812c2e26671e3972fca1b8
+
+[build.os=darwin]
+# The buildin FreeBSD version incompatible with the GNU one, so for the sake of
+# being cross-platform friendly this manifest provides the GNU version.
+builder = autoconf
+subdir = coreutils-8.32
+
+[build.not(os=darwin)]
+builder = nop
diff --git a/build/fbcode_builder/manifests/gnu-grep b/build/fbcode_builder/manifests/gnu-grep
new file mode 100644
index 000000000..e6a163d37
--- /dev/null
+++ b/build/fbcode_builder/manifests/gnu-grep
@@ -0,0 +1,15 @@
+[manifest]
+name = gnu-grep
+
+[download.os=darwin]
+url = https://ftp.gnu.org/gnu/grep/grep-3.5.tar.gz
+sha256 = 9897220992a8fd38a80b70731462defa95f7ff2709b235fb54864ddd011141dd
+
+[build.os=darwin]
+# The buildin FreeBSD version incompatible with the GNU one, so for the sake of
+# being cross-platform friendly this manifest provides the GNU version.
+builder = autoconf
+subdir = grep-3.5
+
+[build.not(os=darwin)]
+builder = nop
diff --git a/build/fbcode_builder/manifests/gnu-sed b/build/fbcode_builder/manifests/gnu-sed
new file mode 100644
index 000000000..9b458df6e
--- /dev/null
+++ b/build/fbcode_builder/manifests/gnu-sed
@@ -0,0 +1,15 @@
+[manifest]
+name = gnu-sed
+
+[download.os=darwin]
+url = https://ftp.gnu.org/gnu/sed/sed-4.8.tar.gz
+sha256 = 53cf3e14c71f3a149f29d13a0da64120b3c1d3334fba39c4af3e520be053982a
+
+[build.os=darwin]
+# The buildin FreeBSD version incompatible with the GNU one, so for the sake of
+# being cross-platform friendly this manifest provides the GNU version.
+builder = autoconf
+subdir = sed-4.8
+
+[build.not(os=darwin)]
+builder = nop
diff --git a/build/fbcode_builder/manifests/googletest b/build/fbcode_builder/manifests/googletest
new file mode 100644
index 000000000..775aac34f
--- /dev/null
+++ b/build/fbcode_builder/manifests/googletest
@@ -0,0 +1,18 @@
+[manifest]
+name = googletest
+
+[download]
+url = https://github.com/google/googletest/archive/release-1.10.0.tar.gz
+sha256 = 9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb
+
+[build]
+builder = cmake
+subdir = googletest-release-1.10.0
+
+[cmake.defines]
+# Everything else defaults to the shared runtime, so tell gtest that
+# it should not use its choice of the static runtime
+gtest_force_shared_crt=ON
+
+[cmake.defines.os=windows]
+BUILD_SHARED_LIBS=ON
diff --git a/build/fbcode_builder/manifests/googletest_1_8 b/build/fbcode_builder/manifests/googletest_1_8
new file mode 100644
index 000000000..76c0ce51f
--- /dev/null
+++ b/build/fbcode_builder/manifests/googletest_1_8
@@ -0,0 +1,18 @@
+[manifest]
+name = googletest_1_8
+
+[download]
+url = https://github.com/google/googletest/archive/release-1.8.0.tar.gz
+sha256 = 58a6f4277ca2bc8565222b3bbd58a177609e9c488e8a72649359ba51450db7d8
+
+[build]
+builder = cmake
+subdir = googletest-release-1.8.0
+
+[cmake.defines]
+# Everything else defaults to the shared runtime, so tell gtest that
+# it should not use its choice of the static runtime
+gtest_force_shared_crt=ON
+
+[cmake.defines.os=windows]
+BUILD_SHARED_LIBS=ON
diff --git a/build/fbcode_builder/manifests/gperf b/build/fbcode_builder/manifests/gperf
new file mode 100644
index 000000000..13d7a890f
--- /dev/null
+++ b/build/fbcode_builder/manifests/gperf
@@ -0,0 +1,14 @@
+[manifest]
+name = gperf
+
+[download]
+url = http://ftp.gnu.org/pub/gnu/gperf/gperf-3.1.tar.gz
+sha256 = 588546b945bba4b70b6a3a616e80b4ab466e3f33024a352fc2198112cdbb3ae2
+
+[build.not(os=windows)]
+builder = autoconf
+subdir = gperf-3.1
+
+[build.os=windows]
+builder = nop
+
diff --git a/build/fbcode_builder/manifests/iproute2 b/build/fbcode_builder/manifests/iproute2
new file mode 100644
index 000000000..6fb7f77ed
--- /dev/null
+++ b/build/fbcode_builder/manifests/iproute2
@@ -0,0 +1,13 @@
+[manifest]
+name = iproute2
+
+[download]
+url = https://mirrors.edge.kernel.org/pub/linux/utils/net/iproute2/iproute2-4.12.0.tar.gz
+sha256 = 46612a1e2d01bb31932557bccdb1b8618cae9a439dfffc08ef35ed8e197f14ce
+
+[build.os=linux]
+builder = iproute2
+subdir = iproute2-4.12.0
+
+[build.not(os=linux)]
+builder = nop
diff --git a/build/fbcode_builder/manifests/jq b/build/fbcode_builder/manifests/jq
new file mode 100644
index 000000000..231818f34
--- /dev/null
+++ b/build/fbcode_builder/manifests/jq
@@ -0,0 +1,24 @@
+[manifest]
+name = jq
+
+[rpms]
+jq
+
+[debs]
+jq
+
+[download.not(os=windows)]
+url = https://github.com/stedolan/jq/releases/download/jq-1.5/jq-1.5.tar.gz
+sha256 = c4d2bfec6436341113419debf479d833692cc5cdab7eb0326b5a4d4fbe9f493c
+
+[build.not(os=windows)]
+builder = autoconf
+subdir = jq-1.5
+
+[build.os=windows]
+builder = nop
+
+[autoconf.args]
+# This argument turns off some developers tool and it is recommended in jq's
+# README
+--disable-maintainer-mode
diff --git a/build/fbcode_builder/manifests/katran b/build/fbcode_builder/manifests/katran
new file mode 100644
index 000000000..224ccbe21
--- /dev/null
+++ b/build/fbcode_builder/manifests/katran
@@ -0,0 +1,38 @@
+[manifest]
+name = katran
+fbsource_path = fbcode/katran
+shipit_project = katran
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebookincubator/katran.git
+
+[build.not(os=linux)]
+builder = nop
+
+[build.os=linux]
+builder = cmake
+subdir = .
+
+[cmake.defines.test=on]
+BUILD_TESTS=ON
+
+[cmake.defines.test=off]
+BUILD_TESTS=OFF
+
+[dependencies]
+folly
+fizz
+libbpf
+libmnl
+zlib
+googletest
+
+
+[shipit.pathmap]
+fbcode/katran/public_root = .
+fbcode/katran = katran
+
+[shipit.strip]
+^fbcode/katran/facebook
+^fbcode/katran/OSS_SYNC
diff --git a/build/fbcode_builder/manifests/libbpf b/build/fbcode_builder/manifests/libbpf
new file mode 100644
index 000000000..0416822e4
--- /dev/null
+++ b/build/fbcode_builder/manifests/libbpf
@@ -0,0 +1,26 @@
+[manifest]
+name = libbpf
+
+[download]
+url = https://github.com/libbpf/libbpf/archive/v0.3.tar.gz
+sha256 = c168d84a75b541f753ceb49015d9eb886e3fb5cca87cdd9aabce7e10ad3a1efc
+
+# BPF only builds on linux, so make it a NOP on other platforms
+[build.not(os=linux)]
+builder = nop
+
+[build.os=linux]
+builder = make
+subdir = libbpf-0.3/src
+
+[make.build_args]
+BUILD_STATIC_ONLY=y
+
+# libbpf-0.3 requires uapi headers >= 5.8
+[make.install_args]
+install
+install_uapi_headers
+BUILD_STATIC_ONLY=y
+
+[dependencies]
+libelf
diff --git a/build/fbcode_builder/manifests/libbpf_0_2_0_beta b/build/fbcode_builder/manifests/libbpf_0_2_0_beta
new file mode 100644
index 000000000..072639817
--- /dev/null
+++ b/build/fbcode_builder/manifests/libbpf_0_2_0_beta
@@ -0,0 +1,26 @@
+[manifest]
+name = libbpf_0_2_0_beta
+
+[download]
+url = https://github.com/libbpf/libbpf/archive/b6dd2f2.tar.gz
+sha256 = 8db9dca90f5c445ef2362e3c6a00f3d6c4bf36e8782f8e27704109c78e541497
+
+# BPF only builds on linux, so make it a NOP on other platforms
+[build.not(os=linux)]
+builder = nop
+
+[build.os=linux]
+builder = make
+subdir = libbpf-b6dd2f2b7df4d3bd35d64aaf521d9ad18d766f53/src
+
+[make.build_args]
+BUILD_STATIC_ONLY=y
+
+# libbpf now requires uapi headers >= 5.8
+[make.install_args]
+install
+install_uapi_headers
+BUILD_STATIC_ONLY=y
+
+[dependencies]
+libelf
diff --git a/build/fbcode_builder/manifests/libcurl b/build/fbcode_builder/manifests/libcurl
new file mode 100644
index 000000000..466b4497c
--- /dev/null
+++ b/build/fbcode_builder/manifests/libcurl
@@ -0,0 +1,39 @@
+[manifest]
+name = libcurl
+
+[rpms]
+libcurl-devel
+libcurl
+
+[debs]
+libcurl4-openssl-dev
+
+[download]
+url = https://curl.haxx.se/download/curl-7.65.1.tar.gz
+sha256 = 821aeb78421375f70e55381c9ad2474bf279fc454b791b7e95fc83562951c690
+
+[dependencies]
+nghttp2
+
+# We use system OpenSSL on Linux (see folly's manifest for details)
+[dependencies.not(os=linux)]
+openssl
+
+[build.not(os=windows)]
+builder = autoconf
+subdir = curl-7.65.1
+
+[autoconf.args]
+# fboss (which added the libcurl dep) doesn't need ldap so it is disabled here.
+# if someone in the future wants to add ldap for something else, it won't hurt
+# fboss. However, that would require adding an ldap manifest.
+#
+# For the same reason, we disable libssh2 and libidn2 which aren't really used
+# but would require adding manifests if we don't disable them.
+--disable-ldap
+--without-libssh2
+--without-libidn2
+
+[build.os=windows]
+builder = cmake
+subdir = curl-7.65.1
diff --git a/build/fbcode_builder/manifests/libelf b/build/fbcode_builder/manifests/libelf
new file mode 100644
index 000000000..a46aab879
--- /dev/null
+++ b/build/fbcode_builder/manifests/libelf
@@ -0,0 +1,20 @@
+[manifest]
+name = libelf
+
+[rpms]
+elfutils-libelf-devel-static
+
+[debs]
+libelf-dev
+
+[download]
+url = https://ftp.osuosl.org/pub/blfs/conglomeration/libelf/libelf-0.8.13.tar.gz
+sha256 = 591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d
+
+# libelf only makes sense on linux, so make it a NOP on other platforms
+[build.not(os=linux)]
+builder = nop
+
+[build.os=linux]
+builder = autoconf
+subdir = libelf-0.8.13
diff --git a/build/fbcode_builder/manifests/libevent b/build/fbcode_builder/manifests/libevent
new file mode 100644
index 000000000..eaa39a9e6
--- /dev/null
+++ b/build/fbcode_builder/manifests/libevent
@@ -0,0 +1,29 @@
+[manifest]
+name = libevent
+
+[rpms]
+libevent-devel
+
+[debs]
+libevent-dev
+
+# Note that the CMakeLists.txt file is present only in
+# git repo and not in the release tarball, so take care
+# to use the github generated source tarball rather than
+# the explicitly uploaded source tarball
+[download]
+url = https://github.com/libevent/libevent/archive/release-2.1.8-stable.tar.gz
+sha256 = 316ddb401745ac5d222d7c529ef1eada12f58f6376a66c1118eee803cb70f83d
+
+[build]
+builder = cmake
+subdir = libevent-release-2.1.8-stable
+
+[cmake.defines]
+EVENT__DISABLE_TESTS = ON
+EVENT__DISABLE_BENCHMARK = ON
+EVENT__DISABLE_SAMPLES = ON
+EVENT__DISABLE_REGRESS = ON
+
+[dependencies.not(os=linux)]
+openssl
diff --git a/build/fbcode_builder/manifests/libgit2 b/build/fbcode_builder/manifests/libgit2
new file mode 100644
index 000000000..1d6a53e5e
--- /dev/null
+++ b/build/fbcode_builder/manifests/libgit2
@@ -0,0 +1,24 @@
+[manifest]
+name = libgit2
+
+[rpms]
+libgit2-devel
+
+[debs]
+libgit2-dev
+
+[download]
+url = https://github.com/libgit2/libgit2/archive/v0.28.1.tar.gz
+sha256 = 0ca11048795b0d6338f2e57717370208c2c97ad66c6d5eac0c97a8827d13936b
+
+[build]
+builder = cmake
+subdir = libgit2-0.28.1
+
+[cmake.defines]
+# Could turn this on if we also wanted to add a manifest for libssh2
+USE_SSH = OFF
+BUILD_CLAR = OFF
+# Have to build shared to work around annoying problems with cmake
+# mis-parsing the frameworks required to link this on macos :-/
+BUILD_SHARED_LIBS = ON
diff --git a/build/fbcode_builder/manifests/libicu b/build/fbcode_builder/manifests/libicu
new file mode 100644
index 000000000..c1deda503
--- /dev/null
+++ b/build/fbcode_builder/manifests/libicu
@@ -0,0 +1,19 @@
+[manifest]
+name = libicu
+
+[rpms]
+libicu-devel
+
+[debs]
+libicu-dev
+
+[download]
+url = https://github.com/unicode-org/icu/releases/download/release-68-2/icu4c-68_2-src.tgz
+sha256 = c79193dee3907a2199b8296a93b52c5cb74332c26f3d167269487680d479d625
+
+[build.not(os=windows)]
+builder = autoconf
+subdir = icu/source
+
+[build.os=windows]
+builder = nop
diff --git a/build/fbcode_builder/manifests/libmnl b/build/fbcode_builder/manifests/libmnl
new file mode 100644
index 000000000..9b28b87b9
--- /dev/null
+++ b/build/fbcode_builder/manifests/libmnl
@@ -0,0 +1,17 @@
+[manifest]
+name = libmnl
+
+[rpms]
+libmnl-devel
+libmnl-static
+
+[debs]
+libmnl-dev
+
+[download]
+url = http://www.netfilter.org/pub/libmnl/libmnl-1.0.4.tar.bz2
+sha256 = 171f89699f286a5854b72b91d06e8f8e3683064c5901fb09d954a9ab6f551f81
+
+[build.os=linux]
+builder = autoconf
+subdir = libmnl-1.0.4
diff --git a/build/fbcode_builder/manifests/libnl b/build/fbcode_builder/manifests/libnl
new file mode 100644
index 000000000..f864acb49
--- /dev/null
+++ b/build/fbcode_builder/manifests/libnl
@@ -0,0 +1,17 @@
+[manifest]
+name = libnl
+
+[rpms]
+libnl3-devel
+libnl3
+
+[debs]
+libnl-3-dev
+
+[download]
+url = https://www.infradead.org/~tgr/libnl/files/libnl-3.2.25.tar.gz
+sha256 = 8beb7590674957b931de6b7f81c530b85dc7c1ad8fbda015398bc1e8d1ce8ec5
+
+[build.os=linux]
+builder = autoconf
+subdir = libnl-3.2.25
diff --git a/build/fbcode_builder/manifests/libsai b/build/fbcode_builder/manifests/libsai
new file mode 100644
index 000000000..4f422d8e1
--- /dev/null
+++ b/build/fbcode_builder/manifests/libsai
@@ -0,0 +1,13 @@
+[manifest]
+name = libsai
+
+[download]
+url = https://github.com/opencomputeproject/SAI/archive/v1.7.1.tar.gz
+sha256 = e18eb1a2a6e5dd286d97e13569d8b78cc1f8229030beed0db4775b9a50ab6a83
+
+[build]
+builder = nop
+subdir = SAI-1.7.1
+
+[install.files]
+inc = include
diff --git a/build/fbcode_builder/manifests/libsodium b/build/fbcode_builder/manifests/libsodium
new file mode 100644
index 000000000..d69bfcc4b
--- /dev/null
+++ b/build/fbcode_builder/manifests/libsodium
@@ -0,0 +1,33 @@
+[manifest]
+name = libsodium
+
+[rpms]
+libsodium-devel
+libsodium-static
+
+[debs]
+libsodium-dev
+
+[download.not(os=windows)]
+url = https://github.com/jedisct1/libsodium/releases/download/1.0.17/libsodium-1.0.17.tar.gz
+sha256 = 0cc3dae33e642cc187b5ceb467e0ad0e1b51dcba577de1190e9ffa17766ac2b1
+
+[build.not(os=windows)]
+builder = autoconf
+subdir = libsodium-1.0.17
+
+[download.os=windows]
+url = https://download.libsodium.org/libsodium/releases/libsodium-1.0.17-msvc.zip
+sha256 = f0f32ad8ebd76eee99bb039f843f583f2babca5288a8c26a7261db9694c11467
+
+[build.os=windows]
+builder = nop
+
+[install.files.os=windows]
+x64/Release/v141/dynamic/libsodium.dll = bin/libsodium.dll
+x64/Release/v141/dynamic/libsodium.lib = lib/libsodium.lib
+x64/Release/v141/dynamic/libsodium.exp = lib/libsodium.exp
+x64/Release/v141/dynamic/libsodium.pdb = lib/libsodium.pdb
+include = include
+
+[autoconf.args]
diff --git a/build/fbcode_builder/manifests/libtool b/build/fbcode_builder/manifests/libtool
new file mode 100644
index 000000000..1ec99b5f4
--- /dev/null
+++ b/build/fbcode_builder/manifests/libtool
@@ -0,0 +1,22 @@
+[manifest]
+name = libtool
+
+[rpms]
+libtool
+
+[debs]
+libtool
+
+[download]
+url = http://ftp.gnu.org/gnu/libtool/libtool-2.4.6.tar.gz
+sha256 = e3bd4d5d3d025a36c21dd6af7ea818a2afcd4dfc1ea5a17b39d7854bcd0c06e3
+
+[build]
+builder = autoconf
+subdir = libtool-2.4.6
+
+[dependencies]
+automake
+
+[autoconf.args]
+--enable-ltdl-install
diff --git a/build/fbcode_builder/manifests/libusb b/build/fbcode_builder/manifests/libusb
new file mode 100644
index 000000000..74702d3f0
--- /dev/null
+++ b/build/fbcode_builder/manifests/libusb
@@ -0,0 +1,23 @@
+[manifest]
+name = libusb
+
+[rpms]
+libusb-devel
+libusb
+
+[debs]
+libusb-1.0-0-dev
+
+[download]
+url = https://github.com/libusb/libusb/releases/download/v1.0.22/libusb-1.0.22.tar.bz2
+sha256 = 75aeb9d59a4fdb800d329a545c2e6799f732362193b465ea198f2aa275518157
+
+[build.os=linux]
+builder = autoconf
+subdir = libusb-1.0.22
+
+[autoconf.args]
+# fboss (which added the libusb dep) doesn't need udev so it is disabled here.
+# if someone in the future wants to add udev for something else, it won't hurt
+# fboss.
+--disable-udev
diff --git a/build/fbcode_builder/manifests/libyaml b/build/fbcode_builder/manifests/libyaml
new file mode 100644
index 000000000..a7ff57316
--- /dev/null
+++ b/build/fbcode_builder/manifests/libyaml
@@ -0,0 +1,13 @@
+[manifest]
+name = libyaml
+
+[download]
+url = http://pyyaml.org/download/libyaml/yaml-0.1.7.tar.gz
+sha256 = 8088e457264a98ba451a90b8661fcb4f9d6f478f7265d48322a196cec2480729
+
+[build.os=linux]
+builder = autoconf
+subdir = yaml-0.1.7
+
+[build.not(os=linux)]
+builder = nop
diff --git a/build/fbcode_builder/manifests/libzmq b/build/fbcode_builder/manifests/libzmq
new file mode 100644
index 000000000..4f555fa65
--- /dev/null
+++ b/build/fbcode_builder/manifests/libzmq
@@ -0,0 +1,24 @@
+[manifest]
+name = libzmq
+
+[rpms]
+zeromq-devel
+zeromq
+
+[debs]
+libzmq3-dev
+
+[download]
+url = https://github.com/zeromq/libzmq/releases/download/v4.3.1/zeromq-4.3.1.tar.gz
+sha256 = bcbabe1e2c7d0eec4ed612e10b94b112dd5f06fcefa994a0c79a45d835cd21eb
+
+
+[build]
+builder = autoconf
+subdir = zeromq-4.3.1
+
+[autoconf.args]
+
+[dependencies]
+autoconf
+libtool
diff --git a/build/fbcode_builder/manifests/lz4 b/build/fbcode_builder/manifests/lz4
new file mode 100644
index 000000000..03dbd9de4
--- /dev/null
+++ b/build/fbcode_builder/manifests/lz4
@@ -0,0 +1,17 @@
+[manifest]
+name = lz4
+
+[rpms]
+lz4-devel
+lz4-static
+
+[debs]
+liblz4-dev
+
+[download]
+url = https://github.com/lz4/lz4/archive/v1.8.3.tar.gz
+sha256 = 33af5936ac06536805f9745e0b6d61da606a1f8b4cc5c04dd3cbaca3b9b4fc43
+
+[build]
+builder = cmake
+subdir = lz4-1.8.3/contrib/cmake_unofficial
diff --git a/build/fbcode_builder/manifests/lzo b/build/fbcode_builder/manifests/lzo
new file mode 100644
index 000000000..342428ab5
--- /dev/null
+++ b/build/fbcode_builder/manifests/lzo
@@ -0,0 +1,19 @@
+[manifest]
+name = lzo
+
+[rpms]
+lzo-devel
+
+[debs]
+liblzo2-dev
+
+[download]
+url = http://www.oberhumer.com/opensource/lzo/download/lzo-2.10.tar.gz
+sha256 = c0f892943208266f9b6543b3ae308fab6284c5c90e627931446fb49b4221a072
+
+[build.not(os=windows)]
+builder = autoconf
+subdir = lzo-2.10
+
+[build.os=windows]
+builder = nop
diff --git a/build/fbcode_builder/manifests/mononoke b/build/fbcode_builder/manifests/mononoke
new file mode 100644
index 000000000..7df92c77b
--- /dev/null
+++ b/build/fbcode_builder/manifests/mononoke
@@ -0,0 +1,44 @@
+[manifest]
+name = mononoke
+fbsource_path = fbcode/eden
+shipit_project = eden
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebookexperimental/eden.git
+
+[build.not(os=windows)]
+builder = cargo
+
+[build.os=windows]
+# building Mononoke on windows is not supported
+builder = nop
+
+[cargo]
+build_doc = true
+workspace_dir = eden/mononoke
+
+[shipit.pathmap]
+fbcode/configerator/structs/scm/mononoke/public_autocargo = configerator/structs/scm/mononoke
+fbcode/configerator/structs/scm/mononoke = configerator/structs/scm/mononoke
+fbcode/eden/oss = .
+fbcode/eden = eden
+fbcode/eden/mononoke/public_autocargo = eden/mononoke
+fbcode/tools/lfs = tools/lfs
+tools/rust/ossconfigs = .
+
+[shipit.strip]
+# strip all code unrelated to mononoke to prevent triggering unnecessary checks
+^fbcode/eden/(?!mononoke|scm/lib/xdiff.*)/.*$
+^fbcode/eden/scm/lib/third-party/rust/.*/Cargo.toml$
+^fbcode/eden/mononoke/Cargo\.toml$
+^fbcode/eden/mononoke/(?!public_autocargo).+/Cargo\.toml$
+^fbcode/configerator/structs/scm/mononoke/(?!public_autocargo).+/Cargo\.toml$
+^.*/facebook/.*$
+
+[dependencies]
+fbthrift-source
+rust-shed
+
+[dependencies.fb=on]
+rust
diff --git a/build/fbcode_builder/manifests/mononoke_integration b/build/fbcode_builder/manifests/mononoke_integration
new file mode 100644
index 000000000..a796e967e
--- /dev/null
+++ b/build/fbcode_builder/manifests/mononoke_integration
@@ -0,0 +1,47 @@
+[manifest]
+name = mononoke_integration
+fbsource_path = fbcode/eden
+shipit_project = eden
+shipit_fbcode_builder = true
+
+[build.not(os=windows)]
+builder = make
+subdir = eden/mononoke/tests/integration
+
+[build.os=windows]
+# building Mononoke on windows is not supported
+builder = nop
+
+[make.build_args]
+build-getdeps
+
+[make.install_args]
+install-getdeps
+
+[make.test_args]
+test-getdeps
+
+[shipit.pathmap]
+fbcode/eden/mononoke/tests/integration = eden/mononoke/tests/integration
+
+[shipit.strip]
+^.*/facebook/.*$
+
+[dependencies]
+eden_scm
+eden_scm_lib_edenapi_tools
+jq
+mononoke
+nmap
+python-click
+python-dulwich
+tree
+
+[dependencies.os=linux]
+sqlite3-bin
+
+[dependencies.os=darwin]
+gnu-bash
+gnu-coreutils
+gnu-grep
+gnu-sed
diff --git a/build/fbcode_builder/manifests/mvfst b/build/fbcode_builder/manifests/mvfst
new file mode 100644
index 000000000..4f72a9192
--- /dev/null
+++ b/build/fbcode_builder/manifests/mvfst
@@ -0,0 +1,32 @@
+[manifest]
+name = mvfst
+fbsource_path = fbcode/quic
+shipit_project = mvfst
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebookincubator/mvfst.git
+
+[build]
+builder = cmake
+subdir = .
+
+[cmake.defines.test=on]
+BUILD_TESTS = ON
+
+[cmake.defines.all(os=windows, test=on)]
+BUILD_TESTS = OFF
+
+[cmake.defines.test=off]
+BUILD_TESTS = OFF
+
+[dependencies]
+folly
+fizz
+
+[dependencies.all(test=on, not(os=windows))]
+googletest_1_8
+
+[shipit.pathmap]
+fbcode/quic/public_root = .
+fbcode/quic = quic
diff --git a/build/fbcode_builder/manifests/nghttp2 b/build/fbcode_builder/manifests/nghttp2
new file mode 100644
index 000000000..151daf8af
--- /dev/null
+++ b/build/fbcode_builder/manifests/nghttp2
@@ -0,0 +1,20 @@
+[manifest]
+name = nghttp2
+
+[rpms]
+libnghttp2-devel
+libnghttp2
+
+[debs]
+libnghttp2-dev
+
+[download]
+url = https://github.com/nghttp2/nghttp2/releases/download/v1.39.2/nghttp2-1.39.2.tar.gz
+sha256 = fc820a305e2f410fade1a3260f09229f15c0494fc089b0100312cd64a33a38c0
+
+[build]
+builder = autoconf
+subdir = nghttp2-1.39.2
+
+[autoconf.args]
+--enable-lib-only
diff --git a/build/fbcode_builder/manifests/ninja b/build/fbcode_builder/manifests/ninja
new file mode 100644
index 000000000..2b6c5dc8d
--- /dev/null
+++ b/build/fbcode_builder/manifests/ninja
@@ -0,0 +1,26 @@
+[manifest]
+name = ninja
+
+[rpms]
+ninja-build
+
+[debs]
+ninja-build
+
+[download.os=windows]
+url = https://github.com/ninja-build/ninja/releases/download/v1.10.2/ninja-win.zip
+sha256 = bbde850d247d2737c5764c927d1071cbb1f1957dcabda4a130fa8547c12c695f
+
+[build.os=windows]
+builder = nop
+
+[install.files.os=windows]
+ninja.exe = bin/ninja.exe
+
+[download.not(os=windows)]
+url = https://github.com/ninja-build/ninja/archive/v1.10.2.tar.gz
+sha256 = ce35865411f0490368a8fc383f29071de6690cbadc27704734978221f25e2bed
+
+[build.not(os=windows)]
+builder = ninja_bootstrap
+subdir = ninja-1.10.2
diff --git a/build/fbcode_builder/manifests/nmap b/build/fbcode_builder/manifests/nmap
new file mode 100644
index 000000000..c245e1241
--- /dev/null
+++ b/build/fbcode_builder/manifests/nmap
@@ -0,0 +1,25 @@
+[manifest]
+name = nmap
+
+[rpms]
+nmap
+
+[debs]
+nmap
+
+[download.not(os=windows)]
+url = https://api.github.com/repos/nmap/nmap/tarball/ef8213a36c2e89233c806753a57b5cd473605408
+sha256 = eda39e5a8ef4964fac7db16abf91cc11ff568eac0fa2d680b0bfa33b0ed71f4a
+
+[build.not(os=windows)]
+builder = autoconf
+subdir = nmap-nmap-ef8213a
+build_in_src_dir = true
+
+[build.os=windows]
+builder = nop
+
+[autoconf.args]
+# Without this option the build was filing to find some third party libraries
+# that we don't need
+enable_rdma=no
diff --git a/build/fbcode_builder/manifests/openr b/build/fbcode_builder/manifests/openr
new file mode 100644
index 000000000..754ba8cd5
--- /dev/null
+++ b/build/fbcode_builder/manifests/openr
@@ -0,0 +1,37 @@
+[manifest]
+name = openr
+fbsource_path = facebook/openr
+shipit_project = openr
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebook/openr.git
+
+[build.os=linux]
+builder = cmake
+
+[build.not(os=linux)]
+# boost.fiber is required and that is not available on macos.
+# libzmq doesn't currently build on windows.
+builder = nop
+
+[dependencies]
+boost
+fb303
+fbthrift
+fbzmq
+folly
+googletest
+re2
+
+[cmake.defines.test=on]
+BUILD_TESTS=ON
+ADD_ROOT_TESTS=OFF
+
+[cmake.defines.test=off]
+BUILD_TESTS=OFF
+
+
+[shipit.pathmap]
+fbcode/openr = openr
+fbcode/openr/public_tld = .
diff --git a/build/fbcode_builder/manifests/openssl b/build/fbcode_builder/manifests/openssl
new file mode 100644
index 000000000..991196c9a
--- /dev/null
+++ b/build/fbcode_builder/manifests/openssl
@@ -0,0 +1,20 @@
+[manifest]
+name = openssl
+
+[rpms]
+openssl-devel
+openssl
+
+[debs]
+libssl-dev
+
+[download]
+url = https://www.openssl.org/source/openssl-1.1.1i.tar.gz
+sha256 = e8be6a35fe41d10603c3cc635e93289ed00bf34b79671a3a4de64fcee00d5242
+
+[build]
+builder = openssl
+subdir = openssl-1.1.1i
+
+[dependencies.os=windows]
+perl
diff --git a/build/fbcode_builder/manifests/osxfuse b/build/fbcode_builder/manifests/osxfuse
new file mode 100644
index 000000000..b6c6c551f
--- /dev/null
+++ b/build/fbcode_builder/manifests/osxfuse
@@ -0,0 +1,12 @@
+[manifest]
+name = osxfuse
+
+[download]
+url = https://github.com/osxfuse/osxfuse/archive/osxfuse-3.8.3.tar.gz
+sha256 = 93bab6731bdfe8dc1ef069483437270ce7fe5a370f933d40d8d0ef09ba846c0c
+
+[build]
+builder = nop
+
+[install.files]
+osxfuse-osxfuse-3.8.3/common = include
diff --git a/build/fbcode_builder/manifests/patchelf b/build/fbcode_builder/manifests/patchelf
new file mode 100644
index 000000000..f9d050424
--- /dev/null
+++ b/build/fbcode_builder/manifests/patchelf
@@ -0,0 +1,17 @@
+[manifest]
+name = patchelf
+
+[rpms]
+patchelf
+
+[debs]
+patchelf
+
+[download]
+url = https://github.com/NixOS/patchelf/archive/0.10.tar.gz
+sha256 = b3cb6bdedcef5607ce34a350cf0b182eb979f8f7bc31eae55a93a70a3f020d13
+
+[build]
+builder = autoconf
+subdir = patchelf-0.10
+
diff --git a/build/fbcode_builder/manifests/pcre b/build/fbcode_builder/manifests/pcre
new file mode 100644
index 000000000..5353d8c27
--- /dev/null
+++ b/build/fbcode_builder/manifests/pcre
@@ -0,0 +1,18 @@
+[manifest]
+name = pcre
+
+[rpms]
+pcre-devel
+pcre-static
+
+[debs]
+libpcre3-dev
+
+[download]
+url = https://ftp.pcre.org/pub/pcre/pcre-8.43.tar.gz
+sha256 = 0b8e7465dc5e98c757cc3650a20a7843ee4c3edf50aaf60bb33fd879690d2c73
+
+[build]
+builder = cmake
+subdir = pcre-8.43
+
diff --git a/build/fbcode_builder/manifests/perl b/build/fbcode_builder/manifests/perl
new file mode 100644
index 000000000..32bddc51c
--- /dev/null
+++ b/build/fbcode_builder/manifests/perl
@@ -0,0 +1,11 @@
+[manifest]
+name = perl
+
+[download.os=windows]
+url = http://strawberryperl.com/download/5.28.1.1/strawberry-perl-5.28.1.1-64bit-portable.zip
+sha256 = 935c95ba096fa11c4e1b5188732e3832d330a2a79e9882ab7ba8460ddbca810d
+
+[build.os=windows]
+builder = nop
+subdir = perl
+
diff --git a/build/fbcode_builder/manifests/pexpect b/build/fbcode_builder/manifests/pexpect
new file mode 100644
index 000000000..682e66a54
--- /dev/null
+++ b/build/fbcode_builder/manifests/pexpect
@@ -0,0 +1,12 @@
+[manifest]
+name = pexpect
+
+[download]
+url = https://files.pythonhosted.org/packages/0e/3e/377007e3f36ec42f1b84ec322ee12141a9e10d808312e5738f52f80a232c/pexpect-4.7.0-py2.py3-none-any.whl
+sha256 = 2094eefdfcf37a1fdbfb9aa090862c1a4878e5c7e0e7e7088bdb511c558e5cd1
+
+[build]
+builder = python-wheel
+
+[dependencies]
+python-ptyprocess
diff --git a/build/fbcode_builder/manifests/protobuf b/build/fbcode_builder/manifests/protobuf
new file mode 100644
index 000000000..7f21e4821
--- /dev/null
+++ b/build/fbcode_builder/manifests/protobuf
@@ -0,0 +1,17 @@
+[manifest]
+name = protobuf
+
+[rpms]
+protobuf-devel
+
+[debs]
+libprotobuf-dev
+
+[git]
+repo_url = https://github.com/protocolbuffers/protobuf.git
+
+[build.not(os=windows)]
+builder = autoconf
+
+[build.os=windows]
+builder = nop
diff --git a/build/fbcode_builder/manifests/proxygen b/build/fbcode_builder/manifests/proxygen
new file mode 100644
index 000000000..5452a2454
--- /dev/null
+++ b/build/fbcode_builder/manifests/proxygen
@@ -0,0 +1,39 @@
+[manifest]
+name = proxygen
+fbsource_path = fbcode/proxygen
+shipit_project = proxygen
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebook/proxygen.git
+
+[build.os=windows]
+builder = nop
+
+[build]
+builder = cmake
+subdir = .
+
+[cmake.defines]
+BUILD_QUIC = ON
+
+[cmake.defines.test=on]
+BUILD_TESTS = ON
+
+[cmake.defines.test=off]
+BUILD_TESTS = OFF
+
+[dependencies]
+zlib
+gperf
+folly
+fizz
+wangle
+mvfst
+
+[dependencies.test=on]
+googletest_1_8
+
+[shipit.pathmap]
+fbcode/proxygen/public_tld = .
+fbcode/proxygen = proxygen
diff --git a/build/fbcode_builder/manifests/python b/build/fbcode_builder/manifests/python
new file mode 100644
index 000000000..e51c0ab51
--- /dev/null
+++ b/build/fbcode_builder/manifests/python
@@ -0,0 +1,17 @@
+[manifest]
+name = python
+
+[rpms]
+python3
+python3-devel
+
+[debs]
+python3-all-dev
+
+[download.os=linux]
+url = https://www.python.org/ftp/python/3.7.6/Python-3.7.6.tgz
+sha256 = aeee681c235ad336af116f08ab6563361a0c81c537072c1b309d6e4050aa2114
+
+[build.os=linux]
+builder = autoconf
+subdir = Python-3.7.6
diff --git a/build/fbcode_builder/manifests/python-click b/build/fbcode_builder/manifests/python-click
new file mode 100644
index 000000000..ea9a9d2d3
--- /dev/null
+++ b/build/fbcode_builder/manifests/python-click
@@ -0,0 +1,9 @@
+[manifest]
+name = python-click
+
+[download]
+url = https://files.pythonhosted.org/packages/d2/3d/fa76db83bf75c4f8d338c2fd15c8d33fdd7ad23a9b5e57eb6c5de26b430e/click-7.1.2-py2.py3-none-any.whl
+sha256 = dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc
+
+[build]
+builder = python-wheel
diff --git a/build/fbcode_builder/manifests/python-dulwich b/build/fbcode_builder/manifests/python-dulwich
new file mode 100644
index 000000000..0d995e12f
--- /dev/null
+++ b/build/fbcode_builder/manifests/python-dulwich
@@ -0,0 +1,19 @@
+[manifest]
+name = python-dulwich
+
+# The below links point to custom github forks of project dulwich, because the
+# 0.18.6 version didn't have an official rollout of wheel packages.
+
+[download.os=linux]
+url = https://github.com/lukaspiatkowski/dulwich/releases/download/dulwich-0.18.6-wheel/dulwich-0.18.6-cp36-cp36m-linux_x86_64.whl
+sha256 = e96f545f3d003e67236785473caaba2c368e531ea85fd508a3bd016ebac3a6d8
+
+[download.os=darwin]
+url = https://github.com/lukaspiatkowski/dulwich/releases/download/dulwich-0.18.6-wheel/dulwich-0.18.6-cp37-cp37m-macosx_10_14_x86_64.whl
+sha256 = 8373652056284ad40ea5220b659b3489b0a91f25536322345a3e4b5d29069308
+
+[build.not(os=windows)]
+builder = python-wheel
+
+[build.os=windows]
+builder = nop
diff --git a/build/fbcode_builder/manifests/python-ptyprocess b/build/fbcode_builder/manifests/python-ptyprocess
new file mode 100644
index 000000000..adc60e048
--- /dev/null
+++ b/build/fbcode_builder/manifests/python-ptyprocess
@@ -0,0 +1,9 @@
+[manifest]
+name = python-ptyprocess
+
+[download]
+url = https://files.pythonhosted.org/packages/d1/29/605c2cc68a9992d18dada28206eeada56ea4bd07a239669da41674648b6f/ptyprocess-0.6.0-py2.py3-none-any.whl
+sha256 = d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f
+
+[build]
+builder = python-wheel
diff --git a/build/fbcode_builder/manifests/python-six b/build/fbcode_builder/manifests/python-six
new file mode 100644
index 000000000..a712188dc
--- /dev/null
+++ b/build/fbcode_builder/manifests/python-six
@@ -0,0 +1,9 @@
+[manifest]
+name = python-six
+
+[download]
+url = https://files.pythonhosted.org/packages/73/fb/00a976f728d0d1fecfe898238ce23f502a721c0ac0ecfedb80e0d88c64e9/six-1.12.0-py2.py3-none-any.whl
+sha256 = 3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c
+
+[build]
+builder = python-wheel
diff --git a/build/fbcode_builder/manifests/python-toml b/build/fbcode_builder/manifests/python-toml
new file mode 100644
index 000000000..b49a3b8fb
--- /dev/null
+++ b/build/fbcode_builder/manifests/python-toml
@@ -0,0 +1,9 @@
+[manifest]
+name = python-toml
+
+[download]
+url = https://files.pythonhosted.org/packages/a2/12/ced7105d2de62fa7c8fb5fce92cc4ce66b57c95fb875e9318dba7f8c5db0/toml-0.10.0-py2.py3-none-any.whl
+sha256 = 235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e
+
+[build]
+builder = python-wheel
diff --git a/build/fbcode_builder/manifests/re2 b/build/fbcode_builder/manifests/re2
new file mode 100644
index 000000000..eb4d6a92c
--- /dev/null
+++ b/build/fbcode_builder/manifests/re2
@@ -0,0 +1,17 @@
+[manifest]
+name = re2
+
+[rpms]
+re2
+re2-devel
+
+[debs]
+libre2-dev
+
+[download]
+url = https://github.com/google/re2/archive/2019-06-01.tar.gz
+sha256 = 02b7d73126bd18e9fbfe5d6375a8bb13fadaf8e99e48cbb062e4500fc18e8e2e
+
+[build]
+builder = cmake
+subdir = re2-2019-06-01
diff --git a/build/fbcode_builder/manifests/rocksdb b/build/fbcode_builder/manifests/rocksdb
new file mode 100644
index 000000000..323e6dc6d
--- /dev/null
+++ b/build/fbcode_builder/manifests/rocksdb
@@ -0,0 +1,41 @@
+[manifest]
+name = rocksdb
+
+[download]
+url = https://github.com/facebook/rocksdb/archive/v6.8.1.tar.gz
+sha256 = ca192a06ed3bcb9f09060add7e9d0daee1ae7a8705a3d5ecbe41867c5e2796a2
+
+[dependencies]
+lz4
+snappy
+
+[build]
+builder = cmake
+subdir = rocksdb-6.8.1
+
+[cmake.defines]
+WITH_SNAPPY=ON
+WITH_LZ4=ON
+WITH_TESTS=OFF
+WITH_BENCHMARK_TOOLS=OFF
+# We get relocation errors with the static gflags lib,
+# and there's no clear way to make it pick the shared gflags
+# so just turn it off.
+WITH_GFLAGS=OFF
+# mac pro machines don't have some of the newer features that
+# rocksdb enables by default; ask it to disable their use even
+# when building on new hardware
+PORTABLE = ON
+# Disable the use of -Werror
+FAIL_ON_WARNINGS = OFF
+
+[cmake.defines.os=windows]
+ROCKSDB_INSTALL_ON_WINDOWS=ON
+# RocksDB hard codes the paths to the snappy libs to something
+# that doesn't exist; ignoring the usual cmake rules. As a result,
+# we can't build it with snappy without either patching rocksdb or
+# without introducing more complex logic to the build system to
+# connect the snappy build outputs to rocksdb's custom logic here.
+# Let's just turn it off on windows.
+WITH_SNAPPY=OFF
+WITH_LZ4=OFF
diff --git a/build/fbcode_builder/manifests/rust-shed b/build/fbcode_builder/manifests/rust-shed
new file mode 100644
index 000000000..c94b3fdd6
--- /dev/null
+++ b/build/fbcode_builder/manifests/rust-shed
@@ -0,0 +1,34 @@
+[manifest]
+name = rust-shed
+fbsource_path = fbcode/common/rust/shed
+shipit_project = rust-shed
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebookexperimental/rust-shed.git
+
+[build]
+builder = cargo
+
+[cargo]
+build_doc = true
+workspace_dir =
+
+[shipit.pathmap]
+fbcode/common/rust/shed = shed
+fbcode/common/rust/shed/public_autocargo = shed
+fbcode/common/rust/shed/public_tld = .
+tools/rust/ossconfigs = .
+
+[shipit.strip]
+^fbcode/common/rust/shed/(?!public_autocargo|public_tld).+/Cargo\.toml$
+
+[dependencies]
+fbthrift
+# macOS doesn't expose the openssl api so we need to build our own.
+# Windows doesn't have openssl and Linux might contain an old version,
+# so we get to provide it
+openssl
+
+[dependencies.fb=on]
+rust
diff --git a/build/fbcode_builder/manifests/snappy b/build/fbcode_builder/manifests/snappy
new file mode 100644
index 000000000..2f46a7734
--- /dev/null
+++ b/build/fbcode_builder/manifests/snappy
@@ -0,0 +1,25 @@
+[manifest]
+name = snappy
+
+[rpms]
+snappy
+snappy-devel
+
+[debs]
+libsnappy-dev
+
+[download]
+url = https://github.com/google/snappy/archive/1.1.7.tar.gz
+sha256 = 3dfa02e873ff51a11ee02b9ca391807f0c8ea0529a4924afa645fbf97163f9d4
+
+[build]
+builder = cmake
+subdir = snappy-1.1.7
+
+[cmake.defines]
+SNAPPY_BUILD_TESTS = OFF
+
+# Avoid problems like `relocation R_X86_64_PC32 against symbol` on ELF systems
+# when linking rocksdb, which builds PIC even when building a static lib
+[cmake.defines.os=linux]
+BUILD_SHARED_LIBS = ON
diff --git a/build/fbcode_builder/manifests/sqlite3 b/build/fbcode_builder/manifests/sqlite3
new file mode 100644
index 000000000..2463f5761
--- /dev/null
+++ b/build/fbcode_builder/manifests/sqlite3
@@ -0,0 +1,21 @@
+[manifest]
+name = sqlite3
+
+[rpms]
+sqlite-devel
+sqlite-libs
+
+[debs]
+libsqlite3-dev
+
+[download]
+url = https://sqlite.org/2019/sqlite-amalgamation-3280000.zip
+sha256 = d02fc4e95cfef672b45052e221617a050b7f2e20103661cda88387349a9b1327
+
+[dependencies]
+cmake
+ninja
+
+[build]
+builder = sqlite
+subdir = sqlite-amalgamation-3280000
diff --git a/build/fbcode_builder/manifests/sqlite3-bin b/build/fbcode_builder/manifests/sqlite3-bin
new file mode 100644
index 000000000..aa138d499
--- /dev/null
+++ b/build/fbcode_builder/manifests/sqlite3-bin
@@ -0,0 +1,28 @@
+[manifest]
+name = sqlite3-bin
+
+[rpms]
+sqlite
+
+[debs]
+sqlite3
+
+[download.os=linux]
+url = https://github.com/sqlite/sqlite/archive/version-3.33.0.tar.gz
+sha256 = 48e5f989eefe9af0ac758096f82ead0f3c7b58118ac17cc5810495bd5084a331
+
+[build.os=linux]
+builder = autoconf
+subdir = sqlite-version-3.33.0
+
+[build.not(os=linux)]
+# MacOS comes with sqlite3 preinstalled and don't need Windows here
+builder = nop
+
+[dependencies.os=linux]
+tcl
+
+[autoconf.args]
+# This flag disabled tcl as a runtime library used for some functionality,
+# but tcl is still a required dependency as it is used by the build files
+--disable-tcl
diff --git a/build/fbcode_builder/manifests/tcl b/build/fbcode_builder/manifests/tcl
new file mode 100644
index 000000000..5e9892f37
--- /dev/null
+++ b/build/fbcode_builder/manifests/tcl
@@ -0,0 +1,20 @@
+[manifest]
+name = tcl
+
+[rpms]
+tcl
+
+[debs]
+tcl
+
+[download]
+url = https://github.com/tcltk/tcl/archive/core-8-7a3.tar.gz
+sha256 = 22d748f0c9652f3ecc195fed3f24a1b6eea8d449003085e6651197951528982e
+
+[build.os=linux]
+builder = autoconf
+subdir = tcl-core-8-7a3/unix
+
+[build.not(os=linux)]
+# This is for sqlite3 on Linux for now
+builder = nop
diff --git a/build/fbcode_builder/manifests/tree b/build/fbcode_builder/manifests/tree
new file mode 100644
index 000000000..0c982f35a
--- /dev/null
+++ b/build/fbcode_builder/manifests/tree
@@ -0,0 +1,34 @@
+[manifest]
+name = tree
+
+[rpms]
+tree
+
+[debs]
+tree
+
+[download.os=linux]
+url = https://salsa.debian.org/debian/tree-packaging/-/archive/debian/1.8.0-1/tree-packaging-debian-1.8.0-1.tar.gz
+sha256 = a841eee1d52bfd64a48f54caab9937b9bd92935055c48885c4ab1ae4dab7fae5
+
+[download.os=darwin]
+# The official package of tree source requires users of non-Linux platform to
+# comment/uncomment certain lines in the Makefile to build for their platform.
+# Besauce getdeps.py doesn't have that functionality we just use this custom
+# fork of tree which has proper lines uncommented for a OSX build
+url = https://github.com/lukaspiatkowski/tree-command/archive/debian/1.8.0-1-macos.tar.gz
+sha256 = 9cbe889553d95cf5a2791dd0743795d46a3c092c5bba691769c0e5c52e11229e
+
+[build.os=linux]
+builder = make
+subdir = tree-packaging-debian-1.8.0-1
+
+[build.os=darwin]
+builder = make
+subdir = tree-command-debian-1.8.0-1-macos
+
+[build.os=windows]
+builder = nop
+
+[make.install_args]
+install
diff --git a/build/fbcode_builder/manifests/wangle b/build/fbcode_builder/manifests/wangle
new file mode 100644
index 000000000..6b330d620
--- /dev/null
+++ b/build/fbcode_builder/manifests/wangle
@@ -0,0 +1,27 @@
+[manifest]
+name = wangle
+fbsource_path = fbcode/wangle
+shipit_project = wangle
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebook/wangle.git
+
+[build]
+builder = cmake
+subdir = wangle
+
+[cmake.defines.test=on]
+BUILD_TESTS=ON
+
+[cmake.defines.test=off]
+BUILD_TESTS=OFF
+
+[dependencies]
+folly
+googletest
+fizz
+
+[shipit.pathmap]
+fbcode/wangle/public_tld = .
+fbcode/wangle = wangle
diff --git a/build/fbcode_builder/manifests/watchman b/build/fbcode_builder/manifests/watchman
new file mode 100644
index 000000000..0fcd6bb9f
--- /dev/null
+++ b/build/fbcode_builder/manifests/watchman
@@ -0,0 +1,45 @@
+[manifest]
+name = watchman
+fbsource_path = fbcode/watchman
+shipit_project = watchman
+shipit_fbcode_builder = true
+
+[git]
+repo_url = https://github.com/facebook/watchman.git
+
+[build]
+builder = cmake
+
+[dependencies]
+boost
+cpptoml
+fb303
+fbthrift
+folly
+pcre
+googletest
+
+[dependencies.fb=on]
+rust
+
+[shipit.pathmap]
+fbcode/watchman = watchman
+fbcode/watchman/oss = .
+fbcode/eden/fs = eden/fs
+
+[shipit.strip]
+^fbcode/eden/fs/(?!.*\.thrift|service/shipit_test_file\.txt)
+
+[cmake.defines.fb=on]
+ENABLE_EDEN_SUPPORT=ON
+
+# FB macos specific settings
+[cmake.defines.all(fb=on,os=darwin)]
+# this path is coupled with the FB internal watchman-osx.spec
+WATCHMAN_STATE_DIR=/opt/facebook/watchman/var/run/watchman
+# tell cmake not to try to create /opt/facebook/...
+INSTALL_WATCHMAN_STATE_DIR=OFF
+USE_SYS_PYTHON=OFF
+
+[depends.environment]
+WATCHMAN_VERSION_OVERRIDE
diff --git a/build/fbcode_builder/manifests/yaml-cpp b/build/fbcode_builder/manifests/yaml-cpp
new file mode 100644
index 000000000..bffa540fe
--- /dev/null
+++ b/build/fbcode_builder/manifests/yaml-cpp
@@ -0,0 +1,20 @@
+[manifest]
+name = yaml-cpp
+
+[download]
+url = https://github.com/jbeder/yaml-cpp/archive/yaml-cpp-0.6.2.tar.gz
+sha256 = e4d8560e163c3d875fd5d9e5542b5fd5bec810febdcba61481fe5fc4e6b1fd05
+
+[build.os=linux]
+builder = cmake
+subdir = yaml-cpp-yaml-cpp-0.6.2
+
+[build.not(os=linux)]
+builder = nop
+
+[dependencies]
+boost
+googletest
+
+[cmake.defines]
+YAML_CPP_BUILD_TESTS=OFF
diff --git a/build/fbcode_builder/manifests/zlib b/build/fbcode_builder/manifests/zlib
new file mode 100644
index 000000000..8df0e3e48
--- /dev/null
+++ b/build/fbcode_builder/manifests/zlib
@@ -0,0 +1,22 @@
+[manifest]
+name = zlib
+
+[rpms]
+zlib-devel
+zlib-static
+
+[debs]
+zlib1g-dev
+
+[download]
+url = http://www.zlib.net/zlib-1.2.11.tar.gz
+sha256 = c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1
+
+[build.os=windows]
+builder = cmake
+subdir = zlib-1.2.11
+
+# Every platform but windows ships with zlib, so just skip
+# building on not(windows)
+[build.not(os=windows)]
+builder = nop
diff --git a/build/fbcode_builder/manifests/zstd b/build/fbcode_builder/manifests/zstd
new file mode 100644
index 000000000..71db9d5c6
--- /dev/null
+++ b/build/fbcode_builder/manifests/zstd
@@ -0,0 +1,28 @@
+[manifest]
+name = zstd
+
+[rpms]
+libzstd-devel
+libzstd
+
+[debs]
+libzstd-dev
+
+[download]
+url = https://github.com/facebook/zstd/releases/download/v1.4.5/zstd-1.4.5.tar.gz
+sha256 = 98e91c7c6bf162bf90e4e70fdbc41a8188b9fa8de5ad840c401198014406ce9e
+
+[build]
+builder = cmake
+subdir = zstd-1.4.5/build/cmake
+
+# The zstd cmake build explicitly sets the install name
+# for the shared library in such a way that cmake discards
+# the path to the library from the install_name, rendering
+# the library non-resolvable during the build. The short
+# term solution for this is just to link static on macos.
+[cmake.defines.os=darwin]
+ZSTD_BUILD_SHARED = OFF
+
+[cmake.defines.os=windows]
+ZSTD_BUILD_SHARED = OFF
diff --git a/build/fbcode_builder/parse_args.py b/build/fbcode_builder/parse_args.py
new file mode 100644
index 000000000..8d5e35330
--- /dev/null
+++ b/build/fbcode_builder/parse_args.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+"Argument parsing logic shared by all fbcode_builder CLI tools."
+
+import argparse
+import logging
+
+from shell_quoting import raw_shell, ShellQuoted
+
+
+def parse_args_to_fbcode_builder_opts(add_args_fn, top_level_opts, opts, help):
+ """
+
+ Provides some standard arguments: --debug, --option, --shell-quoted-option
+
+ Then, calls `add_args_fn(parser)` to add application-specific arguments.
+
+ `opts` are first used as defaults for the various command-line
+ arguments. Then, the parsed arguments are mapped back into `opts`,
+ which then become the values for `FBCodeBuilder.option()`, to be used
+ both by the builder and by `get_steps_fn()`.
+
+ `help` is printed in response to the `--help` argument.
+
+ """
+ top_level_opts = set(top_level_opts)
+
+ parser = argparse.ArgumentParser(
+ description=help, formatter_class=argparse.RawDescriptionHelpFormatter
+ )
+
+ add_args_fn(parser)
+
+ parser.add_argument(
+ "--option",
+ nargs=2,
+ metavar=("KEY", "VALUE"),
+ action="append",
+ default=[
+ (k, v)
+ for k, v in opts.items()
+ if k not in top_level_opts and not isinstance(v, ShellQuoted)
+ ],
+ help="Set project-specific options. These are assumed to be raw "
+ "strings, to be shell-escaped as needed. Default: %(default)s.",
+ )
+ parser.add_argument(
+ "--shell-quoted-option",
+ nargs=2,
+ metavar=("KEY", "VALUE"),
+ action="append",
+ default=[
+ (k, raw_shell(v))
+ for k, v in opts.items()
+ if k not in top_level_opts and isinstance(v, ShellQuoted)
+ ],
+ help="Set project-specific options. These are assumed to be shell-"
+ "quoted, and may be used in commands as-is. Default: %(default)s.",
+ )
+
+ parser.add_argument("--debug", action="store_true", help="Log more")
+ args = parser.parse_args()
+
+ logging.basicConfig(
+ level=logging.DEBUG if args.debug else logging.INFO,
+ format="%(levelname)s: %(message)s",
+ )
+
+ # Map command-line args back into opts.
+ logging.debug("opts before command-line arguments: {0}".format(opts))
+
+ new_opts = {}
+ for key in top_level_opts:
+ val = getattr(args, key)
+ # Allow clients to unset a default by passing a value of None in opts
+ if val is not None:
+ new_opts[key] = val
+ for key, val in args.option:
+ new_opts[key] = val
+ for key, val in args.shell_quoted_option:
+ new_opts[key] = ShellQuoted(val)
+
+ logging.debug("opts after command-line arguments: {0}".format(new_opts))
+
+ return new_opts
diff --git a/build/fbcode_builder/shell_builder.py b/build/fbcode_builder/shell_builder.py
new file mode 100644
index 000000000..e0d5429ad
--- /dev/null
+++ b/build/fbcode_builder/shell_builder.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+"""
+shell_builder.py allows running the fbcode_builder logic
+on the host rather than in a container.
+
+It emits a bash script with set -exo pipefail configured such that
+any failing step will cause the script to exit with failure.
+
+== How to run it? ==
+
+cd build
+python fbcode_builder/shell_builder.py > ~/run.sh
+bash ~/run.sh
+"""
+
+import distutils.spawn
+import os
+
+from fbcode_builder import FBCodeBuilder
+from shell_quoting import raw_shell, shell_comment, shell_join, ShellQuoted
+from utils import recursively_flatten_list
+
+
+class ShellFBCodeBuilder(FBCodeBuilder):
+ def _render_impl(self, steps):
+ return raw_shell(shell_join("\n", recursively_flatten_list(steps)))
+
+ def set_env(self, key, value):
+ return ShellQuoted("export {key}={val}").format(key=key, val=value)
+
+ def workdir(self, dir):
+ return [
+ ShellQuoted("mkdir -p {d} && cd {d}").format(d=dir),
+ ]
+
+ def run(self, shell_cmd):
+ return ShellQuoted("{cmd}").format(cmd=shell_cmd)
+
+ def step(self, name, actions):
+ assert "\n" not in name, "Name {0} would span > 1 line".format(name)
+ b = ShellQuoted("")
+ return [ShellQuoted("### {0} ###".format(name)), b] + actions + [b]
+
+ def setup(self):
+ steps = (
+ [
+ ShellQuoted("set -exo pipefail"),
+ ]
+ + self.create_python_venv()
+ + self.python_venv()
+ )
+ if self.has_option("ccache_dir"):
+ ccache_dir = self.option("ccache_dir")
+ steps += [
+ ShellQuoted(
+ # Set CCACHE_DIR before the `ccache` invocations below.
+ "export CCACHE_DIR={ccache_dir} "
+ 'CC="ccache ${{CC:-gcc}}" CXX="ccache ${{CXX:-g++}}"'
+ ).format(ccache_dir=ccache_dir)
+ ]
+ return steps
+
+ def comment(self, comment):
+ return shell_comment(comment)
+
+ def copy_local_repo(self, dir, dest_name):
+ return [
+ ShellQuoted("cp -r {dir} {dest_name}").format(dir=dir, dest_name=dest_name),
+ ]
+
+
+def find_project_root():
+ here = os.path.dirname(os.path.realpath(__file__))
+ maybe_root = os.path.dirname(os.path.dirname(here))
+ if os.path.isdir(os.path.join(maybe_root, ".git")):
+ return maybe_root
+ raise RuntimeError(
+ "I expected shell_builder.py to be in the "
+ "build/fbcode_builder subdir of a git repo"
+ )
+
+
+def persistent_temp_dir(repo_root):
+ escaped = repo_root.replace("/", "sZs").replace("\\", "sZs").replace(":", "")
+ return os.path.join(os.path.expandvars("$HOME"), ".fbcode_builder-" + escaped)
+
+
+if __name__ == "__main__":
+ from utils import read_fbcode_builder_config, build_fbcode_builder_config
+
+ repo_root = find_project_root()
+ temp = persistent_temp_dir(repo_root)
+
+ config = read_fbcode_builder_config("fbcode_builder_config.py")
+ builder = ShellFBCodeBuilder(projects_dir=temp)
+
+ if distutils.spawn.find_executable("ccache"):
+ builder.add_option(
+ "ccache_dir", os.environ.get("CCACHE_DIR", os.path.join(temp, ".ccache"))
+ )
+ builder.add_option("prefix", os.path.join(temp, "installed"))
+ builder.add_option("make_parallelism", 4)
+ builder.add_option(
+ "{project}:local_repo_dir".format(project=config["github_project"]), repo_root
+ )
+ make_steps = build_fbcode_builder_config(config)
+ steps = make_steps(builder)
+ print(builder.render(steps))
diff --git a/build/fbcode_builder/shell_quoting.py b/build/fbcode_builder/shell_quoting.py
new file mode 100644
index 000000000..7429226bd
--- /dev/null
+++ b/build/fbcode_builder/shell_quoting.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+"""
+
+Almost every FBCodeBuilder string is ultimately passed to a shell. Escaping
+too little or too much tends to be the most common error. The utilities in
+this file give a systematic way of avoiding such bugs:
+ - When you write literal strings destined for the shell, use `ShellQuoted`.
+ - When these literal strings are parameterized, use `ShellQuoted.format`.
+ - Any parameters that are raw strings get `shell_quote`d automatically,
+ while any ShellQuoted parameters will be left intact.
+ - Use `path_join` to join path components.
+ - Use `shell_join` to join already-quoted command arguments or shell lines.
+
+"""
+
+import os
+from collections import namedtuple
+
+
+class ShellQuoted(namedtuple("ShellQuoted", ("do_not_use_raw_str",))):
+ """
+
+ Wrap a string with this to make it transparent to shell_quote(). It
+ will almost always suffice to use ShellQuoted.format(), path_join(),
+ or shell_join().
+
+ If you really must, use raw_shell() to access the raw string.
+
+ """
+
+ def __new__(cls, s):
+ "No need to nest ShellQuoted."
+ return super(ShellQuoted, cls).__new__(
+ cls, s.do_not_use_raw_str if isinstance(s, ShellQuoted) else s
+ )
+
+ def __str__(self):
+ raise RuntimeError(
+ "One does not simply convert {0} to a string -- use path_join() "
+ "or ShellQuoted.format() instead".format(repr(self))
+ )
+
+ def __repr__(self):
+ return "{0}({1})".format(self.__class__.__name__, repr(self.do_not_use_raw_str))
+
+ def format(self, **kwargs):
+ """
+
+ Use instead of str.format() when the arguments are either
+ `ShellQuoted()` or raw strings needing to be `shell_quote()`d.
+
+ Positional args are deliberately not supported since they are more
+ error-prone.
+
+ """
+ return ShellQuoted(
+ self.do_not_use_raw_str.format(
+ **dict(
+ (k, shell_quote(v).do_not_use_raw_str) for k, v in kwargs.items()
+ )
+ )
+ )
+
+
+def shell_quote(s):
+ "Quotes a string if it is not already quoted"
+ return (
+ s
+ if isinstance(s, ShellQuoted)
+ else ShellQuoted("'" + str(s).replace("'", "'\\''") + "'")
+ )
+
+
+def raw_shell(s):
+ "Not a member of ShellQuoted so we get a useful error for raw strings"
+ if isinstance(s, ShellQuoted):
+ return s.do_not_use_raw_str
+ raise RuntimeError("{0} should have been ShellQuoted".format(s))
+
+
+def shell_join(delim, it):
+ "Joins an iterable of ShellQuoted with a delimiter between each two"
+ return ShellQuoted(delim.join(raw_shell(s) for s in it))
+
+
+def path_join(*args):
+ "Joins ShellQuoted and raw pieces of paths to make a shell-quoted path"
+ return ShellQuoted(os.path.join(*[raw_shell(shell_quote(s)) for s in args]))
+
+
+def shell_comment(c):
+ "Do not shell-escape raw strings in comments, but do handle line breaks."
+ return ShellQuoted("# {c}").format(
+ c=ShellQuoted(
+ (raw_shell(c) if isinstance(c, ShellQuoted) else c).replace("\n", "\n# ")
+ )
+ )
diff --git a/build/fbcode_builder/specs/__init__.py b/build/fbcode_builder/specs/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/build/fbcode_builder/specs/fbthrift.py b/build/fbcode_builder/specs/fbthrift.py
new file mode 100644
index 000000000..f0c7e7ac7
--- /dev/null
+++ b/build/fbcode_builder/specs/fbthrift.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import specs.fizz as fizz
+import specs.fmt as fmt
+import specs.folly as folly
+import specs.sodium as sodium
+import specs.wangle as wangle
+import specs.zstd as zstd
+
+
+def fbcode_builder_spec(builder):
+ return {
+ "depends_on": [fmt, folly, fizz, sodium, wangle, zstd],
+ "steps": [
+ builder.fb_github_cmake_install("fbthrift/thrift"),
+ ],
+ }
diff --git a/build/fbcode_builder/specs/fbzmq.py b/build/fbcode_builder/specs/fbzmq.py
new file mode 100644
index 000000000..78c8bc9dd
--- /dev/null
+++ b/build/fbcode_builder/specs/fbzmq.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import specs.fbthrift as fbthrift
+import specs.fmt as fmt
+import specs.folly as folly
+import specs.gmock as gmock
+import specs.sodium as sodium
+from shell_quoting import ShellQuoted
+
+
+def fbcode_builder_spec(builder):
+ builder.add_option("zeromq/libzmq:git_hash", "v4.2.2")
+ return {
+ "depends_on": [fmt, folly, fbthrift, gmock, sodium],
+ "steps": [
+ builder.github_project_workdir("zeromq/libzmq", "."),
+ builder.step(
+ "Build and install zeromq/libzmq",
+ [
+ builder.run(ShellQuoted("./autogen.sh")),
+ builder.configure(),
+ builder.make_and_install(),
+ ],
+ ),
+ builder.fb_github_project_workdir("fbzmq/_build", "facebook"),
+ builder.step(
+ "Build and install fbzmq/",
+ [
+ builder.cmake_configure("fbzmq/_build"),
+ # we need the pythonpath to find the thrift compiler
+ builder.run(
+ ShellQuoted(
+ 'PYTHONPATH="$PYTHONPATH:"{p}/lib/python2.7/site-packages '
+ "make -j {n}"
+ ).format(
+ p=builder.option("prefix"),
+ n=builder.option("make_parallelism"),
+ )
+ ),
+ builder.run(ShellQuoted("make install")),
+ ],
+ ),
+ ],
+ }
diff --git a/build/fbcode_builder/specs/fizz.py b/build/fbcode_builder/specs/fizz.py
new file mode 100644
index 000000000..82f26e67c
--- /dev/null
+++ b/build/fbcode_builder/specs/fizz.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import specs.fmt as fmt
+import specs.folly as folly
+import specs.gmock as gmock
+import specs.sodium as sodium
+import specs.zstd as zstd
+
+
+def fbcode_builder_spec(builder):
+ builder.add_option(
+ "fizz/fizz/build:cmake_defines",
+ {
+ # Fizz's build is kind of broken, in the sense that both `mvfst`
+ # and `proxygen` depend on files that are only installed with
+ # `BUILD_TESTS` enabled, e.g. `fizz/crypto/test/TestUtil.h`.
+ "BUILD_TESTS": "ON"
+ },
+ )
+ return {
+ "depends_on": [gmock, fmt, folly, sodium, zstd],
+ "steps": [
+ builder.fb_github_cmake_install(
+ "fizz/fizz/build", github_org="facebookincubator"
+ )
+ ],
+ }
diff --git a/build/fbcode_builder/specs/fmt.py b/build/fbcode_builder/specs/fmt.py
new file mode 100644
index 000000000..395316799
--- /dev/null
+++ b/build/fbcode_builder/specs/fmt.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+
+def fbcode_builder_spec(builder):
+ builder.add_option("fmtlib/fmt:git_hash", "6.2.1")
+ builder.add_option(
+ "fmtlib/fmt:cmake_defines",
+ {
+ # Avoids a bizarred failure to run tests in Bistro:
+ # test_crontab_selector: error while loading shared libraries:
+ # libfmt.so.6: cannot open shared object file:
+ # No such file or directory
+ "BUILD_SHARED_LIBS": "OFF",
+ },
+ )
+ return {
+ "steps": [
+ builder.github_project_workdir("fmtlib/fmt", "build"),
+ builder.cmake_install("fmtlib/fmt"),
+ ],
+ }
diff --git a/build/fbcode_builder/specs/folly.py b/build/fbcode_builder/specs/folly.py
new file mode 100644
index 000000000..e89d5e955
--- /dev/null
+++ b/build/fbcode_builder/specs/folly.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import specs.fmt as fmt
+
+
+def fbcode_builder_spec(builder):
+ return {
+ "depends_on": [fmt],
+ "steps": [
+ # on macOS the filesystem is typically case insensitive.
+ # We need to ensure that the CWD is not the folly source
+ # dir when we build, otherwise the system will decide
+ # that `folly/String.h` is the file it wants when including
+ # `string.h` and the build will fail.
+ builder.fb_github_project_workdir("folly/_build"),
+ builder.cmake_install("facebook/folly"),
+ ],
+ }
diff --git a/build/fbcode_builder/specs/gmock.py b/build/fbcode_builder/specs/gmock.py
new file mode 100644
index 000000000..774137301
--- /dev/null
+++ b/build/fbcode_builder/specs/gmock.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+
+def fbcode_builder_spec(builder):
+ builder.add_option("google/googletest:git_hash", "release-1.8.1")
+ builder.add_option(
+ "google/googletest:cmake_defines",
+ {
+ "BUILD_GTEST": "ON",
+ # Avoid problems with MACOSX_RPATH
+ "BUILD_SHARED_LIBS": "OFF",
+ },
+ )
+ return {
+ "steps": [
+ builder.github_project_workdir("google/googletest", "build"),
+ builder.cmake_install("google/googletest"),
+ ],
+ }
diff --git a/build/fbcode_builder/specs/mvfst.py b/build/fbcode_builder/specs/mvfst.py
new file mode 100644
index 000000000..ce8b003d9
--- /dev/null
+++ b/build/fbcode_builder/specs/mvfst.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import specs.fizz as fizz
+import specs.folly as folly
+import specs.gmock as gmock
+
+
+def fbcode_builder_spec(builder):
+ # Projects that **depend** on mvfst should don't need to build tests.
+ builder.add_option(
+ "mvfst/build:cmake_defines",
+ {
+ # This is set to ON in the mvfst `fbcode_builder_config.py`
+ "BUILD_TESTS": "OFF"
+ },
+ )
+ return {
+ "depends_on": [gmock, folly, fizz],
+ "steps": [
+ builder.fb_github_cmake_install(
+ "mvfst/build", github_org="facebookincubator"
+ )
+ ],
+ }
diff --git a/build/fbcode_builder/specs/proxygen.py b/build/fbcode_builder/specs/proxygen.py
new file mode 100644
index 000000000..6a584d710
--- /dev/null
+++ b/build/fbcode_builder/specs/proxygen.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import specs.fizz as fizz
+import specs.fmt as fmt
+import specs.folly as folly
+import specs.gmock as gmock
+import specs.mvfst as mvfst
+import specs.sodium as sodium
+import specs.wangle as wangle
+import specs.zstd as zstd
+
+
+def fbcode_builder_spec(builder):
+ # Projects that **depend** on proxygen should don't need to build tests
+ # or QUIC support.
+ builder.add_option(
+ "proxygen/proxygen:cmake_defines",
+ {
+ # These 2 are set to ON in `proxygen_quic.py`
+ "BUILD_QUIC": "OFF",
+ "BUILD_TESTS": "OFF",
+ # For bistro
+ "BUILD_SHARED_LIBS": "OFF",
+ },
+ )
+
+ return {
+ "depends_on": [gmock, fmt, folly, wangle, fizz, sodium, zstd, mvfst],
+ "steps": [builder.fb_github_cmake_install("proxygen/proxygen", "..")],
+ }
diff --git a/build/fbcode_builder/specs/proxygen_quic.py b/build/fbcode_builder/specs/proxygen_quic.py
new file mode 100644
index 000000000..b4959fb89
--- /dev/null
+++ b/build/fbcode_builder/specs/proxygen_quic.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import specs.fizz as fizz
+import specs.fmt as fmt
+import specs.folly as folly
+import specs.gmock as gmock
+import specs.mvfst as mvfst
+import specs.sodium as sodium
+import specs.wangle as wangle
+import specs.zstd as zstd
+
+# DO NOT USE THIS AS A LIBRARY -- this is currently effectively just part
+# ofthe implementation of proxygen's `fbcode_builder_config.py`. This is
+# why this builds tests and sets `BUILD_QUIC`.
+def fbcode_builder_spec(builder):
+ builder.add_option(
+ "proxygen/proxygen:cmake_defines",
+ {"BUILD_QUIC": "ON", "BUILD_SHARED_LIBS": "OFF", "BUILD_TESTS": "ON"},
+ )
+ return {
+ "depends_on": [gmock, fmt, folly, wangle, fizz, sodium, zstd, mvfst],
+ "steps": [builder.fb_github_cmake_install("proxygen/proxygen", "..")],
+ }
diff --git a/build/fbcode_builder/specs/re2.py b/build/fbcode_builder/specs/re2.py
new file mode 100644
index 000000000..cf4e08a0b
--- /dev/null
+++ b/build/fbcode_builder/specs/re2.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+
+def fbcode_builder_spec(builder):
+ return {
+ "steps": [
+ builder.github_project_workdir("google/re2", "build"),
+ builder.cmake_install("google/re2"),
+ ],
+ }
diff --git a/build/fbcode_builder/specs/rocksdb.py b/build/fbcode_builder/specs/rocksdb.py
new file mode 100644
index 000000000..9ebfe4739
--- /dev/null
+++ b/build/fbcode_builder/specs/rocksdb.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+
+def fbcode_builder_spec(builder):
+ builder.add_option(
+ "rocksdb/_build:cmake_defines",
+ {
+ "USE_RTTI": "1",
+ "PORTABLE": "ON",
+ },
+ )
+ return {
+ "steps": [
+ builder.fb_github_cmake_install("rocksdb/_build"),
+ ],
+ }
diff --git a/build/fbcode_builder/specs/sodium.py b/build/fbcode_builder/specs/sodium.py
new file mode 100644
index 000000000..8be9833cf
--- /dev/null
+++ b/build/fbcode_builder/specs/sodium.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from shell_quoting import ShellQuoted
+
+
+def fbcode_builder_spec(builder):
+ builder.add_option("jedisct1/libsodium:git_hash", "stable")
+ return {
+ "steps": [
+ builder.github_project_workdir("jedisct1/libsodium", "."),
+ builder.step(
+ "Build and install jedisct1/libsodium",
+ [
+ builder.run(ShellQuoted("./autogen.sh")),
+ builder.configure(),
+ builder.make_and_install(),
+ ],
+ ),
+ ],
+ }
diff --git a/build/fbcode_builder/specs/wangle.py b/build/fbcode_builder/specs/wangle.py
new file mode 100644
index 000000000..62b5b3c86
--- /dev/null
+++ b/build/fbcode_builder/specs/wangle.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import specs.fizz as fizz
+import specs.fmt as fmt
+import specs.folly as folly
+import specs.gmock as gmock
+import specs.sodium as sodium
+
+
+def fbcode_builder_spec(builder):
+ # Projects that **depend** on wangle need not spend time on tests.
+ builder.add_option(
+ "wangle/wangle/build:cmake_defines",
+ {
+ # This is set to ON in the wangle `fbcode_builder_config.py`
+ "BUILD_TESTS": "OFF"
+ },
+ )
+ return {
+ "depends_on": [gmock, fmt, folly, fizz, sodium],
+ "steps": [builder.fb_github_cmake_install("wangle/wangle/build")],
+ }
diff --git a/build/fbcode_builder/specs/zstd.py b/build/fbcode_builder/specs/zstd.py
new file mode 100644
index 000000000..14d9a1249
--- /dev/null
+++ b/build/fbcode_builder/specs/zstd.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from shell_quoting import ShellQuoted
+
+
+def fbcode_builder_spec(builder):
+ # This API should change rarely, so build the latest tag instead of master.
+ builder.add_option(
+ "facebook/zstd:git_hash",
+ ShellQuoted("$(git describe --abbrev=0 --tags origin/master)"),
+ )
+ return {
+ "steps": [
+ builder.github_project_workdir("facebook/zstd", "."),
+ builder.step(
+ "Build and install zstd",
+ [
+ builder.make_and_install(
+ make_vars={
+ "PREFIX": builder.option("prefix"),
+ }
+ )
+ ],
+ ),
+ ],
+ }
diff --git a/build/fbcode_builder/travis.yml b/build/fbcode_builder/travis.yml
new file mode 100644
index 000000000..d2bb60778
--- /dev/null
+++ b/build/fbcode_builder/travis.yml
@@ -0,0 +1,51 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# Facebook projects that use `fbcode_builder` for continuous integration
+# share this Travis configuration to run builds via Docker.
+
+# Docker disables IPv6 in containers by default. Enable it for unit tests that need [::1].
+before_script:
+ - if [[ "$TRAVIS_OS_NAME" != "osx" ]];
+ then
+ sudo build/fbcode_builder/docker_enable_ipv6.sh;
+ fi
+
+env:
+ global:
+ - travis_cache_dir=$HOME/travis_ccache
+ # Travis times out after 50 minutes. Very generously leave 10 minutes
+ # for setup (e.g. cache download, compression, and upload), so we never
+ # fail to cache the progress we made.
+ - docker_build_timeout=40m
+
+cache:
+ # Our build caches can be 200-300MB, so increase the timeout to 7 minutes
+ # to make sure we never fail to cache the progress we made.
+ timeout: 420
+ directories:
+ - $HOME/travis_ccache # see docker_build_with_ccache.sh
+
+# Ugh, `services:` must be in the matrix, or we get `docker: command not found`
+# https://github.com/travis-ci/travis-ci/issues/5142
+matrix:
+ include:
+ - env: ['os_image=ubuntu:18.04', gcc_version=7]
+ services: [docker]
+
+addons:
+ apt:
+ packages: python2.7
+
+script:
+ # We don't want to write the script inline because of Travis kludginess --
+ # it looks like it escapes " and \ in scripts when using `matrix:`.
+ - ./build/fbcode_builder/travis_docker_build.sh
diff --git a/build/fbcode_builder/travis_docker_build.sh b/build/fbcode_builder/travis_docker_build.sh
new file mode 100755
index 000000000..d4cba10ef
--- /dev/null
+++ b/build/fbcode_builder/travis_docker_build.sh
@@ -0,0 +1,42 @@
+#!/bin/bash -uex
+# Copyright (c) Facebook, Inc. and its affiliates.
+# .travis.yml in the top-level dir explains why this is a separate script.
+# Read the docs: ./make_docker_context.py --help
+
+os_image=${os_image?Must be set by Travis}
+gcc_version=${gcc_version?Must be set by Travis}
+make_parallelism=${make_parallelism:-4}
+# ccache is off unless requested
+travis_cache_dir=${travis_cache_dir:-}
+# The docker build never times out, unless specified
+docker_build_timeout=${docker_build_timeout:-}
+
+cur_dir="$(realpath "$(dirname "$0")")"
+
+if [[ "$travis_cache_dir" == "" ]]; then
+ echo "ccache disabled, enable by setting env. var. travis_cache_dir"
+ ccache_tgz=""
+elif [[ -e "$travis_cache_dir/ccache.tgz" ]]; then
+ ccache_tgz="$travis_cache_dir/ccache.tgz"
+else
+ echo "$travis_cache_dir/ccache.tgz does not exist, starting with empty cache"
+ ccache_tgz=$(mktemp)
+ tar -T /dev/null -czf "$ccache_tgz"
+fi
+
+docker_context_dir=$(
+ cd "$cur_dir/.." # Let the script find our fbcode_builder_config.py
+ "$cur_dir/make_docker_context.py" \
+ --os-image "$os_image" \
+ --gcc-version "$gcc_version" \
+ --make-parallelism "$make_parallelism" \
+ --local-repo-dir "$cur_dir/../.." \
+ --ccache-tgz "$ccache_tgz"
+)
+cd "${docker_context_dir?Failed to make Docker context directory}"
+
+# Make it safe to iterate on the .sh in the tree while the script runs.
+cp "$cur_dir/docker_build_with_ccache.sh" .
+exec ./docker_build_with_ccache.sh \
+ --build-timeout "$docker_build_timeout" \
+ "$travis_cache_dir"
diff --git a/build/fbcode_builder/utils.py b/build/fbcode_builder/utils.py
new file mode 100644
index 000000000..02459a200
--- /dev/null
+++ b/build/fbcode_builder/utils.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+"Miscellaneous utility functions."
+
+import itertools
+import logging
+import os
+import shutil
+import subprocess
+import sys
+from contextlib import contextmanager
+
+
+def recursively_flatten_list(l):
+ return itertools.chain.from_iterable(
+ (recursively_flatten_list(i) if type(i) is list else (i,)) for i in l
+ )
+
+
+def run_command(*cmd, **kwargs):
+ "The stdout of most fbcode_builder utilities is meant to be parsed."
+ logging.debug("Running: {0} with {1}".format(cmd, kwargs))
+ kwargs["stdout"] = sys.stderr
+ subprocess.check_call(cmd, **kwargs)
+
+
+@contextmanager
+def make_temp_dir(d):
+ os.mkdir(d)
+ try:
+ yield d
+ finally:
+ shutil.rmtree(d, ignore_errors=True)
+
+
+def _inner_read_config(path):
+ """
+ Helper to read a named config file.
+ The grossness with the global is a workaround for this python bug:
+ https://bugs.python.org/issue21591
+ The bug prevents us from defining either a local function or a lambda
+ in the scope of read_fbcode_builder_config below.
+ """
+ global _project_dir
+ full_path = os.path.join(_project_dir, path)
+ return read_fbcode_builder_config(full_path)
+
+
+def read_fbcode_builder_config(filename):
+ # Allow one spec to read another
+ # When doing so, treat paths as relative to the config's project directory.
+ # _project_dir is a "local" for _inner_read_config; see the comments
+ # in that function for an explanation of the use of global.
+ global _project_dir
+ _project_dir = os.path.dirname(filename)
+
+ scope = {"read_fbcode_builder_config": _inner_read_config}
+ with open(filename) as config_file:
+ code = compile(config_file.read(), filename, mode="exec")
+ exec(code, scope)
+ return scope["config"]
+
+
+def steps_for_spec(builder, spec, processed_modules=None):
+ """
+ Sets `builder` configuration, and returns all the builder steps
+ necessary to build `spec` and its dependencies.
+
+ Traverses the dependencies in depth-first order, honoring the sequencing
+ in each 'depends_on' list.
+ """
+ if processed_modules is None:
+ processed_modules = set()
+ steps = []
+ for module in spec.get("depends_on", []):
+ if module not in processed_modules:
+ processed_modules.add(module)
+ steps.extend(
+ steps_for_spec(
+ builder, module.fbcode_builder_spec(builder), processed_modules
+ )
+ )
+ steps.extend(spec.get("steps", []))
+ return steps
+
+
+def build_fbcode_builder_config(config):
+ return lambda builder: builder.build(
+ steps_for_spec(builder, config["fbcode_builder_spec"](builder))
+ )
diff --git a/build/fbcode_builder_config.py b/build/fbcode_builder_config.py
new file mode 100644
index 000000000..85018bf05
--- /dev/null
+++ b/build/fbcode_builder_config.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Copyright (c) Facebook, Inc. and its affiliates.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+'fbcode_builder steps to build rsocket'
+
+import specs.rsocket as rsocket
+
+
+def fbcode_builder_spec(builder):
+ return {
+ 'depends_on': [rsocket],
+ }
+
+
+config = {
+ 'github_project': 'rsocket/rsocket-cpp',
+ 'fbcode_builder_spec': fbcode_builder_spec,
+}
diff --git a/cmake/FindFolly.cmake b/cmake/FindFolly.cmake
deleted file mode 100644
index 30736a77f..000000000
--- a/cmake/FindFolly.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-cmake_minimum_required(VERSION 3.2)
-
-include(FindPackageHandleStandardArgs)
-
-if (FOLLY_INSTALL_DIR)
- set(lib_paths ${FOLLY_INSTALL_DIR}/lib)
- set(include_paths ${FOLLY_INSTALL_DIR}/include)
-endif ()
-
-find_library(FOLLY_LIBRARY folly PATHS ${lib_paths})
-find_library(FOLLY_BENCHMARK_LIBRARY follybenchmark PATHS ${lib_paths})
-find_path(FOLLY_INCLUDE_DIR "folly/String.h" PATHS ${include_paths})
-
-find_package_handle_standard_args(Folly
- DEFAULT_MSG FOLLY_LIBRARY FOLLY_BENCHMARK_LIBRARY FOLLY_INCLUDE_DIR)
diff --git a/cmake/InstallFolly.cmake b/cmake/InstallFolly.cmake
index 432c8c7fd..2bd17460c 100644
--- a/cmake/InstallFolly.cmake
+++ b/cmake/InstallFolly.cmake
@@ -1,73 +1,22 @@
+# Copyright (c) 2018, Facebook, Inc.
+# All rights reserved.
+#
if (NOT FOLLY_INSTALL_DIR)
- set(FOLLY_INSTALL_DIR $ENV{HOME}/folly)
+ set(FOLLY_INSTALL_DIR ${CMAKE_BINARY_DIR}/folly-install)
endif ()
-# Check if the correct version of folly is already installed.
-set(FOLLY_VERSION v2017.12.11.00)
-set(FOLLY_VERSION_FILE ${FOLLY_INSTALL_DIR}/${FOLLY_VERSION})
if (RSOCKET_INSTALL_DEPS)
- if (NOT EXISTS ${FOLLY_VERSION_FILE})
- # Remove the old version of folly.
- file(REMOVE_RECURSE ${FOLLY_INSTALL_DIR})
- set(INSTALL_FOLLY True)
- endif ()
-endif ()
-
-if (INSTALL_FOLLY)
- # Build and install folly.
- ExternalProject_Add(
- folly-ext
- GIT_REPOSITORY https://github.com/facebook/folly
- GIT_TAG ${FOLLY_VERSION}
- BINARY_DIR folly-ext-prefix/src/folly-ext/folly
- CONFIGURE_COMMAND autoreconf -ivf
- COMMAND ./configure CXX=${CMAKE_CXX_COMPILER}
- --prefix=${FOLLY_INSTALL_DIR}
- BUILD_COMMAND make -j4
- INSTALL_COMMAND make install
- COMMAND cmake -E touch ${FOLLY_VERSION_FILE})
-
- set(FOLLY_INCLUDE_DIR ${FOLLY_INSTALL_DIR}/include)
- set(lib ${CMAKE_SHARED_LIBRARY_PREFIX}folly${CMAKE_SHARED_LIBRARY_SUFFIX})
- set(benchlib ${CMAKE_SHARED_LIBRARY_PREFIX}follybenchmark${CMAKE_SHARED_LIBRARY_SUFFIX})
- set(FOLLY_LIBRARY ${FOLLY_INSTALL_DIR}/lib/${lib})
- set(FOLLY_BENCHMARK_LIBRARY ${FOLLY_INSTALL_DIR}/lib/${benchlib})
-
- # CMake requires directories listed in INTERFACE_INCLUDE_DIRECTORIES to exist.
- file(MAKE_DIRECTORY ${FOLLY_INCLUDE_DIR})
-else ()
- # Use installed folly.
- find_package(Folly REQUIRED)
+ execute_process(
+ COMMAND
+ ${CMAKE_SOURCE_DIR}/scripts/build_folly.sh
+ ${CMAKE_BINARY_DIR}/folly-src
+ ${FOLLY_INSTALL_DIR}
+ RESULT_VARIABLE folly_result
+ )
+ if (NOT "${folly_result}" STREQUAL "0")
+ message(FATAL_ERROR "failed to build folly")
+ endif()
endif ()
find_package(Threads)
-find_library(EVENT_LIBRARY event)
-
-add_library(folly SHARED IMPORTED)
-set_property(TARGET folly PROPERTY IMPORTED_LOCATION ${FOLLY_LIBRARY})
-set_property(TARGET folly
- APPEND PROPERTY INTERFACE_LINK_LIBRARIES
- ${EXTRA_LINK_FLAGS} ${EVENT_LIBRARY} ${CMAKE_THREAD_LIBS_INIT})
-if (TARGET folly-ext)
- add_dependencies(folly folly-ext)
-endif ()
-
-add_library(folly-benchmark SHARED IMPORTED)
-set_property(TARGET folly-benchmark PROPERTY IMPORTED_LOCATION ${FOLLY_BENCHMARK_LIBRARY})
-set_property(TARGET folly-benchmark
- APPEND PROPERTY INTERFACE_LINK_LIBRARIES
- ${EXTRA_LINK_FLAGS} ${EVENT_LIBRARY} ${CMAKE_THREAD_LIBS_INIT})
-if (TARGET folly-ext)
- add_dependencies(folly-benchmark folly-ext)
-endif ()
-
-# Folly includes are marked as system to prevent errors on non-standard
-# extensions when compiling with -pedantic and -Werror.
-set_property(TARGET folly
- APPEND PROPERTY INTERFACE_SYSTEM_INCLUDE_DIRECTORIES ${FOLLY_INCLUDE_DIR})
-set_property(TARGET folly
- APPEND PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${FOLLY_INCLUDE_DIR})
-set_property(TARGET folly-benchmark
- APPEND PROPERTY INTERFACE_SYSTEM_INCLUDE_DIRECTORIES ${FOLLY_INCLUDE_DIR})
-set_property(TARGET folly-benchmark
- APPEND PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${FOLLY_INCLUDE_DIR})
+find_package(folly CONFIG REQUIRED PATHS ${FOLLY_INSTALL_DIR})
diff --git a/cmake/rsocket-config.cmake.in b/cmake/rsocket-config.cmake.in
new file mode 100644
index 000000000..d5579a856
--- /dev/null
+++ b/cmake/rsocket-config.cmake.in
@@ -0,0 +1,12 @@
+# Copyright (c) 2018, Facebook, Inc.
+# All rights reserved.
+
+@PACKAGE_INIT@
+
+if(NOT TARGET rsocket::ReactiveSocket)
+ include("${PACKAGE_PREFIX_DIR}/lib/cmake/rsocket/rsocket-exports.cmake")
+endif()
+
+if (NOT rsocket_FIND_QUIETLY)
+ message(STATUS "Found rsocket: ${PACKAGE_PREFIX_DIR}")
+endif()
diff --git a/devtools/format_all.sh b/devtools/format_all.sh
index aed32b572..235b985e2 100755
--- a/devtools/format_all.sh
+++ b/devtools/format_all.sh
@@ -1,4 +1,7 @@
#!/usr/bin/env bash
+#
+# Copyright 2004-present Facebook. All Rights Reserved.
+#
set -xue
cd "$(dirname "$0")/.."
diff --git a/examples/conditional-request-handling/JsonRequestHandler.cpp b/examples/conditional-request-handling/JsonRequestHandler.cpp
deleted file mode 100644
index c19d40032..000000000
--- a/examples/conditional-request-handling/JsonRequestHandler.cpp
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
-
-#include "JsonRequestHandler.h"
-#include
-#include "yarpl/Flowable.h"
-
-using namespace rsocket;
-using namespace yarpl::flowable;
-
-/// Handles a new inbound Stream requested by the other end.
-yarpl::Reference>
-JsonRequestResponder::handleRequestStream(Payload request, StreamId) {
- LOG(INFO) << "JsonRequestResponder.handleRequestStream " << request;
-
- // string from payload data
- auto requestString = request.moveDataToString();
-
- return Flowables::range(1, 100)->map([name = std::move(requestString)](
- int64_t v) {
- std::stringstream ss;
- ss << "Hello (should be JSON) " << name << " " << v << "!";
- std::string s = ss.str();
- return Payload(s, "metadata");
- });
-}
diff --git a/examples/conditional-request-handling/JsonRequestHandler.h b/examples/conditional-request-handling/JsonRequestHandler.h
deleted file mode 100644
index f24f06ccf..000000000
--- a/examples/conditional-request-handling/JsonRequestHandler.h
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
-
-#pragma once
-
-#include "rsocket/Payload.h"
-#include "rsocket/RSocket.h"
-
-class JsonRequestResponder : public rsocket::RSocketResponder {
- public:
- /// Handles a new inbound Stream requested by the other end.
- yarpl::Reference>
- handleRequestStream(rsocket::Payload request, rsocket::StreamId streamId)
- override;
-};
diff --git a/examples/conditional-request-handling/TextRequestHandler.cpp b/examples/conditional-request-handling/TextRequestHandler.cpp
deleted file mode 100644
index a6f0717a1..000000000
--- a/examples/conditional-request-handling/TextRequestHandler.cpp
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
-
-#include "TextRequestHandler.h"
-#include
-#include "yarpl/Flowable.h"
-
-using namespace rsocket;
-using namespace yarpl::flowable;
-
-/// Handles a new inbound Stream requested by the other end.
-yarpl::Reference>
-TextRequestResponder::handleRequestStream(Payload request, StreamId) {
- LOG(INFO) << "TextRequestResponder.handleRequestStream " << request;
-
- // string from payload data
- auto requestString = request.moveDataToString();
-
- return Flowables::range(1, 100)->map([name = std::move(requestString)](
- int64_t v) {
- std::stringstream ss;
- ss << "Hello " << name << " " << v << "!";
- std::string s = ss.str();
- return Payload(s, "metadata");
- });
-}
diff --git a/examples/conditional-request-handling/TextRequestHandler.h b/examples/conditional-request-handling/TextRequestHandler.h
deleted file mode 100644
index 604fdbeea..000000000
--- a/examples/conditional-request-handling/TextRequestHandler.h
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
-
-#pragma once
-
-#include "rsocket/Payload.h"
-#include "rsocket/RSocket.h"
-
-class TextRequestResponder : public rsocket::RSocketResponder {
- public:
- /// Handles a new inbound Stream requested by the other end.
- yarpl::Reference>
- handleRequestStream(rsocket::Payload request, rsocket::StreamId streamId)
- override;
-};
diff --git a/rsocket/ColdResumeHandler.cpp b/rsocket/ColdResumeHandler.cpp
index db3a2dce8..870faef48 100644
--- a/rsocket/ColdResumeHandler.cpp
+++ b/rsocket/ColdResumeHandler.cpp
@@ -1,4 +1,16 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#include "rsocket/ColdResumeHandler.h"
@@ -6,7 +18,6 @@
#include
-using namespace yarpl;
using namespace yarpl::flowable;
namespace rsocket {
@@ -14,20 +25,22 @@ namespace rsocket {
std::string ColdResumeHandler::generateStreamToken(
const Payload&,
StreamId streamId,
- StreamType) {
+ StreamType) const {
return folly::to(streamId);
}
-Reference> ColdResumeHandler::handleResponderResumeStream(
+std::shared_ptr>
+ColdResumeHandler::handleResponderResumeStream(
std::string /* streamToken */,
size_t /* publisherAllowance */) {
- return Flowables::error(
+ return Flowable::error(
std::logic_error("ResumeHandler method not implemented"));
}
-Reference> ColdResumeHandler::handleRequesterResumeStream(
+std::shared_ptr>
+ColdResumeHandler::handleRequesterResumeStream(
std::string /* streamToken */,
size_t /* consumerAllowance */) {
- return yarpl::make_ref>();
-}
+ return std::make_shared>();
}
+} // namespace rsocket
diff --git a/rsocket/ColdResumeHandler.h b/rsocket/ColdResumeHandler.h
index dadedc3ec..f4190e16f 100644
--- a/rsocket/ColdResumeHandler.h
+++ b/rsocket/ColdResumeHandler.h
@@ -1,10 +1,23 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#pragma once
#include "yarpl/Flowable.h"
#include "rsocket/Payload.h"
+#include "rsocket/framing/FrameHeader.h"
#include "rsocket/internal/Common.h"
namespace rsocket {
@@ -16,14 +29,15 @@ class ColdResumeHandler {
virtual ~ColdResumeHandler() = default;
// Generate an application-aware streamToken for the given stream parameters.
- virtual std::string generateStreamToken(const Payload&, StreamId, StreamType);
+ virtual std::string
+ generateStreamToken(const Payload&, StreamId streamId, StreamType) const;
// This method will be called for each REQUEST_STREAM for which the
// application acted as a responder. The default action would be to return a
// Flowable which errors out immediately.
// The second parameter is the allowance which the application received
// before cold-start and hasn't been fulfilled yet.
- virtual yarpl::Reference>
+ virtual std::shared_ptr>
handleResponderResumeStream(
std::string streamToken,
size_t publisherAllowance);
@@ -33,9 +47,10 @@ class ColdResumeHandler {
// Subscriber which cancels the stream immediately after getting subscribed.
// The second parameter is the allowance which the application requested
// before cold-start and hasn't been fulfilled yet.
- virtual yarpl::Reference>
+ virtual std::shared_ptr>
handleRequesterResumeStream(
std::string streamToken,
size_t consumerAllowance);
};
-}
+
+} // namespace rsocket
diff --git a/rsocket/ConnectionAcceptor.h b/rsocket/ConnectionAcceptor.h
index 92aa5f535..3e94a4416 100644
--- a/rsocket/ConnectionAcceptor.h
+++ b/rsocket/ConnectionAcceptor.h
@@ -1,4 +1,16 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#pragma once
@@ -12,9 +24,8 @@ class EventBase;
namespace rsocket {
-using OnDuplexConnectionAccept = std::function,
- folly::EventBase&)>;
+using OnDuplexConnectionAccept = std::function<
+ void(std::unique_ptr, folly::EventBase&)>;
/**
* Common interface for a server that accepts connections and turns them into
@@ -24,8 +35,6 @@ using OnDuplexConnectionAccept = std::function
#include
#include "rsocket/DuplexConnection.h"
+#include "rsocket/framing/ProtocolVersion.h"
namespace folly {
class EventBase;
@@ -12,6 +25,8 @@ class EventBase;
namespace rsocket {
+enum class ResumeStatus { NEW_SESSION, RESUMING };
+
/**
* Common interface for a client to create connections and turn them into
* DuplexConnections.
@@ -44,6 +59,8 @@ class ConnectionFactory {
*
* Resource creation depends on the particular implementation.
*/
- virtual folly::Future connect() = 0;
+ virtual folly::Future connect(
+ ProtocolVersion,
+ ResumeStatus resume) = 0;
};
} // namespace rsocket
diff --git a/rsocket/DuplexConnection.h b/rsocket/DuplexConnection.h
index c093c8488..7aaff2156 100644
--- a/rsocket/DuplexConnection.h
+++ b/rsocket/DuplexConnection.h
@@ -1,42 +1,27 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#pragma once
#include
-#include "yarpl/flowable/Subscriber.h"
+#include
-namespace folly {
-class IOBuf;
-}
+#include "yarpl/flowable/Subscriber.h"
namespace rsocket {
-using yarpl::Reference;
-
-class DuplexSubscriber :
- public yarpl::flowable::Subscriber>
-{
-public:
- void onSubscribe(Reference sub) override {
- subscription_ = sub;
- }
- void onComplete() override {
- subscription_.reset();
- }
- void onError(folly::exception_wrapper) override {
- subscription_.reset();
- }
-
-protected:
- Reference subscription() {
- return subscription_;
- }
-
-private:
- Reference subscription_;
-};
-
/// Represents a connection of the underlying protocol, on top of which the
/// RSocket protocol is layered. The underlying protocol MUST provide an
/// ordered, guaranteed, bidirectional transport of frames. Moreover, frame
@@ -55,7 +40,6 @@ class DuplexSubscriber :
class DuplexConnection {
public:
using Subscriber = yarpl::flowable::Subscriber>;
- using DuplexSubscriber = rsocket::DuplexSubscriber;
virtual ~DuplexConnection() = default;
@@ -63,7 +47,7 @@ class DuplexConnection {
///
/// If setInput() has already been called, then calling setInput() again will
/// complete the previous subscriber.
- virtual void setInput(yarpl::Reference) = 0;
+ virtual void setInput(std::shared_ptr) = 0;
/// Write a serialized frame to the connection.
///
@@ -75,4 +59,5 @@ class DuplexConnection {
return false;
}
};
-}
+
+} // namespace rsocket
diff --git a/rsocket/Payload.cpp b/rsocket/Payload.cpp
index 55810c784..b4037d888 100644
--- a/rsocket/Payload.cpp
+++ b/rsocket/Payload.cpp
@@ -1,65 +1,61 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#include "rsocket/Payload.h"
+
#include
#include
-#include "rsocket/framing/Frame.h"
+
+#include "rsocket/internal/Common.h"
namespace rsocket {
-Payload::Payload(
- std::unique_ptr _data,
- std::unique_ptr _metadata)
- : data(std::move(_data)), metadata(std::move(_metadata)) {}
-
-Payload::Payload(const std::string& _data, const std::string& _metadata)
- : data(folly::IOBuf::copyBuffer(_data)) {
- if (!_metadata.empty()) {
- metadata = folly::IOBuf::copyBuffer(_metadata);
- }
-}
+namespace {
-void Payload::checkFlags(FrameFlags flags) const {
- DCHECK(!!(flags & FrameFlags::METADATA) == bool(metadata));
+std::string moveIOBufToString(std::unique_ptr buf) {
+ return buf ? buf->moveToFbString().toStdString() : "";
}
-std::ostream& operator<<(std::ostream& os, const Payload& payload) {
- return os << "Metadata("
- << (payload.metadata
- ? folly::to(
- payload.metadata->computeChainDataLength())
- : "0")
- << (payload.metadata
- ? "): '" +
- folly::humanify(
- payload.metadata->cloneAsValue().moveToFbString().substr(0, 80)) +
- "'"
- : "): ")
- << ", Data("
- << (payload.data ? folly::to(
- payload.data->computeChainDataLength())
- : "0")
- << (payload.data
- ? "): '" +
- folly::humanify(
- payload.data->cloneAsValue().moveToFbString().substr(0, 80)) +
- "'"
- : "): ");
+std::string cloneIOBufToString(std::unique_ptr const& buf) {
+ return buf ? buf->cloneAsValue().moveToFbString().toStdString() : "";
}
-static std::string moveIOBufToString(std::unique_ptr iobuf) {
- if (!iobuf) {
- return "";
+} // namespace
+
+Payload::Payload(
+ std::unique_ptr d,
+ std::unique_ptr m)
+ : data{std::move(d)}, metadata{std::move(m)} {}
+
+Payload::Payload(folly::StringPiece d, folly::StringPiece m)
+ : data{folly::IOBuf::copyBuffer(d.data(), d.size())} {
+ if (!m.empty()) {
+ metadata = folly::IOBuf::copyBuffer(m.data(), m.size());
}
- return iobuf->moveToFbString().toStdString();
}
-static std::string cloneIOBufToString(
- std::unique_ptr const& iobuf) {
- if (!iobuf) {
- return "";
- }
- return iobuf->cloneAsValue().moveToFbString().toStdString();
+std::ostream& operator<<(std::ostream& os, const Payload& payload) {
+ return os << "Metadata("
+ << (payload.metadata ? payload.metadata->computeChainDataLength()
+ : 0)
+ << "): "
+ << (payload.metadata ? "'" + humanify(payload.metadata) + "'"
+ : "")
+ << ", Data("
+ << (payload.data ? payload.data->computeChainDataLength() : 0)
+ << "): "
+ << (payload.data ? "'" + humanify(payload.data) + "'" : "");
}
std::string Payload::moveDataToString() {
@@ -88,15 +84,28 @@ Payload Payload::clone() const {
if (data) {
out.data = data->clone();
}
-
if (metadata) {
out.metadata = metadata->clone();
}
return out;
}
-FrameFlags Payload::getFlags() const {
- return (metadata != nullptr ? FrameFlags::METADATA : FrameFlags::EMPTY);
+ErrorWithPayload::ErrorWithPayload(Payload&& payload)
+ : payload(std::move(payload)) {}
+
+ErrorWithPayload::ErrorWithPayload(const ErrorWithPayload& oth) {
+ payload = oth.payload.clone();
+}
+
+ErrorWithPayload& ErrorWithPayload::operator=(const ErrorWithPayload& oth) {
+ payload = oth.payload.clone();
+ return *this;
+}
+
+std::ostream& operator<<(
+ std::ostream& os,
+ const ErrorWithPayload& errorWithPayload) {
+ return os << "rsocket::ErrorWithPayload: " << errorWithPayload.payload;
}
} // namespace rsocket
diff --git a/rsocket/Payload.h b/rsocket/Payload.h
index b43f745f0..c21587014 100644
--- a/rsocket/Payload.h
+++ b/rsocket/Payload.h
@@ -1,4 +1,16 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#pragma once
@@ -6,8 +18,6 @@
#include
#include
-#include "rsocket/framing/FrameFlags.h"
-
namespace rsocket {
/// The type of a read-only view on a binary buffer.
@@ -20,16 +30,13 @@ struct Payload {
std::unique_ptr metadata = std::unique_ptr());
explicit Payload(
- const std::string& data,
- const std::string& metadata = std::string());
+ folly::StringPiece data,
+ folly::StringPiece metadata = folly::StringPiece{});
explicit operator bool() const {
return data != nullptr || metadata != nullptr;
}
- FrameFlags getFlags() const;
- void checkFlags(FrameFlags flags) const;
-
std::string moveDataToString();
std::string cloneDataToString() const;
@@ -44,5 +51,23 @@ struct Payload {
std::unique_ptr metadata;
};
-std::ostream& operator<<(std::ostream& os, const Payload& payload);
-}
+struct ErrorWithPayload : public std::exception {
+ explicit ErrorWithPayload(Payload&& payload);
+
+ // folly::ExceptionWrapper requires exceptions to have copy constructors
+ ErrorWithPayload(const ErrorWithPayload& oth);
+ ErrorWithPayload& operator=(const ErrorWithPayload&);
+ ErrorWithPayload(ErrorWithPayload&&) = default;
+ ErrorWithPayload& operator=(ErrorWithPayload&&) = default;
+
+ const char* what() const noexcept override {
+ return "ErrorWithPayload";
+ }
+
+ Payload payload;
+};
+
+std::ostream& operator<<(std::ostream& os, const Payload&);
+std::ostream& operator<<(std::ostream& os, const ErrorWithPayload&);
+
+} // namespace rsocket
diff --git a/rsocket/README.md b/rsocket/README.md
new file mode 100644
index 000000000..3b811aca9
--- /dev/null
+++ b/rsocket/README.md
@@ -0,0 +1,27 @@
+# rsocket-cpp
+
+C++ implementation of [RSocket](https://rsocket.io)
+
+
+[](https://coveralls.io/github/rsocket/rsocket-cpp?branch=master)
+
+# Dependencies
+
+Install `folly`:
+
+```
+brew install folly
+```
+
+# Building and running tests
+
+After installing dependencies as above, you can build and run tests with:
+
+```
+# inside root ./rsocket-cpp
+mkdir -p build
+cd build
+cmake -DCMAKE_BUILD_TYPE=DEBUG ../
+make -j
+./tests
+```
diff --git a/rsocket/RSocket.cpp b/rsocket/RSocket.cpp
index f990df0cd..e83c5ca71 100644
--- a/rsocket/RSocket.cpp
+++ b/rsocket/RSocket.cpp
@@ -1,4 +1,16 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#include "rsocket/RSocket.h"
@@ -14,44 +26,51 @@ folly::Future> RSocket::createConnectedClient(
std::shared_ptr resumeManager,
std::shared_ptr coldResumeHandler,
folly::EventBase* stateMachineEvb) {
- auto createRSC = [
- connectionFactory,
- setupParameters = std::move(setupParameters),
- responder = std::move(responder),
- keepaliveInterval,
- stats = std::move(stats),
- connectionEvents = std::move(connectionEvents),
- resumeManager = std::move(resumeManager),
- coldResumeHandler = std::move(coldResumeHandler),
- stateMachineEvb
- ](ConnectionFactory::ConnectedDuplexConnection connection) mutable {
- VLOG(3) << "createConnectedClient received DuplexConnection";
- return RSocket::createClientFromConnection(
- std::move(connection.connection),
- connection.eventBase,
- std::move(setupParameters),
- std::move(connectionFactory),
- std::move(responder),
- keepaliveInterval,
- std::move(stats),
- std::move(connectionEvents),
- std::move(resumeManager),
- std::move(coldResumeHandler),
- stateMachineEvb);
- };
+ CHECK(resumeManager)
+ << "provide ResumeManager::makeEmpty() instead of nullptr";
+ auto protocolVersion = setupParameters.protocolVersion;
+ auto createRSC =
+ [connectionFactory,
+ setupParameters = std::move(setupParameters),
+ responder = std::move(responder),
+ keepaliveInterval,
+ stats = std::move(stats),
+ connectionEvents = std::move(connectionEvents),
+ resumeManager = std::move(resumeManager),
+ coldResumeHandler = std::move(coldResumeHandler),
+ stateMachineEvb](
+ ConnectionFactory::ConnectedDuplexConnection connection) mutable {
+ VLOG(3) << "createConnectedClient received DuplexConnection";
+ return RSocket::createClientFromConnection(
+ std::move(connection.connection),
+ connection.eventBase,
+ std::move(setupParameters),
+ std::move(connectionFactory),
+ std::move(responder),
+ keepaliveInterval,
+ std::move(stats),
+ std::move(connectionEvents),
+ std::move(resumeManager),
+ std::move(coldResumeHandler),
+ stateMachineEvb);
+ };
- return connectionFactory->connect().then([createRSC = std::move(createRSC)](
- ConnectionFactory::ConnectedDuplexConnection connection) mutable {
- // fromConnection method must be called from the transport eventBase
- // and since there is no guarantee that the Future returned from the
- // connectionFactory::connect method is executed on the event base, we
- // have to ensure it by using folly::via
- auto* transportEvb = &connection.eventBase;
- return via(transportEvb, [
- connection = std::move(connection),
- createRSC = std::move(createRSC)
- ]() mutable { return createRSC(std::move(connection)); });
- });
+ return connectionFactory->connect(protocolVersion, ResumeStatus::NEW_SESSION)
+ .thenValue(
+ [createRSC = std::move(createRSC)](
+ ConnectionFactory::ConnectedDuplexConnection connection) mutable {
+ // fromConnection method must be called from the transport eventBase
+ // and since there is no guarantee that the Future returned from the
+ // connectionFactory::connect method is executed on the event base,
+ // we have to ensure it by using folly::via
+ auto transportEvb = &connection.eventBase;
+ return folly::via(
+ transportEvb,
+ [connection = std::move(connection),
+ createRSC = std::move(createRSC)]() mutable {
+ return createRSC(std::move(connection));
+ });
+ });
}
folly::Future> RSocket::createResumedClient(
@@ -77,8 +96,8 @@ folly::Future> RSocket::createResumedClient(
std::move(coldResumeHandler),
stateMachineEvb);
- return c->resume()
- .then([client = std::unique_ptr(c)]() mutable {
+ return c->resume().thenValue(
+ [client = std::unique_ptr(c)](auto&&) mutable {
return std::move(client);
});
}
@@ -86,7 +105,7 @@ folly::Future> RSocket::createResumedClient(
std::unique_ptr RSocket::createClientFromConnection(
std::unique_ptr connection,
folly::EventBase& transportEvb,
- SetupParameters setupParameters,
+ SetupParameters params,
std::shared_ptr connectionFactory,
std::shared_ptr responder,
std::chrono::milliseconds keepaliveInterval,
@@ -95,10 +114,10 @@ std::unique_ptr RSocket::createClientFromConnection(
std::shared_ptr resumeManager,
std::shared_ptr coldResumeHandler,
folly::EventBase* stateMachineEvb) {
- auto c = std::unique_ptr(new RSocketClient(
+ auto client = std::unique_ptr(new RSocketClient(
std::move(connectionFactory),
- setupParameters.protocolVersion,
- setupParameters.token,
+ params.protocolVersion,
+ params.token,
std::move(responder),
keepaliveInterval,
std::move(stats),
@@ -106,11 +125,9 @@ std::unique_ptr RSocket::createClientFromConnection(
std::move(resumeManager),
std::move(coldResumeHandler),
stateMachineEvb));
- c->fromConnection(
- std::move(connection),
- transportEvb,
- std::move(setupParameters));
- return c;
+ client->fromConnection(
+ std::move(connection), transportEvb, std::move(params));
+ return client;
}
std::unique_ptr RSocket::createServer(
@@ -119,4 +136,5 @@ std::unique_ptr RSocket::createServer(
return std::make_unique(
std::move(connectionAcceptor), std::move(stats));
}
+
} // namespace rsocket
diff --git a/rsocket/RSocket.h b/rsocket/RSocket.h
index f68f26497..13f642830 100644
--- a/rsocket/RSocket.h
+++ b/rsocket/RSocket.h
@@ -1,4 +1,16 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#pragma once
@@ -23,7 +35,7 @@ class RSocket {
std::shared_ptr stats = RSocketStats::noop(),
std::shared_ptr connectionEvents =
std::shared_ptr(),
- std::shared_ptr resumeManager = nullptr,
+ std::shared_ptr resumeManager = ResumeManager::makeEmpty(),
std::shared_ptr coldResumeHandler =
std::shared_ptr(),
folly::EventBase* stateMachineEvb = nullptr);
@@ -41,11 +53,11 @@ class RSocket {
std::shared_ptr stats = RSocketStats::noop(),
std::shared_ptr connectionEvents =
std::shared_ptr(),
- ProtocolVersion protocolVersion = ProtocolVersion::Current(),
+ ProtocolVersion protocolVersion = ProtocolVersion::Latest,
folly::EventBase* stateMachineEvb = nullptr);
- // Creates a RSocketClient from an existing DuplexConnection
- // keepaliveInterval of 0 will result in no keepAlives
+ // Creates a RSocketClient from an existing DuplexConnection. A keepalive
+ // interval of 0 will result in no keepalives.
static std::unique_ptr createClientFromConnection(
std::unique_ptr connection,
folly::EventBase& transportEvb,
@@ -55,11 +67,9 @@ class RSocket {
std::make_shared(),
std::chrono::milliseconds keepaliveInterval = kDefaultKeepaliveInterval,
std::shared_ptr stats = RSocketStats::noop(),
- std::shared_ptr connectionEvents =
- std::shared_ptr(),
- std::shared_ptr resumeManager = nullptr,
- std::shared_ptr coldResumeHandler =
- std::shared_ptr(),
+ std::shared_ptr connectionEvents = nullptr,
+ std::shared_ptr resumeManager = ResumeManager::makeEmpty(),
+ std::shared_ptr coldResumeHandler = nullptr,
folly::EventBase* stateMachineEvb = nullptr);
// A convenience function to create RSocketServer
@@ -68,13 +78,9 @@ class RSocket {
std::shared_ptr stats = RSocketStats::noop());
RSocket() = delete;
-
RSocket(const RSocket&) = delete;
-
RSocket(RSocket&&) = delete;
-
RSocket& operator=(const RSocket&) = delete;
-
RSocket& operator=(RSocket&&) = delete;
};
-}
+} // namespace rsocket
diff --git a/rsocket/RSocketClient.cpp b/rsocket/RSocketClient.cpp
index 45177ed1b..7f10b3ead 100644
--- a/rsocket/RSocketClient.cpp
+++ b/rsocket/RSocketClient.cpp
@@ -1,4 +1,16 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#include "rsocket/RSocketClient.h"
#include "rsocket/RSocketRequester.h"
@@ -10,8 +22,6 @@
#include "rsocket/internal/ClientResumeStatusCallback.h"
#include "rsocket/internal/KeepaliveTimer.h"
-using namespace folly;
-
namespace rsocket {
RSocketClient::RSocketClient(
@@ -34,13 +44,17 @@ RSocketClient::RSocketClient(
coldResumeHandler_(coldResumeHandler),
protocolVersion_(protocolVersion),
token_(std::move(token)),
- evb_(stateMachineEvb) {}
+ evb_(stateMachineEvb) {
+ CHECK(resumeManager_)
+ << "provide ResumeManager::makeEmpty() instead of nullptr";
+}
RSocketClient::~RSocketClient() {
VLOG(3) << "~RSocketClient ..";
evb_->runImmediatelyOrRunInEventBaseThreadAndWait([sm = stateMachine_] {
- std::runtime_error exn{"RSocketClient is closing"};
+ auto exn = folly::make_exception_wrapper(
+ "RSocketClient is closing");
sm->close(std::move(exn), StreamCompletionSignal::CONNECTION_END);
});
}
@@ -49,88 +63,94 @@ const std::shared_ptr& RSocketClient::getRequester() const {
return requester_;
}
-folly::Future RSocketClient::resume() {
- VLOG(2) << "Resuming connection";
+// Returns if this client is currently disconnected
+bool RSocketClient::isDisconnected() const {
+ return stateMachine_->isDisconnected();
+}
+folly::Future RSocketClient::resume() {
CHECK(connectionFactory_)
<< "The client was likely created without ConnectionFactory. Can't "
<< "resume";
- return connectionFactory_->connect().then(
- [this](ConnectionFactory::ConnectedDuplexConnection connection) mutable {
-
- if (!evb_) {
- // cold-resumption. EventBase hasn't been explicitly set for SM by
- // the application. Use the transports eventBase.
- evb_ = &connection.eventBase;
- }
-
- class ResumeCallback : public ClientResumeStatusCallback {
- public:
- explicit ResumeCallback(folly::Promise promise)
- : promise_(std::move(promise)) {}
-
- void onResumeOk() noexcept override {
- promise_.setValue();
- }
-
- void onResumeError(folly::exception_wrapper ex) noexcept override {
- promise_.setException(ex);
- }
-
- private:
- folly::Promise promise_;
- };
-
- folly::Promise promise;
- auto future = promise.getFuture();
-
- auto resumeCallback =
- std::make_unique(std::move(promise));
- std::unique_ptr framedConnection;
- if (connection.connection->isFramed()) {
- framedConnection = std::move(connection.connection);
- } else {
- framedConnection = std::make_unique(
- std::move(connection.connection), protocolVersion_);
- }
- auto transport =
- yarpl::make_ref(std::move(framedConnection));
-
- yarpl::Reference ft;
- if (evb_ != &connection.eventBase) {
- // If the StateMachine EventBase is different from the transport
- // EventBase, then use ScheduledFrameTransport and
- // ScheduledFrameProcessor to ensure the RSocketStateMachine and
- // Transport live on the desired EventBases
- ft = yarpl::make_ref(
- std::move(transport),
- &connection.eventBase, /* Transport EventBase */
- evb_); /* StateMachine EventBase */
- } else {
- ft = std::move(transport);
- }
-
- evb_->runInEventBaseThread([
- this,
- frameTransport = std::move(ft),
- resumeCallback = std::move(resumeCallback),
- connection = std::move(connection)
- ]() mutable {
- if (!stateMachine_) {
- createState();
- }
-
- stateMachine_->resumeClient(
- token_,
- std::move(frameTransport),
- std::move(resumeCallback),
- protocolVersion_);
- });
-
- return future;
-
- });
+ return connectionFactory_->connect(protocolVersion_, ResumeStatus::RESUMING)
+ .thenValue(
+ [this](
+ ConnectionFactory::ConnectedDuplexConnection connection) mutable {
+ return resumeFromConnection(std::move(connection));
+ });
+}
+
+folly::Future RSocketClient::resumeFromConnection(
+ ConnectionFactory::ConnectedDuplexConnection connection) {
+ VLOG(2) << "Resuming connection";
+
+ if (!evb_) {
+ // Cold-resumption. EventBase hasn't been explicitly set for SM by the
+ // application. Use the transport's eventBase.
+ evb_ = &connection.eventBase;
+ }
+
+ class ResumeCallback : public ClientResumeStatusCallback {
+ public:
+ explicit ResumeCallback(folly::Promise promise)
+ : promise_(std::move(promise)) {}
+
+ void onResumeOk() noexcept override {
+ promise_.setValue();
+ }
+
+ void onResumeError(folly::exception_wrapper ex) noexcept override {
+ promise_.setException(ex);
+ }
+
+ private:
+ folly::Promise promise_;
+ };
+
+ folly::Promise promise;
+ auto future = promise.getFuture();
+
+ auto resumeCallback = std::make_unique(std::move(promise));
+ std::unique_ptr framedConnection;
+ if (connection.connection->isFramed()) {
+ framedConnection = std::move(connection.connection);
+ } else {
+ framedConnection = std::make_unique(
+ std::move(connection.connection), protocolVersion_);
+ }
+ auto transport =
+ std::make_shared(std::move(framedConnection));
+
+ std::shared_ptr ft;
+ if (evb_ != &connection.eventBase) {
+ // If the StateMachine EventBase is different from the transport
+ // EventBase, then use ScheduledFrameTransport and
+ // ScheduledFrameProcessor to ensure the RSocketStateMachine and
+ // Transport live on the desired EventBases
+ ft = std::make_shared(
+ std::move(transport),
+ &connection.eventBase, /* Transport EventBase */
+ evb_); /* StateMachine EventBase */
+ } else {
+ ft = std::move(transport);
+ }
+
+ evb_->runInEventBaseThread([this,
+ frameTransport = std::move(ft),
+ callback = std::move(resumeCallback)]() mutable {
+ if (!stateMachine_) {
+ createState();
+ }
+
+ stateMachine_->resumeClient(
+ token_,
+ std::move(frameTransport),
+ std::move(callback),
+ protocolVersion_);
+ });
+
+ return future;
}
folly::Future RSocketClient::disconnect(
@@ -140,7 +160,7 @@ folly::Future RSocketClient::disconnect(
std::runtime_error{"RSocketClient must always have a state machine"});
}
- auto work = [ sm = stateMachine_, e = std::move(ew) ]() mutable {
+ auto work = [sm = stateMachine_, e = std::move(ew)]() mutable {
sm->disconnect(std::move(e));
};
@@ -157,43 +177,38 @@ folly::Future RSocketClient::disconnect(
void RSocketClient::fromConnection(
std::unique_ptr connection,
folly::EventBase& transportEvb,
- SetupParameters setupParameters) {
+ SetupParameters params) {
if (!evb_) {
// If no EventBase is given for the stateMachine, then use the transport's
// EventBase to drive the stateMachine.
evb_ = &transportEvb;
}
createState();
- std::unique_ptr framedConnection;
+
+ std::unique_ptr framed;
if (connection->isFramed()) {
- framedConnection = std::move(connection);
+ framed = std::move(connection);
} else {
- framedConnection = std::make_unique(
- std::move(connection), setupParameters.protocolVersion);
+ framed = std::make_unique(
+ std::move(connection), params.protocolVersion);
}
- auto transport =
- yarpl::make_ref(std::move(framedConnection));
- if (evb_ != &transportEvb) {
- // If the StateMachine EventBase is different from the transport
- // EventBase, then use ScheduledFrameTransport and ScheduledFrameProcessor
- // to ensure the RSocketStateMachine and Transport live on the desired
- // EventBases
- auto scheduledFT = yarpl::make_ref(
- std::move(transport),
- &transportEvb, /* Transport EventBase */
- evb_); /* StateMachine EventBase */
- evb_->runInEventBaseThread([
- stateMachine = stateMachine_,
- scheduledFT = std::move(scheduledFT),
- setupParameters = std::move(setupParameters)
- ]() mutable {
- stateMachine->connectClient(
- std::move(scheduledFT), std::move(setupParameters));
- });
- } else {
- stateMachine_->connectClient(
- std::move(transport), std::move(setupParameters));
+ auto transport = std::make_shared(std::move(framed));
+
+ if (evb_ == &transportEvb) {
+ stateMachine_->connectClient(std::move(transport), std::move(params));
+ return;
}
+
+ // If the StateMachine EventBase is different from the transport EventBase,
+ // then use ScheduledFrameTransport and ScheduledFrameProcessor to ensure the
+ // RSocketStateMachine and Transport live on the desired EventBases.
+ auto scheduledFT = std::make_shared(
+ std::move(transport), &transportEvb, evb_);
+ evb_->runInEventBaseThread([stateMachine = stateMachine_,
+ scheduledFT = std::move(scheduledFT),
+ params = std::move(params)]() mutable {
+ stateMachine->connectClient(std::move(scheduledFT), std::move(params));
+ });
}
void RSocketClient::createState() {
diff --git a/rsocket/RSocketClient.h b/rsocket/RSocketClient.h
index 077fb8dbc..070a3f6be 100644
--- a/rsocket/RSocketClient.h
+++ b/rsocket/RSocketClient.h
@@ -1,4 +1,16 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#pragma once
@@ -27,22 +39,32 @@ class RSocketClient {
~RSocketClient();
RSocketClient(const RSocketClient&) = delete;
- RSocketClient(RSocketClient&&) = default;
+ RSocketClient(RSocketClient&&) = delete;
RSocketClient& operator=(const RSocketClient&) = delete;
- RSocketClient& operator=(RSocketClient&&) = default;
+ RSocketClient& operator=(RSocketClient&&) = delete;
friend class RSocket;
// Returns the RSocketRequester associated with the RSocketClient.
const std::shared_ptr& getRequester() const;
- // Resumes the connection. If a stateMachine already exists,
- // it provides a warm-resumption. If a stateMachine does not exist,
- // it does a cold-resumption. The returned future resolves on successful
- // resumption. Else either a ConnectionException or a ResumptionException
- // is raised.
+ // Returns if this client is currently disconnected
+ bool isDisconnected() const;
+
+ // Resumes the client's connection. If the client was previously connected
+ // this will attempt a warm-resumption. Otherwise this will attempt a
+ // cold-resumption.
+ //
+ // Uses the internal ConnectionFactory instance to re-connect.
folly::Future resume();
+ // Like resume(), but this doesn't use a ConnectionFactory and instead takes
+ // the connection and transport EventBase by argument.
+ //
+ // Prefer using resume() if possible.
+ folly::Future resumeFromConnection(
+ ConnectionFactory::ConnectedDuplexConnection);
+
// Disconnect the underlying transport.
folly::Future disconnect(folly::exception_wrapper = {});
@@ -70,9 +92,9 @@ class RSocketClient {
// Creates RSocketStateMachine and RSocketRequester
void createState();
- std::shared_ptr connectionFactory_;
+ const std::shared_ptr connectionFactory_;
std::shared_ptr responder_;
- std::chrono::milliseconds keepaliveInterval_;
+ const std::chrono::milliseconds keepaliveInterval_;
std::shared_ptr stats_;
std::shared_ptr connectionEvents_;
std::shared_ptr resumeManager_;
@@ -81,8 +103,8 @@ class RSocketClient {
std::shared_ptr stateMachine_;
std::shared_ptr requester_;
- ProtocolVersion protocolVersion_;
- ResumeIdentificationToken token_;
+ const ProtocolVersion protocolVersion_;
+ const ResumeIdentificationToken token_;
// Remember the StateMachine's evb (supplied through constructor). If no
// EventBase is provided, the underlying transport's EventBase will be used
@@ -94,6 +116,5 @@ class RSocketClient {
// EventBase, but the transport ends up being in different EventBase after
// resumption, and vice versa.
folly::EventBase* evb_{nullptr};
-
};
-}
+} // namespace rsocket
diff --git a/rsocket/RSocketConnectionEvents.h b/rsocket/RSocketConnectionEvents.h
index 8df3ee6a5..177a819d2 100644
--- a/rsocket/RSocketConnectionEvents.h
+++ b/rsocket/RSocketConnectionEvents.h
@@ -1,4 +1,16 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#pragma once
@@ -39,4 +51,4 @@ class RSocketConnectionEvents {
// typically gets called after onConnected()
virtual void onStreamsResumed() {}
};
-}
+} // namespace rsocket
diff --git a/rsocket/RSocketErrors.h b/rsocket/RSocketErrors.h
index 8add6294f..e570e7532 100644
--- a/rsocket/RSocketErrors.h
+++ b/rsocket/RSocketErrors.h
@@ -1,7 +1,20 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#pragma once
+#include
#include
namespace rsocket {
@@ -19,7 +32,7 @@ class RSocketError : public std::runtime_error {
* https://github.com/ReactiveSocket/reactivesocket/blob/master/Protocol.md#error-codes
* @return
*/
- virtual int getErrorCode() = 0;
+ virtual int getErrorCode() const = 0;
};
/**
@@ -29,7 +42,7 @@ class InvalidSetupError : public RSocketError {
public:
using RSocketError::RSocketError;
- int getErrorCode() override {
+ int getErrorCode() const override {
return 0x00000001;
}
@@ -45,7 +58,7 @@ class UnsupportedSetupError : public RSocketError {
public:
using RSocketError::RSocketError;
- int getErrorCode() override {
+ int getErrorCode() const override {
return 0x00000002;
}
@@ -61,7 +74,7 @@ class RejectedSetupError : public RSocketError {
public:
using RSocketError::RSocketError;
- int getErrorCode() override {
+ int getErrorCode() const override {
return 0x00000003;
}
@@ -77,7 +90,7 @@ class RejectedResumeError : public RSocketError {
public:
using RSocketError::RSocketError;
- int getErrorCode() override {
+ int getErrorCode() const override {
return 0x00000004;
}
@@ -93,7 +106,7 @@ class ConnectionError : public RSocketError {
public:
using RSocketError::RSocketError;
- int getErrorCode() override {
+ int getErrorCode() const override {
return 0x00000101;
}
@@ -103,13 +116,13 @@ class ConnectionError : public RSocketError {
};
/**
-* Error Code: CONNECTION_CLOSE 0x00000102
-*/
+ * Error Code: CONNECTION_CLOSE 0x00000102
+ */
class ConnectionCloseError : public RSocketError {
public:
using RSocketError::RSocketError;
- int getErrorCode() override {
+ int getErrorCode() const override {
return 0x00000102;
}
@@ -117,4 +130,4 @@ class ConnectionCloseError : public RSocketError {
return "CONNECTION_CLOSE";
}
};
-}
+} // namespace rsocket
diff --git a/rsocket/RSocketException.h b/rsocket/RSocketException.h
index ae99ab70d..9dc9d61e7 100644
--- a/rsocket/RSocketException.h
+++ b/rsocket/RSocketException.h
@@ -1,4 +1,16 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#pragma once
@@ -21,4 +33,4 @@ class ResumptionException : public RSocketException {
class ConnectionException : public RSocketException {
using RSocketException::RSocketException;
};
-}
+} // namespace rsocket
diff --git a/rsocket/RSocketParameters.cpp b/rsocket/RSocketParameters.cpp
index b59b5ba62..08f221e44 100644
--- a/rsocket/RSocketParameters.cpp
+++ b/rsocket/RSocketParameters.cpp
@@ -1,4 +1,16 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#include "rsocket/RSocketParameters.h"
@@ -14,4 +26,4 @@ std::ostream& operator<<(
<< " token: " << setupPayload.token
<< " resumable: " << setupPayload.resumable;
}
-}
+} // namespace rsocket
diff --git a/rsocket/RSocketParameters.h b/rsocket/RSocketParameters.h
index 8d1181d0c..0605bcfcf 100644
--- a/rsocket/RSocketParameters.h
+++ b/rsocket/RSocketParameters.h
@@ -1,12 +1,26 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#pragma once
-#include
+#include
+#include
#include
+#include
+
#include "rsocket/Payload.h"
-#include "rsocket/framing/FrameSerializer.h"
-#include "rsocket/internal/Common.h"
+#include "rsocket/framing/Frame.h"
namespace rsocket {
@@ -15,8 +29,8 @@ using OnRSocketResume =
class RSocketParameters {
public:
- RSocketParameters(bool _resumable, ProtocolVersion _protocolVersion)
- : resumable(_resumable), protocolVersion(std::move(_protocolVersion)) {}
+ RSocketParameters(bool resume, ProtocolVersion version)
+ : resumable{resume}, protocolVersion{std::move(version)} {}
bool resumable;
ProtocolVersion protocolVersion;
@@ -25,18 +39,18 @@ class RSocketParameters {
class SetupParameters : public RSocketParameters {
public:
explicit SetupParameters(
- std::string _metadataMimeType = "text/plain",
- std::string _dataMimeType = "text/plain",
- Payload _payload = Payload(),
- bool _resumable = false,
- const ResumeIdentificationToken& _token =
+ std::string metadataMime = "text/plain",
+ std::string dataMime = "text/plain",
+ Payload buf = Payload(),
+ bool resume = false,
+ ResumeIdentificationToken resumeToken =
ResumeIdentificationToken::generateNew(),
- ProtocolVersion _protocolVersion = ProtocolVersion::Current())
- : RSocketParameters(_resumable, _protocolVersion),
- metadataMimeType(std::move(_metadataMimeType)),
- dataMimeType(std::move(_dataMimeType)),
- payload(std::move(_payload)),
- token(_token) {}
+ ProtocolVersion version = ProtocolVersion::Latest)
+ : RSocketParameters(resume, version),
+ metadataMimeType(std::move(metadataMime)),
+ dataMimeType(std::move(dataMime)),
+ payload(std::move(buf)),
+ token(resumeToken) {}
std::string metadataMimeType;
std::string dataMimeType;
@@ -49,18 +63,18 @@ std::ostream& operator<<(std::ostream&, const SetupParameters&);
class ResumeParameters : public RSocketParameters {
public:
ResumeParameters(
- ResumeIdentificationToken _token,
- ResumePosition _serverPosition,
- ResumePosition _clientPosition,
- ProtocolVersion _protocolVersion)
- : RSocketParameters(true, _protocolVersion),
- token(std::move(_token)),
- serverPosition(_serverPosition),
- clientPosition(_clientPosition) {}
+ ResumeIdentificationToken resumeToken,
+ ResumePosition serverPos,
+ ResumePosition clientPos,
+ ProtocolVersion version)
+ : RSocketParameters(true, version),
+ token(std::move(resumeToken)),
+ serverPosition(serverPos),
+ clientPosition(clientPos) {}
ResumeIdentificationToken token;
ResumePosition serverPosition;
ResumePosition clientPosition;
};
-} // reactivesocket
+} // namespace rsocket
diff --git a/rsocket/RSocketRequester.cpp b/rsocket/RSocketRequester.cpp
index c216711e6..cf1799506 100644
--- a/rsocket/RSocketRequester.cpp
+++ b/rsocket/RSocketRequester.cpp
@@ -1,4 +1,16 @@
-// Copyright 2004-present Facebook. All Rights Reserved.
+// Copyright (c) Facebook, Inc. and its affiliates.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
#include "rsocket/RSocketRequester.h"
@@ -10,161 +22,160 @@
#include "yarpl/single/SingleSubscriptions.h"
using namespace folly;
-using namespace yarpl;
namespace rsocket {
+namespace {
+
+template
+void runOnCorrectThread(folly::EventBase& evb, Fn fn) {
+ if (evb.isInEventBaseThread()) {
+ fn();
+ } else {
+ evb.runInEventBaseThread(std::move(fn));
+ }
+}
+
+} // namespace
+
RSocketRequester::RSocketRequester(
std::shared_ptr srs,
EventBase& eventBase)
- : stateMachine_(std::move(srs)), eventBase_(&eventBase) {}
+ : stateMachine_{std::move(srs)}, eventBase_{&eventBase} {}
RSocketRequester::~RSocketRequester() {
VLOG(1) << "Destroying RSocketRequester";
}
void RSocketRequester::closeSocket() {
- eventBase_->add([stateMachine = std::move(stateMachine_)] {
+ eventBase_->runInEventBaseThread([stateMachine = std::move(stateMachine_)] {
VLOG(2) << "Closing RSocketStateMachine on EventBase";
- stateMachine->close(
- folly::exception_wrapper(), StreamCompletionSignal::SOCKET_CLOSED);
+ stateMachine->close({}, StreamCompletionSignal::SOCKET_CLOSED);
});
}
-yarpl::Reference>
+std::shared_ptr>
RSocketRequester::requestChannel(
- yarpl::Reference>
+ std::shared_ptr>
requestStream) {
- CHECK(stateMachine_); // verify the socket was not closed
-
- return yarpl::flowable::Flowables::fromPublisher([
- eb = eventBase_,
- requestStream = std::move(requestStream),
- srs = stateMachine_
- ](yarpl::Reference> subscriber) mutable {
- auto lambda = [
- requestStream = std::move(requestStream),
- subscriber = std::move(subscriber),
- srs = std::move(srs),
- eb
- ]() mutable {
- auto responseSink = srs->streamsFactory().createChannelRequester(
- yarpl::make_ref>(
- std::move(subscriber), *eb));
- // responseSink is wrapped with thread scheduling
- // so all emissions happen on the right thread
-
- // if we don't get a responseSink back, that means that
- // the requesting peer wasn't connected (or similar error)
- // and the Flowable it gets back will immediately call onError
- if (responseSink) {
- requestStream->subscribe(yarpl::make_ref>(
- std::move(responseSink), *eb));
- }
- };
- if (eb->isInEventBaseThread()) {
- lambda();
- } else {
- eb->runInEventBaseThread(std::move(lambda));
- }
- });
+ return requestChannel({}, false, std::move(requestStream));
+}
+
+std::shared_ptr>
+RSocketRequester::requestChannel(
+ Payload request,
+ std::shared_ptr>
+ requestStream) {
+ return requestChannel(std::move(request), true, std::move(requestStream));
+}
+
+std::shared_ptr>
+RSocketRequester::requestChannel(
+ Payload request,
+ bool hasInitialRequest,
+ std::shared_ptr>
+ requestStreamFlowable) {
+ CHECK(stateMachine_);
+
+ return yarpl::flowable::internal::flowableFromSubscriber(
+ [eb = eventBase_,
+ req = std::move(request),
+ hasInitialRequest,
+ requestStream = std::move(requestStreamFlowable),
+ srs = stateMachine_](
+ std::shared_ptr> subscriber) {
+ auto lambda = [eb,
+ r = req.clone(),
+ hasInitialRequest,
+ requestStream,
+ srs,
+ subs = std::move(subscriber)]() mutable {
+ auto scheduled =
+ std::make_shared>(
+ std::move(subs), *eb);
+ auto responseSink = srs->requestChannel(
+ std::move(r), hasInitialRequest, std::move(scheduled));
+ // responseSink is wrapped with thread scheduling
+ // so all emissions happen on the right thread.
+
+ // If we don't get a responseSink back, that means that
+ // the requesting peer wasn't connected (or similar error)
+ // and the Flowable it gets back will immediately call onError.
+ if (responseSink) {
+ auto scheduledResponse =
+ std::make_shared>(
+ std::move(responseSink), *eb);
+ requestStream->subscribe(std::move(scheduledResponse));
+ }
+ };
+ runOnCorrectThread(*eb, std::move(lambda));
+ });
}
-yarpl::Reference>
+std::shared_ptr>
RSocketRequester::requestStream(Payload request) {
- CHECK(stateMachine_); // verify the socket was not closed
-
- return yarpl::flowable::Flowables::fromPublisher([
- eb = eventBase_,
- request = std::move(request),
- srs = stateMachine_
- ](yarpl::Reference> subscriber) mutable {
- auto lambda = [
- request = std::move(request),
- subscriber = std::move(subscriber),
- srs = std::move(srs),
- eb
- ]() mutable {
- srs->streamsFactory().createStreamRequester(
- std::move(request),
- yarpl::make_ref>(
- std::move(subscriber), *eb));
- };
- if (eb->isInEventBaseThread()) {
- lambda();
- } else {
- eb->runInEventBaseThread(std::move(lambda));
- }
- });
+ CHECK(stateMachine_);
+
+ return yarpl::flowable::internal::flowableFromSubscriber(
+ [eb = eventBase_, req = std::move(request), srs = stateMachine_](
+ std::shared_ptr