Export of internal Abseil changes.
-- 3d20ce6cd6541579abecaba169d4b8716d511272 by Jon Cohen <cohenjon@google.com>: Only use LSAN for clang version >= 3.5. This should fix https://github.com/abseil/abseil-cpp/issues/244 PiperOrigin-RevId: 234675129 -- e15bd4ec7a81aa95cc3d09fa1e0e81d58ae478fb by Conrad Parker <conradparker@google.com>: Fix errors in apply() sample code The following changes are made: * Make the example method public. * Give the two user functions different names to avoid confusion about whether apply() can select the correct overload of a function based on its tuple argument (it can't). * Pass tuple2 to the second example apply() invocation, instead of passing its contents individually. * Fix a s/tuple/tuple3/ typo in the third example apply() invocation. PiperOrigin-RevId: 234223407 -- de0ed71e21bc76ddf9fe715fdbaef74cd0df95c7 by Abseil Team <absl-team@google.com>: First test if a macro is defined to avoid -Wundef. ABSL clients may need to compile their code with the -Wundef warning flag. It will be helpful if ABSL header files can be compiled without the -Wundef warning. How to avoid the -Wundef warning: If a macro may be undefined, we need to first test whether the macro is defined before testing its value. We can't rely on the C preprocessor rule that an undefined macro has the value 0L. PiperOrigin-RevId: 234201123 -- fa484ad7dae0cac21140a96662809ecb0ec8eb5d by Abseil Team <absl-team@google.com>: Internal change. PiperOrigin-RevId: 234185697 -- d69b1baef681e27954b065375ecf9c2320463b2b by Samuel Benzaquen <sbenza@google.com>: Mix pointers more thoroughly. Some pointer alignments interact badly with the mixing constant. By mixing twice we reduce this problem. PiperOrigin-RevId: 234178401 -- 1041d0e474610f3a8fea0db90958857327d6da1c by Samuel Benzaquen <sbenza@google.com>: Record rehashes in the hashtablez struct. Only recording the probe length on insertion causes a huge overestimation of the total probe length at any given time. With natural growth, elements are inserted when the load factor is between (max load/2, max load). However, after a rehash the majority of elements are actually inserted when the load factor is less than max/2 and have a much lower average probe length. Also reset some values when the table is cleared. PiperOrigin-RevId: 234013580 -- 299205caf3c89c47339f7409bc831746602cea84 by Mark Barolak <mbar@google.com>: Fix a sample code snippet that assumes `absl::string_view::const_iterator` is `const char*`. This is generally true, however in C++17 builds, absl::string_view is an alias for std::string_view and on MSVC, the std::string_view::const_iterator is an object instead of just a pointer. PiperOrigin-RevId: 233844595 -- af6c6370cf51a1e6c1469c79dfb2a486a4009136 by Abseil Team <absl-team@google.com>: Internal change. PiperOrigin-RevId: 233773470 -- 6e59e4b8e2bb6101b448f0f32b0bea81fe399ccf by Abseil Team <absl-team@google.com>: fix typo in {Starts|Ends}WithIgnoreCase comment in match.h PiperOrigin-RevId: 233662951 GitOrigin-RevId: 3d20ce6cd6541579abecaba169d4b8716d511272 Change-Id: Ib9a29b1c38c6aedf5d9f3f7f00596e8d30e864dd
This commit is contained in:
parent
426eaa4aa4
commit
93d155bc44
13 changed files with 170 additions and 54 deletions
|
@ -527,25 +527,26 @@ namespace container_internal {
|
|||
|
||||
template <class K, class V>
|
||||
struct FlatHashMapPolicy {
|
||||
using slot_type = container_internal::slot_type<K, V>;
|
||||
using slot_policy = container_internal::map_slot_policy<K, V>;
|
||||
using slot_type = typename slot_policy::slot_type;
|
||||
using key_type = K;
|
||||
using mapped_type = V;
|
||||
using init_type = std::pair</*non const*/ key_type, mapped_type>;
|
||||
|
||||
template <class Allocator, class... Args>
|
||||
static void construct(Allocator* alloc, slot_type* slot, Args&&... args) {
|
||||
slot_type::construct(alloc, slot, std::forward<Args>(args)...);
|
||||
slot_policy::construct(alloc, slot, std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
template <class Allocator>
|
||||
static void destroy(Allocator* alloc, slot_type* slot) {
|
||||
slot_type::destroy(alloc, slot);
|
||||
slot_policy::destroy(alloc, slot);
|
||||
}
|
||||
|
||||
template <class Allocator>
|
||||
static void transfer(Allocator* alloc, slot_type* new_slot,
|
||||
slot_type* old_slot) {
|
||||
slot_type::transfer(alloc, new_slot, old_slot);
|
||||
slot_policy::transfer(alloc, new_slot, old_slot);
|
||||
}
|
||||
|
||||
template <class F, class... Args>
|
||||
|
|
|
@ -311,7 +311,23 @@ struct IsLayoutCompatible {
|
|||
// kMutableKeys. For C++11, the relevant section of the standard is
|
||||
// https://timsong-cpp.github.io/cppwp/n3337/class.mem#19 (9.2.19)
|
||||
template <class K, class V>
|
||||
union slot_type {
|
||||
union map_slot_type {
|
||||
map_slot_type() {}
|
||||
~map_slot_type() = delete;
|
||||
using value_type = std::pair<const K, V>;
|
||||
using mutable_value_type = std::pair<K, V>;
|
||||
|
||||
value_type value;
|
||||
mutable_value_type mutable_value;
|
||||
K key;
|
||||
};
|
||||
|
||||
template <class K, class V>
|
||||
struct map_slot_policy {
|
||||
using slot_type = map_slot_type<K, V>;
|
||||
using value_type = std::pair<const K, V>;
|
||||
using mutable_value_type = std::pair<K, V>;
|
||||
|
||||
private:
|
||||
static void emplace(slot_type* slot) {
|
||||
// The construction of union doesn't do anything at runtime but it allows us
|
||||
|
@ -321,19 +337,17 @@ union slot_type {
|
|||
// If pair<const K, V> and pair<K, V> are layout-compatible, we can accept one
|
||||
// or the other via slot_type. We are also free to access the key via
|
||||
// slot_type::key in this case.
|
||||
using kMutableKeys =
|
||||
std::integral_constant<bool,
|
||||
memory_internal::IsLayoutCompatible<K, V>::value>;
|
||||
using kMutableKeys = memory_internal::IsLayoutCompatible<K, V>;
|
||||
|
||||
public:
|
||||
slot_type() {}
|
||||
~slot_type() = delete;
|
||||
using value_type = std::pair<const K, V>;
|
||||
using mutable_value_type = std::pair<K, V>;
|
||||
static value_type& element(slot_type* slot) { return slot->value; }
|
||||
static const value_type& element(const slot_type* slot) {
|
||||
return slot->value;
|
||||
}
|
||||
|
||||
value_type value;
|
||||
mutable_value_type mutable_value;
|
||||
K key;
|
||||
static const K& key(const slot_type* slot) {
|
||||
return kMutableKeys::value ? slot->key : slot->value.first;
|
||||
}
|
||||
|
||||
template <class Allocator, class... Args>
|
||||
static void construct(Allocator* alloc, slot_type* slot, Args&&... args) {
|
||||
|
|
|
@ -33,6 +33,7 @@
|
|||
|
||||
#include "absl/base/internal/per_thread_tls.h"
|
||||
#include "absl/base/optimization.h"
|
||||
#include "absl/container/internal/have_sse.h"
|
||||
#include "absl/synchronization/mutex.h"
|
||||
#include "absl/utility/utility.h"
|
||||
|
||||
|
@ -82,10 +83,24 @@ struct HashtablezInfo {
|
|||
void* stack[kMaxStackDepth];
|
||||
};
|
||||
|
||||
inline void RecordRehashSlow(HashtablezInfo* info, size_t total_probe_length) {
|
||||
#if SWISSTABLE_HAVE_SSE2
|
||||
total_probe_length /= 16;
|
||||
#else
|
||||
total_probe_length /= 8;
|
||||
#endif
|
||||
info->total_probe_length.store(total_probe_length, std::memory_order_relaxed);
|
||||
info->num_erases.store(0, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
inline void RecordStorageChangedSlow(HashtablezInfo* info, size_t size,
|
||||
size_t capacity) {
|
||||
info->size.store(size, std::memory_order_relaxed);
|
||||
info->capacity.store(capacity, std::memory_order_relaxed);
|
||||
if (size == 0) {
|
||||
// This is a clear, reset the total/num_erases too.
|
||||
RecordRehashSlow(info, 0);
|
||||
}
|
||||
}
|
||||
|
||||
void RecordInsertSlow(HashtablezInfo* info, size_t hash,
|
||||
|
@ -126,6 +141,11 @@ class HashtablezInfoHandle {
|
|||
RecordStorageChangedSlow(info_, size, capacity);
|
||||
}
|
||||
|
||||
inline void RecordRehash(size_t total_probe_length) {
|
||||
if (ABSL_PREDICT_TRUE(info_ == nullptr)) return;
|
||||
RecordRehashSlow(info_, total_probe_length);
|
||||
}
|
||||
|
||||
inline void RecordInsert(size_t hash, size_t distance_from_desired) {
|
||||
if (ABSL_PREDICT_TRUE(info_ == nullptr)) return;
|
||||
RecordInsertSlow(info_, hash, distance_from_desired);
|
||||
|
|
|
@ -145,6 +145,29 @@ TEST(HashtablezInfoTest, RecordErase) {
|
|||
EXPECT_EQ(info.num_erases.load(), 1);
|
||||
}
|
||||
|
||||
TEST(HashtablezInfoTest, RecordRehash) {
|
||||
HashtablezInfo info;
|
||||
absl::MutexLock l(&info.init_mu);
|
||||
info.PrepareForSampling();
|
||||
RecordInsertSlow(&info, 0x1, 0);
|
||||
RecordInsertSlow(&info, 0x2, kProbeLength);
|
||||
RecordInsertSlow(&info, 0x4, kProbeLength);
|
||||
RecordInsertSlow(&info, 0x8, 2 * kProbeLength);
|
||||
EXPECT_EQ(info.size.load(), 4);
|
||||
EXPECT_EQ(info.total_probe_length.load(), 4);
|
||||
|
||||
RecordEraseSlow(&info);
|
||||
RecordEraseSlow(&info);
|
||||
EXPECT_EQ(info.size.load(), 2);
|
||||
EXPECT_EQ(info.total_probe_length.load(), 4);
|
||||
EXPECT_EQ(info.num_erases.load(), 2);
|
||||
|
||||
RecordRehashSlow(&info, 3 * kProbeLength);
|
||||
EXPECT_EQ(info.size.load(), 2);
|
||||
EXPECT_EQ(info.total_probe_length.load(), 3);
|
||||
EXPECT_EQ(info.num_erases.load(), 0);
|
||||
}
|
||||
|
||||
TEST(HashtablezSamplerTest, SmallSampleParameter) {
|
||||
SetHashtablezEnabled(true);
|
||||
SetHashtablezSampleParameter(100);
|
||||
|
|
|
@ -936,7 +936,7 @@ class raw_hash_set {
|
|||
reset_growth_left();
|
||||
}
|
||||
assert(empty());
|
||||
infoz_.RecordStorageChanged(size_, capacity_);
|
||||
infoz_.RecordStorageChanged(0, capacity_);
|
||||
}
|
||||
|
||||
// This overload kicks in when the argument is an rvalue of insertable and
|
||||
|
@ -1226,7 +1226,7 @@ class raw_hash_set {
|
|||
if (n == 0 && capacity_ == 0) return;
|
||||
if (n == 0 && size_ == 0) {
|
||||
destroy_slots();
|
||||
infoz_.RecordStorageChanged(size_, capacity_);
|
||||
infoz_.RecordStorageChanged(0, 0);
|
||||
return;
|
||||
}
|
||||
// bitor is a faster way of doing `max` here. We will round up to the next
|
||||
|
@ -1483,11 +1483,14 @@ class raw_hash_set {
|
|||
capacity_ = new_capacity;
|
||||
initialize_slots();
|
||||
|
||||
size_t total_probe_length = 0;
|
||||
for (size_t i = 0; i != old_capacity; ++i) {
|
||||
if (IsFull(old_ctrl[i])) {
|
||||
size_t hash = PolicyTraits::apply(HashElement{hash_ref()},
|
||||
PolicyTraits::element(old_slots + i));
|
||||
size_t new_i = find_first_non_full(hash).offset;
|
||||
auto target = find_first_non_full(hash);
|
||||
size_t new_i = target.offset;
|
||||
total_probe_length += target.probe_length;
|
||||
set_ctrl(new_i, H2(hash));
|
||||
PolicyTraits::transfer(&alloc_ref(), slots_ + new_i, old_slots + i);
|
||||
}
|
||||
|
@ -1499,6 +1502,7 @@ class raw_hash_set {
|
|||
Deallocate<Layout::Alignment()>(&alloc_ref(), old_ctrl,
|
||||
layout.AllocSize());
|
||||
}
|
||||
infoz_.RecordRehash(total_probe_length);
|
||||
}
|
||||
|
||||
void drop_deletes_without_resize() ABSL_ATTRIBUTE_NOINLINE {
|
||||
|
@ -1522,12 +1526,15 @@ class raw_hash_set {
|
|||
ConvertDeletedToEmptyAndFullToDeleted(ctrl_, capacity_);
|
||||
typename std::aligned_storage<sizeof(slot_type), alignof(slot_type)>::type
|
||||
raw;
|
||||
size_t total_probe_length = 0;
|
||||
slot_type* slot = reinterpret_cast<slot_type*>(&raw);
|
||||
for (size_t i = 0; i != capacity_; ++i) {
|
||||
if (!IsDeleted(ctrl_[i])) continue;
|
||||
size_t hash = PolicyTraits::apply(HashElement{hash_ref()},
|
||||
PolicyTraits::element(slots_ + i));
|
||||
size_t new_i = find_first_non_full(hash).offset;
|
||||
auto target = find_first_non_full(hash);
|
||||
size_t new_i = target.offset;
|
||||
total_probe_length += target.probe_length;
|
||||
|
||||
// Verify if the old and new i fall within the same group wrt the hash.
|
||||
// If they do, we don't need to move the object as it falls already in the
|
||||
|
@ -1560,6 +1567,7 @@ class raw_hash_set {
|
|||
}
|
||||
}
|
||||
reset_growth_left();
|
||||
infoz_.RecordRehash(total_probe_length);
|
||||
}
|
||||
|
||||
void rehash_and_grow_if_necessary() {
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
# See absl/copts/copts.py and absl/copts/generate_copts.py
|
||||
include(GENERATED_AbseilCopts)
|
||||
|
||||
set(ABSL_LSAN_LINKOPTS "")
|
||||
set(ABSL_HAVE_LSAN OFF)
|
||||
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
set(ABSL_DEFAULT_COPTS "${GCC_FLAGS}")
|
||||
set(ABSL_TEST_COPTS "${GCC_FLAGS};${GCC_TEST_FLAGS}")
|
||||
|
@ -10,6 +13,14 @@ elseif("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang")
|
|||
set(ABSL_DEFAULT_COPTS "${LLVM_FLAGS}")
|
||||
set(ABSL_TEST_COPTS "${LLVM_FLAGS};${LLVM_TEST_FLAGS}")
|
||||
set(ABSL_EXCEPTIONS_FLAG "${LLVM_EXCEPTIONS_FLAGS}")
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
|
||||
# AppleClang doesn't have lsan
|
||||
# https://developer.apple.com/documentation/code_diagnostics
|
||||
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 3.5)
|
||||
set(ABSL_LSAN_LINKOPTS "-fsanitize=leak")
|
||||
set(ABSL_HAVE_LSAN ON)
|
||||
endif()
|
||||
endif()
|
||||
elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC")
|
||||
set(ABSL_DEFAULT_COPTS "${MSVC_FLAGS}")
|
||||
set(ABSL_TEST_COPTS "${MSVC_FLAGS};${MSVC_TEST_FLAGS}")
|
||||
|
|
|
@ -199,11 +199,6 @@ absl_cc_library(
|
|||
PUBLIC
|
||||
)
|
||||
|
||||
# TODO(cohenjon) Move into the copts code.
|
||||
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
set(ABSL_LSAN_LINKOPTS "-fsanitize=leak")
|
||||
endif()
|
||||
|
||||
absl_cc_library(
|
||||
NAME
|
||||
leak_check_api_enabled_for_testing
|
||||
|
@ -212,7 +207,7 @@ absl_cc_library(
|
|||
SRCS
|
||||
"leak_check.cc"
|
||||
COPTS
|
||||
$<$<BOOL:${ABSL_USING_CLANG}>:-DLEAK_SANITIZER>
|
||||
$<$<BOOL:${ABSL_HAVE_LSAN}>:-DLEAK_SANITIZER>
|
||||
TESTONLY
|
||||
)
|
||||
|
||||
|
@ -234,7 +229,7 @@ absl_cc_test(
|
|||
SRCS
|
||||
"leak_check_test.cc"
|
||||
COPTS
|
||||
"$<$<CXX_COMPILER_ID:Clang>:-DABSL_EXPECT_LEAK_SANITIZER>"
|
||||
"$<$<BOOL:${ABSL_HAVE_LSAN}>:-DABSL_EXPECT_LEAK_SANITIZER>"
|
||||
LINKOPTS
|
||||
"${ABSL_LSAN_LINKOPTS}"
|
||||
DEPS
|
||||
|
|
|
@ -135,6 +135,36 @@ TEST(HashValueTest, Pointer) {
|
|||
std::make_tuple(&i, ptr, nullptr, ptr + 1, n)));
|
||||
}
|
||||
|
||||
TEST(HashValueTest, PointerAlignment) {
|
||||
// We want to make sure that pointer alignment will not cause bits to be
|
||||
// stuck.
|
||||
|
||||
constexpr size_t kTotalSize = 1 << 20;
|
||||
std::unique_ptr<char[]> data(new char[kTotalSize]);
|
||||
constexpr size_t kLog2NumValues = 5;
|
||||
constexpr size_t kNumValues = 1 << kLog2NumValues;
|
||||
|
||||
for (size_t align = 1; align < kTotalSize / kNumValues;
|
||||
align < 8 ? align += 1 : align < 1024 ? align += 8 : align += 32) {
|
||||
SCOPED_TRACE(align);
|
||||
ASSERT_LE(align * kNumValues, kTotalSize);
|
||||
|
||||
size_t bits_or = 0;
|
||||
size_t bits_and = ~size_t{};
|
||||
|
||||
for (size_t i = 0; i < kNumValues; ++i) {
|
||||
size_t hash = absl::Hash<void*>()(data.get() + i * align);
|
||||
bits_or |= hash;
|
||||
bits_and &= hash;
|
||||
}
|
||||
|
||||
// Limit the scope to the bits we would be using for Swisstable.
|
||||
constexpr size_t kMask = (1 << (kLog2NumValues + 7)) - 1;
|
||||
size_t stuck_bits = (~bits_or | bits_and) & kMask;
|
||||
EXPECT_EQ(stuck_bits, 0) << "0x" << std::hex << stuck_bits;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(EricWF): MSVC 15 has a bug that causes it to incorrectly evaluate the
|
||||
// SFINAE in internal/hash.h, causing this test to fail.
|
||||
#if !defined(_MSC_VER)
|
||||
|
|
|
@ -264,7 +264,12 @@ H AbslHashValue(H hash_state, long double value) {
|
|||
// AbslHashValue() for hashing pointers
|
||||
template <typename H, typename T>
|
||||
H AbslHashValue(H hash_state, T* ptr) {
|
||||
return hash_internal::hash_bytes(std::move(hash_state), ptr);
|
||||
auto v = reinterpret_cast<uintptr_t>(ptr);
|
||||
// Due to alignment, pointers tend to have low bits as zero, and the next few
|
||||
// bits follow a pattern since they are also multiples of some base value.
|
||||
// Mixing the pointer twice helps prevent stuck low bits for certain alignment
|
||||
// values.
|
||||
return H::combine(std::move(hash_state), v, v);
|
||||
}
|
||||
|
||||
// AbslHashValue() for hashing nullptr_t
|
||||
|
|
|
@ -646,7 +646,7 @@ struct allocator_is_nothrow
|
|||
: memory_internal::ExtractOrT<memory_internal::GetIsNothrow, Alloc,
|
||||
std::false_type> {};
|
||||
|
||||
#if ABSL_ALLOCATOR_NOTHROW
|
||||
#if defined(ABSL_ALLOCATOR_NOTHROW) && ABSL_ALLOCATOR_NOTHROW
|
||||
template <typename T>
|
||||
struct allocator_is_nothrow<std::allocator<T>> : std::true_type {};
|
||||
struct default_allocator_is_nothrow : std::true_type {};
|
||||
|
|
|
@ -74,13 +74,13 @@ bool EqualsIgnoreCase(absl::string_view piece1, absl::string_view piece2);
|
|||
|
||||
// StartsWithIgnoreCase()
|
||||
//
|
||||
// Returns whether a given ASCII string `text` starts with `starts_with`,
|
||||
// Returns whether a given ASCII string `text` starts with `prefix`,
|
||||
// ignoring case in the comparison.
|
||||
bool StartsWithIgnoreCase(absl::string_view text, absl::string_view prefix);
|
||||
|
||||
// EndsWithIgnoreCase()
|
||||
//
|
||||
// Returns whether a given ASCII string `text` ends with `ends_with`, ignoring
|
||||
// Returns whether a given ASCII string `text` ends with `suffix`, ignoring
|
||||
// case in the comparison.
|
||||
bool EndsWithIgnoreCase(absl::string_view text, absl::string_view suffix);
|
||||
|
||||
|
|
|
@ -72,22 +72,23 @@ namespace absl {
|
|||
// - `MaxSplits`
|
||||
//
|
||||
//
|
||||
// A Delimiter's Find() member function will be passed the input text that is to
|
||||
// be split and the position to begin searching for the next delimiter in the
|
||||
// input text. The returned absl::string_view should refer to the next
|
||||
// occurrence (after pos) of the represented delimiter; this returned
|
||||
// absl::string_view represents the next location where the input string should
|
||||
// be broken. The returned absl::string_view may be zero-length if the Delimiter
|
||||
// does not represent a part of the string (e.g., a fixed-length delimiter). If
|
||||
// no delimiter is found in the given text, a zero-length absl::string_view
|
||||
// referring to text.end() should be returned (e.g.,
|
||||
// absl::string_view(text.end(), 0)). It is important that the returned
|
||||
// absl::string_view always be within the bounds of input text given as an
|
||||
// A Delimiter's `Find()` member function will be passed an input `text` that is
|
||||
// to be split and a position (`pos`) to begin searching for the next delimiter
|
||||
// in `text`. The returned absl::string_view should refer to the next occurrence
|
||||
// (after `pos`) of the represented delimiter; this returned absl::string_view
|
||||
// represents the next location where the input `text` should be broken.
|
||||
//
|
||||
// The returned absl::string_view may be zero-length if the Delimiter does not
|
||||
// represent a part of the string (e.g., a fixed-length delimiter). If no
|
||||
// delimiter is found in the input `text`, a zero-length absl::string_view
|
||||
// referring to `text.end()` should be returned (e.g.,
|
||||
// `text.substr(text.size())`). It is important that the returned
|
||||
// absl::string_view always be within the bounds of the input `text` given as an
|
||||
// argument--it must not refer to a string that is physically located outside of
|
||||
// the given string.
|
||||
//
|
||||
// The following example is a simple Delimiter object that is created with a
|
||||
// single char and will look for that char in the text passed to the Find()
|
||||
// single char and will look for that char in the text passed to the `Find()`
|
||||
// function:
|
||||
//
|
||||
// struct SimpleDelimiter {
|
||||
|
|
|
@ -234,25 +234,33 @@ auto apply_helper(Functor&& functor, Tuple&& t, index_sequence<Indexes...>)
|
|||
//
|
||||
// Example:
|
||||
//
|
||||
// class Foo{void Bar(int);};
|
||||
// void user_function(int, string);
|
||||
// void user_function(std::unique_ptr<Foo>);
|
||||
// class Foo {
|
||||
// public:
|
||||
// void Bar(int);
|
||||
// };
|
||||
// void user_function1(int, string);
|
||||
// void user_function2(std::unique_ptr<Foo>);
|
||||
// auto user_lambda = [](int, int) {};
|
||||
//
|
||||
// int main()
|
||||
// {
|
||||
// std::tuple<int, string> tuple1(42, "bar");
|
||||
// // Invokes the user function overload on int, string.
|
||||
// absl::apply(&user_function, tuple1);
|
||||
// // Invokes the first user function on int, string.
|
||||
// absl::apply(&user_function1, tuple1);
|
||||
//
|
||||
// auto foo = absl::make_unique<Foo>();
|
||||
// std::tuple<Foo*, int> tuple2(foo.get(), 42);
|
||||
// // Invokes the method Bar on foo with one argument 42.
|
||||
// absl::apply(&Foo::Bar, foo.get(), 42);
|
||||
//
|
||||
// std::tuple<std::unique_ptr<Foo>> tuple3(absl::make_unique<Foo>());
|
||||
// std::tuple<std::unique_ptr<Foo>> tuple2(absl::make_unique<Foo>());
|
||||
// // Invokes the user function that takes ownership of the unique
|
||||
// // pointer.
|
||||
// absl::apply(&user_function, std::move(tuple));
|
||||
// absl::apply(&user_function2, std::move(tuple2));
|
||||
//
|
||||
// auto foo = absl::make_unique<Foo>();
|
||||
// std::tuple<Foo*, int> tuple3(foo.get(), 42);
|
||||
// // Invokes the method Bar on foo with one argument, 42.
|
||||
// absl::apply(&Foo::Bar, tuple3);
|
||||
//
|
||||
// std::tuple<int, int> tuple4(8, 9);
|
||||
// // Invokes a lambda.
|
||||
// absl::apply(user_lambda, tuple4);
|
||||
// }
|
||||
template <typename Functor, typename Tuple>
|
||||
auto apply(Functor&& functor, Tuple&& t)
|
||||
|
|
Loading…
Reference in a new issue