2 * Copyright 2015 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
24 #include <folly/Optional.h>
25 #include <folly/SmallLocks.h>
27 #include <folly/futures/Try.h>
28 #include <folly/futures/Promise.h>
29 #include <folly/futures/Future.h>
30 #include <folly/Executor.h>
31 #include <folly/futures/detail/FSM.h>
33 #include <folly/io/async/Request.h>
35 namespace folly { namespace detail {
44 This state machine is fairly self-explanatory. The most important bit is
45 that the callback is only executed on the transition from Armed to Done,
46 and that transition can happen immediately after transitioning from Only*
47 to Armed, if it is active (the usual case).
49 enum class State : uint8_t {
57 /// The shared state object for Future and Promise.
58 /// Some methods must only be called by either the Future thread or the
59 /// Promise thread. The Future thread is the thread that currently "owns" the
60 /// Future and its callback-related operations, and the Promise thread is
61 /// likewise the thread that currently "owns" the Promise and its
62 /// result-related operations. Also, Futures own interruption, Promises own
63 /// interrupt handlers. Unfortunately, there are things that users can do to
64 /// break this, and we can't detect that. However if they follow move
65 /// semantics religiously wrt threading, they should be ok.
67 /// It's worth pointing out that Futures and/or Promises can and usually will
68 /// migrate between threads, though this usually happens within the API code.
69 /// For example, an async operation will probably make a Promise, grab its
70 /// Future, then move the Promise into another thread that will eventually
71 /// fulfill it. With executors and via, this gets slightly more complicated at
72 /// first blush, but it's the same principle. In general, as long as the user
73 /// doesn't access a Future or Promise object from more than one thread at a
74 /// time there won't be any problems.
78 /// This must be heap-constructed. There's probably a way to enforce that in
79 /// code but since this is just internal detail code and I don't know how
80 /// off-hand, I'm punting.
83 assert(attached_ == 0);
87 Core(Core const&) = delete;
88 Core& operator=(Core const&) = delete;
90 // not movable (see comment in the implementation of Future::then)
91 Core(Core&&) noexcept = delete;
92 Core& operator=(Core&&) = delete;
94 /// May call from any thread
95 bool hasResult() const {
96 switch (fsm_.getState()) {
97 case State::OnlyResult:
108 /// May call from any thread
113 /// May call from any thread
118 throw FutureNotReady();
122 template <typename F>
123 class LambdaBufHelper {
125 explicit LambdaBufHelper(F&& func) : func_(std::forward<F>(func)) {}
126 void operator()(Try<T>&& t) {
127 SCOPE_EXIT { this->~LambdaBufHelper(); };
134 /// Call only from Future thread.
135 template <typename F>
136 void setCallback(F func) {
137 bool transitionToArmed = false;
138 auto setCallback_ = [&]{
139 context_ = RequestContext::saveContext();
141 // Move the lambda into the Core if it fits
142 if (sizeof(LambdaBufHelper<F>) <= lambdaBufSize) {
143 auto funcLoc = static_cast<LambdaBufHelper<F>*>((void*)lambdaBuf_);
144 new (funcLoc) LambdaBufHelper<F>(std::forward<F>(func));
145 callback_ = std::ref(*funcLoc);
147 callback_ = std::move(func);
153 FSM_UPDATE(fsm_, State::OnlyCallback, setCallback_);
156 case State::OnlyResult:
157 FSM_UPDATE(fsm_, State::Armed, setCallback_);
158 transitionToArmed = true;
161 case State::OnlyCallback:
164 throw std::logic_error("setCallback called twice");
167 // we could always call this, it is an optimization to only call it when
168 // it might be needed.
169 if (transitionToArmed) {
174 /// Call only from Promise thread
175 void setResult(Try<T>&& t) {
176 bool transitionToArmed = false;
177 auto setResult_ = [&]{ result_ = std::move(t); };
180 FSM_UPDATE(fsm_, State::OnlyResult, setResult_);
183 case State::OnlyCallback:
184 FSM_UPDATE(fsm_, State::Armed, setResult_);
185 transitionToArmed = true;
188 case State::OnlyResult:
191 throw std::logic_error("setResult called twice");
194 if (transitionToArmed) {
199 /// Called by a destructing Future (in the Future thread, by definition)
200 void detachFuture() {
201 activateNoDeprecatedWarning();
205 /// Called by a destructing Promise (in the Promise thread, by definition)
206 void detachPromise() {
207 // detachPromise() and setResult() should never be called in parallel
208 // so we don't need to protect this.
210 setResult(Try<T>(exception_wrapper(BrokenPromise())));
215 /// May call from any thread
216 void deactivate() DEPRECATED {
220 /// May call from any thread
221 void activate() DEPRECATED {
222 activateNoDeprecatedWarning();
225 /// May call from any thread
226 bool isActive() { return active_; }
228 /// Call only from Future thread
229 void setExecutor(Executor* x) {
233 Executor* getExecutor() {
237 /// Call only from Future thread
238 void raise(exception_wrapper e) {
239 std::lock_guard<decltype(interruptLock_)> guard(interruptLock_);
240 if (!interrupt_ && !hasResult()) {
241 interrupt_ = folly::make_unique<exception_wrapper>(std::move(e));
242 if (interruptHandler_) {
243 interruptHandler_(*interrupt_);
248 /// Call only from Promise thread
249 void setInterruptHandler(std::function<void(exception_wrapper const&)> fn) {
250 std::lock_guard<decltype(interruptLock_)> guard(interruptLock_);
255 interruptHandler_ = std::move(fn);
261 void activateNoDeprecatedWarning() {
266 void maybeCallback() {
270 FSM_UPDATE2(fsm_, State::Done, []{},
271 std::bind(&Core::doCallback, this));
281 RequestContext::setContext(context_);
283 // TODO(6115514) semantic race on reading executor_ and setExecutor()
284 Executor* x = executor_;
286 ++attached_; // keep Core alive until executor did its thing
288 x->add([this]() mutable {
289 SCOPE_EXIT { detachOne(); };
290 callback_(std::move(*result_));
293 result_ = Try<T>(exception_wrapper(std::current_exception()));
294 callback_(std::move(*result_));
297 callback_(std::move(*result_));
302 auto a = --attached_;
310 FSM<State> fsm_ {State::Start};
311 std::atomic<unsigned char> attached_ {2};
312 std::atomic<bool> active_ {true};
313 folly::MicroSpinLock interruptLock_ {0};
314 folly::Optional<Try<T>> result_ {};
315 std::function<void(Try<T>&&)> callback_ {nullptr};
316 static constexpr size_t lambdaBufSize = 8 * sizeof(void*);
317 char lambdaBuf_[lambdaBufSize];
318 std::shared_ptr<RequestContext> context_ {nullptr};
319 std::atomic<Executor*> executor_ {nullptr};
320 std::unique_ptr<exception_wrapper> interrupt_ {};
321 std::function<void(exception_wrapper const&)> interruptHandler_ {nullptr};
324 template <typename... Ts>
325 struct VariadicContext {
326 VariadicContext() : total(0), count(0) {}
327 Promise<std::tuple<Try<Ts>... > > p;
328 std::tuple<Try<Ts>... > results;
330 std::atomic<size_t> count;
331 typedef Future<std::tuple<Try<Ts>...>> type;
334 template <typename... Ts, typename THead, typename... Fs>
335 typename std::enable_if<sizeof...(Fs) == 0, void>::type
336 whenAllVariadicHelper(VariadicContext<Ts...> *ctx, THead&& head, Fs&&... tail) {
337 head.setCallback_([ctx](Try<typename THead::value_type>&& t) {
338 std::get<sizeof...(Ts) - sizeof...(Fs) - 1>(ctx->results) = std::move(t);
339 if (++ctx->count == ctx->total) {
340 ctx->p.setValue(std::move(ctx->results));
346 template <typename... Ts, typename THead, typename... Fs>
347 typename std::enable_if<sizeof...(Fs) != 0, void>::type
348 whenAllVariadicHelper(VariadicContext<Ts...> *ctx, THead&& head, Fs&&... tail) {
349 head.setCallback_([ctx](Try<typename THead::value_type>&& t) {
350 std::get<sizeof...(Ts) - sizeof...(Fs) - 1>(ctx->results) = std::move(t);
351 if (++ctx->count == ctx->total) {
352 ctx->p.setValue(std::move(ctx->results));
356 // template tail-recursion
357 whenAllVariadicHelper(ctx, std::forward<Fs>(tail)...);
360 template <typename T>
361 struct WhenAllContext {
362 WhenAllContext() : count(0) {}
363 Promise<std::vector<Try<T> > > p;
364 std::vector<Try<T> > results;
365 std::atomic<size_t> count;
368 template <typename T>
369 struct WhenAnyContext {
370 explicit WhenAnyContext(size_t n) : done(false), ref_count(n) {};
371 Promise<std::pair<size_t, Try<T>>> p;
372 std::atomic<bool> done;
373 std::atomic<size_t> ref_count;
375 if (--ref_count == 0) {