Skip to main content

useState

We know we can use useState hook to store status and update status in the Component. Here we disucuss it in it's mount and update stage.

Create State

useState will be treated as mountState in the mount stage

function mountState<S>(
initialState: (() => S) | S,
): [S, Dispatch<BasicStateAction<S>>] {
// return a new hook
const hook = mountWorkInProgressHook();
if (typeof initialState === 'function') {
// $FlowFixMe: Flow doesn't like mixed types
initialState = initialState();
}
// assign the initialState
hook.memoizedState = hook.baseState = initialState;
const queue = (hook.queue = {
pending: null,
dispatch: null,
lastRenderedReducer: basicStateReducer,
lastRenderedState: (initialState: any),
});
const dispatch: Dispatch<
BasicStateAction<S>,
> = (queue.dispatch = (dispatchAction.bind(
null,
currentlyRenderingFiber,
queue,
): any));
// return as new state, and a set function: dispatch, which also is dispatchAction
return [hook.memoizedState, dispatch];
}

Update state

useState will be treated as updateState

function updateState<S>(
initialState: (() => S) | S,
): [S, Dispatch<BasicStateAction<S>>] {
return updateReducer(basicStateReducer, (initialState: any));
}

we can see that updateState will call the updateReducer directly, so it is a simple type of reducer.

function updateReducer<S, I, A>(
reducer: (S, A) => S,
initialArg: I,
init?: I => S,
): [S, Dispatch<A>] {
const hook = updateWorkInProgressHook();
const queue = hook.queue;

queue.lastRenderedReducer = reducer;

const current: Hook = (currentHook: any);

// The last rebase update that is NOT part of the base state.
let baseQueue = current.baseQueue;

// The last pending update that hasn't been processed yet.
const pendingQueue = queue.pending;
if (pendingQueue !== null) {
// We have new updates that haven't been processed yet.
// We'll add them to the base queue.
if (baseQueue !== null) {
// Merge the pending queue and the base queue, the pending queue is added after the current baseQueue.
const baseFirst = baseQueue.next;
const pendingFirst = pendingQueue.next;
baseQueue.next = pendingFirst;
pendingQueue.next = baseFirst;
}
current.baseQueue = baseQueue = pendingQueue;
queue.pending = null;
}

// handle the queue of state update
if (baseQueue !== null) {
// We have a queue to process.
const first = baseQueue.next;
let newState = current.baseState;

let newBaseState = null;
let newBaseQueueFirst = null;
let newBaseQueueLast = null;
let update = first;
do {
const updateLane = update.lane;
if (!isSubsetOfLanes(renderLanes, updateLane)) {
// Priority is insufficient. Skip this update. If this is the first
// skipped update, the previous update/state is the new base
// update/state.
const clone: Update<S, A> = {
lane: updateLane,
action: update.action,
eagerReducer: update.eagerReducer,
eagerState: update.eagerState,
next: (null: any),
};
if (newBaseQueueLast === null) {
newBaseQueueFirst = newBaseQueueLast = clone;
newBaseState = newState;
} else {
newBaseQueueLast = newBaseQueueLast.next = clone;
}
// Update the remaining priority in the queue.
// TODO: Don't need to accumulate this. Instead, we can remove
// renderLanes from the original lanes.
currentlyRenderingFiber.lanes = mergeLanes(
currentlyRenderingFiber.lanes,
updateLane,
);
markSkippedUpdateLanes(updateLane);
} else {
// This update does have sufficient priority.

if (newBaseQueueLast !== null) {
const clone: Update<S, A> = {
// This update is going to be committed so we never want uncommit
// it. Using NoLane works because 0 is a subset of all bitmasks, so
// this will never be skipped by the check above.
lane: NoLane,
action: update.action,
eagerReducer: update.eagerReducer,
eagerState: update.eagerState,
next: (null: any),
};
newBaseQueueLast = newBaseQueueLast.next = clone;
}

// Process this update.
if (update.eagerReducer === reducer) {
// If this update was processed eagerly, and its reducer matches the
// current reducer, we can use the eagerly computed state.
newState = ((update.eagerState: any): S);
} else {
const action = update.action;
newState = reducer(newState, action);
}
}
update = update.next;
} while (update !== null && update !== first);

if (newBaseQueueLast === null) {
newBaseState = newState;
} else {
newBaseQueueLast.next = (newBaseQueueFirst: any);
}

// Mark that the fiber performed work, but only if the new state is
// different from the current state.
if (!is(newState, hook.memoizedState)) {
markWorkInProgressReceivedUpdate();
}

hook.memoizedState = newState;
hook.baseState = newBaseState;
hook.baseQueue = newBaseQueueLast;

queue.lastRenderedState = newState;
}

const dispatch: Dispatch<A> = (queue.dispatch: any);
// return as new state, and a set function: dispatch, which also is dispatchAction
return [hook.memoizedState, dispatch];
}

Fire an update from a set Function

All the set function will call a function named dispatchAction.

ignore some dev and unrelated code.

function dispatchAction<S, A>(
fiber: Fiber,
queue: UpdateQueue<S, A>,
action: A,
) {
// time of firing this update
const eventTime = requestEventTime();
// priority of this update
const lane = requestUpdateLane(fiber);

// create a new update const update: Update<S, A> = { lane, action, eagerReducer: null, eagerState: null, next: (null: any), };

// Append the update to the end of the list. const pending = queue.pending; if (pending === null) { // no pending update in this hook, this is the first update. Create a circular list. update.next = update; } else { // has pending update, add this update after the pending update update.next = pending.next; pending.next = update; } // make this update as the newest update queue.pending = update;

const alternate = fiber.alternate; if ( fiber === currentlyRenderingFiber || (alternate !== null && alternate === currentlyRenderingFiber) ) { // This is a render phase update. Stash it in a lazily-created map of // queue -> linked list of updates. After this render pass, we'll restart // and apply the stashed updates on top of the work-in-progress hook. didScheduleRenderPhaseUpdateDuringThisPass = didScheduleRenderPhaseUpdate = true; } else { if ( fiber.lanes === NoLanes && (alternate === null || alternate.lanes === NoLanes) ) { // The queue is currently empty, which means we can eagerly compute the // next state before entering the render phase. If the new state is the // same as the current state, we may be able to bail out entirely. const lastRenderedReducer = queue.lastRenderedReducer; if (lastRenderedReducer !== null) { let prevDispatcher; try { const currentState: S = (queue.lastRenderedState: any); const eagerState = lastRenderedReducer(currentState, action); // Stash the eagerly computed state, and the reducer used to compute // it, on the update object. If the reducer hasn't changed by the // time we enter the render phase, then the eager state can be used // without calling the reducer again. update.eagerReducer = lastRenderedReducer; update.eagerState = eagerState; if (is(eagerState, currentState)) { // Fast path. We can bail out without scheduling React to re-render. // It's still possible that we'll need to rebase this update later, // if the component re-renders for a different reason and by that // time the reducer has changed. return; } } catch (error) { // Suppress the error. It will throw again in the render phase. } finally { } } } // enter the reconciler process. scheduleUpdateOnFiber(fiber, lane, eventTime); }

}