mirror of
https://github.com/facebook/lexical.git
synced 2025-08-06 16:39:33 +08:00
Use for...of for performance optimizations (#1509)
* Use for...of for performance optimizations * Fix ignores * Remove console times
This commit is contained in:

committed by
acywatson

parent
7dbba00306
commit
db26e25d7a
@ -58,7 +58,6 @@ module.exports = {
|
||||
// import helps to configure simple-import-sort
|
||||
'import',
|
||||
'jest',
|
||||
'no-for-of-loops',
|
||||
'no-function-declare-after-return',
|
||||
'react',
|
||||
'no-only-tests',
|
||||
@ -109,10 +108,6 @@ module.exports = {
|
||||
|
||||
'no-debugger': ERROR,
|
||||
|
||||
// Prevent for...of loops because they require a Symbol polyfill.
|
||||
// You can disable this rule for code that isn't shipped (e.g. build scripts and tests).
|
||||
'no-for-of-loops/no-for-of-loops': ERROR,
|
||||
|
||||
// Prevent function declarations after return statements
|
||||
'no-function-declare-after-return/no-function-declare-after-return': ERROR,
|
||||
|
||||
|
@ -5,8 +5,6 @@
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
/* eslint-disable no-for-of-loops/no-for-of-loops */
|
||||
|
||||
'use strict';
|
||||
|
||||
const {markdown} = require('danger');
|
||||
|
@ -108,7 +108,6 @@
|
||||
"eslint-plugin-flowtype": "^8.0.3",
|
||||
"eslint-plugin-jest": "^24.4.0",
|
||||
"eslint-plugin-jsx-a11y": "^6.4.1",
|
||||
"eslint-plugin-no-for-of-loops": "^1.0.1",
|
||||
"eslint-plugin-no-function-declare-after-return": "^1.1.0",
|
||||
"eslint-plugin-no-only-tests": "^2.6.0",
|
||||
"eslint-plugin-react": "^7.24.0",
|
||||
|
@ -174,9 +174,8 @@ function $createNodesFromDOM(
|
||||
currentLexicalNode = transformOutput.node;
|
||||
if (currentLexicalNode !== null) {
|
||||
lexicalNodes.push(currentLexicalNode);
|
||||
const forChildFunctions = Array.from(forChildMap.values());
|
||||
for (let i = 0; i < forChildFunctions.length; i++) {
|
||||
forChildFunctions[i](currentLexicalNode);
|
||||
for (const [, forChildFunction] of forChildMap) {
|
||||
forChildFunction(currentLexicalNode);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -53,7 +53,6 @@ function isIndentPermitted(maxDepth: number): boolean {
|
||||
|
||||
let totalDepth = 0;
|
||||
|
||||
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
|
||||
for (const elementNode of elementNodesInSelection) {
|
||||
if ($isListNode(elementNode)) {
|
||||
totalDepth = Math.max($getListDepth(elementNode) + 1, totalDepth);
|
||||
|
@ -11,7 +11,6 @@ import {DEFAULT_SETTINGS} from './appSettings';
|
||||
|
||||
// override default options with query parameters if any
|
||||
const urlSearchParams = new URLSearchParams(window.location.search);
|
||||
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
|
||||
for (const param of Object.keys(DEFAULT_SETTINGS)) {
|
||||
if (urlSearchParams.has(param)) {
|
||||
try {
|
||||
|
@ -88,7 +88,6 @@ export default function TablePlugin(): React$Node {
|
||||
const tableSelections = new Map<NodeKey, TableSelection>();
|
||||
|
||||
return editor.addListener('mutation', TableNode, (nodeMutations) => {
|
||||
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
|
||||
for (const [nodeKey, mutation] of nodeMutations) {
|
||||
if (mutation === 'created') {
|
||||
editor.update(() => {
|
||||
|
@ -37,7 +37,6 @@ describe('LexicalNodeHelpers tests', () => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
|
||||
for (const plugin of ['PlainTextPlugin', 'RichTextPlugin']) {
|
||||
it(`${plugin} custom initialEditorState`, async () => {
|
||||
let editor;
|
||||
|
@ -100,7 +100,6 @@ function findOffset(
|
||||
if (typeof Segmenter === 'function') {
|
||||
const segmenter = new Segmenter();
|
||||
const graphemes = segmenter.segment(text);
|
||||
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
|
||||
for (const {segment: grapheme} of graphemes) {
|
||||
const nextOffset = offset + strlen(grapheme);
|
||||
if (nextOffset > maxCharacters) {
|
||||
|
@ -56,28 +56,23 @@ export type HistoryState = {
|
||||
|
||||
function getDirtyNodes(
|
||||
editorState: EditorState,
|
||||
dirtyLeavesSet: Set<NodeKey>,
|
||||
dirtyElementsSet: Map<NodeKey, IntentionallyMarkedAsDirtyElement>,
|
||||
dirtyLeaves: Set<NodeKey>,
|
||||
dirtyElements: Map<NodeKey, IntentionallyMarkedAsDirtyElement>,
|
||||
): Array<LexicalNode> {
|
||||
const dirtyLeaves = Array.from(dirtyLeavesSet);
|
||||
const dirtyElements = Array.from(dirtyElementsSet);
|
||||
const nodeMap = editorState._nodeMap;
|
||||
const nodes = [];
|
||||
|
||||
for (let i = 0; i < dirtyLeaves.length; i++) {
|
||||
const dirtyLeafKey = dirtyLeaves[i];
|
||||
for (const dirtyLeafKey of dirtyLeaves) {
|
||||
const dirtyLeaf = nodeMap.get(dirtyLeafKey);
|
||||
if (dirtyLeaf !== undefined) {
|
||||
nodes.push(dirtyLeaf);
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < dirtyElements.length; i++) {
|
||||
const intentionallyMarkedAsDirty = dirtyElements[i][1];
|
||||
for (const [dirtyElementKey, intentionallyMarkedAsDirty] of dirtyElements) {
|
||||
if (!intentionallyMarkedAsDirty) {
|
||||
continue;
|
||||
}
|
||||
const dirtyElementKey = dirtyElements[i][0];
|
||||
const dirtyElement = nodeMap.get(dirtyElementKey);
|
||||
if (dirtyElement !== undefined && !$isRootNode(dirtyElement)) {
|
||||
nodes.push(dirtyElement);
|
||||
|
@ -241,10 +241,7 @@ export function syncLexicalDecoratorMapToYjs(
|
||||
yjsMap: YMap,
|
||||
): void {
|
||||
const internalMap = decoratorMap._map;
|
||||
const keys = Array.from(internalMap.keys());
|
||||
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i];
|
||||
for (const [key] of internalMap) {
|
||||
syncLexicalDecoratorMapKeyToYjs(
|
||||
binding,
|
||||
collabNode,
|
||||
|
@ -75,15 +75,10 @@ export function $garbageCollectDetachedNodes(
|
||||
dirtyLeaves: Set<NodeKey>,
|
||||
dirtyElements: Map<NodeKey, IntentionallyMarkedAsDirtyElement>,
|
||||
): void {
|
||||
const dirtyLeavesArr = Array.from(dirtyLeaves);
|
||||
const dirtyLeavesLength = dirtyLeavesArr.length;
|
||||
const dirtyElementsArr = Array.from(dirtyElements);
|
||||
const dirtyElementsLength = dirtyElementsArr.length;
|
||||
const prevNodeMap = prevEditorState._nodeMap;
|
||||
const nodeMap = editorState._nodeMap;
|
||||
|
||||
for (let i = 0; i < dirtyLeavesLength; i++) {
|
||||
const nodeKey = dirtyLeavesArr[i];
|
||||
for (const nodeKey of dirtyLeaves) {
|
||||
const node = nodeMap.get(nodeKey);
|
||||
|
||||
if (node !== undefined && !node.isAttached()) {
|
||||
@ -94,10 +89,8 @@ export function $garbageCollectDetachedNodes(
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < dirtyElementsLength; i++) {
|
||||
const nodeKey = dirtyElementsArr[i][0];
|
||||
for (const [nodeKey] of dirtyElements) {
|
||||
const node = nodeMap.get(nodeKey);
|
||||
|
||||
if (node !== undefined) {
|
||||
// Garbage collect node and its children if they exist
|
||||
if (!node.isAttached()) {
|
||||
|
@ -196,10 +196,7 @@ export function $flushMutations(
|
||||
// is Lexical's "current" editor state. This is basically like
|
||||
// an internal revert on the DOM.
|
||||
if (badDOMTargets.size > 0) {
|
||||
const entries = Array.from(badDOMTargets.entries());
|
||||
for (let i = 0; i < entries.length; i++) {
|
||||
const [targetDOM, targetNode] = entries[i];
|
||||
|
||||
for (const [targetDOM, targetNode] of badDOMTargets) {
|
||||
if ($isElementNode(targetNode)) {
|
||||
const childKeys = targetNode.__children;
|
||||
let currentDOM = targetDOM.firstChild;
|
||||
|
@ -285,10 +285,10 @@ export class NodeSelection implements BaseSelection {
|
||||
}
|
||||
|
||||
getNodes(): Array<LexicalNode> {
|
||||
const objects = Array.from(this._nodes);
|
||||
const objects = this._nodes;
|
||||
const nodes = [];
|
||||
for (let i = 0; i < objects.length; i++) {
|
||||
const node = $getNodeByKey(objects[i]);
|
||||
for (const object of objects) {
|
||||
const node = $getNodeByKey(object);
|
||||
if (node !== null) {
|
||||
nodes.push(node);
|
||||
}
|
||||
|
@ -141,10 +141,7 @@ function $normalizeAllDirtyTextNodes(
|
||||
): void {
|
||||
const dirtyLeaves = editor._dirtyLeaves;
|
||||
const nodeMap = editorState._nodeMap;
|
||||
const dirtyLeavesLength = dirtyLeaves.size;
|
||||
const dDirtyLeavesArr = Array.from(dirtyLeaves);
|
||||
for (let i = 0; i < dirtyLeavesLength; i++) {
|
||||
const nodeKey = dDirtyLeavesArr[i];
|
||||
for (const nodeKey of dirtyLeaves) {
|
||||
const node = nodeMap.get(nodeKey);
|
||||
if ($isTextNode(node) && node.isSimpleText() && !node.isUnmergeable()) {
|
||||
$normalizeTextNode(node);
|
||||
@ -183,9 +180,7 @@ function $applyAllTransforms(
|
||||
if (untransformedDirtyLeavesLength > 0) {
|
||||
// We leverage editor._dirtyLeaves to track the new dirty leaves after the transforms
|
||||
editor._dirtyLeaves = new Set();
|
||||
const untransformedDirtyLeavesArr = Array.from(untransformedDirtyLeaves);
|
||||
for (let i = 0; i < untransformedDirtyLeavesLength; i++) {
|
||||
const nodeKey = untransformedDirtyLeavesArr[i];
|
||||
for (const nodeKey of untransformedDirtyLeaves) {
|
||||
const node = nodeMap.get(nodeKey);
|
||||
if ($isTextNode(node) && node.isSimpleText() && !node.isUnmergeable()) {
|
||||
$normalizeTextNode(node);
|
||||
@ -211,18 +206,12 @@ function $applyAllTransforms(
|
||||
// new ones caused by element transforms
|
||||
editor._dirtyLeaves = new Set();
|
||||
editor._dirtyElements = new Map();
|
||||
const untransformedDirtyElementsArr = Array.from(
|
||||
untransformedDirtyElements,
|
||||
);
|
||||
for (let i = 0; i < untransformedDirtyElementsLength; i++) {
|
||||
const currentUntransformedDirtyElement = untransformedDirtyElementsArr[i];
|
||||
for (const currentUntransformedDirtyElement of untransformedDirtyElements) {
|
||||
const nodeKey = currentUntransformedDirtyElement[0];
|
||||
const intentionallyMarkedAsDirty = currentUntransformedDirtyElement[1];
|
||||
if (nodeKey === 'root' || !intentionallyMarkedAsDirty) {
|
||||
continue;
|
||||
}
|
||||
const nodeIntentionallyMarkedAsDirty =
|
||||
untransformedDirtyElementsArr[i][1];
|
||||
const node = nodeMap.get(nodeKey);
|
||||
if (
|
||||
node !== undefined &&
|
||||
@ -230,7 +219,7 @@ function $applyAllTransforms(
|
||||
) {
|
||||
$applyTransforms(editor, node, transformsCache);
|
||||
}
|
||||
dirtyElements.set(nodeKey, nodeIntentionallyMarkedAsDirty);
|
||||
dirtyElements.set(nodeKey, intentionallyMarkedAsDirty);
|
||||
}
|
||||
untransformedDirtyLeaves = editor._dirtyLeaves;
|
||||
untransformedDirtyLeavesLength = untransformedDirtyLeaves.size;
|
||||
@ -460,9 +449,9 @@ export function triggerListeners(
|
||||
const previouslyUpdating = editor._updating;
|
||||
editor._updating = isCurrentlyEnqueuingUpdates;
|
||||
try {
|
||||
const listeners = Array.from(editor._listeners[type]);
|
||||
for (let i = 0; i < listeners.length; i++) {
|
||||
listeners[i](...payload);
|
||||
const listeners = editor._listeners[type];
|
||||
for (const listener of listeners) {
|
||||
listener(...payload);
|
||||
}
|
||||
} finally {
|
||||
editor._updating = previouslyUpdating;
|
||||
@ -486,9 +475,9 @@ export function triggerCommandListeners(
|
||||
for (let e = 0; e < editors.length; e++) {
|
||||
const currentEditor = editors[e];
|
||||
const commandListeners = currentEditor._listeners.command;
|
||||
const listeners = Array.from(commandListeners[i]);
|
||||
for (let s = 0; s < listeners.length; s++) {
|
||||
if (listeners[s](type, payload, editor) === true) {
|
||||
const listeners = commandListeners[i];
|
||||
for (const listener of listeners) {
|
||||
if (listener(type, payload, editor) === true) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -359,11 +359,7 @@ export function markAllNodesAsDirty(editor: LexicalEditor, type: string): void {
|
||||
return;
|
||||
}
|
||||
const nodeMap = editorState._nodeMap;
|
||||
const nodeMapEntries = Array.from(nodeMap);
|
||||
// For...of would be faster here, but this will get
|
||||
// compiled away to a slow-path with Babel.
|
||||
for (let i = 0; i < nodeMapEntries.length; i++) {
|
||||
const node = nodeMapEntries[i][1];
|
||||
for (const [, node] of nodeMap) {
|
||||
node.markDirty();
|
||||
}
|
||||
},
|
||||
@ -884,11 +880,9 @@ export function $nodesOfType<T: LexicalNode>(klass: Class<T>): Array<T> {
|
||||
const editorState = getActiveEditorState();
|
||||
const readOnly = editorState._readOnly;
|
||||
const klassType = klass.getType();
|
||||
const nodes = Array.from(editorState._nodeMap.values());
|
||||
const nodesLength = nodes.length;
|
||||
const nodes = editorState._nodeMap;
|
||||
const nodesOfType = [];
|
||||
for (let i = 0; i < nodesLength; i++) {
|
||||
const node = nodes[i];
|
||||
for (const [, node] of nodes) {
|
||||
if (
|
||||
node instanceof klass &&
|
||||
node.__type === klassType &&
|
||||
|
@ -126,9 +126,9 @@ export class DecoratorMap {
|
||||
|
||||
set(key: string, value: DecoratorStateValue): void {
|
||||
this._map.set(key, value);
|
||||
const observers = Array.from(this._observers);
|
||||
for (let i = 0; i < observers.length; i++) {
|
||||
observers[i](key, value);
|
||||
const observers = this._observers;
|
||||
for (const observer of observers) {
|
||||
observer(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,7 +20,6 @@ function invertObject(targetObj /* : ErrorMap */) /* : ErrorMap */ {
|
||||
const result = {};
|
||||
const mapKeys = Object.keys(targetObj);
|
||||
|
||||
// eslint-disable-next-line no-for-of-loops/no-for-of-loops
|
||||
for (const originalKey of mapKeys) {
|
||||
const originalVal = targetObj[originalKey];
|
||||
|
||||
|
Reference in New Issue
Block a user