Skip to content

Commit

Permalink
remove filter transactions and redundant inserts (#1030)
Browse files Browse the repository at this point in the history
* remove filter transactions and redundant inserts

* fix sync-store tests

* fix:nits

---------

Co-authored-by: typedarray <[email protected]>
  • Loading branch information
kyscott18 and 0xOlias authored Aug 15, 2024
1 parent f7aafb6 commit 5a3e473
Show file tree
Hide file tree
Showing 2 changed files with 87 additions and 91 deletions.
32 changes: 32 additions & 0 deletions packages/core/src/sync-store/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,10 @@ test("getIntervals() handles factory trace filter logic", async (context) => {
test("getIntervals() handles block filter logic", async (context) => {
const { cleanup, syncStore } = await setupDatabaseServices(context);

await syncStore.getIntervals({
filter: context.sources[4].filter,
});

await syncStore.insertInterval({
filter: context.sources[4].filter,
interval: [0, 4],
Expand Down Expand Up @@ -980,6 +984,11 @@ test("pruneByChain deletes filters", async (context) => {
const { sources } = context;
const { syncStore, database, cleanup } = await setupDatabaseServices(context);

await syncStore.getIntervals({ filter: sources[0].filter });
await syncStore.getIntervals({ filter: sources[1].filter });
await syncStore.getIntervals({ filter: sources[2].filter });
await syncStore.getIntervals({ filter: sources[3].filter });

await syncStore.insertInterval({
filter: sources[0].filter,
interval: [1, 4],
Expand All @@ -1002,6 +1011,11 @@ test("pruneByChain deletes filters", async (context) => {
sources[2].filter.chainId = 2;
sources[3].filter.chainId = 2;

await syncStore.getIntervals({ filter: sources[0].filter });
await syncStore.getIntervals({ filter: sources[1].filter });
await syncStore.getIntervals({ filter: sources[2].filter });
await syncStore.getIntervals({ filter: sources[3].filter });

await syncStore.insertInterval({
filter: sources[0].filter,
interval: [1, 4],
Expand Down Expand Up @@ -1052,6 +1066,11 @@ test("pruneByChain updates filters", async (context) => {
const { sources } = context;
const { syncStore, database, cleanup } = await setupDatabaseServices(context);

await syncStore.getIntervals({ filter: sources[0].filter });
await syncStore.getIntervals({ filter: sources[1].filter });
await syncStore.getIntervals({ filter: sources[2].filter });
await syncStore.getIntervals({ filter: sources[3].filter });

await syncStore.insertInterval({
filter: sources[0].filter,
interval: [0, 4],
Expand All @@ -1074,6 +1093,11 @@ test("pruneByChain updates filters", async (context) => {
sources[2].filter.chainId = 2;
sources[3].filter.chainId = 2;

await syncStore.getIntervals({ filter: sources[0].filter });
await syncStore.getIntervals({ filter: sources[1].filter });
await syncStore.getIntervals({ filter: sources[2].filter });
await syncStore.getIntervals({ filter: sources[3].filter });

await syncStore.insertInterval({
filter: sources[0].filter,
interval: [0, 4],
Expand Down Expand Up @@ -1132,13 +1156,17 @@ test("pruneByChain deletes block filters", async (context) => {
const { sources } = context;
const { syncStore, database, cleanup } = await setupDatabaseServices(context);

await syncStore.getIntervals({ filter: sources[4].filter });

await syncStore.insertInterval({
filter: sources[4].filter,
interval: [2, 4],
});

sources[4].filter.chainId = 2;

await syncStore.getIntervals({ filter: sources[4].filter });

await syncStore.insertInterval({
filter: sources[4].filter,
interval: [2, 4],
Expand All @@ -1159,13 +1187,17 @@ test("pruneByChain updates block filters", async (context) => {
const { sources } = context;
const { syncStore, database, cleanup } = await setupDatabaseServices(context);

await syncStore.getIntervals({ filter: sources[4].filter });

await syncStore.insertInterval({
filter: sources[4].filter,
interval: [0, 4],
});

sources[4].filter.chainId = 2;

await syncStore.getIntervals({ filter: sources[4].filter });

await syncStore.insertInterval({
filter: sources[4].filter,
interval: [0, 4],
Expand Down
146 changes: 55 additions & 91 deletions packages/core/src/sync-store/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -171,108 +171,72 @@ export const createSyncStore = ({
}): SyncStore => ({
insertInterval: async ({ filter, interval }) =>
db.wrap({ method: "insertInterval" }, async () => {
const intervalToBlock = (interval: Interval) => ({
startBlock: formatBig(sql, interval[0]),
endBlock: formatBig(sql, interval[1]),
});
const startBlock = formatBig(sql, interval[0]);
const endBlock = formatBig(sql, interval[1]);

switch (filter.type) {
case "log":
{
await db.transaction().execute(async (tx) => {
for (const fragment of buildLogFilterFragments(filter)) {
if (isAddressFactory(filter.address)) {
await tx
.insertInto("factoryLogFilters")
.values(fragment as LogFilterFragment<Factory>)
.onConflict((oc) => oc.column("id").doNothing())
.execute();

await tx
.insertInto("factoryLogFilterIntervals")
.values({
factoryId: fragment.id,
...intervalToBlock(interval),
})
.execute();
} else {
await tx
.insertInto("logFilters")
.values(fragment)
.onConflict((oc) => oc.column("id").doNothing())
.execute();

await tx
.insertInto("logFilterIntervals")
.values({
logFilterId: fragment.id,
...intervalToBlock(interval),
})
.execute();
}
}
});
}
break;

case "block":
{
const fragment = buildBlockFilterFragment(filter);
await db.transaction().execute(async (tx) => {
await tx
.insertInto("blockFilters")
.values(fragment)
.onConflict((oc) => oc.column("id").doNothing())
case "log": {
for (const fragment of buildLogFilterFragments(filter)) {
if (isAddressFactory(filter.address)) {
await db
.insertInto("factoryLogFilterIntervals")
.values({
factoryId: fragment.id,
startBlock,
endBlock,
})
.execute();

await tx
.insertInto("blockFilterIntervals")
} else {
await db
.insertInto("logFilterIntervals")
.values({
blockFilterId: fragment.id,
...intervalToBlock(interval),
logFilterId: fragment.id,
startBlock,
endBlock,
})
.execute();
});
}
}
break;
}

case "callTrace":
{
await db.transaction().execute(async (tx) => {
for (const fragment of buildTraceFilterFragments(filter)) {
if (isAddressFactory(filter.toAddress)) {
await tx
.insertInto("factoryTraceFilters")
.values(fragment as TraceFilterFragment<Factory>)
.onConflict((oc) => oc.column("id").doNothing())
.execute();

await tx
.insertInto("factoryTraceFilterIntervals")
.values({
factoryId: fragment.id,
...intervalToBlock(interval),
})
.execute();
} else {
await tx
.insertInto("traceFilters")
.values(fragment)
.onConflict((oc) => oc.column("id").doNothing())
.execute();

await tx
.insertInto("traceFilterIntervals")
.values({
traceFilterId: fragment.id,
...intervalToBlock(interval),
})
.execute();
}
}
});
case "block": {
const fragment = buildBlockFilterFragment(filter);
await db
.insertInto("blockFilterIntervals")
.values({
blockFilterId: fragment.id,
startBlock,
endBlock,
})
.execute();
break;
}

case "callTrace": {
for (const fragment of buildTraceFilterFragments(filter)) {
if (isAddressFactory(filter.toAddress)) {
await db
.insertInto("factoryTraceFilterIntervals")
.values({
factoryId: fragment.id,
startBlock,
endBlock,
})
.execute();
} else {
await db
.insertInto("traceFilterIntervals")
.values({
traceFilterId: fragment.id,
startBlock,
endBlock,
})
.execute();
}
}
break;
}

default:
never(filter);
Expand Down

0 comments on commit 5a3e473

Please sign in to comment.