Skip to content

Commit

Permalink
Merge branch 'master' into hive_testing
Browse files Browse the repository at this point in the history
  • Loading branch information
ch4r10t33r committed Sep 28, 2023
2 parents fa6d30b + f9440d0 commit b83b2b8
Show file tree
Hide file tree
Showing 26 changed files with 3,608 additions and 3,161 deletions.
38 changes: 34 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,19 @@
</p>
</div>

### Warning! This repo/software is under active development
<div align="center">
<p>
<b>
Warning! This repo/software is under active development
</b>
</p>
</div>

## Important links

**[Install Skandha](https://etherspot.fyi/skandha/installation)**
| [Chains supported](https://etherspot.fyi/skandha/chains)
| [UserOp Fee history](https://etherspot.fyi/skandha/feehistory)

## ⚙️ How to run (from Source code)

Expand All @@ -22,7 +34,7 @@ Run with one-liner:
```sh
curl -fsSL https://skandha.run | bash
```
or follow steps below:
Or follow the steps below:

1. install all dependencies by running `yarn`
2. build `yarn build && yarn bootstrap`
Expand Down Expand Up @@ -83,8 +95,26 @@ or follow steps below:
}
}
```
## 💬 Contact

#### Mempool_ID of the canonical mempool on various networks
If you have any questions or feedback about the ERC-4337 Bundler project, please feel free to reach out to us.

- [Follow on Twitter](https://twitter.com/etherspot)
- [Join our discord](https://discord.etherspot.io/)

## 📄 License

Licensed under the [MIT License](https://github.com/etherspot/skandha/blob/master/LICENSE).

## 🤝 Shared Mempool (P2P)

- Sepolia | QmdDwVFoEEcgv5qnaTB8ncnXGMnqrhnA5nYpRr4ouWe4AT | https://ipfs.io/ipfs/QmdDwVFoEEcgv5qnaTB8ncnXGMnqrhnA5nYpRr4ouWe4AT?filename=sepolia_canonical_mempool.yaml
- Mumbai | QmQfRyE9iVTBqZ17hPSP4tuMzaez83Y5wD874ymyRtj9VE | https://ipfs.io/ipfs/QmQfRyE9iVTBqZ17hPSP4tuMzaez83Y5wD874ymyRtj9VE?filename=mumbai_canonical_mempool.yaml
- Mumbai | QmQfRyE9iVTBqZ17hPSP4tuMzaez83Y5wD874ymyRtj9VE | https://ipfs.io/ipfs/QmQfRyE9iVTBqZ17hPSP4tuMzaez83Y5wD874ymyRtj9VE?filename=mumbai_canonical_mempool.yaml

## 🔢 Statistics
![Alt](https://repobeats.axiom.co/api/embed/4d7ec3ece88b2461c5b1757574321f4f6540cdd5.svg "Skandha analytics image")

## 🙏 Acknowledgements

- [eth-infinitsm](https://github.com/eth-infinitism)
- [lodestar](https://github.com/ChainSafe/lodestar)
2 changes: 1 addition & 1 deletion lerna.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
],
"npmClient": "yarn",
"useWorkspaces": true,
"version": "1.0.1-alpha",
"version": "1.0.2-alpha",
"stream": "true",
"command": {
"version": {
Expand Down
12 changes: 10 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "root",
"private": true,
"version": "1.0.1-alpha",
"version": "1.0.2-alpha",
"engines": {
"node": ">=18.0.0"
},
Expand Down Expand Up @@ -38,5 +38,13 @@
"license": "MIT",
"workspaces": [
"packages/*"
]
],
"resolutions": {
"@libp2p/interface-connection-manager": "1.3.8"
},
"overrides": {
"libp2p": {
"@libp2p/interface-connection-manager": "1.3.8"
}
}
}
6 changes: 3 additions & 3 deletions packages/api/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "api",
"version": "1.0.1-alpha",
"version": "1.0.2-alpha",
"description": "The API module of Etherspot bundler client",
"author": "Etherspot",
"homepage": "https://https://github.com/etherspot/skandha#readme",
Expand Down Expand Up @@ -35,12 +35,12 @@
"class-transformer": "0.5.1",
"class-validator": "0.14.0",
"ethers": "5.7.2",
"executor": "^1.0.1-alpha",
"executor": "1.0.2-alpha",
"fastify": "4.14.1",
"pino": "8.11.0",
"pino-pretty": "10.0.0",
"reflect-metadata": "0.1.13",
"types": "^1.0.1-alpha"
"types": "1.0.2-alpha"
},
"devDependencies": {
"@types/connect": "3.4.35"
Expand Down
5 changes: 1 addition & 4 deletions packages/api/src/app.ts
Original file line number Diff line number Diff line change
Expand Up @@ -125,10 +125,7 @@ export class ApiApp {
break;

case BundlerRPCMethods.debug_bundler_setMempool:
console.log("Calling setMempool");
result = await debugApi.setMempool(
params[0],
);
result = await debugApi.setMempool(params[0]);
break;
}
}
Expand Down
19 changes: 13 additions & 6 deletions packages/cli/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "cli",
"version": "1.0.1-alpha",
"version": "1.0.2-alpha",
"description": "> TODO: description",
"author": "zincoshine <[email protected]>",
"homepage": "https://https://github.com/etherspot/skandha#readme",
Expand Down Expand Up @@ -31,14 +31,21 @@
"url": "https://https://github.com/etherspot/skandha/issues"
},
"dependencies": {
"api": "^1.0.1-alpha",
"db": "^1.0.1-alpha",
"executor": "^1.0.1-alpha",
"@chainsafe/discv5": "3.0.0",
"@libp2p/interface-connection": "3.0.2",
"@libp2p/interface-connection-manager": "1.3.8",
"@libp2p/interface-peer-id": "2.0.1",
"@libp2p/peer-id-factory": "2.0.1",
"@libp2p/prometheus-metrics": "1.1.3",
"@multiformats/multiaddr": "12.1.3",
"api": "1.0.2-alpha",
"db": "1.0.2-alpha",
"executor": "1.0.2-alpha",
"find-up": "5.0.0",
"got": "12.5.3",
"js-yaml": "4.1.0",
"node": "^1.0.1-alpha",
"types": "^1.0.1-alpha",
"node": "1.0.2-alpha",
"types": "1.0.2-alpha",
"yargs": "17.6.2"
},
"devDependencies": {
Expand Down
15 changes: 10 additions & 5 deletions packages/cli/src/cmds/node/handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@ import { Config } from "executor/lib/config";
import { Namespace, getNamespaceByValue, RocksDbController } from "db/lib";
import { ConfigOptions } from "executor/lib/interfaces";
import { BundlerNode, IBundlerNodeOptions, defaultOptions } from "node/lib";
import { buildDefaultNetworkOptions } from "node/lib";
import { initNetworkOptions } from "node/lib";
import logger from "api/lib/logger";
import { ExecutorOptions, ApiOptions, P2POptions } from "types/lib/options";
import { IGlobalArgs } from "../../options";
import { mkdir, readFile } from "../../util";
import { initPeerIdAndEnr } from "./initPeerIdAndEnr";

export async function nodeHandler(args: IGlobalArgs): Promise<void> {
const params = await getNodeConfigFromArgs(args);
Expand Down Expand Up @@ -41,8 +42,8 @@ export async function nodeHandler(args: IGlobalArgs): Promise<void> {
logger.info("Config file not found. Proceeding with env vars...");
config = await Config.init({
networks: {},
testingMode: false,
unsafeMode: false,
testingMode: params.testingMode,
unsafeMode: params.unsafeMode,
redirectRpc: params.redirectRpc,
});
}
Expand All @@ -51,6 +52,9 @@ export async function nodeHandler(args: IGlobalArgs): Promise<void> {
params.dataDir,
getNamespaceByValue(Namespace.userOps)
);
await db.start();

const { enr, peerId } = await initPeerIdAndEnr(args, logger);

const options: IBundlerNodeOptions = {
...defaultOptions,
Expand All @@ -60,7 +64,7 @@ export async function nodeHandler(args: IGlobalArgs): Promise<void> {
cors: params.api["cors"],
enableRequestLogging: params.api["enableRequestLogging"],
},
network: buildDefaultNetworkOptions(params.p2p, params.dataDir),
network: initNetworkOptions(enr, params.p2p, params.dataDir),
};

const node = await BundlerNode.init({
Expand All @@ -70,6 +74,7 @@ export async function nodeHandler(args: IGlobalArgs): Promise<void> {
testingMode: params.testingMode,
redirectRpc: params.redirectRpc,
bundlingMode: params.executor.bundlingMode,
peerId,
});

await node.start();
Expand Down Expand Up @@ -99,7 +104,7 @@ export async function getNodeConfigFromArgs(args: IGlobalArgs): Promise<{
enrHost: entries.get("p2p.enrHost"),
enrPort: entries.get("p2p.enrPort"),
bootEnrs: entries.get("p2p.bootEnrs"),
dataDir: entries.get("p2p.dataDir"),
retainPeerId: entries.get("p2p.retainPeerId"),
},
api: {
address: entries.get("api.address"),
Expand Down
147 changes: 147 additions & 0 deletions packages/cli/src/cmds/node/initPeerIdAndEnr.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
import fs from "node:fs";
import path from "node:path";
import { SignableENR, createKeypairFromPeerId } from "@chainsafe/discv5";
import { createSecp256k1PeerId } from "@libp2p/peer-id-factory";
import { PeerId } from "@libp2p/interface-peer-id";
import { defaultP2POptions } from "types/lib/options";
import { Logger } from "api/lib/logger";
import { fromString as uint8ArrayFromString } from "uint8arrays/from-string";
import { toString as uint8ArrayToString } from "uint8arrays/to-string";
import { peerIdFromBytes } from "@libp2p/peer-id";
import { createFromPrivKey, createFromPubKey } from "@libp2p/peer-id-factory";
import { unmarshalPrivateKey, unmarshalPublicKey } from "@libp2p/crypto/keys";
import { readFile, writeFile600Perm } from "../../util";
import { IGlobalArgs } from "../../options";

export type PeerIdJSON = { id: string; pubKey?: string; privKey?: string };

export function overwriteEnrWithCliArgs(
enr: SignableENR,
args: IGlobalArgs
): void {
const [host, enrPort] = [args["p2p.enrHost"], args["p2p.enrPort"]];
enr.ip = host ?? defaultP2POptions.enrHost;
enr.tcp = enrPort ?? defaultP2POptions.enrPort;
enr.udp = enrPort ?? defaultP2POptions.enrPort;
}

export async function initPeerIdAndEnr(
args: IGlobalArgs,
logger: Logger
): Promise<{ peerId: PeerId; enr: SignableENR }> {
const { dataDir } = args;
const retainPeerId = args["p2p.retainPeerId"];

const newPeerIdAndENR = async (): Promise<{
peerId: PeerId;
enr: SignableENR;
}> => {
const peerId = await createSecp256k1PeerId();
const enr = SignableENR.createV4(createKeypairFromPeerId(peerId));
return { peerId, enr };
};

const readPersistedPeerIdAndENR = async (
peerIdFile: string,
enrFile: string
): Promise<{ peerId: PeerId; enr: SignableENR }> => {
let peerId: PeerId;
let enr: SignableENR;

// attempt to read stored peer id
try {
peerId = await readPeerId(peerIdFile);
} catch (e) {
logger.warn("Unable to read peerIdFile, creating a new peer id");
return newPeerIdAndENR();
}
// attempt to read stored enr
try {
enr = SignableENR.decodeTxt(
fs.readFileSync(enrFile, "utf-8"),
createKeypairFromPeerId(peerId)
);
} catch (e) {
logger.warn("Unable to decode stored local ENR, creating a new ENR");
enr = SignableENR.createV4(createKeypairFromPeerId(peerId));
return { peerId, enr };
}
// check stored peer id against stored enr
if (!peerId.equals(await enr.peerId())) {
logger.warn(
"Stored local ENR doesn't match peerIdFile, creating a new ENR"
);
enr = SignableENR.createV4(createKeypairFromPeerId(peerId));
return { peerId, enr };
}
return { peerId, enr };
};

if (retainPeerId) {
const enrFile = path.join(dataDir, "enr");
const peerIdFile = path.join(dataDir, "peer-id.json");
const { peerId, enr } = await readPersistedPeerIdAndENR(
peerIdFile,
enrFile
);
overwriteEnrWithCliArgs(enr, args);
writeFile600Perm(peerIdFile, exportToJSON(peerId));
writeFile600Perm(enrFile, enr.encodeTxt());
return { peerId, enr };
} else {
const { peerId, enr } = await newPeerIdAndENR();
overwriteEnrWithCliArgs(enr, args);
return { peerId, enr };
}
}

async function createFromParts(
multihash: Uint8Array,
privKey?: Uint8Array,
pubKey?: Uint8Array
): Promise<PeerId> {
if (privKey != null) {
const key = await unmarshalPrivateKey(privKey);

return createFromPrivKey(key);
} else if (pubKey != null) {
const key = unmarshalPublicKey(pubKey);

return createFromPubKey(key);
}

return peerIdFromBytes(multihash);
}

export function exportToJSON(
peerId: PeerId,
excludePrivateKey?: boolean
): PeerIdJSON {
return {
id: uint8ArrayToString(peerId.toBytes(), "base58btc"),
pubKey:
peerId.publicKey != null
? uint8ArrayToString(peerId.publicKey, "base64pad")
: undefined,
privKey:
excludePrivateKey === true || peerId.privateKey == null
? undefined
: uint8ArrayToString(peerId.privateKey, "base64pad"),
};
}

async function createFromJSON(obj: PeerIdJSON): Promise<PeerId> {
return createFromParts(
uint8ArrayFromString(obj.id, "base58btc"),
obj.privKey != null
? uint8ArrayFromString(obj.privKey, "base64pad")
: undefined,
obj.pubKey != null
? uint8ArrayFromString(obj.pubKey, "base64pad")
: undefined
);
}

async function readPeerId(filepath: string): Promise<PeerId> {
return createFromJSON(readFile(filepath));
}
Loading

0 comments on commit b83b2b8

Please sign in to comment.