Skip to content

Commit 987ee36

Browse files
authored
Merge branch 'main' into improv
2 parents 63a9f51 + 8305b2b commit 987ee36

File tree

5 files changed

+162
-88
lines changed

5 files changed

+162
-88
lines changed

README.md

+1
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ npm run dev -- --from-block=12345 # start indexing from the 12345th block
4444
npm run dev -- --run-once # index and exit without watching for events
4545
npm run dev -- --no-cache # disable cache
4646
npm run dev -- --log-level=trace # set log level
47+
npm run dev -- --port=8081 # start web service on a given port
4748
```
4849

4950
## Running in production

fly.toml

+81-42
Original file line numberDiff line numberDiff line change
@@ -1,66 +1,105 @@
1-
app = 'indexer-v2'
2-
primary_region = 'den'
3-
kill_signal = 'SIGINT'
4-
kill_timeout = '5s'
1+
2+
app = "indexer-v2"
3+
primary_region = "den"
4+
kill_signal = "SIGINT"
5+
kill_timeout = "5s"
56

67
[experimental]
78
auto_rollback = true
89

910
[build]
1011

1112
[deploy]
12-
wait_timeout = '6h0m0s'
13+
wait_timeout = "6h0m0s"
1314

1415
[env]
15-
PINO_PRETTY = 'true'
16-
DEPLOYMENT_ENVIRONMENT = 'production'
17-
ENABLE_RESOURCE_MONITOR = 'false'
18-
ESTIMATES_LINEARQF_WORKER_POOL_SIZE = '10'
19-
INDEXED_CHAINS = 'mainnet,optimism,fantom,pgn-testnet,pgn-mainnet,arbitrum,polygon,sepolia,avalanche,avalanche-fuji,scroll,scroll-sepolia,base,zksync-era-mainnet,lukso-mainnet,lukso-testnet,celo-mainnet,celo-testnet,sei-mainnet,metisAndromeda'
20-
LOG_LEVEL = 'debug'
21-
NODE_OPTIONS = '--max-old-space-size=4096'
22-
PORT = '8080'
23-
STORAGE_DIR = '/mnt/indexer'
24-
PASSPORT_SCORER_ID=335
16+
PINO_PRETTY = "true"
17+
DEPLOYMENT_ENVIRONMENT = "production"
18+
ENABLE_RESOURCE_MONITOR = "false"
19+
ESTIMATES_LINEARQF_WORKER_POOL_SIZE = "10"
20+
INDEXED_CHAINS = "mainnet,optimism,fantom,pgn-testnet,pgn-mainnet,arbitrum,polygon,sepolia,avalanche,avalanche-fuji,scroll,scroll-sepolia,base,zksync-era-mainnet,lukso-mainnet,lukso-testnet,celo-mainnet,celo-testnet,sei-mainnet,metisAndromeda"
21+
LOG_LEVEL = "debug"
22+
NODE_OPTIONS = "--max-old-space-size=4096"
23+
PORT = "8080"
24+
STORAGE_DIR = "/mnt/indexer"
25+
PASSPORT_SCORER_ID = 335
2526

2627
[processes]
27-
indexer = 'npm start -- --indexer --http'
28-
web = 'npm start -- --http --http-wait-for-sync=false'
28+
indexer = "npm start -- --indexer --http"
29+
web = "npm start -- --http --http-wait-for-sync=false"
2930

3031
[[mounts]]
31-
source = 'indexer_staging'
32-
destination = '/mnt/indexer'
33-
initial_size = '50GB'
32+
source = "indexer_staging"
33+
destination = "/mnt/indexer"
34+
initial_size = "50GB"
3435
auto_extend_size_threshold = 80
3536
auto_extend_size_increment = "5GB"
3637
auto_extend_size_limit = "100GB"
37-
processes = ['indexer', 'web']
38+
processes = ["indexer", "web"]
3839

39-
[http_service]
40+
[[services]]
4041
internal_port = 8080
41-
force_https = true
42-
auto_stop_machines = true
43-
auto_start_machines = true
44-
min_machines_running = 2
45-
processes = ['web']
46-
47-
[http_service.concurrency]
48-
type = 'requests'
42+
processes = ["indexer"]
43+
protocol = "tcp"
44+
script_checks = []
45+
46+
[services.concurrency]
4947
hard_limit = 250
5048
soft_limit = 200
49+
type = "requests"
5150

52-
[checks]
53-
[checks.http]
51+
[[services.ports]]
52+
force_https = true
53+
handlers = ["http"]
5454
port = 8080
55-
type = 'http'
56-
interval = '15s'
57-
timeout = '10s'
58-
grace_period = '30s'
59-
method = 'get'
60-
path = '/api/v1/status'
61-
processes = ['web', 'indexer']
55+
56+
[[services.ports]]
57+
handlers = ["tls", "http"]
58+
port = 8081
59+
60+
[[services.tcp_checks]]
61+
grace_period = "30s"
62+
interval = "15s"
63+
restart_limit = 0
64+
timeout = "10s"
65+
66+
[[services]]
67+
internal_port = 8080
68+
processes = ["web"]
69+
protocol = "tcp"
70+
script_checks = []
71+
72+
[services.concurrency]
73+
hard_limit = 250
74+
soft_limit = 200
75+
type = "requests"
76+
77+
[[services.ports]]
78+
force_https = true
79+
handlers = ["http"]
80+
port = 80
81+
82+
[[services.ports]]
83+
handlers = ["tls", "http"]
84+
port = 443
85+
86+
[[services.tcp_checks]]
87+
grace_period = "30s"
88+
interval = "15s"
89+
restart_limit = 0
90+
timeout = "10s"
91+
92+
[checks.http]
93+
port = 8080
94+
type = "http"
95+
interval = "15s"
96+
timeout = "10s"
97+
grace_period = "30s"
98+
method = "get"
99+
path = "/api/v1/status"
100+
processes = ["web", "indexer"]
62101

63102
[[vm]]
64-
memory = '4gb'
65-
cpu_kind = 'performance'
66-
cpus = 2
103+
memory = "4gb"
104+
cpu_kind = "performance"
105+
cpus = 2

src/config.ts

+49-45
Original file line numberDiff line numberDiff line change
@@ -1848,6 +1848,44 @@ export type Config = {
18481848
};
18491849

18501850
export function getConfig(): Config {
1851+
const { values: args } = parseArgs({
1852+
options: {
1853+
"to-block": {
1854+
type: "string",
1855+
},
1856+
"from-block": {
1857+
type: "string",
1858+
},
1859+
"drop-db": {
1860+
type: "boolean",
1861+
},
1862+
"rm-cache": {
1863+
type: "boolean",
1864+
},
1865+
"log-level": {
1866+
type: "string",
1867+
},
1868+
"run-once": {
1869+
type: "boolean",
1870+
},
1871+
"no-cache": {
1872+
type: "boolean",
1873+
},
1874+
"http-wait-for-sync": {
1875+
type: "string",
1876+
},
1877+
http: {
1878+
type: "boolean",
1879+
},
1880+
indexer: {
1881+
type: "boolean",
1882+
},
1883+
port: {
1884+
type: "string",
1885+
},
1886+
},
1887+
});
1888+
18511889
const buildTag = z
18521890
.union([z.string(), z.null()])
18531891
.default(null)
@@ -1858,7 +1896,17 @@ export function getConfig(): Config {
18581896
.transform((value) => value === "true")
18591897
.parse(process.env.ENABLE_RESOURCE_MONITOR);
18601898

1861-
const apiHttpPort = z.coerce.number().parse(process.env.PORT);
1899+
const portSchema = z.coerce.number().int().nonnegative().max(65535);
1900+
1901+
const portOverride = z
1902+
.union([portSchema, z.undefined()])
1903+
.optional()
1904+
.parse(args["port"]);
1905+
1906+
const apiHttpPort =
1907+
portOverride !== undefined
1908+
? portOverride
1909+
: portSchema.parse(z.coerce.number().parse(process.env.PORT));
18621910

18631911
const pinoPretty = z
18641912
.enum(["true", "false"])
@@ -1899,50 +1947,6 @@ export function getConfig(): Config {
18991947
.default(path.join(storageDir, "cache"))
19001948
.parse(process.env.CACHE_DIR);
19011949

1902-
const { values: args } = parseArgs({
1903-
options: {
1904-
"to-block": {
1905-
type: "string",
1906-
},
1907-
"from-block": {
1908-
type: "string",
1909-
},
1910-
"drop-db": {
1911-
type: "boolean",
1912-
},
1913-
"drop-chain-db": {
1914-
type: "boolean",
1915-
},
1916-
"drop-ipfs-db": {
1917-
type: "boolean",
1918-
},
1919-
"drop-price-db": {
1920-
type: "boolean",
1921-
},
1922-
"rm-cache": {
1923-
type: "boolean",
1924-
},
1925-
"log-level": {
1926-
type: "string",
1927-
},
1928-
"run-once": {
1929-
type: "boolean",
1930-
},
1931-
"no-cache": {
1932-
type: "boolean",
1933-
},
1934-
"http-wait-for-sync": {
1935-
type: "string",
1936-
},
1937-
http: {
1938-
type: "boolean",
1939-
},
1940-
indexer: {
1941-
type: "boolean",
1942-
},
1943-
},
1944-
});
1945-
19461950
const chains = z
19471951
.string()
19481952
.or(z.literal("all"))

src/index.ts

+3-1
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ import type { EventHandlerContext } from "./indexer/indexer.js";
3939
import { handleEvent as handleAlloV1Event } from "./indexer/allo/v1/handleEvent.js";
4040
import { handleEvent as handleAlloV2Event } from "./indexer/allo/v2/handleEvent.js";
4141
import { Database } from "./database/index.js";
42-
import { decodeJsonWithBigInts } from "./utils/index.js";
42+
import { decodeJsonWithBigInts, getExternalIP } from "./utils/index.js";
4343
import { Block } from "chainsauce/dist/cache.js";
4444
import { createPublicClient, http } from "viem";
4545
import { IndexerEvents } from "chainsauce/dist/indexer.js";
@@ -121,6 +121,8 @@ async function main(): Promise<void> {
121121
return decodeJsonWithBigInts(val);
122122
});
123123

124+
await getExternalIP(baseLogger);
125+
124126
if (config.cacheDir) {
125127
if (config.removeCache) {
126128
await fs.rm(config.cacheDir, { recursive: true });

src/utils/index.ts

+28
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
// TODO: why is eslint not recognizing type narrowing?
22
/* eslint-disable @typescript-eslint/no-unsafe-argument */
3+
4+
import { Logger } from "pino";
5+
36
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
47
export function encodeJsonWithBigInts(value: unknown): string {
58
return JSON.stringify(value, (_key, value) => {
@@ -31,3 +34,28 @@ export const UINT64_MAX = 18446744073709551615n;
3134
export const getDateFromTimestamp = (timestamp: bigint): Date | null => {
3235
return timestamp < UINT64_MAX ? new Date(Number(timestamp) * 1000) : null;
3336
};
37+
38+
export const getExternalIP = async (logger: Logger): Promise<string> => {
39+
const urls = ["https://api.ipify.org?format=json", "http://ipinfo.io/json"];
40+
for (const url of urls) {
41+
try {
42+
logger.debug(`Attempting to fetch IP address from: ${url}`);
43+
const response = await fetch(url);
44+
if (response.ok) {
45+
const { ip } = (await response.json()) as { ip: string };
46+
logger.info(`Successfully fetched IP address: ${ip}`);
47+
return ip;
48+
}
49+
throw new Error(`Request failed with status: ${response.status}`);
50+
} catch (error) {
51+
if (error instanceof Error) {
52+
logger.error(`Failed to fetch from ${url}: ${error.message}`);
53+
} else {
54+
logger.error(`Failed to fetch from ${url}`);
55+
}
56+
}
57+
}
58+
throw new Error(
59+
"Unable to fetch external IP address from both primary and fallback URLs."
60+
);
61+
};

0 commit comments

Comments
 (0)