Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions examples/block-finality-alerts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Create a `.env` file in the root of your project and set the following environme

```env
RPC_WS_URL=...
RUST_LOG=...
```

This `RPC_WS_URL` should point to the RPC Websocket endpoint you want to use for Solana block subscribing.
Expand Down
2 changes: 1 addition & 1 deletion examples/block-finality-alerts/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ use {

#[tokio::main]
pub async fn main() -> CarbonResult<()> {
env_logger::init();
dotenv::dotenv().ok();
env_logger::init();

let filters = Filters::new(
RpcBlockSubscribeFilter::All,
Expand Down
3 changes: 2 additions & 1 deletion examples/jupiter-swap-alerts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ Create a `.env` file in the root of your project and set the following environme
```env
GEYSER_URL=...
X_TOKEN=...
RUST_LOG=...
```

- `GEYSER_URL` should point to the Yellowstone Geyser RPC URL you want to use for Solana transaction crawling.
Expand All @@ -45,4 +46,4 @@ This will start the Geyser client and the pipeline will begin processing transac

## Metrics

The example doesn't include a metrics implementation by default. However, you can easily integrate custom metrics or logging by passing your own metrics implementation to the pipeline.
The example doesn't include a metrics implementation by default. However, you can easily integrate custom metrics or logging by passing your own metrics implementation to the pipeline.
2 changes: 1 addition & 1 deletion examples/jupiter-swap-alerts/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ use {

#[tokio::main]
pub async fn main() -> CarbonResult<()> {
env_logger::init();
dotenv::dotenv().ok();
env_logger::init();

// NOTE: Workaround, that solving issue https://github.com/rustls/rustls/issues/1877
rustls::crypto::aws_lc_rs::default_provider()
Expand Down
1 change: 1 addition & 0 deletions examples/kamino-alerts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ Create a `.env` file in the root of your project and set the following environme
```env
GEYSER_URL=...
X_TOKEN=...
RUST_LOG=...
```

- `GEYSER_URL` should point to the Yellowstone Geyser RPC URL you want to use for Solana transaction crawling.
Expand Down
2 changes: 1 addition & 1 deletion examples/kamino-alerts/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ use {

#[tokio::main]
pub async fn main() -> CarbonResult<()> {
env_logger::init();
dotenv::dotenv().ok();
env_logger::init();

let mut account_filters: HashMap<String, SubscribeRequestFilterAccounts> = HashMap::new();
account_filters.insert(
Expand Down
1 change: 1 addition & 0 deletions examples/meteora-activities/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Create a `.env` file in the root of your project and set the following environme

```env
RPC_URL=...
RUST_LOG=...
```

This `RPC_URL` should point to the RPC endpoint you want to use for Solana transaction crawling.
Expand Down
2 changes: 1 addition & 1 deletion examples/meteora-activities/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ use {

#[tokio::main]
pub async fn main() -> CarbonResult<()> {
env_logger::init();
dotenv::dotenv().ok();
env_logger::init();

let filters = Filters::new(None, None, None);

Expand Down
1 change: 1 addition & 0 deletions examples/moonshot-alerts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Create a `.env` file in the root of your project and set the following environme

```env
RPC_WS_URL=...
RUST_LOG=...
```

This `RPC_WS_URL` should point to the RPC Websocket endpoint you want to use for Solana block subscribing.
Expand Down
2 changes: 1 addition & 1 deletion examples/moonshot-alerts/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ use {

#[tokio::main]
pub async fn main() -> CarbonResult<()> {
env_logger::init();
dotenv::dotenv().ok();
env_logger::init();

let filters = Filters::new(
RpcBlockSubscribeFilter::MentionsAccountOrProgram(MOONSHOT_PROGRAM_ID.to_string()),
Expand Down
1 change: 1 addition & 0 deletions examples/openbook-v2-alerts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Create a `.env` file in the root of your project and set the following environme

```env
RPC_WS_URL=...
RUST_LOG=...
```

This `RPC_WS_URL` should point to the RPC Websocket endpoint you want to use for Solana block subscribing.
Expand Down
2 changes: 1 addition & 1 deletion examples/openbook-v2-alerts/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ use {

#[tokio::main]
pub async fn main() -> CarbonResult<()> {
env_logger::init();
dotenv::dotenv().ok();
env_logger::init();

let filters = Filters::new(
RpcBlockSubscribeFilter::MentionsAccountOrProgram(OPENBOOK_V2_PROGRAM_ID.to_string()),
Expand Down
3 changes: 2 additions & 1 deletion examples/raydium-alerts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ Create a `.env` file in the root of your project and set the following environme
```env
GEYSER_URL=...
X_TOKEN=...
RUST_LOG=...
```

- `GEYSER_URL` should point to the Yellowstone Geyser RPC URL you want to use for Solana transaction crawling.
Expand All @@ -45,4 +46,4 @@ This will start the Geyser client and the pipeline will begin processing transac

## Metrics

The example doesn't include a metrics implementation by default. However, you can easily integrate custom metrics or logging by passing your own metrics implementation to the pipeline.
The example doesn't include a metrics implementation by default. However, you can easily integrate custom metrics or logging by passing your own metrics implementation to the pipeline.
2 changes: 1 addition & 1 deletion examples/raydium-alerts/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ use {

#[tokio::main]
pub async fn main() -> CarbonResult<()> {
env_logger::init();
dotenv::dotenv().ok();
env_logger::init();

// NOTE: Workaround, that solving issue https://github.com/rustls/rustls/issues/1877
rustls::crypto::aws_lc_rs::default_provider()
Expand Down
1 change: 1 addition & 0 deletions examples/raydium-clmm-alerts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Create a `.env` file in the root of your project and set the following environme

```env
RPC_WS_URL=...
RUST_LOG=...
```

This `RPC_WS_URL` should point to the RPC Websocket endpoint you want to use for Solana block subscribing.
Expand Down
2 changes: 1 addition & 1 deletion examples/raydium-clmm-alerts/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ use {

#[tokio::main]
pub async fn main() -> CarbonResult<()> {
env_logger::init();
dotenv::dotenv().ok();
env_logger::init();

let filters = Filters::new(
RpcBlockSubscribeFilter::MentionsAccountOrProgram(RAYDIUM_CLMM_PROGRAM_ID.to_string()),
Expand Down
1 change: 1 addition & 0 deletions examples/raydium-cpmm-alerts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Create a `.env` file in the root of your project and set the following environme

```env
RPC_WS_URL=...
RUST_LOG=...
```

This `RPC_WS_URL` should point to the RPC Websocket endpoint you want to use for Solana block subscribing.
Expand Down
2 changes: 1 addition & 1 deletion examples/raydium-cpmm-alerts/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ use {

#[tokio::main]
pub async fn main() -> CarbonResult<()> {
env_logger::init();
dotenv::dotenv().ok();
env_logger::init();

let filters = Filters::new(
RpcBlockSubscribeFilter::MentionsAccountOrProgram(RAYDIUM_CPMM_PROGRAM_ID.to_string()),
Expand Down
1 change: 1 addition & 0 deletions examples/sharky-offers/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Create a `.env` file in the root of your project and set the following environme

```env
RPC_URL=...
RUST_LOG=...
```

- `RPC_URL` should point to the Solana RPC URL you want to use for Solana program accounts crawling and live updates.
Expand Down
2 changes: 1 addition & 1 deletion examples/sharky-offers/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,8 +131,8 @@ instruction_decoder_collection!(

#[tokio::main]
async fn main() -> anyhow::Result<()> {
env_logger::init();
dotenv::dotenv().ok();
env_logger::init();

Pipeline::builder()
.datasource(GpaBackfillDatasource::new(
Expand Down
4 changes: 2 additions & 2 deletions examples/token-indexing/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ DATABASE_URL=...

- `GEYSER_URL` should point to the Yellowstone Geyser RPC URL you want to use for Solana transaction crawling.
- `X_TOKEN` is optional and can be used if your Geyser endpoint requires an authentication token.
- `DATABASE_URL` postgres connection url
- `DATABASE_URL` postgres connection url

### Step 3: Build the Project

Expand All @@ -45,4 +45,4 @@ cargo run --release

## Metrics

The example doesn't include a metrics implementation by default. However, you can easily integrate custom metrics or logging by passing your own metrics implementation to the pipeline.
The example doesn't include a metrics implementation by default. However, you can easily integrate custom metrics or logging by passing your own metrics implementation to the pipeline.
Loading