Skip to content

Commit afeca96

Browse files
authored
fix typos
1 parent 730a41b commit afeca96

6 files changed

Lines changed: 8 additions & 8 deletions

File tree

apps/docs/blog/2023-12-15-pln-ecosystem-analysis/index.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ We are excited to announce that we are now initiating coverage of the PLN open s
1111

1212
![image](./datashader.png)<p style={{textAlign: 'center'}}><small>Heatmap of all GitHub contributions to PLN open source projects since 2017.</small></p>
1313

14-
This report is a shallow dive on 188 open source software projects that we are currently tracking within the PLN ecosystem. We are also tracking 1,000+ projects in the broader crypto economy, which we will cover in seperate reports.
14+
This report is a shallow dive on 188 open source software projects that we are currently tracking within the PLN ecosystem. We are also tracking 1,000+ projects in the broader crypto economy, which we will cover in separate reports.
1515

1616
<!-- truncate -->
1717

ops/external-prs/src/base.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ export class NoUserError extends Error {}
4444

4545
export class NoCommandError extends Error {}
4646

47-
export type CommmentCommandHandler<T> = (command: CommentCommand) => Promise<T>;
47+
export type CommentCommandHandler<T> = (command: CommentCommand) => Promise<T>;
4848

4949
/**
5050
* Convenience utilities for external PRs
@@ -197,7 +197,7 @@ export class GHAppUtils {
197197

198198
async parseCommentForCommand<T>(
199199
commentId: number,
200-
handlers: Record<string, CommmentCommandHandler<T>>,
200+
handlers: Record<string, CommentCommandHandler<T>>,
201201
): Promise<T> {
202202
const comment = await this.octo.rest.issues.getComment({
203203
repo: this.repo.name,

warehouse/bq2cloudsql/cloudsql.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -242,7 +242,7 @@ def import_csv(self, csv_uri: str, table: str, columns: None | List[str] = None)
242242
pp.pprint(r)
243243
if r["status"] not in ["PENDING", "RUNNING"]:
244244
if r["status"] != "DONE":
245-
raise Exception("An error occured importing")
245+
raise Exception("An error occurred importing")
246246
print("done importing")
247247
return
248248
time.sleep(1)

warehouse/oso_dagster/cbt/operations/_cbt_merge.sql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
{#
2-
source_table_fqn is optional and allows for the the the
2+
source_table_fqn is optional and allows for the
33
destination to compare columns to the source to ensure compatibility
44
#}
55
{% if not source_table_fqn %}

warehouse/oso_dagster/dlt_sources/sql_database/README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ To setup the SQL Database Verified Source read the [full documentation here.](ht
2626
Here's what the `secrets.toml` looks like:
2727

2828
```toml
29-
# Put your secret values and credentials here. do not share this file and do not upload it to github.
29+
# Put your secret values and credentials here. Do not share this file and do not upload it to github.
3030
# We will set up creds with the following connection URL, which is a public database
3131

3232
# The credentials are as follows
@@ -217,7 +217,7 @@ With dataset above and local postgres instance, connectorx is 2x faster than pya
217217

218218
### Postgres / MSSQL
219219

220-
No issues found. Postgres is the only backend where we observed 2x speedup with connector x. On other db systems it performs same as `pyarrrow` backend or slower.
220+
No issues found. Postgres is the only backend where we observed 2x speedup with connector x. On other db systems it performs same as `pyarrow` backend or slower.
221221

222222
## Learn more
223223

warehouse/oso_dagster/factories/goldsky/assets.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -612,7 +612,7 @@ async def load_worker_tables(
612612
context.log.info(f"Worker[{worker.name}] completed latest data load")
613613
else:
614614
# Check if there are existing worker table. If so we continue from
615-
# there because likely some failures occured but new data isn't
615+
# there because likely some failures occurred but new data isn't
616616
# coming in.
617617
with self.bigquery.get_client() as client:
618618
# WARNING hardcoded for now to 8 workers as this seems to be the standard

0 commit comments

Comments
 (0)