Skip to content

Commit

Permalink
misc: Update project description. Adapt code to lint rules.
Browse files Browse the repository at this point in the history
  • Loading branch information
vxern committed Jul 6, 2022
1 parent 9f6100c commit de0a3a4
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 14 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 1.1.1

- Updated project description.
- Adapted code to lint rules.

## 1.1.0+3

- Improved documentation.
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
## A simple yet complete, lightweight and sturdy `robots.txt` ruleset parser to ensure your application follows the standard protocol.
## A lightweight `robots.txt` ruleset parser to ensure your application follows the standard protocol.

### Usage

Expand Down
3 changes: 1 addition & 2 deletions lib/robots_txt.dart
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
/// Lightweight, sturdy and fully documented library for parsing the
/// `robots.txt` file. Nothing more, nothing less.
/// Lightweight, fully documented `robots.txt` file parser.
library robots_txt;

export 'src/parser.dart';
Expand Down
21 changes: 15 additions & 6 deletions lib/src/parser.dart
Original file line number Diff line number Diff line change
Expand Up @@ -30,19 +30,22 @@ class Robots {
bool quietMode = false,
bool productionMode = true,
}) : scraper = WebScraper(host),
log = Sprint('Robots',
quietMode: quietMode, productionMode: productionMode);
log = Sprint(
'Robots',
quietMode: quietMode,
productionMode: productionMode,
);

/// Reads and parses the `robots.txt` file of the [host].
Future read({String? onlyRelevantTo}) async {
Future<void> read({String? onlyRelevantTo}) async {
await scraper.loadWebPage('/robots.txt');
final body = scraper.getElement('body', [])[0];

final invalidRobotsFileError = "'$host' has an invalid `robots.txt`:";

if (body.isEmpty) {
log.warn('$invalidRobotsFileError No text elements found');
return rulesets;
return;
}

final content = body['title'] as String;
Expand Down Expand Up @@ -117,9 +120,15 @@ class Robots {
/// Determines whether or not [path] may be traversed.
bool canVisitPath(String path, {required String userAgent}) {
final explicitAllowance = rulesets.getRule(
appliesTo: userAgent, concernsPath: path, andAllowsIt: true);
appliesTo: userAgent,
concernsPath: path,
andAllowsIt: true,
);
final explicitDisallowance = rulesets.getRule(
appliesTo: userAgent, concernsPath: path, andAllowsIt: false);
appliesTo: userAgent,
concernsPath: path,
andAllowsIt: false,
);

final allowancePriority = explicitAllowance?.priority ?? -1;
final disallowancePriority = explicitDisallowance?.priority ?? -1;
Expand Down
10 changes: 5 additions & 5 deletions pubspec.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
name: robots_txt
version: 1.1.0+3
version: 1.1.1

description: >-
A simple yet complete, lightweight and sturdy `robots.txt` ruleset parser
to ensure your application follows the standard.
A lightweight `robots.txt` ruleset parser to ensure your application adheres
the de facto standard.
homepage: https://github.com/wordcollector/robots_txt
repository: https://github.com/wordcollector/robots_txt
Expand All @@ -13,8 +13,8 @@ environment:
sdk: '>=2.13.0 <3.0.0'

dependencies:
sprint: ^1.0.2+4
sprint: ^1.0.4
web_scraper: ^0.1.4

dev_dependencies:
words: ^0.0.2+1
words: ^0.1.1

0 comments on commit de0a3a4

Please sign in to comment.