Compare commits
300 Commits
0.2.3
...
ignore-0.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
487713aa34 | ||
|
|
e300541701 | ||
|
|
e9df420d2f | ||
|
|
201b4fc757 | ||
|
|
90a11dec5e | ||
|
|
9456d95e8f | ||
|
|
0c298f60a6 | ||
|
|
79271fcb33 | ||
|
|
fc975af8e9 | ||
|
|
1425d6735e | ||
|
|
aed3ccb9c7 | ||
|
|
33c95d2919 | ||
|
|
01deac9427 | ||
|
|
b4bc3b6349 | ||
|
|
685cc6c562 | ||
|
|
08c017330f | ||
|
|
2f3a8c7f69 | ||
|
|
3ac1b68e54 | ||
|
|
0ebd5465b7 | ||
|
|
5cb4bb9ea0 | ||
|
|
c8a179b4da | ||
|
|
46f94826fd | ||
|
|
75f1855a91 | ||
|
|
fd9870d668 | ||
|
|
a3a2708067 | ||
|
|
78847b65c8 | ||
|
|
e962eea1cc | ||
|
|
95bc678403 | ||
|
|
68af3bbdc4 | ||
|
|
70b6bdb104 | ||
|
|
c648eadbaa | ||
|
|
d352b79294 | ||
|
|
23aec58669 | ||
|
|
ae863bc7aa | ||
|
|
f0d3cae569 | ||
|
|
4ef4818130 | ||
|
|
8db24e1353 | ||
|
|
8bbe58d623 | ||
|
|
b3fd0df94b | ||
|
|
c1b841e934 | ||
|
|
f5ede0e319 | ||
|
|
6ecffec537 | ||
|
|
80e91a1f1d | ||
|
|
d570f78144 | ||
|
|
7c37065911 | ||
|
|
50f7a60a8d | ||
|
|
33ec988d70 | ||
|
|
adff43fbb4 | ||
|
|
71585f6d47 | ||
|
|
714ae82241 | ||
|
|
49fd668712 | ||
|
|
066f97d855 | ||
|
|
df1bf4a042 | ||
|
|
4e8c0fc4ad | ||
|
|
da1764dfd1 | ||
|
|
48a8a3a691 | ||
|
|
796eaab0d7 | ||
|
|
bf49448e1e | ||
|
|
cffba53379 | ||
|
|
79d40d0e20 | ||
|
|
525b278049 | ||
|
|
16de47920c | ||
|
|
a114b86063 | ||
|
|
a5a16ebb27 | ||
|
|
8ac5bc0147 | ||
|
|
cf750a190f | ||
|
|
d825648b86 | ||
|
|
22cb644eb6 | ||
|
|
e424f87487 | ||
|
|
f5b2c96b77 | ||
|
|
6e209b6fdb | ||
|
|
72e3c54e0a | ||
|
|
b67886264f | ||
|
|
e67ab459d3 | ||
|
|
7a926d090d | ||
|
|
596f94aa7f | ||
|
|
de55d37bea | ||
|
|
fecef10c1c | ||
|
|
79e5e6671f | ||
|
|
b04a68a782 | ||
|
|
e573ab5c60 | ||
|
|
f5a2d022ec | ||
|
|
b1d1cd2366 | ||
|
|
f26e0f088f | ||
|
|
057ed6305a | ||
|
|
730beb9cb5 | ||
|
|
ed60ec736c | ||
|
|
a7ca2d6563 | ||
|
|
a7d0e40668 | ||
|
|
7a951f103a | ||
|
|
c3de1f58ea | ||
|
|
e940bc956d | ||
|
|
8751e55706 | ||
|
|
2143bcf9cb | ||
|
|
a6a24bafb3 | ||
|
|
db27a33827 | ||
|
|
083fb73790 | ||
|
|
461e0c4e33 | ||
|
|
82df3b7685 | ||
|
|
ece6011164 | ||
|
|
00033e1875 | ||
|
|
5aea517fb4 | ||
|
|
073ff35ebb | ||
|
|
c4633ff187 | ||
|
|
97e6873b38 | ||
|
|
ed01e80a79 | ||
|
|
8f7b9be356 | ||
|
|
851799f42b | ||
|
|
b65a8c353b | ||
|
|
95cea77625 | ||
|
|
b187c1a817 | ||
|
|
f7a2fe30d4 | ||
|
|
aed315e80a | ||
|
|
2f0d9d411a | ||
|
|
163e00677a | ||
|
|
d58236fbdc | ||
|
|
932875684e | ||
|
|
b65bb37b14 | ||
|
|
de5cb7d22e | ||
|
|
7a682f465e | ||
|
|
084d3f4911 | ||
|
|
9911cd0cd9 | ||
|
|
de91c26bb1 | ||
|
|
5b1796d64d | ||
|
|
d4527854de | ||
|
|
82ceb818f3 | ||
|
|
dd5ded2f78 | ||
|
|
cbacf4f19e | ||
|
|
900ef0abc7 | ||
|
|
8396d3ffaa | ||
|
|
652c70f207 | ||
|
|
bb70f96743 | ||
|
|
6d346a09de | ||
|
|
699c76f45c | ||
|
|
de33003527 | ||
|
|
3e943636f4 | ||
|
|
3f515afbb4 | ||
|
|
30db03bb62 | ||
|
|
d66812102b | ||
|
|
86f8c3c818 | ||
|
|
5eb2ca4338 | ||
|
|
20bcb8d883 | ||
|
|
7282706b42 | ||
|
|
160f04894f | ||
|
|
0473df1ef5 | ||
|
|
301a3fd71d | ||
|
|
d12bdf35a5 | ||
|
|
08514e8e6c | ||
|
|
687e846944 | ||
|
|
b286fdcb88 | ||
|
|
e3959d67a6 | ||
|
|
7d475b0c70 | ||
|
|
42223047a8 | ||
|
|
42afb6faa5 | ||
|
|
c4a6733f3b | ||
|
|
9e04a8283c | ||
|
|
05b26d5986 | ||
|
|
506f046b8b | ||
|
|
ae592b11e3 | ||
|
|
a5e7f176f1 | ||
|
|
0428bd1bec | ||
|
|
7f3e7d2faa | ||
|
|
8d5906d7fc | ||
|
|
feda38852e | ||
|
|
59187902d0 | ||
|
|
aef46beaf2 | ||
|
|
f0e192943f | ||
|
|
df72d8d1e0 | ||
|
|
d06f84ced3 | ||
|
|
9598331fa8 | ||
|
|
883d8fc72f | ||
|
|
e8a30cb893 | ||
|
|
03f7605322 | ||
|
|
61663e2307 | ||
|
|
bd3e7eedb1 | ||
|
|
1e6c2ac8e3 | ||
|
|
0302d58eb8 | ||
|
|
e37f783fc0 | ||
|
|
495e13cc61 | ||
|
|
92dc402f7f | ||
|
|
a3f5e0c3d5 | ||
|
|
39e1a0d694 | ||
|
|
e9cd0a1cc3 | ||
|
|
cc35ae0748 | ||
|
|
5ee175beaf | ||
|
|
4b18f82899 | ||
|
|
5462af4434 | ||
|
|
d2e70da040 | ||
|
|
64dc9b6709 | ||
|
|
9ffd4c421f | ||
|
|
d862b80afb | ||
|
|
5b73dcc8ab | ||
|
|
2dce0dc0df | ||
|
|
2e5c3c05e8 | ||
|
|
6884eea2f5 | ||
|
|
a3a2f0be6a | ||
|
|
f24873c70b | ||
|
|
58126ffe15 | ||
|
|
17644a76c0 | ||
|
|
9fc9f368f5 | ||
|
|
9cab076a72 | ||
|
|
7aa9652f3c | ||
|
|
7187f61ca8 | ||
|
|
f869c58a5a | ||
|
|
3538ba3577 | ||
|
|
a454fa75b9 | ||
|
|
18943b9317 | ||
|
|
68427b5b79 | ||
|
|
4ca15a8a51 | ||
|
|
2daef51fe5 | ||
|
|
43ed91dc5c | ||
|
|
dada75d2a7 | ||
|
|
76b9f01ad2 | ||
|
|
8baa0e56b7 | ||
|
|
301ee6d3f5 | ||
|
|
77ad7588ae | ||
|
|
58aca2efb2 | ||
|
|
351eddc17e | ||
|
|
277dda544c | ||
|
|
8c869cbd87 | ||
|
|
598b162fea | ||
|
|
0222e024fe | ||
|
|
5bd0edbbe1 | ||
|
|
4368913d8f | ||
|
|
02de97b8ce | ||
|
|
32db773d51 | ||
|
|
b272be25fa | ||
|
|
f63c168563 | ||
|
|
a05671c8d7 | ||
|
|
1aeae3e22d | ||
|
|
60d537c43d | ||
|
|
ef5c07476b | ||
|
|
4f6f34307c | ||
|
|
7cf560d27c | ||
|
|
15b263ff55 | ||
|
|
53121e0733 | ||
|
|
404785f950 | ||
|
|
103c4c953c | ||
|
|
82abf883c5 | ||
|
|
a2315d5ee5 | ||
|
|
201d0cb8c1 | ||
|
|
6f45478a7d | ||
|
|
9c2c569624 | ||
|
|
a1e4e0f85c | ||
|
|
caf31a769b | ||
|
|
920112e640 | ||
|
|
a84ffe603b | ||
|
|
e4f83f3161 | ||
|
|
fbca4a0332 | ||
|
|
65c7df1c25 | ||
|
|
18237da9b2 | ||
|
|
f147f3aa39 | ||
|
|
599c4fc3f3 | ||
|
|
d85a6dd5c8 | ||
|
|
40abade8ee | ||
|
|
fca4fdf6ea | ||
|
|
16975797fe | ||
|
|
6507a48f97 | ||
|
|
c8e2fa1869 | ||
|
|
f728708ce9 | ||
|
|
c302995d05 | ||
|
|
4a77cc8100 | ||
|
|
dc86666044 | ||
|
|
6b038511c7 | ||
|
|
c767bccade | ||
|
|
a075a462fa | ||
|
|
24f753c306 | ||
|
|
1aae2759ad | ||
|
|
91646f6cca | ||
|
|
031ace209d | ||
|
|
942e9c4743 | ||
|
|
12c9656b18 | ||
|
|
8bf3760cdb | ||
|
|
c96623e66a | ||
|
|
36f949633b | ||
|
|
811fcc1fe8 | ||
|
|
79a8d0ab3f | ||
|
|
fbf8265cde | ||
|
|
d79add341b | ||
|
|
12b2b1f624 | ||
|
|
3aaf550ca5 | ||
|
|
d4876cd064 | ||
|
|
867a57e176 | ||
|
|
ec4904df33 | ||
|
|
c4ea157cb7 | ||
|
|
0156967f4c | ||
|
|
3238707b0b | ||
|
|
31fbae597f | ||
|
|
f2e1711781 | ||
|
|
94d600e6e1 | ||
|
|
b904c5d9dc | ||
|
|
5a29417796 | ||
|
|
f694800768 | ||
|
|
bd1c9e9499 | ||
|
|
f04b0dd95c | ||
|
|
5487dffefa | ||
|
|
1fc6787648 | ||
|
|
11e164aec9 | ||
|
|
1c1331d926 | ||
|
|
cbe94823d2 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -3,3 +3,6 @@ tags
|
|||||||
target
|
target
|
||||||
/grep/Cargo.lock
|
/grep/Cargo.lock
|
||||||
/globset/Cargo.lock
|
/globset/Cargo.lock
|
||||||
|
/ignore/Cargo.lock
|
||||||
|
/termcolor/Cargo.lock
|
||||||
|
/wincolor/Cargo.lock
|
||||||
|
|||||||
23
.travis.yml
23
.travis.yml
@@ -1,5 +1,4 @@
|
|||||||
language: rust
|
language: rust
|
||||||
cache: cargo
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
@@ -10,13 +9,13 @@ matrix:
|
|||||||
# (All *nix releases are done on the nightly channel to take advantage
|
# (All *nix releases are done on the nightly channel to take advantage
|
||||||
# of the regex library's multiple pattern SIMD search.)
|
# of the regex library's multiple pattern SIMD search.)
|
||||||
- os: linux
|
- os: linux
|
||||||
rust: nightly
|
rust: nightly-2017-03-13
|
||||||
env: TARGET=i686-unknown-linux-musl
|
env: TARGET=i686-unknown-linux-musl
|
||||||
- os: linux
|
- os: linux
|
||||||
rust: nightly
|
rust: nightly-2017-03-13
|
||||||
env: TARGET=x86_64-unknown-linux-musl
|
env: TARGET=x86_64-unknown-linux-musl
|
||||||
- os: osx
|
- os: osx
|
||||||
rust: nightly
|
rust: nightly-2017-03-13
|
||||||
env: TARGET=x86_64-apple-darwin
|
env: TARGET=x86_64-apple-darwin
|
||||||
# Beta channel.
|
# Beta channel.
|
||||||
- os: linux
|
- os: linux
|
||||||
@@ -25,19 +24,13 @@ matrix:
|
|||||||
- os: linux
|
- os: linux
|
||||||
rust: beta
|
rust: beta
|
||||||
env: TARGET=x86_64-unknown-linux-gnu
|
env: TARGET=x86_64-unknown-linux-gnu
|
||||||
- os: osx
|
|
||||||
rust: beta
|
|
||||||
env: TARGET=x86_64-apple-darwin
|
|
||||||
# Minimum Rust supported channel.
|
# Minimum Rust supported channel.
|
||||||
- os: linux
|
- os: linux
|
||||||
rust: 1.9.0
|
rust: 1.12.0
|
||||||
env: TARGET=x86_64-unknown-linux-musl
|
|
||||||
- os: linux
|
|
||||||
rust: 1.9.0
|
|
||||||
env: TARGET=x86_64-unknown-linux-gnu
|
env: TARGET=x86_64-unknown-linux-gnu
|
||||||
- os: osx
|
- os: linux
|
||||||
rust: 1.9.0
|
rust: 1.12.0
|
||||||
env: TARGET=x86_64-apple-darwin
|
env: TARGET=x86_64-unknown-linux-musl
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
- export PATH="$PATH:$HOME/.cargo/bin"
|
- export PATH="$PATH:$HOME/.cargo/bin"
|
||||||
@@ -64,7 +57,7 @@ deploy:
|
|||||||
# channel to use to produce the release artifacts
|
# channel to use to produce the release artifacts
|
||||||
# NOTE make sure you only release *once* per target
|
# NOTE make sure you only release *once* per target
|
||||||
# TODO you may want to pick a different channel
|
# TODO you may want to pick a different channel
|
||||||
condition: $TRAVIS_RUST_VERSION = nightly
|
condition: $TRAVIS_RUST_VERSION = nightly-2017-03-13
|
||||||
tags: true
|
tags: true
|
||||||
|
|
||||||
branches:
|
branches:
|
||||||
|
|||||||
354
CHANGELOG.md
354
CHANGELOG.md
@@ -1,3 +1,357 @@
|
|||||||
|
0.5.1 (2017-04-09)
|
||||||
|
==================
|
||||||
|
Feature enhancements:
|
||||||
|
|
||||||
|
* Added or improved file type filtering for vim.
|
||||||
|
* [FEATURE #34](https://github.com/BurntSushi/ripgrep/issues/34):
|
||||||
|
Add a `-o/--only-matching` flag.
|
||||||
|
* [FEATURE #377](https://github.com/BurntSushi/ripgrep/issues/377):
|
||||||
|
Column numbers can now be customized with a color. (The default is
|
||||||
|
no color.)
|
||||||
|
* [FEATURE #419](https://github.com/BurntSushi/ripgrep/issues/419):
|
||||||
|
Added `-0` short flag option for `--null`.
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* [BUG #381](https://github.com/BurntSushi/ripgrep/issues/381):
|
||||||
|
Include license text in all subcrates.
|
||||||
|
* [BUG #418](https://github.com/BurntSushi/ripgrep/issues/418),
|
||||||
|
[BUG #426](https://github.com/BurntSushi/ripgrep/issues/426),
|
||||||
|
[BUG #439](https://github.com/BurntSushi/ripgrep/issues/439):
|
||||||
|
Fix a few bugs with `-h/--help` output.
|
||||||
|
|
||||||
|
|
||||||
|
0.5.0 (2017-03-12)
|
||||||
|
==================
|
||||||
|
This is a new minor version release of ripgrep that includes one minor breaking
|
||||||
|
change, bug fixes and several new features including support for text encodings
|
||||||
|
other than UTF-8.
|
||||||
|
|
||||||
|
A notable accomplishment with respect to Rust is that ripgrep proper now only
|
||||||
|
contains a single `unsafe` use (for accessing the contents of a memory map).
|
||||||
|
|
||||||
|
The **breaking change** is:
|
||||||
|
|
||||||
|
* [FEATURE #380](https://github.com/BurntSushi/ripgrep/issues/380):
|
||||||
|
Line numbers are now hidden by default when ripgrep is printing to a tty
|
||||||
|
**and** the only thing searched is stdin.
|
||||||
|
|
||||||
|
Feature enhancements:
|
||||||
|
|
||||||
|
* Added or improved file type filtering for Ceylon, CSS, Elixir, HTML, log,
|
||||||
|
SASS, SVG, Twig
|
||||||
|
* [FEATURE #1](https://github.com/BurntSushi/ripgrep/issues/1):
|
||||||
|
Add support for additional text encodings, including automatic detection for
|
||||||
|
UTF-16 via BOM sniffing. Explicit text encoding support with the
|
||||||
|
`-E/--encoding` flag was also added for latin-1, GBK, EUC-JP
|
||||||
|
and Shift_JIS, among others. The full list can be found here:
|
||||||
|
https://encoding.spec.whatwg.org/#concept-encoding-get
|
||||||
|
* [FEATURE #129](https://github.com/BurntSushi/ripgrep/issues/129):
|
||||||
|
Add a new `-M/--max-columns` flag that omits lines longer than the given
|
||||||
|
number of bytes. (Disabled by default!)
|
||||||
|
* [FEATURE #369](https://github.com/BurntSushi/ripgrep/issues/369):
|
||||||
|
A new flag, `--max-filesize`, was added for limiting searches to files with
|
||||||
|
a maximum file size.
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* [BUG #52](https://github.com/BurntSushi/ripgrep/issues/52),
|
||||||
|
[BUG #311](https://github.com/BurntSushi/ripgrep/issues/311):
|
||||||
|
Tweak how binary files are detected and handled. (We are slightly less
|
||||||
|
conservative and will no longer use memory without bound.)
|
||||||
|
* [BUG #326](https://github.com/BurntSushi/ripgrep/issues/326):
|
||||||
|
When --files flag is given, we should never attempt to parse positional
|
||||||
|
arguments as regexes.
|
||||||
|
* [BUG #327](https://github.com/BurntSushi/ripgrep/issues/327):
|
||||||
|
Permit the --heading flag to override the --no-heading flag.
|
||||||
|
* [BUG #340](https://github.com/BurntSushi/ripgrep/pull/340):
|
||||||
|
Clarify that the `-u/--unrestricted` flags are aliases.
|
||||||
|
* [BUG #343](https://github.com/BurntSushi/ripgrep/pull/343):
|
||||||
|
Global git ignore config should use `$HOME/.config/git/ignore` and not
|
||||||
|
`$HOME/git/ignore`.
|
||||||
|
* [BUG #345](https://github.com/BurntSushi/ripgrep/pull/345):
|
||||||
|
Clarify docs for `-g/--glob` flag.
|
||||||
|
* [BUG #381](https://github.com/BurntSushi/ripgrep/issues/381):
|
||||||
|
Add license files to each sub-crate.
|
||||||
|
* [BUG #383](https://github.com/BurntSushi/ripgrep/issues/383):
|
||||||
|
Use latest version of clap (for argv parsing).
|
||||||
|
* [BUG #392](https://github.com/BurntSushi/ripgrep/issues/391):
|
||||||
|
Fix translation of set globs (e.g., `{foo,bar,quux}`) to regexes.
|
||||||
|
* [BUG #401](https://github.com/BurntSushi/ripgrep/pull/401):
|
||||||
|
Add PowerShell completion file to Windows release.
|
||||||
|
* [BUG #405](https://github.com/BurntSushi/ripgrep/issues/405):
|
||||||
|
Fix bug when excluding absolute paths with the `-g/--glob` flag.
|
||||||
|
|
||||||
|
|
||||||
|
0.4.0
|
||||||
|
=====
|
||||||
|
This is a new minor version release of ripgrep that includes a couple very
|
||||||
|
minor breaking changes, a few new features and lots of bug fixes.
|
||||||
|
|
||||||
|
This version of ripgrep upgrades its `regex` dependency from `0.1` to `0.2`,
|
||||||
|
which includes a few minor syntax changes:
|
||||||
|
|
||||||
|
* POSIX character classes now require double bracketing. Previously, the regex
|
||||||
|
`[:upper:]` would parse as the `upper` POSIX character class. Now it parses
|
||||||
|
as the character class containing the characters `:upper:`. The fix to this
|
||||||
|
change is to use `[[:upper:]]` instead. Note that variants like
|
||||||
|
`[[:upper:][:blank:]]` continue to work.
|
||||||
|
* The character `[` must always be escaped inside a character class.
|
||||||
|
* The characters `&`, `-` and `~` must be escaped if any one of them are
|
||||||
|
repeated consecutively. For example, `[&]`, `[\&]`, `[\&\&]`, `[&-&]` are all
|
||||||
|
equivalent while `[&&]` is illegal. (The motivation for this and the prior
|
||||||
|
change is to provide a backwards compatible path for adding character class
|
||||||
|
set notation.)
|
||||||
|
|
||||||
|
Feature enhancements:
|
||||||
|
|
||||||
|
* Added or improved file type filtering for Crystal, Kotlin, Perl, PowerShell,
|
||||||
|
Ruby, Swig
|
||||||
|
* [FEATURE #83](https://github.com/BurntSushi/ripgrep/issues/83):
|
||||||
|
Type definitions can now include other type definitions.
|
||||||
|
* [FEATURE #243](https://github.com/BurntSushi/ripgrep/issues/243):
|
||||||
|
**BREAKING CHANGE**: The `--column` flag now implies `--line-number`.
|
||||||
|
* [FEATURE #263](https://github.com/BurntSushi/ripgrep/issues/263):
|
||||||
|
Add a new `--sort-files` flag.
|
||||||
|
* [FEATURE #275](https://github.com/BurntSushi/ripgrep/issues/275):
|
||||||
|
Add a new `--path-separator` flag. Useful in cygwin.
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* [BUG #182](https://github.com/BurntSushi/ripgrep/issues/182):
|
||||||
|
Redux: use more portable ANSI color escape sequences when possible.
|
||||||
|
* [BUG #258](https://github.com/BurntSushi/ripgrep/issues/258):
|
||||||
|
Fix bug that caused ripgrep's parallel iterator to spin and burn CPU.
|
||||||
|
* [BUG #262](https://github.com/BurntSushi/ripgrep/issues/262):
|
||||||
|
Document how to install shell completion files.
|
||||||
|
* [BUG #266](https://github.com/BurntSushi/ripgrep/issues/266),
|
||||||
|
[BUG #293](https://github.com/BurntSushi/ripgrep/issues/293):
|
||||||
|
Fix handling of bold styling and change the default colors.
|
||||||
|
* [BUG #268](https://github.com/BurntSushi/ripgrep/issues/268):
|
||||||
|
Make lack of backreference support more explicit.
|
||||||
|
* [BUG #271](https://github.com/BurntSushi/ripgrep/issues/271):
|
||||||
|
Remove `~` dependency on clap.
|
||||||
|
* [BUG #277](https://github.com/BurntSushi/ripgrep/issues/277):
|
||||||
|
Fix cosmetic issue in `globset` crate docs.
|
||||||
|
* [BUG #279](https://github.com/BurntSushi/ripgrep/issues/279):
|
||||||
|
ripgrep did not terminate when `-q/--quiet` was given.
|
||||||
|
* [BUG #281](https://github.com/BurntSushi/ripgrep/issues/281):
|
||||||
|
**BREAKING CHANGE**: Completely remove `^C` handling from ripgrep.
|
||||||
|
* [BUG #284](https://github.com/BurntSushi/ripgrep/issues/284):
|
||||||
|
Make docs for `-g/--glob` clearer.
|
||||||
|
* [BUG #286](https://github.com/BurntSushi/ripgrep/pull/286):
|
||||||
|
When stdout is redirected to a file, don't search that file.
|
||||||
|
* [BUG #287](https://github.com/BurntSushi/ripgrep/pull/287):
|
||||||
|
Fix ZSH completions.
|
||||||
|
* [BUG #295](https://github.com/BurntSushi/ripgrep/pull/295):
|
||||||
|
Remove superfluous `memmap` dependency in `grep` crate.
|
||||||
|
* [BUG #308](https://github.com/BurntSushi/ripgrep/pull/308):
|
||||||
|
Improve docs for `-r/--replace`.
|
||||||
|
* [BUG #313](https://github.com/BurntSushi/ripgrep/pull/313):
|
||||||
|
Update bytecount dep to latest version.
|
||||||
|
* [BUG #318](https://github.com/BurntSushi/ripgrep/pull/318):
|
||||||
|
Fix invalid UTF-8 output bug in Windows consoles.
|
||||||
|
|
||||||
|
|
||||||
|
0.3.2
|
||||||
|
=====
|
||||||
|
Feature enhancements:
|
||||||
|
|
||||||
|
* Added or improved file type filtering for Less, Sass, stylus, Zsh
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* [BUG #229](https://github.com/BurntSushi/ripgrep/issues/229):
|
||||||
|
Make smart case slightly less conservative.
|
||||||
|
* [BUG #247](https://github.com/BurntSushi/ripgrep/issues/247):
|
||||||
|
Clarify use of --heading/--no-heading.
|
||||||
|
* [BUG #251](https://github.com/BurntSushi/ripgrep/issues/251),
|
||||||
|
[BUG #264](https://github.com/BurntSushi/ripgrep/issues/264),
|
||||||
|
[BUG #267](https://github.com/BurntSushi/ripgrep/issues/267):
|
||||||
|
Fix matching bug caused by literal optimizations.
|
||||||
|
* [BUG #256](https://github.com/BurntSushi/ripgrep/issues/256):
|
||||||
|
Fix bug that caused `rg foo` and `rg foo/` to have different behavior
|
||||||
|
when `foo` was a symlink.
|
||||||
|
* [BUG #270](https://github.com/BurntSushi/ripgrep/issues/270):
|
||||||
|
Fix bug where patterns starting with a `-` couldn't be used with the
|
||||||
|
`-e/--regexp` flag. (This resolves a regression that was introduced in
|
||||||
|
ripgrep 0.3.0.)
|
||||||
|
|
||||||
|
|
||||||
|
0.3.1
|
||||||
|
=====
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* [BUG #242](https://github.com/BurntSushi/ripgrep/issues/242):
|
||||||
|
ripgrep didn't respect `--colors foo:none` correctly. Now it does.
|
||||||
|
|
||||||
|
|
||||||
|
0.3.0
|
||||||
|
=====
|
||||||
|
This is a new minor version release of ripgrep that includes two breaking
|
||||||
|
changes with lots of bug fixes and some new features and performance
|
||||||
|
improvements. Notably, if you had a problem with colors or piping on Windows
|
||||||
|
before, then that should now be fixed in this release.
|
||||||
|
|
||||||
|
**BREAKING CHANGES**:
|
||||||
|
|
||||||
|
* ripgrep now requires Rust 1.11 to compile. Previously, it could build on
|
||||||
|
Rust 1.9. The cause of this was the move from
|
||||||
|
[Docopt to Clap](https://github.com/BurntSushi/ripgrep/pull/233)
|
||||||
|
for argument parsing.
|
||||||
|
* The `-e/--regexp` flag can no longer accept a pattern starting with a `-`.
|
||||||
|
There are two work-arounds: `rg -- -foo` and `rg [-]foo` or `rg -e [-]foo`
|
||||||
|
will all search for the same `-foo` pattern. The cause of this was the move
|
||||||
|
from [Docopt to Clap](https://github.com/BurntSushi/ripgrep/pull/233)
|
||||||
|
for argument parsing.
|
||||||
|
[This may get fixed in the
|
||||||
|
future.](https://github.com/kbknapp/clap-rs/issues/742).
|
||||||
|
|
||||||
|
Performance improvements:
|
||||||
|
|
||||||
|
* [PERF #33](https://github.com/BurntSushi/ripgrep/issues/33):
|
||||||
|
ripgrep now performs similar to GNU grep on small corpora.
|
||||||
|
* [PERF #136](https://github.com/BurntSushi/ripgrep/issues/136):
|
||||||
|
ripgrep no longer slows down because of argument parsing when given a large
|
||||||
|
argument list.
|
||||||
|
|
||||||
|
Feature enhancements:
|
||||||
|
|
||||||
|
* Added or improved file type filtering for Elixir.
|
||||||
|
* [FEATURE #7](https://github.com/BurntSushi/ripgrep/issues/7):
|
||||||
|
Add a `-f/--file` flag that causes ripgrep to read patterns from a file.
|
||||||
|
* [FEATURE #51](https://github.com/BurntSushi/ripgrep/issues/51):
|
||||||
|
Add a `--colors` flag that enables one to customize the colors used in
|
||||||
|
ripgrep's output.
|
||||||
|
* [FEATURE #138](https://github.com/BurntSushi/ripgrep/issues/138):
|
||||||
|
Add a `--files-without-match` flag that shows only file paths that contain
|
||||||
|
zero matches.
|
||||||
|
* [FEATURE #230](https://github.com/BurntSushi/ripgrep/issues/230):
|
||||||
|
Add completion files to the release (Bash, Fish and PowerShell).
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* [BUG #37](https://github.com/BurntSushi/ripgrep/issues/37):
|
||||||
|
Use correct ANSI escape sequences when `TERM=screen.linux`.
|
||||||
|
* [BUG #94](https://github.com/BurntSushi/ripgrep/issues/94):
|
||||||
|
ripgrep now detects stdin on Windows automatically.
|
||||||
|
* [BUG #117](https://github.com/BurntSushi/ripgrep/issues/117):
|
||||||
|
Colors should now work correctly and automatically inside mintty.
|
||||||
|
* [BUG #182](https://github.com/BurntSushi/ripgrep/issues/182):
|
||||||
|
Colors should now work within Emacs. In particular, `--color=always` will
|
||||||
|
emit colors regardless of the current environment.
|
||||||
|
* [BUG #189](https://github.com/BurntSushi/ripgrep/issues/189):
|
||||||
|
Show less content when running `rg -h`. The full help content can be
|
||||||
|
accessed with `rg --help`.
|
||||||
|
* [BUG #210](https://github.com/BurntSushi/ripgrep/issues/210):
|
||||||
|
Support non-UTF-8 file names on Unix platforms.
|
||||||
|
* [BUG #231](https://github.com/BurntSushi/ripgrep/issues/231):
|
||||||
|
Switch from block buffering to line buffering.
|
||||||
|
* [BUG #241](https://github.com/BurntSushi/ripgrep/issues/241):
|
||||||
|
Some error messages weren't suppressed when `--no-messages` was used.
|
||||||
|
|
||||||
|
|
||||||
|
0.2.9
|
||||||
|
=====
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* [BUG #226](https://github.com/BurntSushi/ripgrep/issues/226):
|
||||||
|
File paths explicitly given on the command line weren't searched in parallel.
|
||||||
|
(This was a regression in `0.2.7`.)
|
||||||
|
* [BUG #228](https://github.com/BurntSushi/ripgrep/issues/228):
|
||||||
|
If a directory was given to `--ignore-file`, ripgrep's memory usage would
|
||||||
|
grow without bound.
|
||||||
|
|
||||||
|
|
||||||
|
0.2.8
|
||||||
|
=====
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* Fixed a bug with the SIMD/AVX features for using bytecount in commit
|
||||||
|
`4ca15a`.
|
||||||
|
|
||||||
|
|
||||||
|
0.2.7
|
||||||
|
=====
|
||||||
|
Performance improvements:
|
||||||
|
|
||||||
|
* [PERF #223](https://github.com/BurntSushi/ripgrep/pull/223):
|
||||||
|
Added a parallel recursive directory iterator. This results in major
|
||||||
|
performance improvements on large repositories.
|
||||||
|
* [PERF #11](https://github.com/BurntSushi/ripgrep/pull/11):
|
||||||
|
ripgrep now uses the `bytecount` library for counting new lines. In some
|
||||||
|
cases, ripgrep runs twice as fast. Use
|
||||||
|
`RUSTFLAGS="-C target-cpu=native" cargo build --release --features 'simd-accel avx-accel'`
|
||||||
|
to get the fastest possible binary.
|
||||||
|
|
||||||
|
Feature enhancements:
|
||||||
|
|
||||||
|
* Added or improved file type filtering for Agda, Tex, Taskpaper, Markdown,
|
||||||
|
asciidoc, textile, rdoc, org, creole, wiki, pod, C#, PDF, C, C++.
|
||||||
|
* [FEATURE #149](https://github.com/BurntSushi/ripgrep/issues/149):
|
||||||
|
Add a new `--no-messages` flag that suppresses error messages.
|
||||||
|
Note that `rg foo 2> /dev/null` also works.
|
||||||
|
* [FEATURE #159](https://github.com/BurntSushi/ripgrep/issues/159):
|
||||||
|
Add a new `-m/--max-count` flag that limits the total number of matches
|
||||||
|
printed for each file searched.
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* [BUG #199](https://github.com/BurntSushi/ripgrep/issues/199):
|
||||||
|
Fixed a bug where `-S/--smart-case` wasn't being applied correctly to
|
||||||
|
literal optimizations.
|
||||||
|
* [BUG #203](https://github.com/BurntSushi/ripgrep/issues/203):
|
||||||
|
Mention the full name, ripgrep, in more places. It now appears in
|
||||||
|
the output of `--help` and `--version`. The repository URL is now also
|
||||||
|
in the output of `--help` and the man page.
|
||||||
|
* [BUG #215](https://github.com/BurntSushi/ripgrep/issues/215):
|
||||||
|
Include small note about how to search for a pattern that starts with a `-`.
|
||||||
|
|
||||||
|
|
||||||
|
0.2.6
|
||||||
|
=====
|
||||||
|
Feature enhancements:
|
||||||
|
|
||||||
|
* Added or improved file type filtering for Fish.
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* [BUG #206](https://github.com/BurntSushi/ripgrep/issues/206):
|
||||||
|
Fixed a regression with `-g/--glob` flag in `0.2.5`.
|
||||||
|
|
||||||
|
|
||||||
|
0.2.5
|
||||||
|
=====
|
||||||
|
Feature enhancements:
|
||||||
|
|
||||||
|
* Added or improved file type filtering for Groovy, Handlebars, Tcl, zsh and
|
||||||
|
Python.
|
||||||
|
* [FEATURE #9](https://github.com/BurntSushi/ripgrep/issues/9):
|
||||||
|
Support global gitignore config and `.git/info/exclude` files.
|
||||||
|
* [FEATURE #45](https://github.com/BurntSushi/ripgrep/issues/45):
|
||||||
|
Add --ignore-file flag for specifying additional ignore files.
|
||||||
|
* [FEATURE #202](https://github.com/BurntSushi/ripgrep/pull/202):
|
||||||
|
Introduce a new
|
||||||
|
[`ignore`](https://github.com/BurntSushi/ripgrep/tree/master/ignore)
|
||||||
|
crate that encapsulates all of ripgrep's gitignore matching logic.
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* [BUG #44](https://github.com/BurntSushi/ripgrep/issues/44):
|
||||||
|
ripgrep runs slowly when given lots of positional arguments that are
|
||||||
|
directories.
|
||||||
|
* [BUG #119](https://github.com/BurntSushi/ripgrep/issues/119):
|
||||||
|
ripgrep didn't reset terminal colors if it was interrupted by `^C`.
|
||||||
|
Fixed in [PR #187](https://github.com/BurntSushi/ripgrep/pull/187).
|
||||||
|
* [BUG #184](https://github.com/BurntSushi/ripgrep/issues/184):
|
||||||
|
Fixed a bug related to interpreting gitignore files in parent directories.
|
||||||
|
|
||||||
|
|
||||||
|
0.2.4
|
||||||
|
=====
|
||||||
|
SKIPPED.
|
||||||
|
|
||||||
|
|
||||||
0.2.3
|
0.2.3
|
||||||
=====
|
=====
|
||||||
Bug fixes:
|
Bug fixes:
|
||||||
|
|||||||
311
Cargo.lock
generated
311
Cargo.lock
generated
@@ -1,61 +1,100 @@
|
|||||||
[root]
|
[root]
|
||||||
name = "ripgrep"
|
name = "ripgrep"
|
||||||
version = "0.2.2"
|
version = "0.5.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"docopt 0.6.86 (registry+https://github.com/rust-lang/crates.io-index)",
|
"bytecount 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clap 2.23.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"globset 0.1.0",
|
"encoding_rs 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"grep 0.1.3",
|
"env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"grep 0.1.6",
|
||||||
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"ignore 0.1.9",
|
||||||
"libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memmap 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num_cpus 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"memmap 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num_cpus 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"term 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
"same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"termcolor 0.3.2",
|
||||||
"walkdir 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aho-corasick"
|
name = "aho-corasick"
|
||||||
version = "0.5.3"
|
version = "0.6.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "deque"
|
name = "ansi_term"
|
||||||
version = "0.3.1"
|
version = "0.9.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "atty"
|
||||||
|
version = "0.2.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "docopt"
|
name = "bitflags"
|
||||||
version = "0.6.86"
|
version = "0.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bytecount"
|
||||||
|
version = "0.1.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"simd 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)",
|
]
|
||||||
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"strsim 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
[[package]]
|
||||||
|
name = "cfg-if"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap"
|
||||||
|
version = "2.23.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crossbeam"
|
||||||
|
version = "0.2.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "encoding_rs"
|
||||||
|
version = "0.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "env_logger"
|
name = "env_logger"
|
||||||
version = "0.3.5"
|
version = "0.4.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -65,35 +104,47 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fs2"
|
name = "fs2"
|
||||||
version = "0.2.5"
|
version = "0.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "globset"
|
name = "globset"
|
||||||
version = "0.1.0"
|
version = "0.1.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "grep"
|
name = "grep"
|
||||||
version = "0.1.3"
|
version = "0.1.6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memmap 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)",
|
"regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"regex-syntax 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ignore"
|
||||||
|
version = "0.1.9"
|
||||||
|
dependencies = [
|
||||||
|
"crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"globset 0.1.4",
|
||||||
|
"lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -107,76 +158,72 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lazy_static"
|
name = "lazy_static"
|
||||||
version = "0.2.1"
|
version = "0.2.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.16"
|
version = "0.2.21"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "log"
|
||||||
version = "0.3.6"
|
version = "0.3.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
version = "0.1.11"
|
version = "1.0.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memmap"
|
name = "memmap"
|
||||||
version = "0.2.3"
|
version = "0.5.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"fs2 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "num_cpus"
|
name = "num_cpus"
|
||||||
version = "1.1.0"
|
version = "1.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand"
|
|
||||||
version = "0.3.14"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "0.1.77"
|
version = "0.2.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"regex-syntax 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"simd 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"simd 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.3.7"
|
version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-serialize"
|
name = "same-file"
|
||||||
version = "0.3.19"
|
version = "0.1.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "simd"
|
name = "simd"
|
||||||
@@ -185,46 +232,84 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strsim"
|
name = "strsim"
|
||||||
version = "0.5.1"
|
version = "0.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "term"
|
name = "term_size"
|
||||||
version = "0.4.4"
|
version = "0.2.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "termcolor"
|
||||||
|
version = "0.3.2"
|
||||||
|
dependencies = [
|
||||||
|
"wincolor 0.1.3",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thread-id"
|
name = "thread-id"
|
||||||
version = "2.0.0"
|
version = "3.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thread_local"
|
name = "thread_local"
|
||||||
version = "0.2.7"
|
version = "0.3.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-segmentation"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-width"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unreachable"
|
||||||
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "utf8-ranges"
|
name = "utf8-ranges"
|
||||||
version = "0.1.3"
|
version = "1.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "vec_map"
|
||||||
|
version = "0.7.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "void"
|
||||||
|
version = "1.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "walkdir"
|
name = "walkdir"
|
||||||
version = "0.1.8"
|
version = "1.0.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -238,30 +323,48 @@ name = "winapi-build"
|
|||||||
version = "0.1.1"
|
version = "0.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wincolor"
|
||||||
|
version = "0.1.3"
|
||||||
|
dependencies = [
|
||||||
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66"
|
"checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699"
|
||||||
"checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf"
|
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
|
||||||
"checksum docopt 0.6.86 (registry+https://github.com/rust-lang/crates.io-index)" = "4a7ef30445607f6fc8720f0a0a2c7442284b629cf0d049286860fae23e71c4d9"
|
"checksum atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d912da0db7fa85514874458ca3651fe2cddace8d0b0505571dbdcd41ab490159"
|
||||||
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
|
"checksum bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1370e9fc2a6ae53aea8b7a5110edbd08836ed87c88736dfabccade1c2b44bff4"
|
||||||
|
"checksum bytecount 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1e8f09fbc8c6726a4b616dcfbd4f54491068d6bb1b93ac03c78ac18ff9a5924a"
|
||||||
|
"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
|
||||||
|
"checksum clap 2.23.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d480c39a2e5f9b3a3798c661613e1b0e7a7ae71e005102d4aa910fc3289df484"
|
||||||
|
"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97"
|
||||||
|
"checksum encoding_rs 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a1cca0a26f904955d80d70b9bff1019e4f4cbc06f2fcbccf8bd3d889cc1c9b7"
|
||||||
|
"checksum env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e3856f1697098606fc6cb97a93de88ca3f3bc35bb878c725920e6e82ecf05e83"
|
||||||
"checksum fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344"
|
"checksum fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344"
|
||||||
"checksum fs2 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "bcd414e5a1a979b931bb92f41b7a54106d3f6d2e6c253e9ce943b7cd468251ef"
|
"checksum fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "34edaee07555859dc13ca387e6ae05686bb4d0364c95d649b6dab959511f4baf"
|
||||||
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||||
"checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f"
|
"checksum lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "4732c563b9a21a406565c4747daa7b46742f082911ae4753f390dc9ec7ee1a97"
|
||||||
"checksum libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)" = "408014cace30ee0f767b1c4517980646a573ec61a57957aeeabcac8ac0a02e8d"
|
"checksum libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "88ee81885f9f04bff991e306fea7c1c60a5f0f9e409e99f6b40e3311a3363135"
|
||||||
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
|
"checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
|
||||||
"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
|
"checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4"
|
||||||
"checksum memmap 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f20f72ed93291a72e22e8b16bb18762183bb4943f0f483da5b8be1a9e8192752"
|
"checksum memmap 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "46f3c7359028b31999287dae4e5047ddfe90a23b7dca2282ce759b491080c99b"
|
||||||
"checksum num_cpus 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8890e6084723d57d0df8d2720b0d60c6ee67d6c93e7169630e4371e88765dcad"
|
"checksum num_cpus 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a18c392466409c50b87369414a2680c93e739aedeb498eb2bff7d7eb569744e2"
|
||||||
"checksum rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "2791d88c6defac799c3f20d74f094ca33b9332612d9aef9078519c82e4fe04a5"
|
"checksum regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4278c17d0f6d62dfef0ab00028feb45bd7d2102843f80763474eeb1be8a10c01"
|
||||||
"checksum regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)" = "64b03446c466d35b42f2a8b203c8e03ed8b91c0f17b56e1f84f7210a257aa665"
|
"checksum regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9191b1f57603095f105d317e375d19b1c9c5c3185ea9633a99a6dcbed04457"
|
||||||
"checksum regex-syntax 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "48f0573bcee95a48da786f8823465b5f2a1fae288a55407aca991e5b3e0eae11"
|
"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7"
|
||||||
"checksum rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6159e4e6e559c81bd706afe9c8fd68f547d3e851ce12e76b1de7914bab61691b"
|
|
||||||
"checksum simd 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "63b5847c2d766ca7ce7227672850955802fabd779ba616aeabead4c2c3877023"
|
"checksum simd 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "63b5847c2d766ca7ce7227672850955802fabd779ba616aeabead4c2c3877023"
|
||||||
"checksum strsim 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "50c069df92e4b01425a8bf3576d5d417943a6a7272fbabaf5bd80b1aaa76442e"
|
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
|
||||||
"checksum term 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3deff8a2b3b6607d6d7cc32ac25c0b33709453ca9cceac006caac51e963cf94a"
|
"checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a"
|
||||||
"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
|
"checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a"
|
||||||
"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
|
"checksum thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c85048c6260d17cf486ceae3282d9fb6b90be220bf5b28c400f5485ffc29f0c7"
|
||||||
"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
|
"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
|
||||||
"checksum walkdir 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c66c0b9792f0a765345452775f3adbd28dde9d33f30d13e5dcc5ae17cf6f3780"
|
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
|
||||||
|
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
|
||||||
|
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
|
||||||
|
"checksum vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8cdc8b93bd0198ed872357fb2e667f7125646b1762f16d60b2c96350d361897"
|
||||||
|
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
|
||||||
|
"checksum walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "bb08f9e670fab86099470b97cd2b252d6527f0b3cc1401acdb595ffc9dd288ff"
|
||||||
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
|
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
|
||||||
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
|
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
|
||||||
|
|||||||
37
Cargo.toml
37
Cargo.toml
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "ripgrep"
|
name = "ripgrep"
|
||||||
version = "0.2.3" #:version
|
version = "0.5.0" #:version
|
||||||
authors = ["Andrew Gallant <jamslam@gmail.com>"]
|
authors = ["Andrew Gallant <jamslam@gmail.com>"]
|
||||||
description = """
|
description = """
|
||||||
Line oriented search tool using Rust's regex library. Combines the raw
|
Line oriented search tool using Rust's regex library. Combines the raw
|
||||||
@@ -11,8 +11,10 @@ homepage = "https://github.com/BurntSushi/ripgrep"
|
|||||||
repository = "https://github.com/BurntSushi/ripgrep"
|
repository = "https://github.com/BurntSushi/ripgrep"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
keywords = ["regex", "grep", "egrep", "search", "pattern"]
|
keywords = ["regex", "grep", "egrep", "search", "pattern"]
|
||||||
|
categories = ["command-line-utilities", "text-processing"]
|
||||||
license = "Unlicense/MIT"
|
license = "Unlicense/MIT"
|
||||||
exclude = ["HomebrewFormula"]
|
exclude = ["HomebrewFormula"]
|
||||||
|
build = "build.rs"
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
bench = false
|
bench = false
|
||||||
@@ -24,29 +26,30 @@ name = "integration"
|
|||||||
path = "tests/tests.rs"
|
path = "tests/tests.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
deque = "0.3"
|
atty = "0.2.2"
|
||||||
docopt = "0.6"
|
bytecount = "0.1.4"
|
||||||
env_logger = "0.3"
|
clap = "2.23.1"
|
||||||
globset = { version = "0.1.0", path = "globset" }
|
encoding_rs = "0.5.0"
|
||||||
grep = { version = "0.1.3", path = "grep" }
|
env_logger = { version = "0.4", default-features = false }
|
||||||
|
grep = { version = "0.1.5", path = "grep" }
|
||||||
|
ignore = { version = "0.1.9", path = "ignore" }
|
||||||
lazy_static = "0.2"
|
lazy_static = "0.2"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
memchr = "0.1"
|
memchr = "1"
|
||||||
memmap = "0.2"
|
memmap = "0.5"
|
||||||
num_cpus = "1"
|
num_cpus = "1"
|
||||||
regex = "0.1.77"
|
regex = "0.2.1"
|
||||||
rustc-serialize = "0.3"
|
same-file = "0.1.1"
|
||||||
term = "0.4"
|
termcolor = { version = "0.3.0", path = "termcolor" }
|
||||||
thread_local = "0.2.7"
|
|
||||||
walkdir = "0.1"
|
|
||||||
|
|
||||||
[target.'cfg(windows)'.dependencies]
|
[build-dependencies]
|
||||||
kernel32-sys = "0.2"
|
clap = "2.23.1"
|
||||||
winapi = "0.2"
|
lazy_static = "0.2"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
simd-accel = ["regex/simd-accel"]
|
avx-accel = ["bytecount/avx-accel"]
|
||||||
|
simd-accel = ["bytecount/simd-accel", "regex/simd-accel"]
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
debug = true
|
debug = true
|
||||||
|
|||||||
211
README.md
211
README.md
@@ -1,25 +1,32 @@
|
|||||||
ripgrep (rg)
|
ripgrep (rg)
|
||||||
------------
|
------------
|
||||||
`ripgrep` is a command line search tool that combines the usability of The
|
`ripgrep` is a line oriented search tool that combines the usability of The
|
||||||
Silver Searcher (an `ack` clone) with the raw speed of GNU grep. `ripgrep` has
|
Silver Searcher (similar to `ack`) with the raw speed of GNU grep. `ripgrep`
|
||||||
first class support on Windows, Mac and Linux, with binary downloads available
|
works by recursively searching your current directory for a regex pattern.
|
||||||
for [every release](https://github.com/BurntSushi/ripgrep/releases).
|
`ripgrep` has first class support on Windows, Mac and Linux, with binary
|
||||||
|
downloads available for
|
||||||
|
[every release](https://github.com/BurntSushi/ripgrep/releases).
|
||||||
|
|
||||||
[](https://travis-ci.org/BurntSushi/ripgrep)
|
[](https://travis-ci.org/BurntSushi/ripgrep)
|
||||||
[](https://ci.appveyor.com/project/BurntSushi/ripgrep)
|
[](https://ci.appveyor.com/project/BurntSushi/ripgrep)
|
||||||
[](https://crates.io/crates/ripgrep)
|
[](https://crates.io/crates/ripgrep)
|
||||||
|
|
||||||
Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org).
|
Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org).
|
||||||
|
|
||||||
|
### CHANGELOG
|
||||||
|
|
||||||
|
Please see the [CHANGELOG](CHANGELOG.md) for a release history.
|
||||||
|
|
||||||
### Screenshot of search results
|
### Screenshot of search results
|
||||||
|
|
||||||
[](http://burntsushi.net/stuff/ripgrep1.png)
|
[](http://burntsushi.net/stuff/ripgrep1.png)
|
||||||
|
|
||||||
### Quick example comparing tools
|
### Quick examples comparing tools
|
||||||
|
|
||||||
This example searches the entire Linux kernel source tree (after running
|
This example searches the entire Linux kernel source tree (after running
|
||||||
`make defconfig && make -j8`) for `[A-Z]+_SUSPEND`, where all matches must be
|
`make defconfig && make -j8`) for `[A-Z]+_SUSPEND`, where all matches must be
|
||||||
words. Timings were collected on a system with an Intel i7-6900K 3.2 GHz.
|
words. Timings were collected on a system with an Intel i7-6900K 3.2 GHz, and
|
||||||
|
ripgrep was compiled using the `compile` script in this repo.
|
||||||
|
|
||||||
Please remember that a single benchmark is never enough! See my
|
Please remember that a single benchmark is never enough! See my
|
||||||
[blog post on `ripgrep`](http://blog.burntsushi.net/ripgrep/)
|
[blog post on `ripgrep`](http://blog.burntsushi.net/ripgrep/)
|
||||||
@@ -27,17 +34,41 @@ for a very detailed comparison with more benchmarks and analysis.
|
|||||||
|
|
||||||
| Tool | Command | Line count | Time |
|
| Tool | Command | Line count | Time |
|
||||||
| ---- | ------- | ---------- | ---- |
|
| ---- | ------- | ---------- | ---- |
|
||||||
| ripgrep | `rg -n -w '[A-Z]+_SUSPEND'` | 450 | **0.245s** |
|
| ripgrep (Unicode) | `rg -n -w '[A-Z]+_SUSPEND'` | 450 | **0.134s** |
|
||||||
| [The Silver Searcher](https://github.com/ggreer/the_silver_searcher) | `ag -w '[A-Z]+_SUSPEND'` | 450 | 0.753s |
|
| [The Silver Searcher](https://github.com/ggreer/the_silver_searcher) | `ag -w '[A-Z]+_SUSPEND'` | 450 | 0.753s |
|
||||||
| [git grep](https://www.kernel.org/pub/software/scm/git/docs/git-grep.html) | `LC_ALL=C git grep -E -n -w '[A-Z]+_SUSPEND'` | 450 | 0.823s |
|
| [git grep](https://www.kernel.org/pub/software/scm/git/docs/git-grep.html) | `LC_ALL=C git grep -E -n -w '[A-Z]+_SUSPEND'` | 450 | 0.823s |
|
||||||
| [git grep (Unicode)](https://www.kernel.org/pub/software/scm/git/docs/git-grep.html) | `LC_ALL=en_US.UTF-8 git grep -E -n -w '[A-Z]+_SUSPEND'` | 450 | 2.880s |
|
| [git grep (Unicode)](https://www.kernel.org/pub/software/scm/git/docs/git-grep.html) | `LC_ALL=en_US.UTF-8 git grep -E -n -w '[A-Z]+_SUSPEND'` | 450 | 2.880s |
|
||||||
| [sift](https://github.com/svent/sift) | `sift --git -n -w '[A-Z]+_SUSPEND'` | 450 | 3.656s |
|
| [sift](https://github.com/svent/sift) | `sift --git -n -w '[A-Z]+_SUSPEND'` | 450 | 3.656s |
|
||||||
| [The Platinum Searcher](https://github.com/monochromegane/the_platinum_searcher) | `pt -w -e '[A-Z]+_SUSPEND'` | 450 | 12.369s |
|
| [The Platinum Searcher](https://github.com/monochromegane/the_platinum_searcher) | `pt -w -e '[A-Z]+_SUSPEND'` | 450 | 12.369s |
|
||||||
| [ack](http://beyondgrep.com/) | `ack -w '[A-Z]+_SUSPEND'` | 1878 | 16.952s |
|
| [ack](https://github.com/petdance/ack2) | `ack -w '[A-Z]+_SUSPEND'` | 1878 | 16.952s |
|
||||||
|
|
||||||
(Yes, `ack` [has](https://github.com/petdance/ack2/issues/445) a
|
(Yes, `ack` [has](https://github.com/petdance/ack2/issues/445) a
|
||||||
[bug](https://github.com/petdance/ack2/issues/14).)
|
[bug](https://github.com/petdance/ack2/issues/14).)
|
||||||
|
|
||||||
|
Here's another benchmark that disregards gitignore files and searches with a
|
||||||
|
whitelist instead. The corpus is the same as in the previous benchmark, and the
|
||||||
|
flags passed to each command ensures that they are doing equivalent work:
|
||||||
|
|
||||||
|
| Tool | Command | Line count | Time |
|
||||||
|
| ---- | ------- | ---------- | ---- |
|
||||||
|
| ripgrep | `rg -L -u -tc -n -w '[A-Z]+_SUSPEND'` | 404 | **0.108s** |
|
||||||
|
| [ucg](https://github.com/gvansickle/ucg) | `ucg --type=cc -w '[A-Z]+_SUSPEND'` | 392 | 0.219s |
|
||||||
|
| [GNU grep](https://www.gnu.org/software/grep/) | `egrep -R -n --include='*.c' --include='*.h' -w '[A-Z]+_SUSPEND'` | 404 | 0.733s |
|
||||||
|
|
||||||
|
(`ucg` [has slightly different behavior in the presence of symbolic links](https://github.com/gvansickle/ucg/issues/106).)
|
||||||
|
|
||||||
|
And finally, a straight up comparison between ripgrep and GNU grep on a single
|
||||||
|
large file (~9.3GB,
|
||||||
|
[`OpenSubtitles2016.raw.en.gz`](http://opus.lingfil.uu.se/OpenSubtitles2016/mono/OpenSubtitles2016.raw.en.gz)):
|
||||||
|
|
||||||
|
| Tool | Command | Line count | Time |
|
||||||
|
| ---- | ------- | ---------- | ---- |
|
||||||
|
| ripgrep | `rg -w 'Sherlock [A-Z]\w+'` | 5268 | **2.520s** |
|
||||||
|
| [GNU grep](https://www.gnu.org/software/grep/) | `LC_ALL=C egrep -w 'Sherlock [A-Z]\w+'` | 5268 | 7.143s |
|
||||||
|
|
||||||
|
In the above benchmark, passing the `-n` flag (for showing line numbers)
|
||||||
|
increases the times to `3.081s` for ripgrep and `11.403s` for GNU grep.
|
||||||
|
|
||||||
### Why should I use `ripgrep`?
|
### Why should I use `ripgrep`?
|
||||||
|
|
||||||
* It can replace both The Silver Searcher and GNU grep because it is faster
|
* It can replace both The Silver Searcher and GNU grep because it is faster
|
||||||
@@ -56,9 +87,34 @@ for a very detailed comparison with more benchmarks and analysis.
|
|||||||
of search results, searching multiple patterns, highlighting matches with
|
of search results, searching multiple patterns, highlighting matches with
|
||||||
color and full Unicode support. Unlike GNU grep, `ripgrep` stays fast while
|
color and full Unicode support. Unlike GNU grep, `ripgrep` stays fast while
|
||||||
supporting Unicode (which is always on).
|
supporting Unicode (which is always on).
|
||||||
|
* `ripgrep` supports searching files in text encodings other than UTF-8, such
|
||||||
|
as UTF-16, latin-1, GBK, EUC-JP, Shift_JIS and more. (Some support for
|
||||||
|
automatically detecting UTF-16 is provided. Other text encodings must be
|
||||||
|
specifically specified with the `-E/--encoding` flag.)
|
||||||
|
|
||||||
In other words, use `ripgrep` if you like speed, sane defaults, fewer bugs and
|
In other words, use `ripgrep` if you like speed, filtering by default, fewer
|
||||||
Unicode.
|
bugs and Unicode support.
|
||||||
|
|
||||||
|
### Why shouldn't I use `ripgrep`?
|
||||||
|
|
||||||
|
I'd like to try to convince you why you *shouldn't* use `ripgrep`. This should
|
||||||
|
give you a glimpse at some important downsides or missing features of
|
||||||
|
`ripgrep`.
|
||||||
|
|
||||||
|
* `ripgrep` uses a regex engine based on finite automata, so if you want fancy
|
||||||
|
regex features such as backreferences or look around, `ripgrep` won't give
|
||||||
|
them to you. `ripgrep` does support lots of things though, including, but not
|
||||||
|
limited to: lazy quantification (e.g., `a+?`), repetitions (e.g., `a{2,5}`),
|
||||||
|
begin/end assertions (e.g., `^\w+$`), word boundaries (e.g., `\bfoo\b`), and
|
||||||
|
support for Unicode categories (e.g., `\p{Sc}` to match currency symbols or
|
||||||
|
`\p{Lu}` to match any uppercase letter). (Fancier regexes will never be
|
||||||
|
supported.)
|
||||||
|
* `ripgrep` doesn't yet support searching compressed files. (Likely to be
|
||||||
|
supported in the future.)
|
||||||
|
* `ripgrep` doesn't have multiline search. (Unlikely to ever be supported.)
|
||||||
|
|
||||||
|
In other words, if you like fancy regexes, searching compressed files or
|
||||||
|
multiline search, then `ripgrep` may not quite meet your needs (yet).
|
||||||
|
|
||||||
### Is it really faster than everything else?
|
### Is it really faster than everything else?
|
||||||
|
|
||||||
@@ -82,8 +138,9 @@ Summarizing, `ripgrep` is fast because:
|
|||||||
[`RegexSet`](https://doc.rust-lang.org/regex/regex/struct.RegexSet.html).
|
[`RegexSet`](https://doc.rust-lang.org/regex/regex/struct.RegexSet.html).
|
||||||
That means a single file path can be matched against multiple glob patterns
|
That means a single file path can be matched against multiple glob patterns
|
||||||
simultaneously.
|
simultaneously.
|
||||||
* Uses a Chase-Lev work-stealing queue for quickly distributing work to
|
* It uses a lock-free parallel recursive directory iterator, courtesy of
|
||||||
multiple threads.
|
[`crossbeam`](https://docs.rs/crossbeam) and
|
||||||
|
[`ignore`](https://docs.rs/ignore).
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
@@ -112,13 +169,48 @@ $ brew tap burntsushi/ripgrep https://github.com/BurntSushi/ripgrep.git
|
|||||||
$ brew install burntsushi/ripgrep/ripgrep-bin
|
$ brew install burntsushi/ripgrep/ripgrep-bin
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you're a **Windows Chocolatey** user, then you can install `ripgrep` from the [official repo](https://chocolatey.org/packages/ripgrep):
|
||||||
|
|
||||||
|
```
|
||||||
|
$ choco install ripgrep
|
||||||
|
```
|
||||||
|
|
||||||
If you're an **Arch Linux** user, then you can install `ripgrep` from the official repos:
|
If you're an **Arch Linux** user, then you can install `ripgrep` from the official repos:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ pacman -S ripgrep
|
$ pacman -S ripgrep
|
||||||
```
|
```
|
||||||
|
|
||||||
If you're a **Rust programmer**, `ripgrep` can be installed with `cargo`:
|
If you're a **Gentoo** user, you can install `ripgrep` from the [official repo](https://packages.gentoo.org/packages/sys-apps/ripgrep):
|
||||||
|
|
||||||
|
```
|
||||||
|
$ emerge ripgrep
|
||||||
|
```
|
||||||
|
|
||||||
|
If you're a **Fedora 24+** user, you can install `ripgrep` from [copr](https://copr.fedorainfracloud.org/coprs/carlgeorge/ripgrep/):
|
||||||
|
|
||||||
|
```
|
||||||
|
$ dnf copr enable carlgeorge/ripgrep
|
||||||
|
$ dnf install ripgrep
|
||||||
|
```
|
||||||
|
|
||||||
|
If you're a **RHEL/CentOS 7** user, you can install `ripgrep` from [copr](https://copr.fedorainfracloud.org/coprs/carlgeorge/ripgrep/):
|
||||||
|
|
||||||
|
```
|
||||||
|
$ yum-config-manager --add-repo=https://copr.fedorainfracloud.org/coprs/carlgeorge/ripgrep/repo/epel-7/carlgeorge-ripgrep-epel-7.repo
|
||||||
|
$ yum install ripgrep
|
||||||
|
```
|
||||||
|
|
||||||
|
If you're a **Nix** user, you can install `ripgrep` from
|
||||||
|
[nixpkgs](https://github.com/NixOS/nixpkgs/blob/master/pkgs/tools/text/ripgrep/default.nix):
|
||||||
|
|
||||||
|
```
|
||||||
|
$ nix-env --install ripgrep
|
||||||
|
$ # (Or using the attribute name, which is also `ripgrep`.)
|
||||||
|
```
|
||||||
|
|
||||||
|
If you're a **Rust programmer**, `ripgrep` can be installed with `cargo`. Note
|
||||||
|
that this requires you to have **Rust 1.12 or newer** installed.
|
||||||
|
|
||||||
```
|
```
|
||||||
$ cargo install ripgrep
|
$ cargo install ripgrep
|
||||||
@@ -138,11 +230,10 @@ colorize your output and show line numbers, just like The Silver Searcher.
|
|||||||
Coloring works on Windows too! Colors can be controlled more granularly with
|
Coloring works on Windows too! Colors can be controlled more granularly with
|
||||||
the `--color` flag.
|
the `--color` flag.
|
||||||
|
|
||||||
One last thing before we get started: `ripgrep` assumes UTF-8 *everywhere*. It
|
One last thing before we get started: generally speaking, `ripgrep` assumes the
|
||||||
can still search files that are invalid UTF-8 (like, say, latin-1), but it will
|
input is reading is UTF-8. However, if ripgrep notices a file is encoded as
|
||||||
simply not work on UTF-16 encoded files or other more exotic encodings.
|
UTF-16, then it will know how to search it. For other encodings, you'll need to
|
||||||
[Support for other encodings may
|
explicitly specify them with the `-E/--encoding` flag.
|
||||||
happen.](https://github.com/BurntSushi/ripgrep/issues/1)
|
|
||||||
|
|
||||||
To recursively search the current directory, while respecting all `.gitignore`
|
To recursively search the current directory, while respecting all `.gitignore`
|
||||||
files, ignore hidden files and directories and skip binary files:
|
files, ignore hidden files and directories and skip binary files:
|
||||||
@@ -208,6 +299,12 @@ Or exclude files matching a particular glob:
|
|||||||
$ rg foo -g '!*.min.js'
|
$ rg foo -g '!*.min.js'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Search and return paths matching a particular glob (i.e., `-g` flag in ag/ack):
|
||||||
|
|
||||||
|
```
|
||||||
|
$ rg -g 'doc*' --files
|
||||||
|
```
|
||||||
|
|
||||||
Search only HTML and CSS files:
|
Search only HTML and CSS files:
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -236,11 +333,21 @@ extensions.
|
|||||||
The syntax supported is
|
The syntax supported is
|
||||||
[documented as part of Rust's regex library](https://doc.rust-lang.org/regex/regex/index.html#syntax).
|
[documented as part of Rust's regex library](https://doc.rust-lang.org/regex/regex/index.html#syntax).
|
||||||
|
|
||||||
|
### Shell completions
|
||||||
|
|
||||||
|
Shell completion files are included in the release tarball for Bash, Fish, Zsh
|
||||||
|
and PowerShell.
|
||||||
|
|
||||||
|
For **bash**, move `rg.bash-completion` to `$XDG_CONFIG_HOME/bash_completion`
|
||||||
|
or `/etc/bash_completion.d/`.
|
||||||
|
|
||||||
|
For **fish**, move `rg.fish` to `$HOME/.config/fish/completions`.
|
||||||
|
|
||||||
### Building
|
### Building
|
||||||
|
|
||||||
`ripgrep` is written in Rust, so you'll need to grab a
|
`ripgrep` is written in Rust, so you'll need to grab a
|
||||||
[Rust installation](https://www.rust-lang.org/) in order to compile it.
|
[Rust installation](https://www.rust-lang.org/) in order to compile it.
|
||||||
`ripgrep` compiles with Rust 1.9 (stable) or newer. Building is easy:
|
`ripgrep` compiles with Rust 1.12 (stable) or newer. Building is easy:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ git clone https://github.com/BurntSushi/ripgrep
|
$ git clone https://github.com/BurntSushi/ripgrep
|
||||||
@@ -254,9 +361,12 @@ If you have a Rust nightly compiler, then you can enable optional SIMD
|
|||||||
acceleration like so:
|
acceleration like so:
|
||||||
|
|
||||||
```
|
```
|
||||||
RUSTFLAGS="-C target-cpu=native" cargo build --release --features simd-accel
|
RUSTFLAGS="-C target-cpu=native" cargo build --release --features 'simd-accel avx-accel'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If your machine doesn't support AVX instructions, then simply remove
|
||||||
|
`avx-accel` from the features list. Similarly for SIMD.
|
||||||
|
|
||||||
### Running tests
|
### Running tests
|
||||||
|
|
||||||
`ripgrep` is relatively well tested, including both unit tests and integration
|
`ripgrep` is relatively well tested, including both unit tests and integration
|
||||||
@@ -267,3 +377,62 @@ $ cargo test
|
|||||||
```
|
```
|
||||||
|
|
||||||
from the repository root.
|
from the repository root.
|
||||||
|
|
||||||
|
### Tips
|
||||||
|
|
||||||
|
#### Windows Powershell
|
||||||
|
|
||||||
|
##### Powershell Profile
|
||||||
|
|
||||||
|
To customize powershell on start-up there is a special powershell script that has to be created.
|
||||||
|
In order to find its location run command `Get-Command $profile | Select-Object -ExpandProperty Definition`
|
||||||
|
See [more](https://technet.microsoft.com/en-us/library/bb613488(v=vs.85).aspx) for profile details.
|
||||||
|
|
||||||
|
Any powershell code in this file gets evaluated at the start of console.
|
||||||
|
This way you can have own aliases to be created at start.
|
||||||
|
|
||||||
|
##### Setup function alias
|
||||||
|
|
||||||
|
Often you can find a need to make alias for the favourite utility.
|
||||||
|
|
||||||
|
But powershell function aliases do not behave like your typical linux shell alias.
|
||||||
|
|
||||||
|
You always need to propagate arguments and **Stdin** input.
|
||||||
|
But it cannot be done simply as `function grep() { $input | rg.exe --hidden $args }`
|
||||||
|
|
||||||
|
Use below example as reference to how setup alias in powershell.
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
function grep {
|
||||||
|
$count = @($input).Count
|
||||||
|
$input.Reset()
|
||||||
|
|
||||||
|
if ($count) {
|
||||||
|
$input | rg.exe --hidden $args
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
rg.exe --hidden $args
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Powershell special variables:
|
||||||
|
* input - is powershell **Stdin** object that allows you to access its content.
|
||||||
|
* args - is array of arguments passed to this function.
|
||||||
|
|
||||||
|
This alias checks whether there is **Stdin** input and propagates only if there is some lines.
|
||||||
|
Otherwise empty `$input` will make powershell to trigger `rg` to search empty **Stdin**
|
||||||
|
|
||||||
|
### Known issues
|
||||||
|
|
||||||
|
#### I just hit Ctrl+C in the middle of ripgrep's output and now my terminal's foreground color is wrong!
|
||||||
|
|
||||||
|
Type in `color` on Windows and `echo -ne "\033[0m"` on Unix to restore your
|
||||||
|
original foreground color.
|
||||||
|
|
||||||
|
PR [#187](https://github.com/BurntSushi/ripgrep/pull/187) fixed this, and it
|
||||||
|
was later deprecated in
|
||||||
|
[#281](https://github.com/BurntSushi/ripgrep/issues/281). A full explanation is
|
||||||
|
available [here][msys issue explanation].
|
||||||
|
|
||||||
|
[msys issue explanation]: https://github.com/BurntSushi/ripgrep/issues/281#issuecomment-269093893
|
||||||
|
|||||||
10
appveyor.yml
10
appveyor.yml
@@ -30,6 +30,9 @@ test_script:
|
|||||||
- cargo test --verbose
|
- cargo test --verbose
|
||||||
- cargo test --verbose --manifest-path grep/Cargo.toml
|
- cargo test --verbose --manifest-path grep/Cargo.toml
|
||||||
- cargo test --verbose --manifest-path globset/Cargo.toml
|
- cargo test --verbose --manifest-path globset/Cargo.toml
|
||||||
|
- cargo test --verbose --manifest-path ignore/Cargo.toml
|
||||||
|
- cargo test --verbose --manifest-path wincolor/Cargo.toml
|
||||||
|
- cargo test --verbose --manifest-path termcolor/Cargo.toml
|
||||||
|
|
||||||
before_deploy:
|
before_deploy:
|
||||||
# Generate artifacts for release
|
# Generate artifacts for release
|
||||||
@@ -37,6 +40,7 @@ before_deploy:
|
|||||||
- cargo build --release
|
- cargo build --release
|
||||||
- mkdir staging
|
- mkdir staging
|
||||||
- copy target\release\rg.exe staging
|
- copy target\release\rg.exe staging
|
||||||
|
- ps: copy target\release\build\ripgrep-*\out\_rg.ps1 staging
|
||||||
- cd staging
|
- cd staging
|
||||||
# release zipfile will look like 'rust-everywhere-v1.2.3-x86_64-pc-windows-msvc'
|
# release zipfile will look like 'rust-everywhere-v1.2.3-x86_64-pc-windows-msvc'
|
||||||
- 7z a ../%PROJECT_NAME%-%APPVEYOR_REPO_TAG_NAME%-%TARGET%.zip *
|
- 7z a ../%PROJECT_NAME%-%APPVEYOR_REPO_TAG_NAME%-%TARGET%.zip *
|
||||||
@@ -59,7 +63,9 @@ deploy:
|
|||||||
|
|
||||||
branches:
|
branches:
|
||||||
only:
|
only:
|
||||||
- appveyor
|
|
||||||
- /\d+\.\d+\.\d+/
|
- /\d+\.\d+\.\d+/
|
||||||
except:
|
|
||||||
- master
|
- master
|
||||||
|
# - appveyor
|
||||||
|
# - /\d+\.\d+\.\d+/
|
||||||
|
# except:
|
||||||
|
# - master
|
||||||
|
|||||||
@@ -26,10 +26,10 @@ SUBTITLES_DIR = 'subtitles'
|
|||||||
SUBTITLES_EN_NAME = 'OpenSubtitles2016.raw.en'
|
SUBTITLES_EN_NAME = 'OpenSubtitles2016.raw.en'
|
||||||
SUBTITLES_EN_NAME_SAMPLE = 'OpenSubtitles2016.raw.sample.en'
|
SUBTITLES_EN_NAME_SAMPLE = 'OpenSubtitles2016.raw.sample.en'
|
||||||
SUBTITLES_EN_NAME_GZ = '%s.gz' % SUBTITLES_EN_NAME
|
SUBTITLES_EN_NAME_GZ = '%s.gz' % SUBTITLES_EN_NAME
|
||||||
SUBTITLES_EN_URL = 'http://opus.lingfil.uu.se/OpenSubtitles2016/mono/OpenSubtitles2016.raw.en.gz'
|
SUBTITLES_EN_URL = 'http://opus.lingfil.uu.se/OpenSubtitles2016/mono/OpenSubtitles2016.raw.en.gz' # noqa
|
||||||
SUBTITLES_RU_NAME = 'OpenSubtitles2016.raw.ru'
|
SUBTITLES_RU_NAME = 'OpenSubtitles2016.raw.ru'
|
||||||
SUBTITLES_RU_NAME_GZ = '%s.gz' % SUBTITLES_RU_NAME
|
SUBTITLES_RU_NAME_GZ = '%s.gz' % SUBTITLES_RU_NAME
|
||||||
SUBTITLES_RU_URL = 'http://opus.lingfil.uu.se/OpenSubtitles2016/mono/OpenSubtitles2016.raw.ru.gz'
|
SUBTITLES_RU_URL = 'http://opus.lingfil.uu.se/OpenSubtitles2016/mono/OpenSubtitles2016.raw.ru.gz' # noqa
|
||||||
|
|
||||||
LINUX_DIR = 'linux'
|
LINUX_DIR = 'linux'
|
||||||
LINUX_CLONE = 'git://github.com/BurntSushi/linux'
|
LINUX_CLONE = 'git://github.com/BurntSushi/linux'
|
||||||
@@ -755,7 +755,8 @@ class Benchmark(object):
|
|||||||
|
|
||||||
def __init__(self, name=None, pattern=None, commands=None,
|
def __init__(self, name=None, pattern=None, commands=None,
|
||||||
warmup_count=1, count=3, line_count=True,
|
warmup_count=1, count=3, line_count=True,
|
||||||
allow_missing_commands=False):
|
allow_missing_commands=False,
|
||||||
|
disabled_cmds=None):
|
||||||
'''
|
'''
|
||||||
Create a single benchmark.
|
Create a single benchmark.
|
||||||
|
|
||||||
@@ -786,6 +787,11 @@ class Benchmark(object):
|
|||||||
:param bool line_count:
|
:param bool line_count:
|
||||||
When set, the lines of each search are counted and included
|
When set, the lines of each search are counted and included
|
||||||
in the samples produced.
|
in the samples produced.
|
||||||
|
:param bool allow_missing_commands:
|
||||||
|
When set, if a command is missing, then the benchmark
|
||||||
|
will simply skip it.
|
||||||
|
:param list(str) disabled_cmds:
|
||||||
|
A list of commands to skip.
|
||||||
'''
|
'''
|
||||||
self.name = name
|
self.name = name
|
||||||
self.pattern = pattern
|
self.pattern = pattern
|
||||||
@@ -794,6 +800,7 @@ class Benchmark(object):
|
|||||||
self.count = count
|
self.count = count
|
||||||
self.line_count = line_count
|
self.line_count = line_count
|
||||||
self.allow_missing_commands = allow_missing_commands
|
self.allow_missing_commands = allow_missing_commands
|
||||||
|
self.disabled_cmds = set(disabled_cmds or [])
|
||||||
|
|
||||||
def raise_if_missing(self):
|
def raise_if_missing(self):
|
||||||
'''
|
'''
|
||||||
@@ -804,8 +811,11 @@ class Benchmark(object):
|
|||||||
least one command in this benchmark could not be found on this
|
least one command in this benchmark could not be found on this
|
||||||
system.
|
system.
|
||||||
'''
|
'''
|
||||||
missing_commands = \
|
missing_commands = []
|
||||||
[c.binary_name for c in self.commands if not c.exists()]
|
for c in self.commands:
|
||||||
|
if c.binary_name in self.disabled_cmds or c.exists():
|
||||||
|
continue
|
||||||
|
missing_commands.append(c.binary_name)
|
||||||
if not self.allow_missing_commands and len(missing_commands) > 0:
|
if not self.allow_missing_commands and len(missing_commands) > 0:
|
||||||
raise MissingCommands(missing_commands)
|
raise MissingCommands(missing_commands)
|
||||||
|
|
||||||
@@ -821,6 +831,8 @@ class Benchmark(object):
|
|||||||
self.raise_if_missing()
|
self.raise_if_missing()
|
||||||
result = Result(self)
|
result = Result(self)
|
||||||
for cmd in self.commands:
|
for cmd in self.commands:
|
||||||
|
if cmd.binary_name in self.disabled_cmds:
|
||||||
|
continue
|
||||||
if self.allow_missing_commands and not cmd.exists():
|
if self.allow_missing_commands and not cmd.exists():
|
||||||
# Skip this command if we're OK with it.
|
# Skip this command if we're OK with it.
|
||||||
continue
|
continue
|
||||||
@@ -849,7 +861,7 @@ class Benchmark(object):
|
|||||||
:rtype: int
|
:rtype: int
|
||||||
'''
|
'''
|
||||||
if not cmd.exists():
|
if not cmd.exists():
|
||||||
raise MissingCommand(cmd.cmd[0])
|
raise MissingCommands([cmd.cmd[0]])
|
||||||
cmd.kwargs['stderr'] = subprocess.DEVNULL
|
cmd.kwargs['stderr'] = subprocess.DEVNULL
|
||||||
if self.line_count:
|
if self.line_count:
|
||||||
cmd.kwargs['stdout'] = subprocess.PIPE
|
cmd.kwargs['stdout'] = subprocess.PIPE
|
||||||
@@ -936,8 +948,9 @@ class Result(object):
|
|||||||
A dictionary from command name to a set of line
|
A dictionary from command name to a set of line
|
||||||
counts recorded.
|
counts recorded.
|
||||||
'''
|
'''
|
||||||
return {s['line_count'] for s in self.samples_for(cmd)
|
return {s['line_count']
|
||||||
if s['line_count'] is not None}
|
for s in self.samples_for(cmd)
|
||||||
|
if s['line_count'] is not None}
|
||||||
|
|
||||||
def distribution_for(self, cmd):
|
def distribution_for(self, cmd):
|
||||||
'''
|
'''
|
||||||
@@ -1135,6 +1148,7 @@ def download(suite_dir, choices):
|
|||||||
|
|
||||||
def collect_benchmarks(suite_dir, filter_pat=None,
|
def collect_benchmarks(suite_dir, filter_pat=None,
|
||||||
allow_missing_commands=False,
|
allow_missing_commands=False,
|
||||||
|
disabled_cmds=None,
|
||||||
warmup_iter=1, bench_iter=3):
|
warmup_iter=1, bench_iter=3):
|
||||||
'''
|
'''
|
||||||
Return an iterable of all runnable benchmarks.
|
Return an iterable of all runnable benchmarks.
|
||||||
@@ -1161,6 +1175,7 @@ def collect_benchmarks(suite_dir, filter_pat=None,
|
|||||||
benchmark.warmup_count = warmup_iter
|
benchmark.warmup_count = warmup_iter
|
||||||
benchmark.count = bench_iter
|
benchmark.count = bench_iter
|
||||||
benchmark.allow_missing_commands = allow_missing_commands
|
benchmark.allow_missing_commands = allow_missing_commands
|
||||||
|
benchmark.disabled_cmds = disabled_cmds
|
||||||
benchmark.raise_if_missing()
|
benchmark.raise_if_missing()
|
||||||
except MissingDependencies as e:
|
except MissingDependencies as e:
|
||||||
eprint(
|
eprint(
|
||||||
@@ -1195,6 +1210,8 @@ def main():
|
|||||||
p.add_argument(
|
p.add_argument(
|
||||||
'--allow-missing', action='store_true',
|
'--allow-missing', action='store_true',
|
||||||
help='Permit benchmarks to run even if some commands are missing.')
|
help='Permit benchmarks to run even if some commands are missing.')
|
||||||
|
p.add_argument(
|
||||||
|
'--disabled', help='A list of comma separated commands to skip.')
|
||||||
p.add_argument(
|
p.add_argument(
|
||||||
'-f', '--force', action='store_true',
|
'-f', '--force', action='store_true',
|
||||||
help='Overwrite existing files if there is a conflict.')
|
help='Overwrite existing files if there is a conflict.')
|
||||||
@@ -1222,6 +1239,7 @@ def main():
|
|||||||
benchmarks = collect_benchmarks(
|
benchmarks = collect_benchmarks(
|
||||||
args.dir, filter_pat=args.bench,
|
args.dir, filter_pat=args.bench,
|
||||||
allow_missing_commands=args.allow_missing,
|
allow_missing_commands=args.allow_missing,
|
||||||
|
disabled_cmds=args.disabled.split(','),
|
||||||
warmup_iter=args.warmup_iter, bench_iter=args.bench_iter)
|
warmup_iter=args.warmup_iter, bench_iter=args.bench_iter)
|
||||||
for b in benchmarks:
|
for b in benchmarks:
|
||||||
print(b.name)
|
print(b.name)
|
||||||
@@ -1248,6 +1266,7 @@ def main():
|
|||||||
benchmarks = collect_benchmarks(
|
benchmarks = collect_benchmarks(
|
||||||
args.dir, filter_pat=args.bench,
|
args.dir, filter_pat=args.bench,
|
||||||
allow_missing_commands=args.allow_missing,
|
allow_missing_commands=args.allow_missing,
|
||||||
|
disabled_cmds=args.disabled.split(','),
|
||||||
warmup_iter=args.warmup_iter, bench_iter=args.bench_iter)
|
warmup_iter=args.warmup_iter, bench_iter=args.bench_iter)
|
||||||
for i, b in enumerate(benchmarks):
|
for i, b in enumerate(benchmarks):
|
||||||
result = b.run()
|
result = b.run()
|
||||||
@@ -1265,8 +1284,6 @@ def main():
|
|||||||
if mean is None:
|
if mean is None:
|
||||||
# If we couldn't get a distribution for this command then
|
# If we couldn't get a distribution for this command then
|
||||||
# it was skipped.
|
# it was skipped.
|
||||||
print('{name:{pad}} SKIPPED'.format(
|
|
||||||
name=name, pad=max_name_len + 2))
|
|
||||||
continue
|
continue
|
||||||
line_counts = result.line_counts_for(cmd)
|
line_counts = result.line_counts_for(cmd)
|
||||||
show_fast_cmd, show_line_counts = '', ''
|
show_fast_cmd, show_line_counts = '', ''
|
||||||
|
|||||||
@@ -0,0 +1,157 @@
|
|||||||
|
benchmark,warmup_iter,iter,name,command,duration,lines,env
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09452986717224121,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.08666801452636719,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.08859610557556152,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07011771202087402,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06441712379455566,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06532430648803711,68,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.10056233406066895,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09930968284606934,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09773039817810059,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07829093933105469,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.0827643871307373,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.08210110664367676,160,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.06728911399841309,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.06839728355407715,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.0736091136932373,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.3859975337982178,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.38575077056884766,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.4032607078552246,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.0657193660736084,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.058367013931274414,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.05761837959289551,16,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.07888174057006836,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.08236145973205566,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.07680559158325195,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.3796377182006836,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.3852665424346924,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.387775182723999,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.06758904457092285,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.06706357002258301,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.07329010963439941,370,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06952190399169922,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06766009330749512,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06621623039245605,16,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.29076576232910156,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.3039717674255371,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.3147861957550049,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16966867446899414,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16578006744384766,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16440153121948242,490,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2742593288421631,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.27411365509033203,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2799038887023926,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.15713810920715332,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.15285205841064453,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.14862322807312012,419,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.06728196144104004,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.06869316101074219,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07347917556762695,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.05894923210144043,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.0584101676940918,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.05851030349731445,1630,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16553878784179688,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16529393196105957,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16843223571777344,23,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16363120079040527,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16463160514831543,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16590571403503418,103,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07585549354553223,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07546257972717285,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07645726203918457,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.0733344554901123,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07160758972167969,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07302546501159668,174,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06922054290771484,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06507658958435059,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06478118896484375,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06373715400695801,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06354117393493652,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06615662574768066,168,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.07121825218200684,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.07050347328186035,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.07254600524902344,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.06099557876586914,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.061118364334106445,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.062296390533447266,6,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2724471092224121,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.26970720291137695,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2697625160217285,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.21796512603759766,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.32093358039855957,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.35228729248046875,848,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.2418622970581055,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.2306008338928223,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.1873059272766113,862,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.22467422485351562,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.23565077781677246,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.21903586387634277,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.19292092323303223,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.19452929496765137,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.1927196979522705,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.28882503509521484,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.29108643531799316,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.29820847511291504,629,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.32578349113464355,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2801830768585205,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.41840386390686035,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.42424988746643066,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3513953685760498,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.35476160049438477,642,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2841978073120117,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.18991756439208984,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.296999454498291,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2810351848602295,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.27904558181762695,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.28331899642944336,629,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.065884590148926,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.9066839218139648,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.8898587226867676,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.8937196731567383,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.8904955387115479,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.8846819400787354,13,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2928280830383301,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2243812084197998,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2868325710296631,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2832787036895752,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2892146110534668,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.22050261497497559,317,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1501314640045166,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.3660097122192383,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.3461437225341797,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.281209945678711,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.287156343460083,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2730507850646973,691,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.712820529937744,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.7904467582702637,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.657541036605835,735,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.28091931343078613,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2749307155609131,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.27948546409606934,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3499312400817871,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3630790710449219,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.35364317893981934,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.36719226837158203,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3705906867980957,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3758120536804199,583,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1964221000671387,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1625583171844482,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1898295879364014,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.168842077255249,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2533905506134033,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2375917434692383,604,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.27202439308166504,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.26978445053100586,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.21065115928649902,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.36217236518859863,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.36077117919921875,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2883784770965576,579,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.496169090270996,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.315001964569092,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.3024141788482666,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.0939135551452637,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,2.8922672271728516,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.1002702713012695,,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3976116180419922,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.4013686180114746,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3070716857910156,278,
|
||||||
|
@@ -0,0 +1,126 @@
|
|||||||
|
linux_alternates (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.090 +/- 0.004 (lines: 68)
|
||||||
|
rg (whitelist)* 0.067 +/- 0.003 (lines: 68)*
|
||||||
|
|
||||||
|
linux_alternates_casei (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.099 +/- 0.001 (lines: 160)
|
||||||
|
rg (whitelist)* 0.081 +/- 0.002 (lines: 160)*
|
||||||
|
|
||||||
|
linux_literal (pattern: PM_RESUME)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.070 +/- 0.003 (lines: 16)
|
||||||
|
rg (ignore) (mmap) 0.392 +/- 0.010 (lines: 16)
|
||||||
|
rg (whitelist)* 0.061 +/- 0.004 (lines: 16)*
|
||||||
|
|
||||||
|
linux_literal_casei (pattern: PM_RESUME)
|
||||||
|
----------------------------------------
|
||||||
|
rg (ignore) 0.079 +/- 0.003 (lines: 370)
|
||||||
|
rg (ignore) (mmap) 0.384 +/- 0.004 (lines: 370)
|
||||||
|
rg (whitelist)* 0.069 +/- 0.003 (lines: 370)*
|
||||||
|
|
||||||
|
linux_literal_default (pattern: PM_RESUME)
|
||||||
|
------------------------------------------
|
||||||
|
rg* 0.068 +/- 0.002 (lines: 16)*
|
||||||
|
|
||||||
|
linux_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
-----------------------------------------------------------------
|
||||||
|
rg (ignore) 0.303 +/- 0.012 (lines: 490)
|
||||||
|
rg (ignore) (ASCII) 0.167 +/- 0.003 (lines: 490)
|
||||||
|
rg (whitelist) 0.276 +/- 0.003 (lines: 419)
|
||||||
|
rg (whitelist) (ASCII)* 0.153 +/- 0.004 (lines: 419)*
|
||||||
|
|
||||||
|
linux_re_literal_suffix (pattern: [A-Z]+_RESUME)
|
||||||
|
------------------------------------------------
|
||||||
|
rg (ignore) 0.070 +/- 0.003 (lines: 1652)
|
||||||
|
rg (whitelist)* 0.059 +/- 0.000 (lines: 1630)*
|
||||||
|
|
||||||
|
linux_unicode_greek (pattern: \p{Greek})
|
||||||
|
----------------------------------------
|
||||||
|
rg* 0.166 +/- 0.002 (lines: 23)*
|
||||||
|
|
||||||
|
linux_unicode_greek_casei (pattern: \p{Greek})
|
||||||
|
----------------------------------------------
|
||||||
|
rg* 0.165 +/- 0.001 (lines: 103)*
|
||||||
|
|
||||||
|
linux_unicode_word (pattern: \wAh)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.076 +/- 0.001 (lines: 186)
|
||||||
|
rg (ignore) (ASCII) 0.073 +/- 0.001 (lines: 174)
|
||||||
|
rg (whitelist) 0.066 +/- 0.002 (lines: 180)
|
||||||
|
rg (whitelist) (ASCII)* 0.064 +/- 0.001 (lines: 168)*
|
||||||
|
|
||||||
|
linux_word (pattern: PM_RESUME)
|
||||||
|
-------------------------------
|
||||||
|
rg (ignore) 0.071 +/- 0.001 (lines: 6)
|
||||||
|
rg (whitelist)* 0.061 +/- 0.001 (lines: 6)*
|
||||||
|
|
||||||
|
subtitles_en_alternate (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines) 0.271 +/- 0.002 (lines: 848)*
|
||||||
|
rg* 0.297 +/- 0.070 (lines: 848)
|
||||||
|
|
||||||
|
subtitles_en_alternate_casei (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 2.220 +/- 0.029 (lines: 862)*
|
||||||
|
|
||||||
|
subtitles_en_literal (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------
|
||||||
|
rg 0.226 +/- 0.008 (lines: 629)
|
||||||
|
rg (no mmap)* 0.193 +/- 0.001 (lines: 629)*
|
||||||
|
rg (lines) 0.293 +/- 0.005 (lines: 629)
|
||||||
|
|
||||||
|
subtitles_en_literal_casei (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------------
|
||||||
|
rg* 0.341 +/- 0.070 (lines: 642)*
|
||||||
|
rg (lines) 0.377 +/- 0.041 (lines: 642)
|
||||||
|
|
||||||
|
subtitles_en_literal_word (pattern: Sherlock Holmes)
|
||||||
|
----------------------------------------------------
|
||||||
|
rg (ASCII)* 0.257 +/- 0.058 (lines: 629)*
|
||||||
|
rg 0.281 +/- 0.002 (lines: 629)
|
||||||
|
|
||||||
|
subtitles_en_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 1.954 +/- 0.097 (lines: 13)
|
||||||
|
rg (ASCII)* 1.890 +/- 0.005 (lines: 13)*
|
||||||
|
|
||||||
|
subtitles_en_surrounding_words (pattern: \w+\s+Holmes\s+\w+)
|
||||||
|
------------------------------------------------------------
|
||||||
|
rg 0.268 +/- 0.038 (lines: 317)
|
||||||
|
rg (ASCII)* 0.264 +/- 0.038 (lines: 317)*
|
||||||
|
|
||||||
|
subtitles_ru_alternate (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines)* 1.287 +/- 0.119 (lines: 691)
|
||||||
|
rg 1.280 +/- 0.007 (lines: 691)*
|
||||||
|
|
||||||
|
subtitles_ru_alternate_casei (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 3.720 +/- 0.067 (lines: 735)*
|
||||||
|
|
||||||
|
subtitles_ru_literal (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------
|
||||||
|
rg* 0.278 +/- 0.003 (lines: 583)*
|
||||||
|
rg (no mmap) 0.356 +/- 0.007 (lines: 583)
|
||||||
|
rg (lines) 0.371 +/- 0.004 (lines: 583)
|
||||||
|
|
||||||
|
subtitles_ru_literal_casei (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------------
|
||||||
|
rg* 1.183 +/- 0.018 (lines: 604)*
|
||||||
|
rg (lines) 1.220 +/- 0.045 (lines: 604)
|
||||||
|
|
||||||
|
subtitles_ru_literal_word (pattern: Шерлок Холмс)
|
||||||
|
-------------------------------------------------
|
||||||
|
rg (ASCII)* 0.251 +/- 0.035 (lines: 0)*
|
||||||
|
rg 0.337 +/- 0.042 (lines: 579)
|
||||||
|
|
||||||
|
subtitles_ru_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 3.371 +/- 0.108 (lines: 41)
|
||||||
|
rg (ASCII)* 3.029 +/- 0.118 (lines: 0)*
|
||||||
|
|
||||||
|
subtitles_ru_surrounding_words (pattern: \w+\s+Холмс\s+\w+)
|
||||||
|
-----------------------------------------------------------
|
||||||
|
rg* 0.369 +/- 0.053 (lines: 278)*
|
||||||
@@ -0,0 +1,157 @@
|
|||||||
|
benchmark,warmup_iter,iter,name,command,duration,lines,env
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09608030319213867,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.08815908432006836,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.08974266052246094,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06675052642822266,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06632375717163086,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06531620025634766,68,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09764790534973145,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.10003781318664551,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.10493707656860352,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07940077781677246,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07863998413085938,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07791614532470703,160,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.06878829002380371,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.06836318969726562,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.07277226448059082,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.379986047744751,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.40039825439453125,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.39777183532714844,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.059081315994262695,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.05873990058898926,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.0586698055267334,16,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.07791399955749512,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.0774388313293457,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.07851481437683105,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.3788566589355469,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.385251522064209,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.38781046867370605,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.06934094429016113,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.07142090797424316,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.07115054130554199,370,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06683826446533203,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.0690450668334961,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06625819206237793,16,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2812047004699707,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2557988166809082,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2892444133758545,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16913127899169922,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16315627098083496,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16622567176818848,490,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2771792411804199,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2861213684082031,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2742443084716797,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.15151619911193848,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.15233445167541504,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.1620476245880127,419,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07045555114746094,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07046008110046387,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07087540626525879,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.06178712844848633,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.0631401538848877,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.0627889633178711,1630,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16510963439941406,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16919803619384766,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16366028785705566,23,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.17235875129699707,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16506695747375488,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16702055931091309,103,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07636308670043945,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.0767667293548584,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07441020011901855,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07776570320129395,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07788562774658203,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07390785217285156,174,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06318306922912598,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06787896156311035,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06569766998291016,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06557774543762207,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06389331817626953,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06407284736633301,168,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.06868839263916016,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.07014894485473633,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.06822323799133301,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.05816149711608887,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.0577540397644043,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.06107187271118164,6,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.38473939895629883,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2760040760040283,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.366499662399292,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.32836484909057617,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3063969612121582,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3368823528289795,848,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.2332417964935303,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.0444729328155518,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.0396711826324463,862,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.13685226440429688,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.1380929946899414,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.13843274116516113,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.192030668258667,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.1971268653869629,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2931783199310303,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2777669429779053,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.28525233268737793,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.22691082954406738,629,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.31324243545532227,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.4144246578216553,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.41251444816589355,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.492443323135376,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.4691810607910156,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.4890565872192383,642,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2987544536590576,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.28134918212890625,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.29971933364868164,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.22062921524047852,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2716941833496094,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2172706127166748,629,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.0828537940979004,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.8791723251342773,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.079643964767456,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.8388440608978271,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.8970744609832764,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.6844482421875,13,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.28177690505981445,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.29820775985717773,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2861142158508301,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.22010159492492676,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.22217011451721191,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2219986915588379,317,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.146566390991211,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.3609087467193604,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1496453285217285,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.281858205795288,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.365553855895996,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2781758308410645,691,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.622640609741211,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.7452948093414307,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.762295961380005,735,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.27922916412353516,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2578129768371582,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.21048188209533691,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.34738945960998535,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.368546724319458,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.35752224922180176,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2654876708984375,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2697427272796631,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3652024269104004,583,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.178579330444336,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1693329811096191,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.144824504852295,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.0454356670379639,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.0725409984588623,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2725732326507568,604,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.20514369010925293,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.18602967262268066,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2725963592529297,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.35959553718566895,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.36090755462646484,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.35926032066345215,579,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.509491205215454,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.292212963104248,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.4941117763519287,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.0987064838409424,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.1049976348876953,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.098233222961426,,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3214902877807617,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.38519954681396484,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3827836513519287,278,
|
||||||
|
@@ -0,0 +1,126 @@
|
|||||||
|
linux_alternates (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.091 +/- 0.004 (lines: 68)
|
||||||
|
rg (whitelist)* 0.066 +/- 0.001 (lines: 68)*
|
||||||
|
|
||||||
|
linux_alternates_casei (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.101 +/- 0.004 (lines: 160)
|
||||||
|
rg (whitelist)* 0.079 +/- 0.001 (lines: 160)*
|
||||||
|
|
||||||
|
linux_literal (pattern: PM_RESUME)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.070 +/- 0.002 (lines: 16)
|
||||||
|
rg (ignore) (mmap) 0.393 +/- 0.011 (lines: 16)
|
||||||
|
rg (whitelist)* 0.059 +/- 0.000 (lines: 16)*
|
||||||
|
|
||||||
|
linux_literal_casei (pattern: PM_RESUME)
|
||||||
|
----------------------------------------
|
||||||
|
rg (ignore) 0.078 +/- 0.001 (lines: 370)
|
||||||
|
rg (ignore) (mmap) 0.384 +/- 0.005 (lines: 370)
|
||||||
|
rg (whitelist)* 0.071 +/- 0.001 (lines: 370)*
|
||||||
|
|
||||||
|
linux_literal_default (pattern: PM_RESUME)
|
||||||
|
------------------------------------------
|
||||||
|
rg* 0.067 +/- 0.001 (lines: 16)*
|
||||||
|
|
||||||
|
linux_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
-----------------------------------------------------------------
|
||||||
|
rg (ignore) 0.275 +/- 0.017 (lines: 490)
|
||||||
|
rg (ignore) (ASCII) 0.166 +/- 0.003 (lines: 490)
|
||||||
|
rg (whitelist) 0.279 +/- 0.006 (lines: 419)
|
||||||
|
rg (whitelist) (ASCII)* 0.155 +/- 0.006 (lines: 419)*
|
||||||
|
|
||||||
|
linux_re_literal_suffix (pattern: [A-Z]+_RESUME)
|
||||||
|
------------------------------------------------
|
||||||
|
rg (ignore) 0.071 +/- 0.000 (lines: 1652)
|
||||||
|
rg (whitelist)* 0.063 +/- 0.001 (lines: 1630)*
|
||||||
|
|
||||||
|
linux_unicode_greek (pattern: \p{Greek})
|
||||||
|
----------------------------------------
|
||||||
|
rg* 0.166 +/- 0.003 (lines: 23)*
|
||||||
|
|
||||||
|
linux_unicode_greek_casei (pattern: \p{Greek})
|
||||||
|
----------------------------------------------
|
||||||
|
rg* 0.168 +/- 0.004 (lines: 103)*
|
||||||
|
|
||||||
|
linux_unicode_word (pattern: \wAh)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.076 +/- 0.001 (lines: 186)
|
||||||
|
rg (ignore) (ASCII) 0.077 +/- 0.002 (lines: 174)
|
||||||
|
rg (whitelist)* 0.066 +/- 0.002 (lines: 180)
|
||||||
|
rg (whitelist) (ASCII) 0.065 +/- 0.001 (lines: 168)*
|
||||||
|
|
||||||
|
linux_word (pattern: PM_RESUME)
|
||||||
|
-------------------------------
|
||||||
|
rg (ignore) 0.069 +/- 0.001 (lines: 6)
|
||||||
|
rg (whitelist)* 0.059 +/- 0.002 (lines: 6)*
|
||||||
|
|
||||||
|
subtitles_en_alternate (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines)* 0.342 +/- 0.058 (lines: 848)
|
||||||
|
rg 0.324 +/- 0.016 (lines: 848)*
|
||||||
|
|
||||||
|
subtitles_en_alternate_casei (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 2.106 +/- 0.110 (lines: 862)*
|
||||||
|
|
||||||
|
subtitles_en_literal (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------
|
||||||
|
rg* 0.138 +/- 0.001 (lines: 629)*
|
||||||
|
rg (no mmap) 0.227 +/- 0.057 (lines: 629)
|
||||||
|
rg (lines) 0.263 +/- 0.032 (lines: 629)
|
||||||
|
|
||||||
|
subtitles_en_literal_casei (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------------
|
||||||
|
rg* 0.380 +/- 0.058 (lines: 642)*
|
||||||
|
rg (lines) 0.484 +/- 0.013 (lines: 642)
|
||||||
|
|
||||||
|
subtitles_en_literal_word (pattern: Sherlock Holmes)
|
||||||
|
----------------------------------------------------
|
||||||
|
rg (ASCII) 0.293 +/- 0.010 (lines: 629)
|
||||||
|
rg* 0.237 +/- 0.030 (lines: 629)*
|
||||||
|
|
||||||
|
subtitles_en_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 2.014 +/- 0.117 (lines: 13)
|
||||||
|
rg (ASCII)* 1.807 +/- 0.110 (lines: 13)*
|
||||||
|
|
||||||
|
subtitles_en_surrounding_words (pattern: \w+\s+Holmes\s+\w+)
|
||||||
|
------------------------------------------------------------
|
||||||
|
rg 0.289 +/- 0.009 (lines: 317)
|
||||||
|
rg (ASCII)* 0.221 +/- 0.001 (lines: 317)*
|
||||||
|
|
||||||
|
subtitles_ru_alternate (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines)* 1.219 +/- 0.123 (lines: 691)*
|
||||||
|
rg 1.309 +/- 0.049 (lines: 691)
|
||||||
|
|
||||||
|
subtitles_ru_alternate_casei (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 3.710 +/- 0.076 (lines: 735)*
|
||||||
|
|
||||||
|
subtitles_ru_literal (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------
|
||||||
|
rg* 0.249 +/- 0.035 (lines: 583)*
|
||||||
|
rg (no mmap) 0.358 +/- 0.011 (lines: 583)
|
||||||
|
rg (lines) 0.300 +/- 0.056 (lines: 583)
|
||||||
|
|
||||||
|
subtitles_ru_literal_casei (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------------
|
||||||
|
rg 1.164 +/- 0.017 (lines: 604)
|
||||||
|
rg (lines)* 1.130 +/- 0.124 (lines: 604)*
|
||||||
|
|
||||||
|
subtitles_ru_literal_word (pattern: Шерлок Холмс)
|
||||||
|
-------------------------------------------------
|
||||||
|
rg (ASCII)* 0.221 +/- 0.045 (lines: 0)*
|
||||||
|
rg 0.360 +/- 0.001 (lines: 579)
|
||||||
|
|
||||||
|
subtitles_ru_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 3.432 +/- 0.121 (lines: 41)
|
||||||
|
rg (ASCII)* 3.101 +/- 0.004 (lines: 0)*
|
||||||
|
|
||||||
|
subtitles_ru_surrounding_words (pattern: \w+\s+Холмс\s+\w+)
|
||||||
|
-----------------------------------------------------------
|
||||||
|
rg* 0.363 +/- 0.036 (lines: 278)*
|
||||||
@@ -0,0 +1,157 @@
|
|||||||
|
benchmark,warmup_iter,iter,name,command,duration,lines,env
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.10048675537109375,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09462523460388184,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.0931856632232666,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06952047348022461,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07045698165893555,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06589603424072266,68,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09840559959411621,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.0973203182220459,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09747123718261719,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07976746559143066,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07973408699035645,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.0842599868774414,160,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.06900453567504883,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.06771540641784668,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.07333683967590332,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.38510584831237793,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.38396191596984863,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.37463903427124023,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.05757570266723633,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.058022260665893555,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.06006050109863281,16,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.07654142379760742,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.07764244079589844,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.07787275314331055,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.38339757919311523,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.38019704818725586,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.3887295722961426,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.06747794151306152,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.06868124008178711,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.06679105758666992,370,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.07849764823913574,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.08336472511291504,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06723690032958984,16,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.25814294815063477,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.29274845123291016,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2985391616821289,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16458344459533691,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16898059844970703,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.1756742000579834,490,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.3002643585205078,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2709066867828369,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2683436870574951,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.1489565372467041,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.14751625061035156,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.1487743854522705,419,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.06930160522460938,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07447147369384766,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07432723045349121,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.06141376495361328,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.06345224380493164,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.05813455581665039,1630,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16566061973571777,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.17109084129333496,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16268444061279297,23,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16269755363464355,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16636371612548828,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16133809089660645,103,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07663178443908691,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07986211776733398,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07756590843200684,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07402157783508301,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07861495018005371,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07465910911560059,174,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06782341003417969,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06639862060546875,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06768679618835449,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06638240814208984,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06481051445007324,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06659054756164551,168,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.06744074821472168,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.06904149055480957,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.07302141189575195,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.05972766876220703,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.0587460994720459,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.05879020690917969,6,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3649451732635498,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.37390756607055664,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3691575527191162,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3192598819732666,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3968648910522461,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.347275972366333,848,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.11361026763916,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.9423036575317383,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.9265573024749756,862,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.21686100959777832,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.23349666595458984,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2394559383392334,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2971608638763428,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2950170040130615,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2873063087463379,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2898998260498047,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.21834325790405273,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2833542823791504,629,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3961493968963623,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.30686163902282715,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.44585490226745605,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.46064209938049316,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.4497091770172119,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.34823131561279297,642,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2819490432739258,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.18886327743530273,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.28542351722717285,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.18991541862487793,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.27848052978515625,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.21271944046020508,629,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.0810630321502686,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.0093939304351807,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.8533532619476318,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.8952853679656982,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.7897896766662598,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.893296480178833,13,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.19786620140075684,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.1896834373474121,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.29248762130737305,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2933495044708252,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.29410600662231445,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.23128199577331543,317,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.343696117401123,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.3544535636901855,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.3534214496612549,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2638463973999023,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2450191974639893,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2779006958007812,691,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.7900640964508057,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.812807321548462,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.7412266731262207,735,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2562215328216553,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2781085968017578,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.21145415306091309,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.36469101905822754,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.37107086181640625,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.29900336265563965,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3739583492279053,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3521237373352051,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3766622543334961,583,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1903154850006104,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.196908950805664,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1714701652526855,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.0471339225769043,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2229478359222412,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1774308681488037,604,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.20592975616455078,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2680799961090088,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.29538846015930176,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.36015796661376953,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3006131649017334,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.36701369285583496,579,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.4495208263397217,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.4749486446380615,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.29917049407959,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.0949668884277344,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.1213910579681396,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,2.905003070831299,,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.39461803436279297,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3211812973022461,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3848116397857666,278,
|
||||||
|
@@ -0,0 +1,126 @@
|
|||||||
|
linux_alternates (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.096 +/- 0.004 (lines: 68)
|
||||||
|
rg (whitelist)* 0.069 +/- 0.002 (lines: 68)*
|
||||||
|
|
||||||
|
linux_alternates_casei (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.098 +/- 0.001 (lines: 160)
|
||||||
|
rg (whitelist)* 0.081 +/- 0.003 (lines: 160)*
|
||||||
|
|
||||||
|
linux_literal (pattern: PM_RESUME)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.070 +/- 0.003 (lines: 16)
|
||||||
|
rg (ignore) (mmap) 0.381 +/- 0.006 (lines: 16)
|
||||||
|
rg (whitelist)* 0.059 +/- 0.001 (lines: 16)*
|
||||||
|
|
||||||
|
linux_literal_casei (pattern: PM_RESUME)
|
||||||
|
----------------------------------------
|
||||||
|
rg (ignore) 0.077 +/- 0.001 (lines: 370)
|
||||||
|
rg (ignore) (mmap) 0.384 +/- 0.004 (lines: 370)
|
||||||
|
rg (whitelist)* 0.068 +/- 0.001 (lines: 370)*
|
||||||
|
|
||||||
|
linux_literal_default (pattern: PM_RESUME)
|
||||||
|
------------------------------------------
|
||||||
|
rg* 0.076 +/- 0.008 (lines: 16)*
|
||||||
|
|
||||||
|
linux_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
-----------------------------------------------------------------
|
||||||
|
rg (ignore) 0.283 +/- 0.022 (lines: 490)
|
||||||
|
rg (ignore) (ASCII) 0.170 +/- 0.006 (lines: 490)
|
||||||
|
rg (whitelist) 0.280 +/- 0.018 (lines: 419)
|
||||||
|
rg (whitelist) (ASCII)* 0.148 +/- 0.001 (lines: 419)*
|
||||||
|
|
||||||
|
linux_re_literal_suffix (pattern: [A-Z]+_RESUME)
|
||||||
|
------------------------------------------------
|
||||||
|
rg (ignore) 0.073 +/- 0.003 (lines: 1652)
|
||||||
|
rg (whitelist)* 0.061 +/- 0.003 (lines: 1630)*
|
||||||
|
|
||||||
|
linux_unicode_greek (pattern: \p{Greek})
|
||||||
|
----------------------------------------
|
||||||
|
rg* 0.166 +/- 0.004 (lines: 23)*
|
||||||
|
|
||||||
|
linux_unicode_greek_casei (pattern: \p{Greek})
|
||||||
|
----------------------------------------------
|
||||||
|
rg* 0.163 +/- 0.003 (lines: 103)*
|
||||||
|
|
||||||
|
linux_unicode_word (pattern: \wAh)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.078 +/- 0.002 (lines: 186)
|
||||||
|
rg (ignore) (ASCII) 0.076 +/- 0.002 (lines: 174)
|
||||||
|
rg (whitelist) 0.067 +/- 0.001 (lines: 180)
|
||||||
|
rg (whitelist) (ASCII)* 0.066 +/- 0.001 (lines: 168)*
|
||||||
|
|
||||||
|
linux_word (pattern: PM_RESUME)
|
||||||
|
-------------------------------
|
||||||
|
rg (ignore) 0.070 +/- 0.003 (lines: 6)
|
||||||
|
rg (whitelist)* 0.059 +/- 0.001 (lines: 6)*
|
||||||
|
|
||||||
|
subtitles_en_alternate (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines) 0.369 +/- 0.004 (lines: 848)
|
||||||
|
rg* 0.354 +/- 0.039 (lines: 848)*
|
||||||
|
|
||||||
|
subtitles_en_alternate_casei (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 1.994 +/- 0.104 (lines: 862)*
|
||||||
|
|
||||||
|
subtitles_en_literal (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------
|
||||||
|
rg* 0.230 +/- 0.012 (lines: 629)*
|
||||||
|
rg (no mmap) 0.293 +/- 0.005 (lines: 629)
|
||||||
|
rg (lines) 0.264 +/- 0.040 (lines: 629)
|
||||||
|
|
||||||
|
subtitles_en_literal_casei (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------------
|
||||||
|
rg* 0.383 +/- 0.070 (lines: 642)*
|
||||||
|
rg (lines) 0.420 +/- 0.062 (lines: 642)
|
||||||
|
|
||||||
|
subtitles_en_literal_word (pattern: Sherlock Holmes)
|
||||||
|
----------------------------------------------------
|
||||||
|
rg (ASCII)* 0.252 +/- 0.055 (lines: 629)
|
||||||
|
rg 0.227 +/- 0.046 (lines: 629)*
|
||||||
|
|
||||||
|
subtitles_en_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 1.981 +/- 0.116 (lines: 13)
|
||||||
|
rg (ASCII)* 1.859 +/- 0.060 (lines: 13)*
|
||||||
|
|
||||||
|
subtitles_en_surrounding_words (pattern: \w+\s+Holmes\s+\w+)
|
||||||
|
------------------------------------------------------------
|
||||||
|
rg* 0.227 +/- 0.057 (lines: 317)*
|
||||||
|
rg (ASCII) 0.273 +/- 0.036 (lines: 317)
|
||||||
|
|
||||||
|
subtitles_ru_alternate (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines) 1.351 +/- 0.006 (lines: 691)
|
||||||
|
rg* 1.262 +/- 0.016 (lines: 691)*
|
||||||
|
|
||||||
|
subtitles_ru_alternate_casei (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 3.781 +/- 0.037 (lines: 735)*
|
||||||
|
|
||||||
|
subtitles_ru_literal (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------
|
||||||
|
rg* 0.249 +/- 0.034 (lines: 583)*
|
||||||
|
rg (no mmap) 0.345 +/- 0.040 (lines: 583)
|
||||||
|
rg (lines) 0.368 +/- 0.013 (lines: 583)
|
||||||
|
|
||||||
|
subtitles_ru_literal_casei (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------------
|
||||||
|
rg 1.186 +/- 0.013 (lines: 604)
|
||||||
|
rg (lines)* 1.149 +/- 0.091 (lines: 604)*
|
||||||
|
|
||||||
|
subtitles_ru_literal_word (pattern: Шерлок Холмс)
|
||||||
|
-------------------------------------------------
|
||||||
|
rg (ASCII)* 0.256 +/- 0.046 (lines: 0)*
|
||||||
|
rg 0.343 +/- 0.037 (lines: 579)
|
||||||
|
|
||||||
|
subtitles_ru_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 3.408 +/- 0.095 (lines: 41)
|
||||||
|
rg (ASCII)* 3.040 +/- 0.118 (lines: 0)*
|
||||||
|
|
||||||
|
subtitles_ru_surrounding_words (pattern: \w+\s+Холмс\s+\w+)
|
||||||
|
-----------------------------------------------------------
|
||||||
|
rg* 0.367 +/- 0.040 (lines: 278)*
|
||||||
157
benchsuite/runs/2016-12-24-archlinux-cheetah-musl-system/raw.csv
Normal file
157
benchsuite/runs/2016-12-24-archlinux-cheetah-musl-system/raw.csv
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
benchmark,warmup_iter,iter,name,command,duration,lines,env
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09466052055358887,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09342074394226074,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.0869603157043457,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06717634201049805,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06487321853637695,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06573486328125,68,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.10077238082885742,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.10428118705749512,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09920215606689453,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07973098754882812,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07897496223449707,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07888197898864746,160,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.06830811500549316,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.0715939998626709,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.06830549240112305,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.3897213935852051,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.39376020431518555,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.3769495487213135,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.060272932052612305,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.058103322982788086,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.06174445152282715,16,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.07664990425109863,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.07809257507324219,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.08361077308654785,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.38071417808532715,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.37515711784362793,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.38091325759887695,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.06704211235046387,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.06667947769165039,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.06667375564575195,370,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06779932975769043,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06725239753723145,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06754946708679199,16,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2662222385406494,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.30044007301330566,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.30494165420532227,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.17267060279846191,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16266226768493652,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16399097442626953,490,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.25603818893432617,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.26952672004699707,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2737579345703125,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.14797663688659668,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.15069222450256348,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.15210580825805664,419,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.06923699378967285,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07373404502868652,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07130026817321777,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.05809664726257324,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.06291556358337402,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.062150001525878906,1630,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16234064102172852,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16682648658752441,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.1634657382965088,23,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16612553596496582,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.1632983684539795,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16044902801513672,103,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07571697235107422,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07967901229858398,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07469820976257324,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07374238967895508,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07341313362121582,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07449674606323242,174,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06552338600158691,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06324410438537598,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.0665595531463623,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06373834609985352,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06749844551086426,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06431031227111816,168,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.0686798095703125,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.06870913505554199,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.06892633438110352,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.05895066261291504,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.06163740158081055,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.058525800704956055,6,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.34538722038269043,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.265763521194458,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.26305389404296875,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.20859956741333008,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.20516705513000488,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.20496821403503418,848,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.2090365886688232,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.226768732070923,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.226130962371826,862,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.24144577980041504,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.21985626220703125,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.21775150299072266,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2918863296508789,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.1962728500366211,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.19310998916625977,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.26285672187805176,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.18955564498901367,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.1910560131072998,629,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.40812134742736816,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.31265878677368164,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.31433773040771484,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3256862163543701,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.32616353034973145,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.32959580421447754,642,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.27936625480651855,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.28154826164245605,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.20779705047607422,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.26377248764038086,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2896091938018799,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.21306395530700684,629,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.0740439891815186,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.875295639038086,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.0514187812805176,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.6831274032592773,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.6856412887573242,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.6853716373443604,13,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2878584861755371,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.22226691246032715,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.294330358505249,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3013031482696533,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2214052677154541,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.19563746452331543,317,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1506719589233398,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.345916509628296,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.3316686153411865,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2255687713623047,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.0810630321502686,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2861762046813965,691,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.666182279586792,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.730118751525879,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.7910759449005127,735,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.265308141708374,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2775256633758545,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.29873085021972656,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3600039482116699,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3705918788909912,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.37277793884277344,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3728773593902588,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2903330326080322,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3601820468902588,583,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1635336875915527,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.9739360809326172,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.155383825302124,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2030081748962402,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.0454139709472656,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2500181198120117,604,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2797272205352783,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2795555591583252,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2683413028717041,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2863891124725342,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.35770344734191895,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3524661064147949,579,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.514166831970215,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.4967641830444336,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.2882306575775146,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,2.8897318840026855,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.1153793334960938,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.100428581237793,,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.4093492031097412,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.4054989814758301,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3883328437805176,278,
|
||||||
|
126
benchsuite/runs/2016-12-24-archlinux-cheetah-musl-system/summary
Normal file
126
benchsuite/runs/2016-12-24-archlinux-cheetah-musl-system/summary
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
linux_alternates (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.092 +/- 0.004 (lines: 68)
|
||||||
|
rg (whitelist)* 0.066 +/- 0.001 (lines: 68)*
|
||||||
|
|
||||||
|
linux_alternates_casei (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.101 +/- 0.003 (lines: 160)
|
||||||
|
rg (whitelist)* 0.079 +/- 0.000 (lines: 160)*
|
||||||
|
|
||||||
|
linux_literal (pattern: PM_RESUME)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.069 +/- 0.002 (lines: 16)
|
||||||
|
rg (ignore) (mmap) 0.387 +/- 0.009 (lines: 16)
|
||||||
|
rg (whitelist)* 0.060 +/- 0.002 (lines: 16)*
|
||||||
|
|
||||||
|
linux_literal_casei (pattern: PM_RESUME)
|
||||||
|
----------------------------------------
|
||||||
|
rg (ignore) 0.079 +/- 0.004 (lines: 370)
|
||||||
|
rg (ignore) (mmap) 0.379 +/- 0.003 (lines: 370)
|
||||||
|
rg (whitelist)* 0.067 +/- 0.000 (lines: 370)*
|
||||||
|
|
||||||
|
linux_literal_default (pattern: PM_RESUME)
|
||||||
|
------------------------------------------
|
||||||
|
rg* 0.068 +/- 0.000 (lines: 16)*
|
||||||
|
|
||||||
|
linux_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
-----------------------------------------------------------------
|
||||||
|
rg (ignore) 0.291 +/- 0.021 (lines: 490)
|
||||||
|
rg (ignore) (ASCII) 0.166 +/- 0.005 (lines: 490)
|
||||||
|
rg (whitelist) 0.266 +/- 0.009 (lines: 419)
|
||||||
|
rg (whitelist) (ASCII)* 0.150 +/- 0.002 (lines: 419)*
|
||||||
|
|
||||||
|
linux_re_literal_suffix (pattern: [A-Z]+_RESUME)
|
||||||
|
------------------------------------------------
|
||||||
|
rg (ignore) 0.071 +/- 0.002 (lines: 1652)
|
||||||
|
rg (whitelist)* 0.061 +/- 0.003 (lines: 1630)*
|
||||||
|
|
||||||
|
linux_unicode_greek (pattern: \p{Greek})
|
||||||
|
----------------------------------------
|
||||||
|
rg* 0.164 +/- 0.002 (lines: 23)*
|
||||||
|
|
||||||
|
linux_unicode_greek_casei (pattern: \p{Greek})
|
||||||
|
----------------------------------------------
|
||||||
|
rg* 0.163 +/- 0.003 (lines: 103)*
|
||||||
|
|
||||||
|
linux_unicode_word (pattern: \wAh)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.077 +/- 0.003 (lines: 186)
|
||||||
|
rg (ignore) (ASCII) 0.074 +/- 0.001 (lines: 174)
|
||||||
|
rg (whitelist)* 0.065 +/- 0.002 (lines: 180)*
|
||||||
|
rg (whitelist) (ASCII) 0.065 +/- 0.002 (lines: 168)
|
||||||
|
|
||||||
|
linux_word (pattern: PM_RESUME)
|
||||||
|
-------------------------------
|
||||||
|
rg (ignore) 0.069 +/- 0.000 (lines: 6)
|
||||||
|
rg (whitelist)* 0.060 +/- 0.002 (lines: 6)*
|
||||||
|
|
||||||
|
subtitles_en_alternate (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines) 0.291 +/- 0.047 (lines: 848)
|
||||||
|
rg* 0.206 +/- 0.002 (lines: 848)*
|
||||||
|
|
||||||
|
subtitles_en_alternate_casei (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 2.221 +/- 0.010 (lines: 862)*
|
||||||
|
|
||||||
|
subtitles_en_literal (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------
|
||||||
|
rg 0.226 +/- 0.013 (lines: 629)
|
||||||
|
rg (no mmap) 0.227 +/- 0.056 (lines: 629)
|
||||||
|
rg (lines)* 0.214 +/- 0.042 (lines: 629)*
|
||||||
|
|
||||||
|
subtitles_en_literal_casei (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------------
|
||||||
|
rg* 0.345 +/- 0.055 (lines: 642)
|
||||||
|
rg (lines) 0.327 +/- 0.002 (lines: 642)*
|
||||||
|
|
||||||
|
subtitles_en_literal_word (pattern: Sherlock Holmes)
|
||||||
|
----------------------------------------------------
|
||||||
|
rg (ASCII)* 0.256 +/- 0.042 (lines: 629)
|
||||||
|
rg 0.255 +/- 0.039 (lines: 629)*
|
||||||
|
|
||||||
|
subtitles_en_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 2.000 +/- 0.109 (lines: 13)
|
||||||
|
rg (ASCII)* 1.685 +/- 0.001 (lines: 13)*
|
||||||
|
|
||||||
|
subtitles_en_surrounding_words (pattern: \w+\s+Holmes\s+\w+)
|
||||||
|
------------------------------------------------------------
|
||||||
|
rg 0.268 +/- 0.040 (lines: 317)
|
||||||
|
rg (ASCII)* 0.239 +/- 0.055 (lines: 317)*
|
||||||
|
|
||||||
|
subtitles_ru_alternate (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines) 1.276 +/- 0.109 (lines: 691)
|
||||||
|
rg* 1.198 +/- 0.105 (lines: 691)*
|
||||||
|
|
||||||
|
subtitles_ru_alternate_casei (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 3.729 +/- 0.062 (lines: 735)*
|
||||||
|
|
||||||
|
subtitles_ru_literal (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------
|
||||||
|
rg* 0.281 +/- 0.017 (lines: 583)*
|
||||||
|
rg (no mmap) 0.368 +/- 0.007 (lines: 583)
|
||||||
|
rg (lines) 0.341 +/- 0.044 (lines: 583)
|
||||||
|
|
||||||
|
subtitles_ru_literal_casei (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------------
|
||||||
|
rg* 1.098 +/- 0.107 (lines: 604)*
|
||||||
|
rg (lines) 1.166 +/- 0.107 (lines: 604)
|
||||||
|
|
||||||
|
subtitles_ru_literal_word (pattern: Шерлок Холмс)
|
||||||
|
-------------------------------------------------
|
||||||
|
rg (ASCII)* 0.276 +/- 0.007 (lines: 0)*
|
||||||
|
rg 0.332 +/- 0.040 (lines: 579)
|
||||||
|
|
||||||
|
subtitles_ru_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 3.433 +/- 0.126 (lines: 41)
|
||||||
|
rg (ASCII)* 3.035 +/- 0.126 (lines: 0)*
|
||||||
|
|
||||||
|
subtitles_ru_surrounding_words (pattern: \w+\s+Холмс\s+\w+)
|
||||||
|
-----------------------------------------------------------
|
||||||
|
rg* 0.401 +/- 0.011 (lines: 278)*
|
||||||
157
benchsuite/runs/2016-12-24-archlinux-cheetah/raw.csv
Normal file
157
benchsuite/runs/2016-12-24-archlinux-cheetah/raw.csv
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
benchmark,warmup_iter,iter,name,command,duration,lines,env
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09865546226501465,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.08865809440612793,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.0934293270111084,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07241153717041016,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06905841827392578,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.06687068939208984,68,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.1011350154876709,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.1053009033203125,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.10577726364135742,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.0832066535949707,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.0812225341796875,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.08363056182861328,160,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.07260608673095703,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.06956887245178223,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.07262110710144043,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.3854484558105469,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.3801109790802002,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.38498902320861816,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.06220889091491699,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.059615373611450195,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.06207394599914551,16,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.08192729949951172,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.08037471771240234,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.08067464828491211,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.3890647888183594,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.3882875442504883,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.3826119899749756,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.07255673408508301,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.07240700721740723,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.06744766235351562,370,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06866455078125,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06612515449523926,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06630897521972656,16,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.30286335945129395,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2710304260253906,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.30267834663391113,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16382431983947754,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.1649789810180664,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16989731788635254,490,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.3308746814727783,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.27356576919555664,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2729830741882324,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16254186630249023,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.15763211250305176,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.15112638473510742,419,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07556724548339844,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07514452934265137,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.06890320777893066,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.061008453369140625,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.06007099151611328,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.058913469314575195,1630,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.1675281524658203,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.1719217300415039,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.1675257682800293,23,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16816997528076172,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16349577903747559,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.1649951934814453,103,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07944488525390625,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.08015990257263184,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07640767097473145,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.0741121768951416,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07456159591674805,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07508492469787598,174,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06891131401062012,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.0655059814453125,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06730937957763672,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.0630197525024414,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06429147720336914,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06670451164245605,168,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.07088422775268555,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.06971001625061035,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.06918931007385254,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.05994749069213867,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.06264781951904297,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.061440467834472656,6,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.4529764652252197,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.28824853897094727,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.287844181060791,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.34439826011657715,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.34816765785217285,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.23656105995178223,848,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.026144027709961,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.2452991008758545,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.2087535858154297,862,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2194046974182129,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.22587895393371582,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.22500324249267578,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3005552291870117,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2805304527282715,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.30254316329956055,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2822296619415283,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2205369472503662,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.23558998107910156,629,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.4217369556427002,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.4352266788482666,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3286154270172119,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.4068911075592041,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.4721720218658447,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.49277544021606445,642,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2922394275665283,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2809920310974121,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2835381031036377,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.28315305709838867,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.29279136657714844,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.29344797134399414,629,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.0767383575439453,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.0589702129364014,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.8731834888458252,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.6820619106292725,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.8845677375793457,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.6996817588806152,13,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2987375259399414,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2955625057220459,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2865116596221924,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.27387547492980957,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2899343967437744,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.19668984413146973,317,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.4088802337646484,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.3826014995574951,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.4164769649505615,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2548110485076904,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.080472707748413,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.3337528705596924,691,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.449100971221924,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.747535467147827,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.7990715503692627,735,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.21280717849731445,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.18636178970336914,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2740190029144287,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.36304640769958496,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3737907409667969,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.37337779998779297,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2974729537963867,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2715010643005371,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.36052799224853516,583,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.206491231918335,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.202974557876587,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.195291519165039,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2875757217407227,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2895469665527344,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.277585506439209,604,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2928614616394043,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2610359191894531,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2692301273345947,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.36007237434387207,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.37184953689575195,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3902134895324707,579,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.2949090003967285,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.48958683013916,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.389604091644287,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,2.894768714904785,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,2.873474359512329,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.088130474090576,,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3918273448944092,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.38251543045043945,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2919657230377197,278,
|
||||||
|
126
benchsuite/runs/2016-12-24-archlinux-cheetah/summary
Normal file
126
benchsuite/runs/2016-12-24-archlinux-cheetah/summary
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
linux_alternates (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.094 +/- 0.005 (lines: 68)
|
||||||
|
rg (whitelist)* 0.069 +/- 0.003 (lines: 68)*
|
||||||
|
|
||||||
|
linux_alternates_casei (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.104 +/- 0.003 (lines: 160)
|
||||||
|
rg (whitelist)* 0.083 +/- 0.001 (lines: 160)*
|
||||||
|
|
||||||
|
linux_literal (pattern: PM_RESUME)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.072 +/- 0.002 (lines: 16)
|
||||||
|
rg (ignore) (mmap) 0.384 +/- 0.003 (lines: 16)
|
||||||
|
rg (whitelist)* 0.061 +/- 0.001 (lines: 16)*
|
||||||
|
|
||||||
|
linux_literal_casei (pattern: PM_RESUME)
|
||||||
|
----------------------------------------
|
||||||
|
rg (ignore) 0.081 +/- 0.001 (lines: 370)
|
||||||
|
rg (ignore) (mmap) 0.387 +/- 0.004 (lines: 370)
|
||||||
|
rg (whitelist)* 0.071 +/- 0.003 (lines: 370)*
|
||||||
|
|
||||||
|
linux_literal_default (pattern: PM_RESUME)
|
||||||
|
------------------------------------------
|
||||||
|
rg* 0.067 +/- 0.001 (lines: 16)*
|
||||||
|
|
||||||
|
linux_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
-----------------------------------------------------------------
|
||||||
|
rg (ignore) 0.292 +/- 0.018 (lines: 490)
|
||||||
|
rg (ignore) (ASCII) 0.166 +/- 0.003 (lines: 490)
|
||||||
|
rg (whitelist) 0.292 +/- 0.033 (lines: 419)
|
||||||
|
rg (whitelist) (ASCII)* 0.157 +/- 0.006 (lines: 419)*
|
||||||
|
|
||||||
|
linux_re_literal_suffix (pattern: [A-Z]+_RESUME)
|
||||||
|
------------------------------------------------
|
||||||
|
rg (ignore) 0.073 +/- 0.004 (lines: 1652)
|
||||||
|
rg (whitelist)* 0.060 +/- 0.001 (lines: 1630)*
|
||||||
|
|
||||||
|
linux_unicode_greek (pattern: \p{Greek})
|
||||||
|
----------------------------------------
|
||||||
|
rg* 0.169 +/- 0.003 (lines: 23)*
|
||||||
|
|
||||||
|
linux_unicode_greek_casei (pattern: \p{Greek})
|
||||||
|
----------------------------------------------
|
||||||
|
rg* 0.166 +/- 0.002 (lines: 103)*
|
||||||
|
|
||||||
|
linux_unicode_word (pattern: \wAh)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.079 +/- 0.002 (lines: 186)
|
||||||
|
rg (ignore) (ASCII) 0.075 +/- 0.000 (lines: 174)
|
||||||
|
rg (whitelist) 0.067 +/- 0.002 (lines: 180)
|
||||||
|
rg (whitelist) (ASCII)* 0.065 +/- 0.002 (lines: 168)*
|
||||||
|
|
||||||
|
linux_word (pattern: PM_RESUME)
|
||||||
|
-------------------------------
|
||||||
|
rg (ignore) 0.070 +/- 0.001 (lines: 6)
|
||||||
|
rg (whitelist)* 0.061 +/- 0.001 (lines: 6)*
|
||||||
|
|
||||||
|
subtitles_en_alternate (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines) 0.343 +/- 0.095 (lines: 848)
|
||||||
|
rg* 0.310 +/- 0.063 (lines: 848)*
|
||||||
|
|
||||||
|
subtitles_en_alternate_casei (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 2.160 +/- 0.117 (lines: 862)*
|
||||||
|
|
||||||
|
subtitles_en_literal (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------
|
||||||
|
rg* 0.223 +/- 0.004 (lines: 629)*
|
||||||
|
rg (no mmap) 0.295 +/- 0.012 (lines: 629)
|
||||||
|
rg (lines) 0.246 +/- 0.032 (lines: 629)
|
||||||
|
|
||||||
|
subtitles_en_literal_casei (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------------
|
||||||
|
rg* 0.395 +/- 0.058 (lines: 642)*
|
||||||
|
rg (lines) 0.457 +/- 0.045 (lines: 642)
|
||||||
|
|
||||||
|
subtitles_en_literal_word (pattern: Sherlock Holmes)
|
||||||
|
----------------------------------------------------
|
||||||
|
rg (ASCII)* 0.286 +/- 0.006 (lines: 629)*
|
||||||
|
rg 0.290 +/- 0.006 (lines: 629)
|
||||||
|
|
||||||
|
subtitles_en_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 2.003 +/- 0.113 (lines: 13)
|
||||||
|
rg (ASCII)* 1.755 +/- 0.112 (lines: 13)*
|
||||||
|
|
||||||
|
subtitles_en_surrounding_words (pattern: \w+\s+Holmes\s+\w+)
|
||||||
|
------------------------------------------------------------
|
||||||
|
rg 0.294 +/- 0.006 (lines: 317)
|
||||||
|
rg (ASCII)* 0.253 +/- 0.050 (lines: 317)*
|
||||||
|
|
||||||
|
subtitles_ru_alternate (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines) 1.403 +/- 0.018 (lines: 691)
|
||||||
|
rg* 1.223 +/- 0.130 (lines: 691)*
|
||||||
|
|
||||||
|
subtitles_ru_alternate_casei (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 3.665 +/- 0.189 (lines: 735)*
|
||||||
|
|
||||||
|
subtitles_ru_literal (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------
|
||||||
|
rg* 0.224 +/- 0.045 (lines: 583)*
|
||||||
|
rg (no mmap) 0.370 +/- 0.006 (lines: 583)
|
||||||
|
rg (lines) 0.310 +/- 0.046 (lines: 583)
|
||||||
|
|
||||||
|
subtitles_ru_literal_casei (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------------
|
||||||
|
rg* 1.202 +/- 0.006 (lines: 604)*
|
||||||
|
rg (lines) 1.285 +/- 0.006 (lines: 604)
|
||||||
|
|
||||||
|
subtitles_ru_literal_word (pattern: Шерлок Холмс)
|
||||||
|
-------------------------------------------------
|
||||||
|
rg (ASCII)* 0.274 +/- 0.017 (lines: 0)*
|
||||||
|
rg 0.374 +/- 0.015 (lines: 579)
|
||||||
|
|
||||||
|
subtitles_ru_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 3.391 +/- 0.097 (lines: 41)
|
||||||
|
rg (ASCII)* 2.952 +/- 0.118 (lines: 0)*
|
||||||
|
|
||||||
|
subtitles_ru_surrounding_words (pattern: \w+\s+Холмс\s+\w+)
|
||||||
|
-----------------------------------------------------------
|
||||||
|
rg* 0.355 +/- 0.055 (lines: 278)*
|
||||||
157
benchsuite/runs/2016-12-30-archlinux-cheetah/raw.csv
Normal file
157
benchsuite/runs/2016-12-30-archlinux-cheetah/raw.csv
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
benchmark,warmup_iter,iter,name,command,duration,lines,env
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09816598892211914,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.08965039253234863,68,
|
||||||
|
linux_alternates,1,3,rg (ignore),rg -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.09101128578186035,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07472872734069824,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07628297805786133,68,
|
||||||
|
linux_alternates,1,3,rg (whitelist),rg --no-ignore -n ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07189559936523438,68,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.10288548469543457,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.10204243659973145,160,
|
||||||
|
linux_alternates_casei,1,3,rg (ignore),rg -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.1031193733215332,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.08190178871154785,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.08523178100585938,160,
|
||||||
|
linux_alternates_casei,1,3,rg (whitelist),rg --no-ignore -n -i ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT,0.07952761650085449,160,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.06972551345825195,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.0691523551940918,16,
|
||||||
|
linux_literal,1,3,rg (ignore),rg -n PM_RESUME,0.06865429878234863,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.39247632026672363,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.4009978771209717,16,
|
||||||
|
linux_literal,1,3,rg (ignore) (mmap),rg -n --mmap PM_RESUME,0.40122294425964355,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.062048912048339844,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.05932760238647461,16,
|
||||||
|
linux_literal,1,3,rg (whitelist),rg -n --no-ignore -tall PM_RESUME,0.058171749114990234,16,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.08261799812316895,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.0780181884765625,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore),rg -n -i PM_RESUME,0.07934045791625977,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.4008915424346924,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.3899986743927002,370,
|
||||||
|
linux_literal_casei,1,3,rg (ignore) (mmap),rg -n -i --mmap PM_RESUME,0.39725732803344727,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.07104611396789551,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.0707247257232666,370,
|
||||||
|
linux_literal_casei,1,3,rg (whitelist),rg -n -i --no-ignore -tall PM_RESUME,0.06864142417907715,370,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06764745712280273,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.06994485855102539,16,
|
||||||
|
linux_literal_default,1,3,rg,rg PM_RESUME,0.0682222843170166,16,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.27941250801086426,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.31389880180358887,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore),rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.30502963066101074,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16565680503845215,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.16579079627990723,490,
|
||||||
|
linux_no_literal,1,3,rg (ignore) (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.169691801071167,490,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2969038486480713,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.2995884418487549,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist),rg -n --no-ignore -tall \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.27426910400390625,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.15207958221435547,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.15367984771728516,419,
|
||||||
|
linux_no_literal,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5},0.15249848365783691,419,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07145977020263672,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.07139325141906738,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (ignore),rg -n [A-Z]+_RESUME,0.0708932876586914,1652,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.05987191200256348,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.06223797798156738,1630,
|
||||||
|
linux_re_literal_suffix,1,3,rg (whitelist),rg -n --no-ignore -tall [A-Z]+_RESUME,0.06115579605102539,1630,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16427040100097656,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.16524410247802734,23,
|
||||||
|
linux_unicode_greek,1,3,rg,rg -n \p{Greek},0.1664714813232422,23,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16639113426208496,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16803503036499023,103,
|
||||||
|
linux_unicode_greek_casei,1,3,rg,rg -n -i \p{Greek},0.16656923294067383,103,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07580804824829102,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07589507102966309,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore),rg -n \wAh,0.07574295997619629,186,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07641291618347168,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07523059844970703,174,
|
||||||
|
linux_unicode_word,1,3,rg (ignore) (ASCII),rg -n (?-u)\wAh,0.07748007774353027,174,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06804847717285156,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06720519065856934,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist),rg -n --no-ignore -tall \wAh,0.06687021255493164,180,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06528687477111816,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.07101035118103027,168,
|
||||||
|
linux_unicode_word,1,3,rg (whitelist) (ASCII),rg -n --no-ignore -tall (?-u)\wAh,0.06446981430053711,168,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.07159972190856934,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.0695488452911377,6,
|
||||||
|
linux_word,1,3,rg (ignore),rg -n -w PM_RESUME,0.07082796096801758,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.06297016143798828,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.06128263473510742,6,
|
||||||
|
linux_word,1,3,rg (whitelist),rg -n -w --no-ignore -tall PM_RESUME,0.06177973747253418,6,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.36841607093811035,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.30306172370910645,848,
|
||||||
|
subtitles_en_alternate,1,3,rg (lines),rg -n Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3288271427154541,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3186373710632324,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.23814082145690918,848,
|
||||||
|
subtitles_en_alternate,1,3,rg,rg Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.23152780532836914,848,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.2480580806732178,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.2288904190063477,862,
|
||||||
|
subtitles_en_alternate_casei,1,3,rg,rg -n -i Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.287020206451416,862,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.21738362312316895,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.16032648086547852,629,
|
||||||
|
subtitles_en_literal,1,3,rg,rg Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.15392351150512695,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.21230578422546387,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.27013158798217773,629,
|
||||||
|
subtitles_en_literal,1,3,rg (no mmap),rg --no-mmap Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.19994258880615234,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2728753089904785,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.23652935028076172,629,
|
||||||
|
subtitles_en_literal,1,3,rg (lines),rg -n Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2579770088195801,629,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.3031468391418457,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.40822505950927734,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg,rg -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.38727545738220215,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.4260599613189697,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.4490511417388916,642,
|
||||||
|
subtitles_en_literal_casei,1,3,rg (lines),rg -n -i Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.49449872970581055,642,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2707977294921875,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2691836357116699,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Sherlock Holmes(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.24464011192321777,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.22373199462890625,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.25702810287475586,629,
|
||||||
|
subtitles_en_literal_word,1,3,rg,rg -nw Sherlock Holmes /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.23047828674316406,629,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.139404773712158,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.0484464168548584,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,2.0293972492218018,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.840238094329834,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.7812306880950928,13,
|
||||||
|
subtitles_en_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,1.7657690048217773,13,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.26054978370666504,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2869753837585449,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg,rg -n \w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.22949600219726562,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.21858429908752441,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.2064223289489746,317,
|
||||||
|
subtitles_en_surrounding_words,1,3,rg (ASCII),rg -n (?-u)\w+\s+Holmes\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.sample.en,0.20789289474487305,317,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.313758373260498,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2925219535827637,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg (lines),rg -n Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.3444299697875977,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.264918565750122,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.30733060836792,691,
|
||||||
|
subtitles_ru_alternate,1,3,rg,rg Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1466560363769531,691,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.783818244934082,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.76894474029541,735,
|
||||||
|
subtitles_ru_alternate_casei,1,3,rg,rg -n -i Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.788987398147583,735,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2822730541229248,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.25232434272766113,583,
|
||||||
|
subtitles_ru_literal,1,3,rg,rg Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2563645839691162,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.34694504737854004,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3202054500579834,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (no mmap),rg --no-mmap Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3236703872680664,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.36035776138305664,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3270585536956787,583,
|
||||||
|
subtitles_ru_literal,1,3,rg (lines),rg -n Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3121967315673828,583,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.0397696495056152,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.158402442932129,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg,rg -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1158676147460938,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.2041549682617188,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1511006355285645,604,
|
||||||
|
subtitles_ru_literal_casei,1,3,rg (lines),rg -n -i Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,1.1794021129608154,604,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.19694828987121582,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.1980271339416504,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg (ASCII),rg -n (?-u:\b)Шерлок Холмс(?-u:\b) /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.2128591537475586,,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3568108081817627,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3353869915008545,579,
|
||||||
|
subtitles_ru_literal_word,1,3,rg,rg -nw Шерлок Холмс /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3075387477874756,579,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.5629587173461914,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.5984435081481934,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg,rg -n \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.4725229740142822,41,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.170077323913574,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.064476490020752,,
|
||||||
|
subtitles_ru_no_literal,1,3,rg (ASCII),rg -n (?-u)\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5} /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,3.159156084060669,,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3924906253814697,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.3874075412750244,278,
|
||||||
|
subtitles_ru_surrounding_words,1,3,rg,rg -n \w+\s+Холмс\s+\w+ /data/benchsuite/subtitles/OpenSubtitles2016.raw.ru,0.39940643310546875,278,
|
||||||
|
126
benchsuite/runs/2016-12-30-archlinux-cheetah/summary
Normal file
126
benchsuite/runs/2016-12-30-archlinux-cheetah/summary
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
linux_alternates (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.093 +/- 0.005 (lines: 68)
|
||||||
|
rg (whitelist)* 0.074 +/- 0.002 (lines: 68)*
|
||||||
|
|
||||||
|
linux_alternates_casei (pattern: ERR_SYS|PME_TURN_OFF|LINK_REQ_RST|CFG_BME_EVT)
|
||||||
|
-------------------------------------------------------------------------------
|
||||||
|
rg (ignore) 0.103 +/- 0.001 (lines: 160)
|
||||||
|
rg (whitelist)* 0.082 +/- 0.003 (lines: 160)*
|
||||||
|
|
||||||
|
linux_literal (pattern: PM_RESUME)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.069 +/- 0.001 (lines: 16)
|
||||||
|
rg (ignore) (mmap) 0.398 +/- 0.005 (lines: 16)
|
||||||
|
rg (whitelist)* 0.060 +/- 0.002 (lines: 16)*
|
||||||
|
|
||||||
|
linux_literal_casei (pattern: PM_RESUME)
|
||||||
|
----------------------------------------
|
||||||
|
rg (ignore) 0.080 +/- 0.002 (lines: 370)
|
||||||
|
rg (ignore) (mmap) 0.396 +/- 0.006 (lines: 370)
|
||||||
|
rg (whitelist)* 0.070 +/- 0.001 (lines: 370)*
|
||||||
|
|
||||||
|
linux_literal_default (pattern: PM_RESUME)
|
||||||
|
------------------------------------------
|
||||||
|
rg* 0.069 +/- 0.001 (lines: 16)*
|
||||||
|
|
||||||
|
linux_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
-----------------------------------------------------------------
|
||||||
|
rg (ignore) 0.299 +/- 0.018 (lines: 490)
|
||||||
|
rg (ignore) (ASCII) 0.167 +/- 0.002 (lines: 490)
|
||||||
|
rg (whitelist) 0.290 +/- 0.014 (lines: 419)
|
||||||
|
rg (whitelist) (ASCII)* 0.153 +/- 0.001 (lines: 419)*
|
||||||
|
|
||||||
|
linux_re_literal_suffix (pattern: [A-Z]+_RESUME)
|
||||||
|
------------------------------------------------
|
||||||
|
rg (ignore) 0.071 +/- 0.000 (lines: 1652)
|
||||||
|
rg (whitelist)* 0.061 +/- 0.001 (lines: 1630)*
|
||||||
|
|
||||||
|
linux_unicode_greek (pattern: \p{Greek})
|
||||||
|
----------------------------------------
|
||||||
|
rg* 0.165 +/- 0.001 (lines: 23)*
|
||||||
|
|
||||||
|
linux_unicode_greek_casei (pattern: \p{Greek})
|
||||||
|
----------------------------------------------
|
||||||
|
rg* 0.167 +/- 0.001 (lines: 103)*
|
||||||
|
|
||||||
|
linux_unicode_word (pattern: \wAh)
|
||||||
|
----------------------------------
|
||||||
|
rg (ignore) 0.076 +/- 0.000 (lines: 186)
|
||||||
|
rg (ignore) (ASCII) 0.076 +/- 0.001 (lines: 174)
|
||||||
|
rg (whitelist) 0.067 +/- 0.001 (lines: 180)
|
||||||
|
rg (whitelist) (ASCII)* 0.067 +/- 0.004 (lines: 168)*
|
||||||
|
|
||||||
|
linux_word (pattern: PM_RESUME)
|
||||||
|
-------------------------------
|
||||||
|
rg (ignore) 0.071 +/- 0.001 (lines: 6)
|
||||||
|
rg (whitelist)* 0.062 +/- 0.001 (lines: 6)*
|
||||||
|
|
||||||
|
subtitles_en_alternate (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines) 0.333 +/- 0.033 (lines: 848)
|
||||||
|
rg* 0.263 +/- 0.048 (lines: 848)*
|
||||||
|
|
||||||
|
subtitles_en_alternate_casei (pattern: Sherlock Holmes|John Watson|Irene Adler|Inspector Lestrade|Professor Moriarty)
|
||||||
|
---------------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 2.255 +/- 0.030 (lines: 862)*
|
||||||
|
|
||||||
|
subtitles_en_literal (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------
|
||||||
|
rg* 0.177 +/- 0.035 (lines: 629)*
|
||||||
|
rg (no mmap) 0.227 +/- 0.037 (lines: 629)
|
||||||
|
rg (lines) 0.256 +/- 0.018 (lines: 629)
|
||||||
|
|
||||||
|
subtitles_en_literal_casei (pattern: Sherlock Holmes)
|
||||||
|
-----------------------------------------------------
|
||||||
|
rg* 0.366 +/- 0.056 (lines: 642)*
|
||||||
|
rg (lines) 0.457 +/- 0.035 (lines: 642)
|
||||||
|
|
||||||
|
subtitles_en_literal_word (pattern: Sherlock Holmes)
|
||||||
|
----------------------------------------------------
|
||||||
|
rg (ASCII) 0.262 +/- 0.015 (lines: 629)
|
||||||
|
rg* 0.237 +/- 0.018 (lines: 629)*
|
||||||
|
|
||||||
|
subtitles_en_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 2.072 +/- 0.059 (lines: 13)
|
||||||
|
rg (ASCII)* 1.796 +/- 0.039 (lines: 13)*
|
||||||
|
|
||||||
|
subtitles_en_surrounding_words (pattern: \w+\s+Holmes\s+\w+)
|
||||||
|
------------------------------------------------------------
|
||||||
|
rg 0.259 +/- 0.029 (lines: 317)
|
||||||
|
rg (ASCII)* 0.211 +/- 0.007 (lines: 317)*
|
||||||
|
|
||||||
|
subtitles_ru_alternate (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------
|
||||||
|
rg (lines) 1.317 +/- 0.026 (lines: 691)
|
||||||
|
rg* 1.240 +/- 0.083 (lines: 691)*
|
||||||
|
|
||||||
|
subtitles_ru_alternate_casei (pattern: Шерлок Холмс|Джон Уотсон|Ирен Адлер|инспектор Лестрейд|профессор Мориарти)
|
||||||
|
-----------------------------------------------------------------------------------------------------------------
|
||||||
|
rg* 3.781 +/- 0.010 (lines: 735)*
|
||||||
|
|
||||||
|
subtitles_ru_literal (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------
|
||||||
|
rg* 0.264 +/- 0.016 (lines: 583)*
|
||||||
|
rg (no mmap) 0.330 +/- 0.015 (lines: 583)
|
||||||
|
rg (lines) 0.333 +/- 0.025 (lines: 583)
|
||||||
|
|
||||||
|
subtitles_ru_literal_casei (pattern: Шерлок Холмс)
|
||||||
|
--------------------------------------------------
|
||||||
|
rg* 1.105 +/- 0.060 (lines: 604)*
|
||||||
|
rg (lines) 1.178 +/- 0.027 (lines: 604)
|
||||||
|
|
||||||
|
subtitles_ru_literal_word (pattern: Шерлок Холмс)
|
||||||
|
-------------------------------------------------
|
||||||
|
rg (ASCII)* 0.203 +/- 0.009 (lines: 0)*
|
||||||
|
rg 0.333 +/- 0.025 (lines: 579)
|
||||||
|
|
||||||
|
subtitles_ru_no_literal (pattern: \w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5}\s+\w{5})
|
||||||
|
----------------------------------------------------------------------------------------
|
||||||
|
rg 3.545 +/- 0.065 (lines: 41)
|
||||||
|
rg (ASCII)* 3.131 +/- 0.058 (lines: 0)*
|
||||||
|
|
||||||
|
subtitles_ru_surrounding_words (pattern: \w+\s+Холмс\s+\w+)
|
||||||
|
-----------------------------------------------------------
|
||||||
|
rg* 0.393 +/- 0.006 (lines: 278)*
|
||||||
27
build.rs
Normal file
27
build.rs
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
#[macro_use]
|
||||||
|
extern crate clap;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate lazy_static;
|
||||||
|
|
||||||
|
use std::env;
|
||||||
|
use std::fs;
|
||||||
|
|
||||||
|
use clap::Shell;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[path = "src/app.rs"]
|
||||||
|
mod app;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let outdir = match env::var_os("OUT_DIR") {
|
||||||
|
None => return,
|
||||||
|
Some(outdir) => outdir,
|
||||||
|
};
|
||||||
|
fs::create_dir_all(&outdir).unwrap();
|
||||||
|
|
||||||
|
let mut app = app::app();
|
||||||
|
app.gen_completions("rg", Shell::Bash, &outdir);
|
||||||
|
app.gen_completions("rg", Shell::Fish, &outdir);
|
||||||
|
app.gen_completions("rg", Shell::Zsh, &outdir);
|
||||||
|
app.gen_completions("rg", Shell::PowerShell, &outdir);
|
||||||
|
}
|
||||||
@@ -16,9 +16,11 @@ mk_tarball() {
|
|||||||
local out_dir=$(pwd)
|
local out_dir=$(pwd)
|
||||||
local name="${PROJECT_NAME}-${TRAVIS_TAG}-${TARGET}"
|
local name="${PROJECT_NAME}-${TRAVIS_TAG}-${TARGET}"
|
||||||
mkdir "$td/$name"
|
mkdir "$td/$name"
|
||||||
|
mkdir "$td/$name/complete"
|
||||||
|
|
||||||
cp target/$TARGET/release/rg "$td/$name/"
|
cp target/$TARGET/release/rg "$td/$name/"
|
||||||
cp {doc/rg.1,README.md,UNLICENSE,COPYING,LICENSE-MIT} "$td/$name/"
|
cp {doc/rg.1,README.md,UNLICENSE,COPYING,LICENSE-MIT} "$td/$name/"
|
||||||
|
cp target/$TARGET/release/build/ripgrep-*/out/{_rg,rg.bash-completion,rg.fish,_rg.ps1} "$td/$name/complete/"
|
||||||
|
|
||||||
pushd $td
|
pushd $td
|
||||||
tar czf "$out_dir/$name.tar.gz" *
|
tar czf "$out_dir/$name.tar.gz" *
|
||||||
|
|||||||
@@ -17,9 +17,6 @@ install_c_toolchain() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
install_rustup() {
|
install_rustup() {
|
||||||
# uninstall the rust toolchain installed by travis, we are going to use rustup
|
|
||||||
sh ~/rust/lib/rustlib/uninstall.sh
|
|
||||||
|
|
||||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain=$TRAVIS_RUST_VERSION
|
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain=$TRAVIS_RUST_VERSION
|
||||||
|
|
||||||
rustc -V
|
rustc -V
|
||||||
|
|||||||
@@ -23,6 +23,10 @@ run_test_suite() {
|
|||||||
cargo test --target $TARGET --verbose --manifest-path grep/Cargo.toml
|
cargo test --target $TARGET --verbose --manifest-path grep/Cargo.toml
|
||||||
cargo build --target $TARGET --verbose --manifest-path globset/Cargo.toml
|
cargo build --target $TARGET --verbose --manifest-path globset/Cargo.toml
|
||||||
cargo test --target $TARGET --verbose --manifest-path globset/Cargo.toml
|
cargo test --target $TARGET --verbose --manifest-path globset/Cargo.toml
|
||||||
|
cargo build --target $TARGET --verbose --manifest-path ignore/Cargo.toml
|
||||||
|
cargo test --target $TARGET --verbose --manifest-path ignore/Cargo.toml
|
||||||
|
cargo build --target $TARGET --verbose --manifest-path termcolor/Cargo.toml
|
||||||
|
cargo test --target $TARGET --verbose --manifest-path termcolor/Cargo.toml
|
||||||
|
|
||||||
# sanity check the file type
|
# sanity check the file type
|
||||||
file target/$TARGET/debug/rg
|
file target/$TARGET/debug/rg
|
||||||
|
|||||||
9
compile
9
compile
@@ -1,5 +1,8 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
export RUSTFLAGS="-C target-feature=+ssse3"
|
# export RUSTFLAGS="-C target-feature=+ssse3"
|
||||||
# export RUSTFLAGS="-C target-cpu=native"
|
# cargo build --release --features 'simd-accel'
|
||||||
cargo build --release --features simd-accel
|
|
||||||
|
export RUSTFLAGS="-C target-cpu=native"
|
||||||
|
cargo build --release --features 'simd-accel avx-accel'
|
||||||
|
# cargo build --release --features 'simd-accel avx-accel' --target x86_64-unknown-linux-musl
|
||||||
|
|||||||
222
doc/rg.1
222
doc/rg.1
@@ -1,4 +1,4 @@
|
|||||||
.\" Automatically generated by Pandoc 1.17.2
|
.\" Automatically generated by Pandoc 1.19.2.1
|
||||||
.\"
|
.\"
|
||||||
.TH "rg" "1"
|
.TH "rg" "1"
|
||||||
.hy
|
.hy
|
||||||
@@ -7,11 +7,11 @@
|
|||||||
rg \- recursively search current directory for lines matching a pattern
|
rg \- recursively search current directory for lines matching a pattern
|
||||||
.SH SYNOPSIS
|
.SH SYNOPSIS
|
||||||
.PP
|
.PP
|
||||||
rg [\f[I]options\f[]] \-e PATTERN ...
|
|
||||||
[\f[I]<\f[]path\f[I]> ...\f[]]
|
|
||||||
.PP
|
|
||||||
rg [\f[I]options\f[]] <\f[I]pattern\f[]> [\f[I]<\f[]path\f[I]> ...\f[]]
|
rg [\f[I]options\f[]] <\f[I]pattern\f[]> [\f[I]<\f[]path\f[I]> ...\f[]]
|
||||||
.PP
|
.PP
|
||||||
|
rg [\f[I]options\f[]] (\-e PATTERN | \-f FILE) ...
|
||||||
|
[\f[I]<\f[]path\f[I]> ...\f[]]
|
||||||
|
.PP
|
||||||
rg [\f[I]options\f[]] \-\-files [\f[I]<\f[]path\f[I]> ...\f[]]
|
rg [\f[I]options\f[]] \-\-files [\f[I]<\f[]path\f[I]> ...\f[]]
|
||||||
.PP
|
.PP
|
||||||
rg [\f[I]options\f[]] \-\-type\-list
|
rg [\f[I]options\f[]] \-\-type\-list
|
||||||
@@ -21,8 +21,15 @@ rg [\f[I]options\f[]] \-\-help
|
|||||||
rg [\f[I]options\f[]] \-\-version
|
rg [\f[I]options\f[]] \-\-version
|
||||||
.SH DESCRIPTION
|
.SH DESCRIPTION
|
||||||
.PP
|
.PP
|
||||||
rg (ripgrep) combines the usability of The Silver Searcher (an ack
|
ripgrep (rg) combines the usability of The Silver Searcher (an ack
|
||||||
clone) with the raw speed of grep.
|
clone) with the raw speed of grep.
|
||||||
|
.PP
|
||||||
|
ripgrep\[aq]s regex engine uses finite automata and guarantees linear
|
||||||
|
time searching.
|
||||||
|
Because of this, features like backreferences and arbitrary lookaround
|
||||||
|
are not supported.
|
||||||
|
.PP
|
||||||
|
Project home page: https://github.com/BurntSushi/ripgrep
|
||||||
.SH COMMON OPTIONS
|
.SH COMMON OPTIONS
|
||||||
.TP
|
.TP
|
||||||
.B \-a, \-\-text
|
.B \-a, \-\-text
|
||||||
@@ -46,6 +53,7 @@ Valid values are never, always or auto.
|
|||||||
Use PATTERN to search.
|
Use PATTERN to search.
|
||||||
This option can be provided multiple times, where all patterns given are
|
This option can be provided multiple times, where all patterns given are
|
||||||
searched.
|
searched.
|
||||||
|
This is also useful when searching for patterns that start with a dash.
|
||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
@@ -56,12 +64,29 @@ Treat the pattern as a literal string instead of a regular expression.
|
|||||||
.TP
|
.TP
|
||||||
.B \-g, \-\-glob \f[I]GLOB\f[] ...
|
.B \-g, \-\-glob \f[I]GLOB\f[] ...
|
||||||
Include or exclude files for searching that match the given glob.
|
Include or exclude files for searching that match the given glob.
|
||||||
This always overrides any other ignore logic.
|
This always overrides any other ignore logic if there is a conflict, but
|
||||||
|
is otherwise applied in addition to ignore files (e.g., .gitignore or
|
||||||
|
\&.ignore).
|
||||||
Multiple glob flags may be used.
|
Multiple glob flags may be used.
|
||||||
Globbing rules match .gitignore globs.
|
Globbing rules match .gitignore globs.
|
||||||
Precede a glob with a \[aq]!\[aq] to exclude it.
|
Precede a glob with a \[aq]!\[aq] to exclude it.
|
||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
|
.PP
|
||||||
|
The \-\-glob flag subsumes the functionality of both the \-\-include and
|
||||||
|
\-\-exclude flags commonly found in other tools.
|
||||||
|
.IP
|
||||||
|
.nf
|
||||||
|
\f[C]
|
||||||
|
Values\ given\ to\ \-g\ must\ be\ quoted\ or\ your\ shell\ will\ expand\ them\ and\ result
|
||||||
|
in\ unexpected\ behavior.
|
||||||
|
|
||||||
|
Combine\ with\ the\ \-\-files\ flag\ to\ return\ matched\ filenames
|
||||||
|
(i.e.,\ to\ replicate\ ack/ag\[aq]s\ \-g\ flag).
|
||||||
|
|
||||||
|
For\ example:\ rg\ \-g\ \[aq]\\<glob\\>\[aq]\ \-\-files
|
||||||
|
\f[]
|
||||||
|
.fi
|
||||||
.TP
|
.TP
|
||||||
.B \-h, \-\-help
|
.B \-h, \-\-help
|
||||||
Show this usage message.
|
Show this usage message.
|
||||||
@@ -111,6 +136,12 @@ Two \-u flags will search hidden files and directories.
|
|||||||
Three \-u flags will search binary files.
|
Three \-u flags will search binary files.
|
||||||
\-uu is equivalent to grep \-r, and \-uuu is equivalent to grep \-a \-r.
|
\-uu is equivalent to grep \-r, and \-uuu is equivalent to grep \-a \-r.
|
||||||
.RS
|
.RS
|
||||||
|
.PP
|
||||||
|
Note that the \-u flags are convenient aliases for other combinations of
|
||||||
|
flags.
|
||||||
|
\-u aliases \[aq]\-\-no\-ignore\[aq].
|
||||||
|
\-uu aliases \[aq]\-\-no\-ignore \-\-hidden\[aq].
|
||||||
|
\-uuu aliases \[aq]\-\-no\-ignore \-\-hidden \-\-text\[aq].
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
.B \-v, \-\-invert\-match
|
.B \-v, \-\-invert\-match
|
||||||
@@ -140,15 +171,38 @@ Show NUM lines before and after each match.
|
|||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
|
.B \-\-colors \f[I]SPEC\f[] ...
|
||||||
|
This flag specifies color settings for use in the output.
|
||||||
|
This flag may be provided multiple times.
|
||||||
|
Settings are applied iteratively.
|
||||||
|
Colors are limited to one of eight choices: red, blue, green, cyan,
|
||||||
|
magenta, yellow, white and black.
|
||||||
|
Styles are limited to nobold, bold, nointense or intense.
|
||||||
|
.RS
|
||||||
|
.PP
|
||||||
|
The format of the flag is {type}:{attribute}:{value}.
|
||||||
|
{type} should be one of path, line, column or match.
|
||||||
|
{attribute} can be fg, bg or style.
|
||||||
|
Value is either a color (for fg and bg) or a text style.
|
||||||
|
A special format, {type}:none, will clear all color settings for {type}.
|
||||||
|
.PP
|
||||||
|
For example, the following command will change the match color to
|
||||||
|
magenta and the background color for line numbers to yellow:
|
||||||
|
.PP
|
||||||
|
rg \-\-colors \[aq]match:fg:magenta\[aq] \-\-colors
|
||||||
|
\[aq]line:bg:yellow\[aq] foo.
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
.B \-\-column
|
.B \-\-column
|
||||||
Show column numbers (1 based) in output.
|
Show column numbers (1 based) in output.
|
||||||
This only shows the column numbers for the first match on each line.
|
This only shows the column numbers for the first match on each line.
|
||||||
Note that this doesn\[aq]t try to account for Unicode.
|
Note that this doesn\[aq]t try to account for Unicode.
|
||||||
One byte is equal to one column.
|
One byte is equal to one column.
|
||||||
|
This implies \-\-line\-number.
|
||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
.B \-\-context\-separator \f[I]ARG\f[]
|
.B \-\-context\-separator \f[I]SEPARATOR\f[]
|
||||||
The string to use when separating non\-continuous context lines.
|
The string to use when separating non\-continuous context lines.
|
||||||
Escape sequences may be used.
|
Escape sequences may be used.
|
||||||
[default: \-\-]
|
[default: \-\-]
|
||||||
@@ -160,9 +214,31 @@ Show debug messages.
|
|||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
|
.B \-E, \-\-encoding \f[I]ENCODING\f[]
|
||||||
|
Specify the text encoding that ripgrep will use on all files searched.
|
||||||
|
The default value is \[aq]auto\[aq], which will cause ripgrep to do a
|
||||||
|
best effort automatic detection of encoding on a per\-file basis.
|
||||||
|
Other supported values can be found in the list of labels here:
|
||||||
|
https://encoding.spec.whatwg.org/#concept\-encoding\-get
|
||||||
|
.RS
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
|
.B \-f, \-\-file FILE ...
|
||||||
|
Search for patterns from the given file, with one pattern per line.
|
||||||
|
When this flag is used or multiple times or in combination with the
|
||||||
|
\-e/\-\-regexp flag, then all patterns provided are searched.
|
||||||
|
Empty pattern lines will match all input lines, and the newline is not
|
||||||
|
counted as part of the pattern.
|
||||||
|
.RS
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
.B \-\-files
|
.B \-\-files
|
||||||
Print each file that would be searched (but don\[aq]t search).
|
Print each file that would be searched (but don\[aq]t search).
|
||||||
.RS
|
.RS
|
||||||
|
.PP
|
||||||
|
Combine with the \-g flag to return matched paths, for example:
|
||||||
|
.PP
|
||||||
|
rg \-g \[aq]<glob>\[aq] \-\-files
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
.B \-l, \-\-files\-with\-matches
|
.B \-l, \-\-files\-with\-matches
|
||||||
@@ -170,6 +246,11 @@ Only show path of each file with matches.
|
|||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
|
.B \-\-files\-without\-match
|
||||||
|
Only show path of each file with no matches.
|
||||||
|
.RS
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
.B \-H, \-\-with\-filename
|
.B \-H, \-\-with\-filename
|
||||||
Prefix each match with the file name that contains it.
|
Prefix each match with the file name that contains it.
|
||||||
This is the default when more than one file is searched.
|
This is the default when more than one file is searched.
|
||||||
@@ -183,13 +264,17 @@ This is the default when one file is searched.
|
|||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
.B \-\-heading
|
.B \-\-heading
|
||||||
Show the file name above clusters of matches from each file.
|
Show the file name above clusters of matches from each file instead of
|
||||||
|
showing the file name for every match.
|
||||||
This is the default mode at a tty.
|
This is the default mode at a tty.
|
||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
.B \-\-no\-heading
|
.B \-\-no\-heading
|
||||||
Don\[aq]t show any file name heading.
|
Don\[aq]t group matches by each file.
|
||||||
|
If \-H/\-\-with\-filename is enabled, then file names will be shown for
|
||||||
|
every line matched.
|
||||||
|
This is the default more when not at a tty.
|
||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
@@ -199,11 +284,43 @@ Search hidden directories and files.
|
|||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
|
.B \-\-ignore\-file FILE ...
|
||||||
|
Specify additional ignore files for filtering file paths.
|
||||||
|
Ignore files should be in the gitignore format and are matched relative
|
||||||
|
to the current working directory.
|
||||||
|
These ignore files have lower precedence than all other ignore files.
|
||||||
|
When specifying multiple ignore files, earlier files have lower
|
||||||
|
precedence than later files.
|
||||||
|
.RS
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
.B \-L, \-\-follow
|
.B \-L, \-\-follow
|
||||||
Follow symlinks.
|
Follow symlinks.
|
||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
|
.B \-M, \-\-max\-columns \f[I]NUM\f[]
|
||||||
|
Don\[aq]t print lines longer than this limit in bytes.
|
||||||
|
Longer lines are omitted, and only the number of matches in that line is
|
||||||
|
printed.
|
||||||
|
.RS
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
|
.B \-m, \-\-max\-count \f[I]NUM\f[]
|
||||||
|
Limit the number of matching lines per file searched to NUM.
|
||||||
|
.RS
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
|
.B \-\-max\-filesize \f[I]NUM\f[]+\f[I]SUFFIX\f[]?
|
||||||
|
Ignore files larger than \f[I]NUM\f[] in size.
|
||||||
|
Directories will never be ignored.
|
||||||
|
.RS
|
||||||
|
.PP
|
||||||
|
\f[I]SUFFIX\f[] is optional and may be one of K, M or G.
|
||||||
|
These correspond to kilobytes, megabytes and gigabytes respectively.
|
||||||
|
If omitted the input is treated as bytes.
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
.B \-\-maxdepth \f[I]NUM\f[]
|
.B \-\-maxdepth \f[I]NUM\f[]
|
||||||
Descend at most NUM directories below the command line arguments.
|
Descend at most NUM directories below the command line arguments.
|
||||||
A value of zero searches only the starting\-points themselves.
|
A value of zero searches only the starting\-points themselves.
|
||||||
@@ -218,6 +335,11 @@ context related options.)
|
|||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
|
.B \-\-no\-messages
|
||||||
|
Suppress all error messages.
|
||||||
|
.RS
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
.B \-\-no\-mmap
|
.B \-\-no\-mmap
|
||||||
Never use memory maps, even when they might be faster.
|
Never use memory maps, even when they might be faster.
|
||||||
.RS
|
.RS
|
||||||
@@ -240,7 +362,7 @@ Note that .ignore files will continue to be respected.
|
|||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
.B \-\-null
|
.B \-0, \-\-null
|
||||||
Whenever a file name is printed, follow it with a NUL byte.
|
Whenever a file name is printed, follow it with a NUL byte.
|
||||||
This includes printing filenames before matches, and when printing a
|
This includes printing filenames before matches, and when printing a
|
||||||
list of matching files such as with \-\-count, \-\-files\-with\-matches
|
list of matching files such as with \-\-count, \-\-files\-with\-matches
|
||||||
@@ -248,6 +370,22 @@ and \-\-files.
|
|||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
|
.B \-o, \-\-only\-matching
|
||||||
|
Print only the matched (non\-empty) parts of a matching line, with each
|
||||||
|
such part on a separate output line.
|
||||||
|
.RS
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
|
.B \-\-path\-separator \f[I]SEPARATOR\f[]
|
||||||
|
The path separator to use when printing file paths.
|
||||||
|
This defaults to your platform\[aq]s path separator, which is / on Unix
|
||||||
|
and \\ on Windows.
|
||||||
|
This flag is intended for overriding the default when the environment
|
||||||
|
demands it (e.g., cygwin).
|
||||||
|
A path separator is limited to a single byte.
|
||||||
|
.RS
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
.B \-p, \-\-pretty
|
.B \-p, \-\-pretty
|
||||||
Alias for \-\-color=always \-\-heading \-n.
|
Alias for \-\-color=always \-\-heading \-n.
|
||||||
.RS
|
.RS
|
||||||
@@ -260,6 +398,17 @@ Neither this flag nor any other flag will modify your files.
|
|||||||
.PP
|
.PP
|
||||||
Capture group indices (e.g., $5) and names (e.g., $foo) are supported in
|
Capture group indices (e.g., $5) and names (e.g., $foo) are supported in
|
||||||
the replacement string.
|
the replacement string.
|
||||||
|
.PP
|
||||||
|
Note that the replacement by default replaces each match, and NOT the
|
||||||
|
entire line.
|
||||||
|
To replace the entire line, you should match the entire line.
|
||||||
|
For example, to emit only the first phone numbers in each line:
|
||||||
|
.IP
|
||||||
|
.nf
|
||||||
|
\f[C]
|
||||||
|
rg\ \[aq]^.*([0\-9]{3}\-[0\-9]{3}\-[0\-9]{4}).*$\[aq]\ \-\-replace\ \[aq]$1\[aq]
|
||||||
|
\f[]
|
||||||
|
.fi
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
.B \-s, \-\-case\-sensitive
|
.B \-s, \-\-case\-sensitive
|
||||||
@@ -275,9 +424,16 @@ This is overridden by either \-\-case\-sensitive or \-\-ignore\-case.
|
|||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
|
.B \-\-sort\-files
|
||||||
|
Sort results by file path.
|
||||||
|
Note that this currently disables all parallelism and runs search in a
|
||||||
|
single thread.
|
||||||
|
.RS
|
||||||
|
.RE
|
||||||
|
.TP
|
||||||
.B \-j, \-\-threads \f[I]ARG\f[]
|
.B \-j, \-\-threads \f[I]ARG\f[]
|
||||||
The number of threads to use.
|
The number of threads to use.
|
||||||
0 means use the number of logical CPUs (capped at 6).
|
0 means use the number of logical CPUs (capped at 12).
|
||||||
[default: 0]
|
[default: 0]
|
||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
@@ -310,9 +466,38 @@ inside of ripgrep.
|
|||||||
Note that this must be passed to every invocation of rg.
|
Note that this must be passed to every invocation of rg.
|
||||||
Type settings are NOT persisted.
|
Type settings are NOT persisted.
|
||||||
.RS
|
.RS
|
||||||
|
.IP
|
||||||
|
.nf
|
||||||
|
\f[C]
|
||||||
|
\ \ Example:\ `rg\ \-\-type\-add\ \[aq]foo:*.foo\[aq]\ \-tfoo\ PATTERN`
|
||||||
|
\f[]
|
||||||
|
.fi
|
||||||
.PP
|
.PP
|
||||||
Example:
|
\-\-type\-add can also be used to include rules from other types with
|
||||||
\f[C]rg\ \-\-type\-add\ \[aq]foo:*.foo\[aq]\ \-tfoo\ PATTERN\f[]
|
the special include directive.
|
||||||
|
The include directive permits specifying one or more other type names
|
||||||
|
(separated by a comma) that have been defined and its rules will
|
||||||
|
automatically be imported into the type specified.
|
||||||
|
For example, to create a type called src that matches C++, Python and
|
||||||
|
Markdown files, one can use:
|
||||||
|
.IP
|
||||||
|
.nf
|
||||||
|
\f[C]
|
||||||
|
\ \ `\-\-type\-add\ \[aq]src:include:cpp,py,md\[aq]`
|
||||||
|
\f[]
|
||||||
|
.fi
|
||||||
|
.PP
|
||||||
|
Additional glob rules can still be added to the src type by using the
|
||||||
|
\-\-type\-add flag again:
|
||||||
|
.IP
|
||||||
|
.nf
|
||||||
|
\f[C]
|
||||||
|
\ \ `\-\-type\-add\ \[aq]src:include:cpp,py,md\[aq]\ \-\-type\-add\ \[aq]src:*.foo\[aq]`
|
||||||
|
\f[]
|
||||||
|
.fi
|
||||||
|
.PP
|
||||||
|
Note that type names must consist only of Unicode letters or numbers.
|
||||||
|
Punctuation characters are not allowed.
|
||||||
.RE
|
.RE
|
||||||
.TP
|
.TP
|
||||||
.B \-\-type\-clear \f[I]TYPE\f[] ...
|
.B \-\-type\-clear \f[I]TYPE\f[] ...
|
||||||
@@ -322,3 +507,14 @@ ripgrep.
|
|||||||
Note that this must be passed to every invocation of rg.
|
Note that this must be passed to every invocation of rg.
|
||||||
.RS
|
.RS
|
||||||
.RE
|
.RE
|
||||||
|
.SH SHELL COMPLETION
|
||||||
|
.PP
|
||||||
|
Shell completion files are included in the release tarball for Bash,
|
||||||
|
Fish, Zsh and PowerShell.
|
||||||
|
.PP
|
||||||
|
For \f[B]bash\f[], move \f[C]rg.bash\-completion\f[] to
|
||||||
|
\f[C]$XDG_CONFIG_HOME/bash_completion\f[] or
|
||||||
|
\f[C]/etc/bash_completion.d/\f[].
|
||||||
|
.PP
|
||||||
|
For \f[B]fish\f[], move \f[C]rg.fish\f[] to
|
||||||
|
\f[C]$HOME/.config/fish/completions\f[].
|
||||||
|
|||||||
164
doc/rg.1.md
164
doc/rg.1.md
@@ -4,10 +4,10 @@ rg - recursively search current directory for lines matching a pattern
|
|||||||
|
|
||||||
# SYNOPSIS
|
# SYNOPSIS
|
||||||
|
|
||||||
rg [*options*] -e PATTERN ... [*<*path*> ...*]
|
|
||||||
|
|
||||||
rg [*options*] <*pattern*> [*<*path*> ...*]
|
rg [*options*] <*pattern*> [*<*path*> ...*]
|
||||||
|
|
||||||
|
rg [*options*] (-e PATTERN | -f FILE) ... [*<*path*> ...*]
|
||||||
|
|
||||||
rg [*options*] --files [*<*path*> ...*]
|
rg [*options*] --files [*<*path*> ...*]
|
||||||
|
|
||||||
rg [*options*] --type-list
|
rg [*options*] --type-list
|
||||||
@@ -18,9 +18,15 @@ rg [*options*] --version
|
|||||||
|
|
||||||
# DESCRIPTION
|
# DESCRIPTION
|
||||||
|
|
||||||
rg (ripgrep) combines the usability of The Silver Searcher (an ack clone) with
|
ripgrep (rg) combines the usability of The Silver Searcher (an ack clone) with
|
||||||
the raw speed of grep.
|
the raw speed of grep.
|
||||||
|
|
||||||
|
ripgrep's regex engine uses finite automata and guarantees linear time
|
||||||
|
searching. Because of this, features like backreferences and arbitrary
|
||||||
|
lookaround are not supported.
|
||||||
|
|
||||||
|
Project home page: https://github.com/BurntSushi/ripgrep
|
||||||
|
|
||||||
# COMMON OPTIONS
|
# COMMON OPTIONS
|
||||||
|
|
||||||
-a, --text
|
-a, --text
|
||||||
@@ -35,15 +41,29 @@ the raw speed of grep.
|
|||||||
|
|
||||||
-e, --regexp *PATTERN* ...
|
-e, --regexp *PATTERN* ...
|
||||||
: Use PATTERN to search. This option can be provided multiple times, where all
|
: Use PATTERN to search. This option can be provided multiple times, where all
|
||||||
patterns given are searched.
|
patterns given are searched. This is also useful when searching for patterns
|
||||||
|
that start with a dash.
|
||||||
|
|
||||||
-F, --fixed-strings
|
-F, --fixed-strings
|
||||||
: Treat the pattern as a literal string instead of a regular expression.
|
: Treat the pattern as a literal string instead of a regular expression.
|
||||||
|
|
||||||
-g, --glob *GLOB* ...
|
-g, --glob *GLOB* ...
|
||||||
: Include or exclude files for searching that match the given glob. This always
|
: Include or exclude files for searching that match the given glob. This always
|
||||||
overrides any other ignore logic. Multiple glob flags may be used. Globbing
|
overrides any other ignore logic if there is a conflict, but is otherwise
|
||||||
rules match .gitignore globs. Precede a glob with a '!' to exclude it.
|
applied in addition to ignore files (e.g., .gitignore or .ignore). Multiple
|
||||||
|
glob flags may be used. Globbing rules match .gitignore globs. Precede a
|
||||||
|
glob with a '!' to exclude it.
|
||||||
|
|
||||||
|
The --glob flag subsumes the functionality of both the --include and
|
||||||
|
--exclude flags commonly found in other tools.
|
||||||
|
|
||||||
|
Values given to -g must be quoted or your shell will expand them and result
|
||||||
|
in unexpected behavior.
|
||||||
|
|
||||||
|
Combine with the --files flag to return matched filenames
|
||||||
|
(i.e., to replicate ack/ag's -g flag).
|
||||||
|
|
||||||
|
For example: rg -g '\<glob\>' --files
|
||||||
|
|
||||||
-h, --help
|
-h, --help
|
||||||
: Show this usage message.
|
: Show this usage message.
|
||||||
@@ -74,6 +94,10 @@ the raw speed of grep.
|
|||||||
-u flags will search binary files. -uu is equivalent to grep -r, and -uuu is
|
-u flags will search binary files. -uu is equivalent to grep -r, and -uuu is
|
||||||
equivalent to grep -a -r.
|
equivalent to grep -a -r.
|
||||||
|
|
||||||
|
Note that the -u flags are convenient aliases for other combinations of
|
||||||
|
flags. -u aliases '--no-ignore'. -uu aliases '--no-ignore --hidden'.
|
||||||
|
-uuu aliases '--no-ignore --hidden --text'.
|
||||||
|
|
||||||
-v, --invert-match
|
-v, --invert-match
|
||||||
: Invert matching.
|
: Invert matching.
|
||||||
|
|
||||||
@@ -92,24 +116,61 @@ the raw speed of grep.
|
|||||||
-C, --context *NUM*
|
-C, --context *NUM*
|
||||||
: Show NUM lines before and after each match.
|
: Show NUM lines before and after each match.
|
||||||
|
|
||||||
|
--colors *SPEC* ...
|
||||||
|
: This flag specifies color settings for use in the output. This flag may be
|
||||||
|
provided multiple times. Settings are applied iteratively. Colors are limited
|
||||||
|
to one of eight choices: red, blue, green, cyan, magenta, yellow, white and
|
||||||
|
black. Styles are limited to nobold, bold, nointense or intense.
|
||||||
|
|
||||||
|
The format of the flag is {type}:{attribute}:{value}. {type} should be one
|
||||||
|
of path, line, column or match. {attribute} can be fg, bg or style. Value
|
||||||
|
is either a color (for fg and bg) or a text style. A special format,
|
||||||
|
{type}:none, will clear all color settings for {type}.
|
||||||
|
|
||||||
|
For example, the following command will change the match color to magenta
|
||||||
|
and the background color for line numbers to yellow:
|
||||||
|
|
||||||
|
rg --colors 'match:fg:magenta' --colors 'line:bg:yellow' foo.
|
||||||
|
|
||||||
--column
|
--column
|
||||||
: Show column numbers (1 based) in output. This only shows the column
|
: Show column numbers (1 based) in output. This only shows the column
|
||||||
numbers for the first match on each line. Note that this doesn't try
|
numbers for the first match on each line. Note that this doesn't try
|
||||||
to account for Unicode. One byte is equal to one column.
|
to account for Unicode. One byte is equal to one column. This implies
|
||||||
|
--line-number.
|
||||||
|
|
||||||
--context-separator *ARG*
|
--context-separator *SEPARATOR*
|
||||||
: The string to use when separating non-continuous context lines. Escape
|
: The string to use when separating non-continuous context lines. Escape
|
||||||
sequences may be used. [default: --]
|
sequences may be used. [default: --]
|
||||||
|
|
||||||
--debug
|
--debug
|
||||||
: Show debug messages.
|
: Show debug messages.
|
||||||
|
|
||||||
|
-E, --encoding *ENCODING*
|
||||||
|
: Specify the text encoding that ripgrep will use on all files
|
||||||
|
searched. The default value is 'auto', which will cause ripgrep to do
|
||||||
|
a best effort automatic detection of encoding on a per-file basis.
|
||||||
|
Other supported values can be found in the list of labels here:
|
||||||
|
https://encoding.spec.whatwg.org/#concept-encoding-get
|
||||||
|
|
||||||
|
-f, --file FILE ...
|
||||||
|
: Search for patterns from the given file, with one pattern per line. When this
|
||||||
|
flag is used or multiple times or in combination with the -e/--regexp flag,
|
||||||
|
then all patterns provided are searched. Empty pattern lines will match all
|
||||||
|
input lines, and the newline is not counted as part of the pattern.
|
||||||
|
|
||||||
--files
|
--files
|
||||||
: Print each file that would be searched (but don't search).
|
: Print each file that would be searched (but don't search).
|
||||||
|
|
||||||
|
Combine with the -g flag to return matched paths, for example:
|
||||||
|
|
||||||
|
rg -g '\<glob\>' --files
|
||||||
|
|
||||||
-l, --files-with-matches
|
-l, --files-with-matches
|
||||||
: Only show path of each file with matches.
|
: Only show path of each file with matches.
|
||||||
|
|
||||||
|
--files-without-match
|
||||||
|
: Only show path of each file with no matches.
|
||||||
|
|
||||||
-H, --with-filename
|
-H, --with-filename
|
||||||
: Prefix each match with the file name that contains it. This is the
|
: Prefix each match with the file name that contains it. This is the
|
||||||
default when more than one file is searched.
|
default when more than one file is searched.
|
||||||
@@ -119,19 +180,43 @@ the raw speed of grep.
|
|||||||
one file is searched.
|
one file is searched.
|
||||||
|
|
||||||
--heading
|
--heading
|
||||||
: Show the file name above clusters of matches from each file.
|
: Show the file name above clusters of matches from each file instead of
|
||||||
This is the default mode at a tty.
|
showing the file name for every match. This is the default mode at a tty.
|
||||||
|
|
||||||
--no-heading
|
--no-heading
|
||||||
: Don't show any file name heading.
|
: Don't group matches by each file. If -H/--with-filename is enabled, then
|
||||||
|
file names will be shown for every line matched. This is the default more
|
||||||
|
when not at a tty.
|
||||||
|
|
||||||
--hidden
|
--hidden
|
||||||
: Search hidden directories and files. (Hidden directories and files are
|
: Search hidden directories and files. (Hidden directories and files are
|
||||||
skipped by default.)
|
skipped by default.)
|
||||||
|
|
||||||
|
--ignore-file FILE ...
|
||||||
|
: Specify additional ignore files for filtering file paths.
|
||||||
|
Ignore files should be in the gitignore format and are matched
|
||||||
|
relative to the current working directory. These ignore files
|
||||||
|
have lower precedence than all other ignore files. When
|
||||||
|
specifying multiple ignore files, earlier files have lower
|
||||||
|
precedence than later files.
|
||||||
|
|
||||||
-L, --follow
|
-L, --follow
|
||||||
: Follow symlinks.
|
: Follow symlinks.
|
||||||
|
|
||||||
|
-M, --max-columns *NUM*
|
||||||
|
: Don't print lines longer than this limit in bytes. Longer lines are omitted,
|
||||||
|
and only the number of matches in that line is printed.
|
||||||
|
|
||||||
|
-m, --max-count *NUM*
|
||||||
|
: Limit the number of matching lines per file searched to NUM.
|
||||||
|
|
||||||
|
--max-filesize *NUM*+*SUFFIX*?
|
||||||
|
: Ignore files larger than *NUM* in size. Directories will never be ignored.
|
||||||
|
|
||||||
|
*SUFFIX* is optional and may be one of K, M or G. These correspond to
|
||||||
|
kilobytes, megabytes and gigabytes respectively. If omitted the input is
|
||||||
|
treated as bytes.
|
||||||
|
|
||||||
--maxdepth *NUM*
|
--maxdepth *NUM*
|
||||||
: Descend at most NUM directories below the command line arguments.
|
: Descend at most NUM directories below the command line arguments.
|
||||||
A value of zero searches only the starting-points themselves.
|
A value of zero searches only the starting-points themselves.
|
||||||
@@ -141,6 +226,9 @@ the raw speed of grep.
|
|||||||
when ripgrep thinks it will be faster. (Note that mmap searching
|
when ripgrep thinks it will be faster. (Note that mmap searching
|
||||||
doesn't currently support the various context related options.)
|
doesn't currently support the various context related options.)
|
||||||
|
|
||||||
|
--no-messages
|
||||||
|
: Suppress all error messages.
|
||||||
|
|
||||||
--no-mmap
|
--no-mmap
|
||||||
: Never use memory maps, even when they might be faster.
|
: Never use memory maps, even when they might be faster.
|
||||||
|
|
||||||
@@ -155,12 +243,22 @@ the raw speed of grep.
|
|||||||
: Don't respect version control ignore files (e.g., .gitignore).
|
: Don't respect version control ignore files (e.g., .gitignore).
|
||||||
Note that .ignore files will continue to be respected.
|
Note that .ignore files will continue to be respected.
|
||||||
|
|
||||||
--null
|
-0, --null
|
||||||
: Whenever a file name is printed, follow it with a NUL byte.
|
: Whenever a file name is printed, follow it with a NUL byte.
|
||||||
This includes printing filenames before matches, and when printing
|
This includes printing filenames before matches, and when printing
|
||||||
a list of matching files such as with --count, --files-with-matches
|
a list of matching files such as with --count, --files-with-matches
|
||||||
and --files.
|
and --files.
|
||||||
|
|
||||||
|
-o, --only-matching
|
||||||
|
: Print only the matched (non-empty) parts of a matching line, with each such
|
||||||
|
part on a separate output line.
|
||||||
|
|
||||||
|
--path-separator *SEPARATOR*
|
||||||
|
: The path separator to use when printing file paths. This defaults to your
|
||||||
|
platform's path separator, which is / on Unix and \\ on Windows. This flag is
|
||||||
|
intended for overriding the default when the environment demands it (e.g.,
|
||||||
|
cygwin). A path separator is limited to a single byte.
|
||||||
|
|
||||||
-p, --pretty
|
-p, --pretty
|
||||||
: Alias for --color=always --heading -n.
|
: Alias for --color=always --heading -n.
|
||||||
|
|
||||||
@@ -171,6 +269,12 @@ the raw speed of grep.
|
|||||||
Capture group indices (e.g., $5) and names (e.g., $foo) are supported
|
Capture group indices (e.g., $5) and names (e.g., $foo) are supported
|
||||||
in the replacement string.
|
in the replacement string.
|
||||||
|
|
||||||
|
Note that the replacement by default replaces each match, and NOT the
|
||||||
|
entire line. To replace the entire line, you should match the entire line.
|
||||||
|
For example, to emit only the first phone numbers in each line:
|
||||||
|
|
||||||
|
rg '^.*([0-9]{3}-[0-9]{3}-[0-9]{4}).*$' --replace '$1'
|
||||||
|
|
||||||
-s, --case-sensitive
|
-s, --case-sensitive
|
||||||
: Search case sensitively. This overrides --ignore-case and --smart-case.
|
: Search case sensitively. This overrides --ignore-case and --smart-case.
|
||||||
|
|
||||||
@@ -179,9 +283,13 @@ the raw speed of grep.
|
|||||||
Search case sensitively otherwise. This is overridden by either
|
Search case sensitively otherwise. This is overridden by either
|
||||||
--case-sensitive or --ignore-case.
|
--case-sensitive or --ignore-case.
|
||||||
|
|
||||||
|
--sort-files
|
||||||
|
: Sort results by file path. Note that this currently
|
||||||
|
disables all parallelism and runs search in a single thread.
|
||||||
|
|
||||||
-j, --threads *ARG*
|
-j, --threads *ARG*
|
||||||
: The number of threads to use. 0 means use the number of logical CPUs
|
: The number of threads to use. 0 means use the number of logical CPUs
|
||||||
(capped at 6). [default: 0]
|
(capped at 12). [default: 0]
|
||||||
|
|
||||||
--version
|
--version
|
||||||
: Show the version number of ripgrep and exit.
|
: Show the version number of ripgrep and exit.
|
||||||
@@ -203,9 +311,37 @@ the raw speed of grep.
|
|||||||
this must be passed to every invocation of rg. Type settings are NOT
|
this must be passed to every invocation of rg. Type settings are NOT
|
||||||
persisted.
|
persisted.
|
||||||
|
|
||||||
Example: `rg --type-add 'foo:*.foo' -tfoo PATTERN`
|
Example: `rg --type-add 'foo:*.foo' -tfoo PATTERN`
|
||||||
|
|
||||||
|
--type-add can also be used to include rules from other types
|
||||||
|
with the special include directive. The include directive
|
||||||
|
permits specifying one or more other type names (separated by a
|
||||||
|
comma) that have been defined and its rules will automatically
|
||||||
|
be imported into the type specified. For example, to create a
|
||||||
|
type called src that matches C++, Python and Markdown files, one
|
||||||
|
can use:
|
||||||
|
|
||||||
|
`--type-add 'src:include:cpp,py,md'`
|
||||||
|
|
||||||
|
Additional glob rules can still be added to the src type by
|
||||||
|
using the --type-add flag again:
|
||||||
|
|
||||||
|
`--type-add 'src:include:cpp,py,md' --type-add 'src:*.foo'`
|
||||||
|
|
||||||
|
Note that type names must consist only of Unicode letters or
|
||||||
|
numbers. Punctuation characters are not allowed.
|
||||||
|
|
||||||
--type-clear *TYPE* ...
|
--type-clear *TYPE* ...
|
||||||
: Clear the file type globs previously defined for TYPE. This only clears
|
: Clear the file type globs previously defined for TYPE. This only clears
|
||||||
the default type definitions that are found inside of ripgrep. Note
|
the default type definitions that are found inside of ripgrep. Note
|
||||||
that this must be passed to every invocation of rg.
|
that this must be passed to every invocation of rg.
|
||||||
|
|
||||||
|
# SHELL COMPLETION
|
||||||
|
|
||||||
|
Shell completion files are included in the release tarball for Bash, Fish, Zsh
|
||||||
|
and PowerShell.
|
||||||
|
|
||||||
|
For **bash**, move `rg.bash-completion` to `$XDG_CONFIG_HOME/bash_completion`
|
||||||
|
or `/etc/bash_completion.d/`.
|
||||||
|
|
||||||
|
For **fish**, move `rg.fish` to `$HOME/.config/fish/completions`.
|
||||||
|
|||||||
3
globset/COPYING
Normal file
3
globset/COPYING
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
This project is dual-licensed under the Unlicense and MIT licenses.
|
||||||
|
|
||||||
|
You may use this code under the terms of either license.
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "globset"
|
name = "globset"
|
||||||
version = "0.1.0" #:version
|
version = "0.1.4" #:version
|
||||||
authors = ["Andrew Gallant <jamslam@gmail.com>"]
|
authors = ["Andrew Gallant <jamslam@gmail.com>"]
|
||||||
description = """
|
description = """
|
||||||
Cross platform single glob and glob set matching. Glob set matching is the
|
Cross platform single glob and glob set matching. Glob set matching is the
|
||||||
@@ -19,12 +19,14 @@ name = "globset"
|
|||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
aho-corasick = "0.5.3"
|
aho-corasick = "0.6.0"
|
||||||
fnv = "1.0"
|
fnv = "1.0"
|
||||||
lazy_static = "0.2"
|
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
memchr = "0.1"
|
memchr = "1"
|
||||||
regex = "0.1.77"
|
regex = "0.2.1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
glob = "0.2"
|
glob = "0.2"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
simd-accel = ["regex/simd-accel"]
|
||||||
|
|||||||
21
globset/LICENSE-MIT
Normal file
21
globset/LICENSE-MIT
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015 Andrew Gallant
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
24
globset/UNLICENSE
Normal file
24
globset/UNLICENSE
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
This is free and unencumbered software released into the public domain.
|
||||||
|
|
||||||
|
Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||||
|
distribute this software, either in source code form or as a compiled
|
||||||
|
binary, for any purpose, commercial or non-commercial, and by any
|
||||||
|
means.
|
||||||
|
|
||||||
|
In jurisdictions that recognize copyright laws, the author or authors
|
||||||
|
of this software dedicate any and all copyright interest in the
|
||||||
|
software to the public domain. We make this dedication for the benefit
|
||||||
|
of the public at large and to the detriment of our heirs and
|
||||||
|
successors. We intend this dedication to be an overt act of
|
||||||
|
relinquishment in perpetuity of all present and future rights to this
|
||||||
|
software under copyright law.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||||
|
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||||
|
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
|
OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
For more information, please refer to <http://unlicense.org/>
|
||||||
@@ -11,6 +11,9 @@ extern crate lazy_static;
|
|||||||
extern crate regex;
|
extern crate regex;
|
||||||
extern crate test;
|
extern crate test;
|
||||||
|
|
||||||
|
use std::ffi::OsStr;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use globset::{Candidate, Glob, GlobMatcher, GlobSet, GlobSetBuilder};
|
use globset::{Candidate, Glob, GlobMatcher, GlobSet, GlobSetBuilder};
|
||||||
|
|
||||||
const EXT: &'static str = "some/a/bigger/path/to/the/crazy/needle.txt";
|
const EXT: &'static str = "some/a/bigger/path/to/the/crazy/needle.txt";
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
use std::ffi::{OsStr, OsString};
|
use std::ffi::{OsStr, OsString};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::hash;
|
||||||
use std::iter;
|
use std::iter;
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::path::{Path, is_separator};
|
use std::path::{Path, is_separator};
|
||||||
@@ -76,7 +77,7 @@ impl MatchStrategy {
|
|||||||
///
|
///
|
||||||
/// It cannot be used directly to match file paths, but it can be converted
|
/// It cannot be used directly to match file paths, but it can be converted
|
||||||
/// to a regular expression string or a matcher.
|
/// to a regular expression string or a matcher.
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq)]
|
||||||
pub struct Glob {
|
pub struct Glob {
|
||||||
glob: String,
|
glob: String,
|
||||||
re: String,
|
re: String,
|
||||||
@@ -84,6 +85,19 @@ pub struct Glob {
|
|||||||
tokens: Tokens,
|
tokens: Tokens,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PartialEq for Glob {
|
||||||
|
fn eq(&self, other: &Glob) -> bool {
|
||||||
|
self.glob == other.glob && self.opts == other.opts
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl hash::Hash for Glob {
|
||||||
|
fn hash<H: hash::Hasher>(&self, state: &mut H) {
|
||||||
|
self.glob.hash(state);
|
||||||
|
self.opts.hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl fmt::Display for Glob {
|
impl fmt::Display for Glob {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
self.glob.fmt(f)
|
self.glob.fmt(f)
|
||||||
@@ -173,7 +187,7 @@ pub struct GlobBuilder<'a> {
|
|||||||
opts: GlobOptions,
|
opts: GlobOptions,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
|
#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
|
||||||
struct GlobOptions {
|
struct GlobOptions {
|
||||||
/// Whether to match case insensitively.
|
/// Whether to match case insensitively.
|
||||||
case_insensitive: bool,
|
case_insensitive: bool,
|
||||||
@@ -645,9 +659,18 @@ impl Tokens {
|
|||||||
for pat in patterns {
|
for pat in patterns {
|
||||||
let mut altre = String::new();
|
let mut altre = String::new();
|
||||||
self.tokens_to_regex(options, &pat, &mut altre);
|
self.tokens_to_regex(options, &pat, &mut altre);
|
||||||
parts.push(altre);
|
if !altre.is_empty() {
|
||||||
|
parts.push(altre);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// It is possible to have an empty set in which case the
|
||||||
|
// resulting alternation '()' would be an error.
|
||||||
|
if !parts.is_empty() {
|
||||||
|
re.push('(');
|
||||||
|
re.push_str(&parts.join("|"));
|
||||||
|
re.push(')');
|
||||||
}
|
}
|
||||||
re.push_str(&parts.join("|"));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -666,7 +689,7 @@ fn bytes_to_escaped_literal(bs: &[u8]) -> String {
|
|||||||
let mut s = String::with_capacity(bs.len());
|
let mut s = String::with_capacity(bs.len());
|
||||||
for &b in bs {
|
for &b in bs {
|
||||||
if b <= 0x7F {
|
if b <= 0x7F {
|
||||||
s.push_str(®ex::quote(&(b as char).to_string()));
|
s.push_str(®ex::escape(&(b as char).to_string()));
|
||||||
} else {
|
} else {
|
||||||
s.push_str(&format!("\\x{:02x}", b));
|
s.push_str(&format!("\\x{:02x}", b));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,8 +14,6 @@ src/**/foo.rs
|
|||||||
and a path `src/bar/baz/foo.rs`, then the set would report the first and third
|
and a path `src/bar/baz/foo.rs`, then the set would report the first and third
|
||||||
globs as matching.
|
globs as matching.
|
||||||
|
|
||||||
Single glob matching is also provided and is done by converting globs to
|
|
||||||
|
|
||||||
# Example: one glob
|
# Example: one glob
|
||||||
|
|
||||||
This example shows how to match a single glob against a single file path.
|
This example shows how to match a single glob against a single file path.
|
||||||
@@ -103,8 +101,6 @@ or to enable case insensitive matching.
|
|||||||
extern crate aho_corasick;
|
extern crate aho_corasick;
|
||||||
extern crate fnv;
|
extern crate fnv;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
|
||||||
#[macro_use]
|
|
||||||
extern crate log;
|
extern crate log;
|
||||||
extern crate memchr;
|
extern crate memchr;
|
||||||
extern crate regex;
|
extern crate regex;
|
||||||
@@ -130,13 +126,6 @@ pub use glob::{Glob, GlobBuilder, GlobMatcher};
|
|||||||
mod glob;
|
mod glob;
|
||||||
mod pathutil;
|
mod pathutil;
|
||||||
|
|
||||||
macro_rules! eprintln {
|
|
||||||
($($tt:tt)*) => {{
|
|
||||||
use std::io::Write;
|
|
||||||
let _ = writeln!(&mut ::std::io::stderr(), $($tt)*);
|
|
||||||
}}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents an error that can occur when parsing a glob pattern.
|
/// Represents an error that can occur when parsing a glob pattern.
|
||||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
@@ -211,7 +200,7 @@ fn new_regex(pat: &str) -> Result<Regex, Error> {
|
|||||||
.dot_matches_new_line(true)
|
.dot_matches_new_line(true)
|
||||||
.size_limit(10 * (1 << 20))
|
.size_limit(10 * (1 << 20))
|
||||||
.dfa_size_limit(10 * (1 << 20))
|
.dfa_size_limit(10 * (1 << 20))
|
||||||
.compile()
|
.build()
|
||||||
.map_err(|err| Error::Regex(err.to_string()))
|
.map_err(|err| Error::Regex(err.to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -226,10 +215,21 @@ type Fnv = hash::BuildHasherDefault<fnv::FnvHasher>;
|
|||||||
/// single pass.
|
/// single pass.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct GlobSet {
|
pub struct GlobSet {
|
||||||
|
len: usize,
|
||||||
strats: Vec<GlobSetMatchStrategy>,
|
strats: Vec<GlobSetMatchStrategy>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GlobSet {
|
impl GlobSet {
|
||||||
|
/// Returns true if this set is empty, and therefore matches nothing.
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.len == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the number of globs in this set.
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.len
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if any glob in this set matches the path given.
|
/// Returns true if any glob in this set matches the path given.
|
||||||
pub fn is_match<P: AsRef<Path>>(&self, path: P) -> bool {
|
pub fn is_match<P: AsRef<Path>>(&self, path: P) -> bool {
|
||||||
self.is_match_candidate(&Candidate::new(path.as_ref()))
|
self.is_match_candidate(&Candidate::new(path.as_ref()))
|
||||||
@@ -240,6 +240,9 @@ impl GlobSet {
|
|||||||
/// This takes a Candidate as input, which can be used to amortize the
|
/// This takes a Candidate as input, which can be used to amortize the
|
||||||
/// cost of preparing a path for matching.
|
/// cost of preparing a path for matching.
|
||||||
pub fn is_match_candidate(&self, path: &Candidate) -> bool {
|
pub fn is_match_candidate(&self, path: &Candidate) -> bool {
|
||||||
|
if self.is_empty() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
for strat in &self.strats {
|
for strat in &self.strats {
|
||||||
if strat.is_match(path) {
|
if strat.is_match(path) {
|
||||||
return true;
|
return true;
|
||||||
@@ -250,9 +253,6 @@ impl GlobSet {
|
|||||||
|
|
||||||
/// Returns the sequence number of every glob pattern that matches the
|
/// Returns the sequence number of every glob pattern that matches the
|
||||||
/// given path.
|
/// given path.
|
||||||
///
|
|
||||||
/// This takes a Candidate as input, which can be used to amortize the
|
|
||||||
/// cost of preparing a path for matching.
|
|
||||||
pub fn matches<P: AsRef<Path>>(&self, path: P) -> Vec<usize> {
|
pub fn matches<P: AsRef<Path>>(&self, path: P) -> Vec<usize> {
|
||||||
self.matches_candidate(&Candidate::new(path.as_ref()))
|
self.matches_candidate(&Candidate::new(path.as_ref()))
|
||||||
}
|
}
|
||||||
@@ -264,6 +264,9 @@ impl GlobSet {
|
|||||||
/// cost of preparing a path for matching.
|
/// cost of preparing a path for matching.
|
||||||
pub fn matches_candidate(&self, path: &Candidate) -> Vec<usize> {
|
pub fn matches_candidate(&self, path: &Candidate) -> Vec<usize> {
|
||||||
let mut into = vec![];
|
let mut into = vec![];
|
||||||
|
if self.is_empty() {
|
||||||
|
return into;
|
||||||
|
}
|
||||||
self.matches_candidate_into(path, &mut into);
|
self.matches_candidate_into(path, &mut into);
|
||||||
into
|
into
|
||||||
}
|
}
|
||||||
@@ -274,12 +277,32 @@ impl GlobSet {
|
|||||||
/// `into` is is cleared before matching begins, and contains the set of
|
/// `into` is is cleared before matching begins, and contains the set of
|
||||||
/// sequence numbers (in ascending order) after matching ends. If no globs
|
/// sequence numbers (in ascending order) after matching ends. If no globs
|
||||||
/// were matched, then `into` will be empty.
|
/// were matched, then `into` will be empty.
|
||||||
|
pub fn matches_into<P: AsRef<Path>>(
|
||||||
|
&self,
|
||||||
|
path: P,
|
||||||
|
into: &mut Vec<usize>,
|
||||||
|
) {
|
||||||
|
self.matches_candidate_into(&Candidate::new(path.as_ref()), into);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds the sequence number of every glob pattern that matches the given
|
||||||
|
/// path to the vec given.
|
||||||
|
///
|
||||||
|
/// `into` is is cleared before matching begins, and contains the set of
|
||||||
|
/// sequence numbers (in ascending order) after matching ends. If no globs
|
||||||
|
/// were matched, then `into` will be empty.
|
||||||
|
///
|
||||||
|
/// This takes a Candidate as input, which can be used to amortize the
|
||||||
|
/// cost of preparing a path for matching.
|
||||||
pub fn matches_candidate_into(
|
pub fn matches_candidate_into(
|
||||||
&self,
|
&self,
|
||||||
path: &Candidate,
|
path: &Candidate,
|
||||||
into: &mut Vec<usize>,
|
into: &mut Vec<usize>,
|
||||||
) {
|
) {
|
||||||
into.clear();
|
into.clear();
|
||||||
|
if self.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
for strat in &self.strats {
|
for strat in &self.strats {
|
||||||
strat.matches_into(path, into);
|
strat.matches_into(path, into);
|
||||||
}
|
}
|
||||||
@@ -288,6 +311,9 @@ impl GlobSet {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn new(pats: &[Glob]) -> Result<GlobSet, Error> {
|
fn new(pats: &[Glob]) -> Result<GlobSet, Error> {
|
||||||
|
if pats.is_empty() {
|
||||||
|
return Ok(GlobSet { len: 0, strats: vec![] });
|
||||||
|
}
|
||||||
let mut lits = LiteralStrategy::new();
|
let mut lits = LiteralStrategy::new();
|
||||||
let mut base_lits = BasenameLiteralStrategy::new();
|
let mut base_lits = BasenameLiteralStrategy::new();
|
||||||
let mut exts = ExtensionStrategy::new();
|
let mut exts = ExtensionStrategy::new();
|
||||||
@@ -330,6 +356,7 @@ impl GlobSet {
|
|||||||
prefixes.literals.len(), suffixes.literals.len(),
|
prefixes.literals.len(), suffixes.literals.len(),
|
||||||
required_exts.0.len(), regexes.literals.len());
|
required_exts.0.len(), regexes.literals.len());
|
||||||
Ok(GlobSet {
|
Ok(GlobSet {
|
||||||
|
len: pats.len(),
|
||||||
strats: vec![
|
strats: vec![
|
||||||
GlobSetMatchStrategy::Extension(exts),
|
GlobSetMatchStrategy::Extension(exts),
|
||||||
GlobSetMatchStrategy::BasenameLiteral(base_lits),
|
GlobSetMatchStrategy::BasenameLiteral(base_lits),
|
||||||
@@ -750,4 +777,11 @@ mod tests {
|
|||||||
assert_eq!(0, matches[0]);
|
assert_eq!(0, matches[0]);
|
||||||
assert_eq!(2, matches[1]);
|
assert_eq!(2, matches[1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_set_works() {
|
||||||
|
let set = GlobSetBuilder::new().build().unwrap();
|
||||||
|
assert!(!set.is_match(""));
|
||||||
|
assert!(!set.is_match("a"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -89,16 +89,14 @@ pub fn path_bytes(path: &Path) -> Cow<[u8]> {
|
|||||||
os_str_bytes(path.as_os_str())
|
os_str_bytes(path.as_os_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the raw bytes of the given OS string, transcoded to UTF-8 if
|
/// Return the raw bytes of the given OS string, possibly transcoded to UTF-8.
|
||||||
/// necessary.
|
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
pub fn os_str_bytes(s: &OsStr) -> Cow<[u8]> {
|
pub fn os_str_bytes(s: &OsStr) -> Cow<[u8]> {
|
||||||
use std::os::unix::ffi::OsStrExt;
|
use std::os::unix::ffi::OsStrExt;
|
||||||
Cow::Borrowed(s.as_bytes())
|
Cow::Borrowed(s.as_bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the raw bytes of the given OS string, transcoded to UTF-8 if
|
/// Return the raw bytes of the given OS string, possibly transcoded to UTF-8.
|
||||||
/// necessary.
|
|
||||||
#[cfg(not(unix))]
|
#[cfg(not(unix))]
|
||||||
pub fn os_str_bytes(s: &OsStr) -> Cow<[u8]> {
|
pub fn os_str_bytes(s: &OsStr) -> Cow<[u8]> {
|
||||||
// TODO(burntsushi): On Windows, OS strings are WTF-8, which is a superset
|
// TODO(burntsushi): On Windows, OS strings are WTF-8, which is a superset
|
||||||
|
|||||||
3
grep/COPYING
Normal file
3
grep/COPYING
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
This project is dual-licensed under the Unlicense and MIT licenses.
|
||||||
|
|
||||||
|
You may use this code under the terms of either license.
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "grep"
|
name = "grep"
|
||||||
version = "0.1.3" #:version
|
version = "0.1.6" #:version
|
||||||
authors = ["Andrew Gallant <jamslam@gmail.com>"]
|
authors = ["Andrew Gallant <jamslam@gmail.com>"]
|
||||||
description = """
|
description = """
|
||||||
Fast line oriented regex searching as a library.
|
Fast line oriented regex searching as a library.
|
||||||
@@ -14,7 +14,6 @@ license = "Unlicense/MIT"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
memchr = "0.1"
|
memchr = "1"
|
||||||
memmap = "0.2"
|
regex = "0.2.1"
|
||||||
regex = "0.1.77"
|
regex-syntax = "0.4.0"
|
||||||
regex-syntax = "0.3.5"
|
|
||||||
|
|||||||
21
grep/LICENSE-MIT
Normal file
21
grep/LICENSE-MIT
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015 Andrew Gallant
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
24
grep/UNLICENSE
Normal file
24
grep/UNLICENSE
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
This is free and unencumbered software released into the public domain.
|
||||||
|
|
||||||
|
Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||||
|
distribute this software, either in source code form or as a compiled
|
||||||
|
binary, for any purpose, commercial or non-commercial, and by any
|
||||||
|
means.
|
||||||
|
|
||||||
|
In jurisdictions that recognize copyright laws, the author or authors
|
||||||
|
of this software dedicate any and all copyright interest in the
|
||||||
|
software to the public domain. We make this dedication for the benefit
|
||||||
|
of the public at large and to the detriment of our heirs and
|
||||||
|
successors. We intend this dedication to be an overt act of
|
||||||
|
relinquishment in perpetuity of all present and future rights to this
|
||||||
|
software under copyright law.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||||
|
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||||
|
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
|
OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
For more information, please refer to <http://unlicense.org/>
|
||||||
@@ -78,6 +78,6 @@ impl From<regex::Error> for Error {
|
|||||||
|
|
||||||
impl From<syntax::Error> for Error {
|
impl From<syntax::Error> for Error {
|
||||||
fn from(err: syntax::Error) -> Error {
|
fn from(err: syntax::Error) -> Error {
|
||||||
Error::Regex(regex::Error::Syntax(err))
|
Error::Regex(regex::Error::Syntax(err.to_string()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ principled.
|
|||||||
*/
|
*/
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
|
|
||||||
use regex::bytes::Regex;
|
use regex::bytes::RegexBuilder;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
Expr, Literals, Lit,
|
Expr, Literals, Lit,
|
||||||
ByteClass, ByteRange, CharClass, ClassRange, Repeater,
|
ByteClass, ByteRange, CharClass, ClassRange, Repeater,
|
||||||
@@ -33,7 +33,7 @@ impl LiteralSets {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_regex(&self) -> Option<Regex> {
|
pub fn to_regex_builder(&self) -> Option<RegexBuilder> {
|
||||||
if self.prefixes.all_complete() && !self.prefixes.is_empty() {
|
if self.prefixes.all_complete() && !self.prefixes.is_empty() {
|
||||||
debug!("literal prefixes detected: {:?}", self.prefixes);
|
debug!("literal prefixes detected: {:?}", self.prefixes);
|
||||||
// When this is true, the regex engine will do a literal scan.
|
// When this is true, the regex engine will do a literal scan.
|
||||||
@@ -79,14 +79,16 @@ impl LiteralSets {
|
|||||||
debug!("required literals found: {:?}", req_lits);
|
debug!("required literals found: {:?}", req_lits);
|
||||||
let alts: Vec<String> =
|
let alts: Vec<String> =
|
||||||
req_lits.into_iter().map(|x| bytes_to_regex(x)).collect();
|
req_lits.into_iter().map(|x| bytes_to_regex(x)).collect();
|
||||||
// Literals always compile.
|
let mut builder = RegexBuilder::new(&alts.join("|"));
|
||||||
Some(Regex::new(&alts.join("|")).unwrap())
|
builder.unicode(false);
|
||||||
|
Some(builder)
|
||||||
} else if lit.is_empty() {
|
} else if lit.is_empty() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
// Literals always compile.
|
|
||||||
debug!("required literal found: {:?}", show(lit));
|
debug!("required literal found: {:?}", show(lit));
|
||||||
Some(Regex::new(&bytes_to_regex(lit)).unwrap())
|
let mut builder = RegexBuilder::new(&bytes_to_regex(&lit));
|
||||||
|
builder.unicode(false);
|
||||||
|
Some(builder)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -144,14 +144,19 @@ impl GrepBuilder {
|
|||||||
let expr = try!(self.parse());
|
let expr = try!(self.parse());
|
||||||
let literals = LiteralSets::create(&expr);
|
let literals = LiteralSets::create(&expr);
|
||||||
let re = try!(self.regex(&expr));
|
let re = try!(self.regex(&expr));
|
||||||
let required = literals.to_regex().or_else(|| {
|
let required = match literals.to_regex_builder() {
|
||||||
let expr = match strip_unicode_word_boundaries(&expr) {
|
Some(builder) => Some(try!(self.regex_build(builder))),
|
||||||
None => return None,
|
None => {
|
||||||
Some(expr) => expr,
|
match strip_unicode_word_boundaries(&expr) {
|
||||||
};
|
None => None,
|
||||||
debug!("Stripped Unicode word boundaries. New AST:\n{:?}", expr);
|
Some(expr) => {
|
||||||
self.regex(&expr).ok()
|
debug!("Stripped Unicode word boundaries. \
|
||||||
});
|
New AST:\n{:?}", expr);
|
||||||
|
self.regex(&expr).ok()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
Ok(Grep {
|
Ok(Grep {
|
||||||
re: re,
|
re: re,
|
||||||
required: required,
|
required: required,
|
||||||
@@ -162,16 +167,18 @@ impl GrepBuilder {
|
|||||||
/// Creates a new regex from the given expression with the current
|
/// Creates a new regex from the given expression with the current
|
||||||
/// configuration.
|
/// configuration.
|
||||||
fn regex(&self, expr: &Expr) -> Result<Regex> {
|
fn regex(&self, expr: &Expr) -> Result<Regex> {
|
||||||
let casei =
|
let mut builder = RegexBuilder::new(&expr.to_string());
|
||||||
self.opts.case_insensitive
|
builder.unicode(true);
|
||||||
|| (self.opts.case_smart && !has_uppercase_literal(expr));
|
self.regex_build(builder)
|
||||||
RegexBuilder::new(&expr.to_string())
|
}
|
||||||
.case_insensitive(casei)
|
|
||||||
|
/// Builds a new regex from the given builder using the caller's settings.
|
||||||
|
fn regex_build(&self, mut builder: RegexBuilder) -> Result<Regex> {
|
||||||
|
builder
|
||||||
.multi_line(true)
|
.multi_line(true)
|
||||||
.unicode(true)
|
|
||||||
.size_limit(self.opts.size_limit)
|
.size_limit(self.opts.size_limit)
|
||||||
.dfa_size_limit(self.opts.dfa_size_limit)
|
.dfa_size_limit(self.opts.dfa_size_limit)
|
||||||
.compile()
|
.build()
|
||||||
.map_err(From::from)
|
.map_err(From::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -182,12 +189,30 @@ impl GrepBuilder {
|
|||||||
try!(syntax::ExprBuilder::new()
|
try!(syntax::ExprBuilder::new()
|
||||||
.allow_bytes(true)
|
.allow_bytes(true)
|
||||||
.unicode(true)
|
.unicode(true)
|
||||||
.case_insensitive(self.opts.case_insensitive)
|
.case_insensitive(try!(self.is_case_insensitive()))
|
||||||
.parse(&self.pattern));
|
.parse(&self.pattern));
|
||||||
let expr = try!(nonl::remove(expr, self.opts.line_terminator));
|
let expr = try!(nonl::remove(expr, self.opts.line_terminator));
|
||||||
debug!("regex ast:\n{:#?}", expr);
|
debug!("regex ast:\n{:#?}", expr);
|
||||||
Ok(expr)
|
Ok(expr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Determines whether the case insensitive flag should be enabled or not.
|
||||||
|
///
|
||||||
|
/// An error is returned if the regex could not be parsed.
|
||||||
|
fn is_case_insensitive(&self) -> Result<bool> {
|
||||||
|
if self.opts.case_insensitive {
|
||||||
|
return Ok(true);
|
||||||
|
}
|
||||||
|
if !self.opts.case_smart {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
|
let expr =
|
||||||
|
try!(syntax::ExprBuilder::new()
|
||||||
|
.allow_bytes(true)
|
||||||
|
.unicode(true)
|
||||||
|
.parse(&self.pattern));
|
||||||
|
Ok(!has_uppercase_literal(&expr))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Grep {
|
impl Grep {
|
||||||
@@ -294,12 +319,29 @@ impl<'b, 's> Iterator for Iter<'b, 's> {
|
|||||||
|
|
||||||
fn has_uppercase_literal(expr: &Expr) -> bool {
|
fn has_uppercase_literal(expr: &Expr) -> bool {
|
||||||
use syntax::Expr::*;
|
use syntax::Expr::*;
|
||||||
|
fn byte_is_upper(b: u8) -> bool { b'A' <= b && b <= b'Z' }
|
||||||
match *expr {
|
match *expr {
|
||||||
Literal { ref chars, casei } => {
|
Literal { ref chars, casei } => {
|
||||||
casei || chars.iter().any(|c| c.is_uppercase())
|
casei || chars.iter().any(|c| c.is_uppercase())
|
||||||
}
|
}
|
||||||
LiteralBytes { ref bytes, casei } => {
|
LiteralBytes { ref bytes, casei } => {
|
||||||
casei || bytes.iter().any(|&b| b'A' <= b && b <= b'Z')
|
casei || bytes.iter().any(|&b| byte_is_upper(b))
|
||||||
|
}
|
||||||
|
Class(ref ranges) => {
|
||||||
|
for r in ranges {
|
||||||
|
if r.start.is_uppercase() || r.end.is_uppercase() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
ClassBytes(ref ranges) => {
|
||||||
|
for r in ranges {
|
||||||
|
if byte_is_upper(r.start) || byte_is_upper(r.end) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
}
|
}
|
||||||
Group { ref e, .. } => has_uppercase_literal(e),
|
Group { ref e, .. } => has_uppercase_literal(e),
|
||||||
Repeat { ref e, .. } => has_uppercase_literal(e),
|
Repeat { ref e, .. } => has_uppercase_literal(e),
|
||||||
@@ -328,11 +370,11 @@ mod tests {
|
|||||||
fn find_lines(pat: &str, haystack: &[u8]) -> Vec<Match> {
|
fn find_lines(pat: &str, haystack: &[u8]) -> Vec<Match> {
|
||||||
let re = Regex::new(pat).unwrap();
|
let re = Regex::new(pat).unwrap();
|
||||||
let mut lines = vec![];
|
let mut lines = vec![];
|
||||||
for (s, e) in re.find_iter(haystack) {
|
for m in re.find_iter(haystack) {
|
||||||
let start = memrchr(b'\n', &haystack[..s])
|
let start = memrchr(b'\n', &haystack[..m.start()])
|
||||||
.map_or(0, |i| i + 1);
|
.map_or(0, |i| i + 1);
|
||||||
let end = memchr(b'\n', &haystack[e..])
|
let end = memchr(b'\n', &haystack[m.end()..])
|
||||||
.map_or(haystack.len(), |i| e + i + 1);
|
.map_or(haystack.len(), |i| m.end() + i + 1);
|
||||||
lines.push(Match {
|
lines.push(Match {
|
||||||
start: start,
|
start: start,
|
||||||
end: end,
|
end: end,
|
||||||
|
|||||||
3
ignore/COPYING
Normal file
3
ignore/COPYING
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
This project is dual-licensed under the Unlicense and MIT licenses.
|
||||||
|
|
||||||
|
You may use this code under the terms of either license.
|
||||||
37
ignore/Cargo.toml
Normal file
37
ignore/Cargo.toml
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
[package]
|
||||||
|
name = "ignore"
|
||||||
|
version = "0.1.9" #:version
|
||||||
|
authors = ["Andrew Gallant <jamslam@gmail.com>"]
|
||||||
|
description = """
|
||||||
|
A fast library for efficiently matching ignore files such as `.gitignore`
|
||||||
|
against file paths.
|
||||||
|
"""
|
||||||
|
documentation = "https://docs.rs/ignore"
|
||||||
|
homepage = "https://github.com/BurntSushi/ripgrep/tree/master/ignore"
|
||||||
|
repository = "https://github.com/BurntSushi/ripgrep/tree/master/ignore"
|
||||||
|
readme = "README.md"
|
||||||
|
keywords = ["glob", "ignore", "gitignore", "pattern", "file"]
|
||||||
|
license = "Unlicense/MIT"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "ignore"
|
||||||
|
bench = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
crossbeam = "0.2"
|
||||||
|
globset = { version = "0.1.4", path = "../globset" }
|
||||||
|
lazy_static = "0.2"
|
||||||
|
log = "0.3"
|
||||||
|
memchr = "1"
|
||||||
|
regex = "0.2.1"
|
||||||
|
thread_local = "0.3.2"
|
||||||
|
walkdir = "1.0.7"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tempdir = "0.3.5"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
simd-accel = ["globset/simd-accel"]
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
debug = true
|
||||||
21
ignore/LICENSE-MIT
Normal file
21
ignore/LICENSE-MIT
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015 Andrew Gallant
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
66
ignore/README.md
Normal file
66
ignore/README.md
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
ignore
|
||||||
|
======
|
||||||
|
The ignore crate provides a fast recursive directory iterator that respects
|
||||||
|
various filters such as globs, file types and `.gitignore` files. This crate
|
||||||
|
also provides lower level direct access to gitignore and file type matchers.
|
||||||
|
|
||||||
|
[](https://travis-ci.org/BurntSushi/ripgrep)
|
||||||
|
[](https://ci.appveyor.com/project/BurntSushi/ripgrep)
|
||||||
|
[](https://crates.io/crates/ignore)
|
||||||
|
|
||||||
|
Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org).
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
[https://docs.rs/ignore](https://docs.rs/ignore)
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Add this to your `Cargo.toml`:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
ignore = "0.1"
|
||||||
|
```
|
||||||
|
|
||||||
|
and this to your crate root:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
extern crate ignore;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
This example shows the most basic usage of this crate. This code will
|
||||||
|
recursively traverse the current directory while automatically filtering out
|
||||||
|
files and directories according to ignore globs found in files like
|
||||||
|
`.ignore` and `.gitignore`:
|
||||||
|
|
||||||
|
|
||||||
|
```rust,no_run
|
||||||
|
use ignore::Walk;
|
||||||
|
|
||||||
|
for result in Walk::new("./") {
|
||||||
|
// Each item yielded by the iterator is either a directory entry or an
|
||||||
|
// error, so either print the path or the error.
|
||||||
|
match result {
|
||||||
|
Ok(entry) => println!("{}", entry.path().display()),
|
||||||
|
Err(err) => println!("ERROR: {}", err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example: advanced
|
||||||
|
|
||||||
|
By default, the recursive directory iterator will ignore hidden files and
|
||||||
|
directories. This can be disabled by building the iterator with `WalkBuilder`:
|
||||||
|
|
||||||
|
```rust,no_run
|
||||||
|
use ignore::WalkBuilder;
|
||||||
|
|
||||||
|
for result in WalkBuilder::new("./").hidden(false).build() {
|
||||||
|
println!("{:?}", result);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
See the documentation for `WalkBuilder` for many other options.
|
||||||
24
ignore/UNLICENSE
Normal file
24
ignore/UNLICENSE
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
This is free and unencumbered software released into the public domain.
|
||||||
|
|
||||||
|
Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||||
|
distribute this software, either in source code form or as a compiled
|
||||||
|
binary, for any purpose, commercial or non-commercial, and by any
|
||||||
|
means.
|
||||||
|
|
||||||
|
In jurisdictions that recognize copyright laws, the author or authors
|
||||||
|
of this software dedicate any and all copyright interest in the
|
||||||
|
software to the public domain. We make this dedication for the benefit
|
||||||
|
of the public at large and to the detriment of our heirs and
|
||||||
|
successors. We intend this dedication to be an overt act of
|
||||||
|
relinquishment in perpetuity of all present and future rights to this
|
||||||
|
software under copyright law.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||||
|
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||||
|
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
|
OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
For more information, please refer to <http://unlicense.org/>
|
||||||
92
ignore/examples/walk.rs
Normal file
92
ignore/examples/walk.rs
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
#![allow(dead_code, unused_imports, unused_mut, unused_variables)]
|
||||||
|
|
||||||
|
extern crate crossbeam;
|
||||||
|
extern crate ignore;
|
||||||
|
extern crate walkdir;
|
||||||
|
|
||||||
|
use std::env;
|
||||||
|
use std::io::{self, Write};
|
||||||
|
use std::path::Path;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
|
use std::thread;
|
||||||
|
|
||||||
|
use crossbeam::sync::MsQueue;
|
||||||
|
use ignore::WalkBuilder;
|
||||||
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let mut path = env::args().nth(1).unwrap();
|
||||||
|
let mut parallel = false;
|
||||||
|
let mut simple = false;
|
||||||
|
let queue: Arc<MsQueue<Option<DirEntry>>> = Arc::new(MsQueue::new());
|
||||||
|
if path == "parallel" {
|
||||||
|
path = env::args().nth(2).unwrap();
|
||||||
|
parallel = true;
|
||||||
|
} else if path == "walkdir" {
|
||||||
|
path = env::args().nth(2).unwrap();
|
||||||
|
simple = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
let stdout_queue = queue.clone();
|
||||||
|
let stdout_thread = thread::spawn(move || {
|
||||||
|
let mut stdout = io::BufWriter::new(io::stdout());
|
||||||
|
while let Some(dent) = stdout_queue.pop() {
|
||||||
|
write_path(&mut stdout, dent.path());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if parallel {
|
||||||
|
let walker = WalkBuilder::new(path).threads(6).build_parallel();
|
||||||
|
walker.run(|| {
|
||||||
|
let queue = queue.clone();
|
||||||
|
Box::new(move |result| {
|
||||||
|
use ignore::WalkState::*;
|
||||||
|
|
||||||
|
queue.push(Some(DirEntry::Y(result.unwrap())));
|
||||||
|
Continue
|
||||||
|
})
|
||||||
|
});
|
||||||
|
} else if simple {
|
||||||
|
let mut stdout = io::BufWriter::new(io::stdout());
|
||||||
|
let walker = WalkDir::new(path);
|
||||||
|
for result in walker {
|
||||||
|
queue.push(Some(DirEntry::X(result.unwrap())));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let mut stdout = io::BufWriter::new(io::stdout());
|
||||||
|
let walker = WalkBuilder::new(path).build();
|
||||||
|
for result in walker {
|
||||||
|
queue.push(Some(DirEntry::Y(result.unwrap())));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
queue.push(None);
|
||||||
|
stdout_thread.join().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
enum DirEntry {
|
||||||
|
X(walkdir::DirEntry),
|
||||||
|
Y(ignore::DirEntry),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DirEntry {
|
||||||
|
fn path(&self) -> &Path {
|
||||||
|
match *self {
|
||||||
|
DirEntry::X(ref x) => x.path(),
|
||||||
|
DirEntry::Y(ref y) => y.path(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
fn write_path<W: Write>(mut wtr: W, path: &Path) {
|
||||||
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
wtr.write(path.as_os_str().as_bytes()).unwrap();
|
||||||
|
wtr.write(b"\n").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
fn write_path<W: Write>(mut wtr: W, path: &Path) {
|
||||||
|
wtr.write(path.to_string_lossy().as_bytes()).unwrap();
|
||||||
|
wtr.write(b"\n").unwrap();
|
||||||
|
}
|
||||||
800
ignore/src/dir.rs
Normal file
800
ignore/src/dir.rs
Normal file
@@ -0,0 +1,800 @@
|
|||||||
|
// This module provides a data structure, `Ignore`, that connects "directory
|
||||||
|
// traversal" with "ignore matchers." Specifically, it knows about gitignore
|
||||||
|
// semantics and precedence, and is organized based on directory hierarchy.
|
||||||
|
// Namely, every matcher logically corresponds to ignore rules from a single
|
||||||
|
// directory, and points to the matcher for its corresponding parent directory.
|
||||||
|
// In this sense, `Ignore` is a *persistent* data structure.
|
||||||
|
//
|
||||||
|
// This design was specifically chosen to make it possible to use this data
|
||||||
|
// structure in a parallel directory iterator.
|
||||||
|
//
|
||||||
|
// My initial intention was to expose this module as part of this crate's
|
||||||
|
// public API, but I think the data structure's public API is too complicated
|
||||||
|
// with non-obvious failure modes. Alas, such things haven't been documented
|
||||||
|
// well.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::ffi::OsString;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::{Arc, RwLock};
|
||||||
|
|
||||||
|
use gitignore::{self, Gitignore, GitignoreBuilder};
|
||||||
|
use pathutil::{is_hidden, strip_prefix};
|
||||||
|
use overrides::{self, Override};
|
||||||
|
use types::{self, Types};
|
||||||
|
use {Error, Match, PartialErrorBuilder};
|
||||||
|
|
||||||
|
/// IgnoreMatch represents information about where a match came from when using
|
||||||
|
/// the `Ignore` matcher.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct IgnoreMatch<'a>(IgnoreMatchInner<'a>);
|
||||||
|
|
||||||
|
/// IgnoreMatchInner describes precisely where the match information came from.
|
||||||
|
/// This is private to allow expansion to more matchers in the future.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
enum IgnoreMatchInner<'a> {
|
||||||
|
Override(overrides::Glob<'a>),
|
||||||
|
Gitignore(&'a gitignore::Glob),
|
||||||
|
Types(types::Glob<'a>),
|
||||||
|
Hidden,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> IgnoreMatch<'a> {
|
||||||
|
fn overrides(x: overrides::Glob<'a>) -> IgnoreMatch<'a> {
|
||||||
|
IgnoreMatch(IgnoreMatchInner::Override(x))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gitignore(x: &'a gitignore::Glob) -> IgnoreMatch<'a> {
|
||||||
|
IgnoreMatch(IgnoreMatchInner::Gitignore(x))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn types(x: types::Glob<'a>) -> IgnoreMatch<'a> {
|
||||||
|
IgnoreMatch(IgnoreMatchInner::Types(x))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hidden() -> IgnoreMatch<'static> {
|
||||||
|
IgnoreMatch(IgnoreMatchInner::Hidden)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Options for the ignore matcher, shared between the matcher itself and the
|
||||||
|
/// builder.
|
||||||
|
#[derive(Clone, Copy, Debug)]
|
||||||
|
struct IgnoreOptions {
|
||||||
|
/// Whether to ignore hidden file paths or not.
|
||||||
|
hidden: bool,
|
||||||
|
/// Whether to read .ignore files.
|
||||||
|
ignore: bool,
|
||||||
|
/// Whether to read git's global gitignore file.
|
||||||
|
git_global: bool,
|
||||||
|
/// Whether to read .gitignore files.
|
||||||
|
git_ignore: bool,
|
||||||
|
/// Whether to read .git/info/exclude files.
|
||||||
|
git_exclude: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IgnoreOptions {
|
||||||
|
/// Returns true if at least one type of ignore rules should be matched.
|
||||||
|
fn has_any_ignore_options(&self) -> bool {
|
||||||
|
self.ignore || self.git_global || self.git_ignore || self.git_exclude
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ignore is a matcher useful for recursively walking one or more directories.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Ignore(Arc<IgnoreInner>);
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
struct IgnoreInner {
|
||||||
|
/// A map of all existing directories that have already been
|
||||||
|
/// compiled into matchers.
|
||||||
|
///
|
||||||
|
/// Note that this is never used during matching, only when adding new
|
||||||
|
/// parent directory matchers. This avoids needing to rebuild glob sets for
|
||||||
|
/// parent directories if many paths are being searched.
|
||||||
|
compiled: Arc<RwLock<HashMap<OsString, Ignore>>>,
|
||||||
|
/// The path to the directory that this matcher was built from.
|
||||||
|
dir: PathBuf,
|
||||||
|
/// An override matcher (default is empty).
|
||||||
|
overrides: Arc<Override>,
|
||||||
|
/// A file type matcher.
|
||||||
|
types: Arc<Types>,
|
||||||
|
/// The parent directory to match next.
|
||||||
|
///
|
||||||
|
/// If this is the root directory or there are otherwise no more
|
||||||
|
/// directories to match, then `parent` is `None`.
|
||||||
|
parent: Option<Ignore>,
|
||||||
|
/// Whether this is an absolute parent matcher, as added by add_parent.
|
||||||
|
is_absolute_parent: bool,
|
||||||
|
/// The absolute base path of this matcher. Populated only if parent
|
||||||
|
/// directories are added.
|
||||||
|
absolute_base: Option<Arc<PathBuf>>,
|
||||||
|
/// Explicit ignore matchers specified by the caller.
|
||||||
|
explicit_ignores: Arc<Vec<Gitignore>>,
|
||||||
|
/// The matcher for .ignore files.
|
||||||
|
ignore_matcher: Gitignore,
|
||||||
|
/// A global gitignore matcher, usually from $XDG_CONFIG_HOME/git/ignore.
|
||||||
|
git_global_matcher: Arc<Gitignore>,
|
||||||
|
/// The matcher for .gitignore files.
|
||||||
|
git_ignore_matcher: Gitignore,
|
||||||
|
/// Special matcher for `.git/info/exclude` files.
|
||||||
|
git_exclude_matcher: Gitignore,
|
||||||
|
/// Whether this directory contains a .git sub-directory.
|
||||||
|
has_git: bool,
|
||||||
|
/// Ignore config.
|
||||||
|
opts: IgnoreOptions,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ignore {
|
||||||
|
/// Return the directory path of this matcher.
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub fn path(&self) -> &Path {
|
||||||
|
&self.0.dir
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return true if this matcher has no parent.
|
||||||
|
pub fn is_root(&self) -> bool {
|
||||||
|
self.0.parent.is_none()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if this matcher was added via the `add_parents` method.
|
||||||
|
pub fn is_absolute_parent(&self) -> bool {
|
||||||
|
self.0.is_absolute_parent
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return this matcher's parent, if one exists.
|
||||||
|
pub fn parent(&self) -> Option<Ignore> {
|
||||||
|
self.0.parent.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new `Ignore` matcher with the parent directories of `dir`.
|
||||||
|
///
|
||||||
|
/// Note that this can only be called on an `Ignore` matcher with no
|
||||||
|
/// parents (i.e., `is_root` returns `true`). This will panic otherwise.
|
||||||
|
pub fn add_parents<P: AsRef<Path>>(
|
||||||
|
&self,
|
||||||
|
path: P,
|
||||||
|
) -> (Ignore, Option<Error>) {
|
||||||
|
if !self.is_root() {
|
||||||
|
panic!("Ignore::add_parents called on non-root matcher");
|
||||||
|
}
|
||||||
|
let absolute_base = match path.as_ref().canonicalize() {
|
||||||
|
Ok(path) => Arc::new(path),
|
||||||
|
Err(_) => {
|
||||||
|
// There's not much we can do here, so just return our
|
||||||
|
// existing matcher. We drop the error to be consistent
|
||||||
|
// with our general pattern of ignoring I/O errors when
|
||||||
|
// processing ignore files.
|
||||||
|
return (self.clone(), None);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// List of parents, from child to root.
|
||||||
|
let mut parents = vec![];
|
||||||
|
let mut path = &**absolute_base;
|
||||||
|
while let Some(parent) = path.parent() {
|
||||||
|
parents.push(parent);
|
||||||
|
path = parent;
|
||||||
|
}
|
||||||
|
let mut errs = PartialErrorBuilder::default();
|
||||||
|
let mut ig = self.clone();
|
||||||
|
for parent in parents.into_iter().rev() {
|
||||||
|
let mut compiled = self.0.compiled.write().unwrap();
|
||||||
|
if let Some(prebuilt) = compiled.get(parent.as_os_str()) {
|
||||||
|
ig = prebuilt.clone();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let (mut igtmp, err) = ig.add_child_path(parent);
|
||||||
|
errs.maybe_push(err);
|
||||||
|
igtmp.is_absolute_parent = true;
|
||||||
|
igtmp.absolute_base = Some(absolute_base.clone());
|
||||||
|
ig = Ignore(Arc::new(igtmp));
|
||||||
|
compiled.insert(parent.as_os_str().to_os_string(), ig.clone());
|
||||||
|
}
|
||||||
|
(ig, errs.into_error_option())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new `Ignore` matcher for the given child directory.
|
||||||
|
///
|
||||||
|
/// Since building the matcher may require reading from multiple
|
||||||
|
/// files, it's possible that this method partially succeeds. Therefore,
|
||||||
|
/// a matcher is always returned (which may match nothing) and an error is
|
||||||
|
/// returned if it exists.
|
||||||
|
///
|
||||||
|
/// Note that all I/O errors are completely ignored.
|
||||||
|
pub fn add_child<P: AsRef<Path>>(
|
||||||
|
&self,
|
||||||
|
dir: P,
|
||||||
|
) -> (Ignore, Option<Error>) {
|
||||||
|
let (ig, err) = self.add_child_path(dir.as_ref());
|
||||||
|
(Ignore(Arc::new(ig)), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Like add_child, but takes a full path and returns an IgnoreInner.
|
||||||
|
fn add_child_path(&self, dir: &Path) -> (IgnoreInner, Option<Error>) {
|
||||||
|
static IG_NAMES: &'static [&'static str] = &[".rgignore", ".ignore"];
|
||||||
|
|
||||||
|
let mut errs = PartialErrorBuilder::default();
|
||||||
|
let ig_matcher =
|
||||||
|
if !self.0.opts.ignore {
|
||||||
|
Gitignore::empty()
|
||||||
|
} else {
|
||||||
|
let (m, err) = create_gitignore(&dir, IG_NAMES);
|
||||||
|
errs.maybe_push(err);
|
||||||
|
m
|
||||||
|
};
|
||||||
|
let gi_matcher =
|
||||||
|
if !self.0.opts.git_ignore {
|
||||||
|
Gitignore::empty()
|
||||||
|
} else {
|
||||||
|
let (m, err) = create_gitignore(&dir, &[".gitignore"]);
|
||||||
|
errs.maybe_push(err);
|
||||||
|
m
|
||||||
|
};
|
||||||
|
let gi_exclude_matcher =
|
||||||
|
if !self.0.opts.git_exclude {
|
||||||
|
Gitignore::empty()
|
||||||
|
} else {
|
||||||
|
let (m, err) = create_gitignore(&dir, &[".git/info/exclude"]);
|
||||||
|
errs.maybe_push(err);
|
||||||
|
m
|
||||||
|
};
|
||||||
|
let ig = IgnoreInner {
|
||||||
|
compiled: self.0.compiled.clone(),
|
||||||
|
dir: dir.to_path_buf(),
|
||||||
|
overrides: self.0.overrides.clone(),
|
||||||
|
types: self.0.types.clone(),
|
||||||
|
parent: Some(self.clone()),
|
||||||
|
is_absolute_parent: false,
|
||||||
|
absolute_base: self.0.absolute_base.clone(),
|
||||||
|
explicit_ignores: self.0.explicit_ignores.clone(),
|
||||||
|
ignore_matcher: ig_matcher,
|
||||||
|
git_global_matcher: self.0.git_global_matcher.clone(),
|
||||||
|
git_ignore_matcher: gi_matcher,
|
||||||
|
git_exclude_matcher: gi_exclude_matcher,
|
||||||
|
has_git: dir.join(".git").is_dir(),
|
||||||
|
opts: self.0.opts,
|
||||||
|
};
|
||||||
|
(ig, errs.into_error_option())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a match indicating whether the given file path should be
|
||||||
|
/// ignored or not.
|
||||||
|
///
|
||||||
|
/// The match contains information about its origin.
|
||||||
|
pub fn matched<'a, P: AsRef<Path>>(
|
||||||
|
&'a self,
|
||||||
|
path: P,
|
||||||
|
is_dir: bool,
|
||||||
|
) -> Match<IgnoreMatch<'a>> {
|
||||||
|
// We need to be careful with our path. If it has a leading ./, then
|
||||||
|
// strip it because it causes nothing but trouble.
|
||||||
|
let mut path = path.as_ref();
|
||||||
|
if let Some(p) = strip_prefix("./", path) {
|
||||||
|
path = p;
|
||||||
|
}
|
||||||
|
// Match against the override patterns. If an override matches
|
||||||
|
// regardless of whether it's whitelist/ignore, then we quit and
|
||||||
|
// return that result immediately. Overrides have the highest
|
||||||
|
// precedence.
|
||||||
|
if !self.0.overrides.is_empty() {
|
||||||
|
let mat =
|
||||||
|
self.0.overrides.matched(path, is_dir)
|
||||||
|
.map(IgnoreMatch::overrides);
|
||||||
|
if !mat.is_none() {
|
||||||
|
return mat;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut whitelisted = Match::None;
|
||||||
|
if self.0.opts.has_any_ignore_options() {
|
||||||
|
let mat = self.matched_ignore(path, is_dir);
|
||||||
|
if mat.is_ignore() {
|
||||||
|
return mat;
|
||||||
|
} else if mat.is_whitelist() {
|
||||||
|
whitelisted = mat;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !self.0.types.is_empty() {
|
||||||
|
let mat =
|
||||||
|
self.0.types.matched(path, is_dir).map(IgnoreMatch::types);
|
||||||
|
if mat.is_ignore() {
|
||||||
|
return mat;
|
||||||
|
} else if mat.is_whitelist() {
|
||||||
|
whitelisted = mat;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if whitelisted.is_none() && self.0.opts.hidden && is_hidden(path) {
|
||||||
|
return Match::Ignore(IgnoreMatch::hidden());
|
||||||
|
}
|
||||||
|
whitelisted
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Performs matching only on the ignore files for this directory and
|
||||||
|
/// all parent directories.
|
||||||
|
fn matched_ignore<'a>(
|
||||||
|
&'a self,
|
||||||
|
path: &Path,
|
||||||
|
is_dir: bool,
|
||||||
|
) -> Match<IgnoreMatch<'a>> {
|
||||||
|
let (mut m_ignore, mut m_gi, mut m_gi_exclude, mut m_explicit) =
|
||||||
|
(Match::None, Match::None, Match::None, Match::None);
|
||||||
|
let mut saw_git = false;
|
||||||
|
for ig in self.parents().take_while(|ig| !ig.0.is_absolute_parent) {
|
||||||
|
if m_ignore.is_none() {
|
||||||
|
m_ignore =
|
||||||
|
ig.0.ignore_matcher.matched(path, is_dir)
|
||||||
|
.map(IgnoreMatch::gitignore);
|
||||||
|
}
|
||||||
|
if !saw_git && m_gi.is_none() {
|
||||||
|
m_gi =
|
||||||
|
ig.0.git_ignore_matcher.matched(path, is_dir)
|
||||||
|
.map(IgnoreMatch::gitignore);
|
||||||
|
}
|
||||||
|
if !saw_git && m_gi_exclude.is_none() {
|
||||||
|
m_gi_exclude =
|
||||||
|
ig.0.git_exclude_matcher.matched(path, is_dir)
|
||||||
|
.map(IgnoreMatch::gitignore);
|
||||||
|
}
|
||||||
|
saw_git = saw_git || ig.0.has_git;
|
||||||
|
}
|
||||||
|
if let Some(abs_parent_path) = self.absolute_base() {
|
||||||
|
let path = abs_parent_path.join(path);
|
||||||
|
for ig in self.parents().skip_while(|ig|!ig.0.is_absolute_parent) {
|
||||||
|
if m_ignore.is_none() {
|
||||||
|
m_ignore =
|
||||||
|
ig.0.ignore_matcher.matched(&path, is_dir)
|
||||||
|
.map(IgnoreMatch::gitignore);
|
||||||
|
}
|
||||||
|
if !saw_git && m_gi.is_none() {
|
||||||
|
m_gi =
|
||||||
|
ig.0.git_ignore_matcher.matched(&path, is_dir)
|
||||||
|
.map(IgnoreMatch::gitignore);
|
||||||
|
}
|
||||||
|
if !saw_git && m_gi_exclude.is_none() {
|
||||||
|
m_gi_exclude =
|
||||||
|
ig.0.git_exclude_matcher.matched(&path, is_dir)
|
||||||
|
.map(IgnoreMatch::gitignore);
|
||||||
|
}
|
||||||
|
saw_git = saw_git || ig.0.has_git;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for gi in self.0.explicit_ignores.iter().rev() {
|
||||||
|
if !m_explicit.is_none() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
m_explicit = gi.matched(&path, is_dir).map(IgnoreMatch::gitignore);
|
||||||
|
}
|
||||||
|
let m_global = self.0.git_global_matcher.matched(&path, is_dir)
|
||||||
|
.map(IgnoreMatch::gitignore);
|
||||||
|
|
||||||
|
m_ignore.or(m_gi).or(m_gi_exclude).or(m_global).or(m_explicit)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator over parent ignore matchers, including this one.
|
||||||
|
pub fn parents(&self) -> Parents {
|
||||||
|
Parents(Some(self))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the first absolute path of the first absolute parent, if
|
||||||
|
/// one exists.
|
||||||
|
fn absolute_base(&self) -> Option<&Path> {
|
||||||
|
self.0.absolute_base.as_ref().map(|p| &***p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An iterator over all parents of an ignore matcher, including itself.
|
||||||
|
///
|
||||||
|
/// The lifetime `'a` refers to the lifetime of the initial `Ignore` matcher.
|
||||||
|
pub struct Parents<'a>(Option<&'a Ignore>);
|
||||||
|
|
||||||
|
impl<'a> Iterator for Parents<'a> {
|
||||||
|
type Item = &'a Ignore;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<&'a Ignore> {
|
||||||
|
match self.0.take() {
|
||||||
|
None => None,
|
||||||
|
Some(ig) => {
|
||||||
|
self.0 = ig.0.parent.as_ref();
|
||||||
|
Some(ig)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A builder for creating an Ignore matcher.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct IgnoreBuilder {
|
||||||
|
/// The root directory path for this ignore matcher.
|
||||||
|
dir: PathBuf,
|
||||||
|
/// An override matcher (default is empty).
|
||||||
|
overrides: Arc<Override>,
|
||||||
|
/// A type matcher (default is empty).
|
||||||
|
types: Arc<Types>,
|
||||||
|
/// Explicit ignore matchers.
|
||||||
|
explicit_ignores: Vec<Gitignore>,
|
||||||
|
/// Ignore config.
|
||||||
|
opts: IgnoreOptions,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IgnoreBuilder {
|
||||||
|
/// Create a new builder for an `Ignore` matcher.
|
||||||
|
///
|
||||||
|
/// All relative file paths are resolved with respect to the current
|
||||||
|
/// working directory.
|
||||||
|
pub fn new() -> IgnoreBuilder {
|
||||||
|
IgnoreBuilder {
|
||||||
|
dir: Path::new("").to_path_buf(),
|
||||||
|
overrides: Arc::new(Override::empty()),
|
||||||
|
types: Arc::new(Types::empty()),
|
||||||
|
explicit_ignores: vec![],
|
||||||
|
opts: IgnoreOptions {
|
||||||
|
hidden: true,
|
||||||
|
ignore: true,
|
||||||
|
git_global: true,
|
||||||
|
git_ignore: true,
|
||||||
|
git_exclude: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds a new `Ignore` matcher.
|
||||||
|
///
|
||||||
|
/// The matcher returned won't match anything until ignore rules from
|
||||||
|
/// directories are added to it.
|
||||||
|
pub fn build(&self) -> Ignore {
|
||||||
|
let git_global_matcher =
|
||||||
|
if !self.opts.git_global {
|
||||||
|
Gitignore::empty()
|
||||||
|
} else {
|
||||||
|
let (gi, err) = Gitignore::global();
|
||||||
|
if let Some(err) = err {
|
||||||
|
debug!("{}", err);
|
||||||
|
}
|
||||||
|
gi
|
||||||
|
};
|
||||||
|
Ignore(Arc::new(IgnoreInner {
|
||||||
|
compiled: Arc::new(RwLock::new(HashMap::new())),
|
||||||
|
dir: self.dir.clone(),
|
||||||
|
overrides: self.overrides.clone(),
|
||||||
|
types: self.types.clone(),
|
||||||
|
parent: None,
|
||||||
|
is_absolute_parent: true,
|
||||||
|
absolute_base: None,
|
||||||
|
explicit_ignores: Arc::new(self.explicit_ignores.clone()),
|
||||||
|
ignore_matcher: Gitignore::empty(),
|
||||||
|
git_global_matcher: Arc::new(git_global_matcher),
|
||||||
|
git_ignore_matcher: Gitignore::empty(),
|
||||||
|
git_exclude_matcher: Gitignore::empty(),
|
||||||
|
has_git: false,
|
||||||
|
opts: self.opts,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add an override matcher.
|
||||||
|
///
|
||||||
|
/// By default, no override matcher is used.
|
||||||
|
///
|
||||||
|
/// This overrides any previous setting.
|
||||||
|
pub fn overrides(&mut self, overrides: Override) -> &mut IgnoreBuilder {
|
||||||
|
self.overrides = Arc::new(overrides);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a file type matcher.
|
||||||
|
///
|
||||||
|
/// By default, no file type matcher is used.
|
||||||
|
///
|
||||||
|
/// This overrides any previous setting.
|
||||||
|
pub fn types(&mut self, types: Types) -> &mut IgnoreBuilder {
|
||||||
|
self.types = Arc::new(types);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Adds a new global ignore matcher from the ignore file path given.
|
||||||
|
pub fn add_ignore(&mut self, ig: Gitignore) -> &mut IgnoreBuilder {
|
||||||
|
self.explicit_ignores.push(ig);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enables ignoring hidden files.
|
||||||
|
///
|
||||||
|
/// This is enabled by default.
|
||||||
|
pub fn hidden(&mut self, yes: bool) -> &mut IgnoreBuilder {
|
||||||
|
self.opts.hidden = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enables reading `.ignore` files.
|
||||||
|
///
|
||||||
|
/// `.ignore` files have the same semantics as `gitignore` files and are
|
||||||
|
/// supported by search tools such as ripgrep and The Silver Searcher.
|
||||||
|
///
|
||||||
|
/// This is enabled by default.
|
||||||
|
pub fn ignore(&mut self, yes: bool) -> &mut IgnoreBuilder {
|
||||||
|
self.opts.ignore = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a global gitignore matcher.
|
||||||
|
///
|
||||||
|
/// Its precedence is lower than both normal `.gitignore` files and
|
||||||
|
/// `.git/info/exclude` files.
|
||||||
|
///
|
||||||
|
/// This overwrites any previous global gitignore setting.
|
||||||
|
///
|
||||||
|
/// This is enabled by default.
|
||||||
|
pub fn git_global(&mut self, yes: bool) -> &mut IgnoreBuilder {
|
||||||
|
self.opts.git_global = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enables reading `.gitignore` files.
|
||||||
|
///
|
||||||
|
/// `.gitignore` files have match semantics as described in the `gitignore`
|
||||||
|
/// man page.
|
||||||
|
///
|
||||||
|
/// This is enabled by default.
|
||||||
|
pub fn git_ignore(&mut self, yes: bool) -> &mut IgnoreBuilder {
|
||||||
|
self.opts.git_ignore = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Enables reading `.git/info/exclude` files.
|
||||||
|
///
|
||||||
|
/// `.git/info/exclude` files have match semantics as described in the
|
||||||
|
/// `gitignore` man page.
|
||||||
|
///
|
||||||
|
/// This is enabled by default.
|
||||||
|
pub fn git_exclude(&mut self, yes: bool) -> &mut IgnoreBuilder {
|
||||||
|
self.opts.git_exclude = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new gitignore matcher for the directory given.
|
||||||
|
///
|
||||||
|
/// Ignore globs are extracted from each of the file names in `dir` in the
|
||||||
|
/// order given (earlier names have lower precedence than later names).
|
||||||
|
///
|
||||||
|
/// I/O errors are ignored.
|
||||||
|
pub fn create_gitignore(
|
||||||
|
dir: &Path,
|
||||||
|
names: &[&str],
|
||||||
|
) -> (Gitignore, Option<Error>) {
|
||||||
|
let mut builder = GitignoreBuilder::new(dir);
|
||||||
|
let mut errs = PartialErrorBuilder::default();
|
||||||
|
for name in names {
|
||||||
|
let gipath = dir.join(name);
|
||||||
|
errs.maybe_push_ignore_io(builder.add(gipath));
|
||||||
|
}
|
||||||
|
let gi = match builder.build() {
|
||||||
|
Ok(gi) => gi,
|
||||||
|
Err(err) => {
|
||||||
|
errs.push(err);
|
||||||
|
GitignoreBuilder::new(dir).build().unwrap()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
(gi, errs.into_error_option())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::fs::{self, File};
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use tempdir::TempDir;
|
||||||
|
|
||||||
|
use dir::IgnoreBuilder;
|
||||||
|
use gitignore::Gitignore;
|
||||||
|
use Error;
|
||||||
|
|
||||||
|
fn wfile<P: AsRef<Path>>(path: P, contents: &str) {
|
||||||
|
let mut file = File::create(path).unwrap();
|
||||||
|
file.write_all(contents.as_bytes()).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mkdirp<P: AsRef<Path>>(path: P) {
|
||||||
|
fs::create_dir_all(path).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn partial(err: Error) -> Vec<Error> {
|
||||||
|
match err {
|
||||||
|
Error::Partial(errs) => errs,
|
||||||
|
_ => panic!("expected partial error but got {:?}", err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn explicit_ignore() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
wfile(td.path().join("not-an-ignore"), "foo\n!bar");
|
||||||
|
|
||||||
|
let (gi, err) = Gitignore::new(td.path().join("not-an-ignore"));
|
||||||
|
assert!(err.is_none());
|
||||||
|
let (ig, err) = IgnoreBuilder::new()
|
||||||
|
.add_ignore(gi).build().add_child(td.path());
|
||||||
|
assert!(err.is_none());
|
||||||
|
assert!(ig.matched("foo", false).is_ignore());
|
||||||
|
assert!(ig.matched("bar", false).is_whitelist());
|
||||||
|
assert!(ig.matched("baz", false).is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn git_exclude() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
mkdirp(td.path().join(".git/info"));
|
||||||
|
wfile(td.path().join(".git/info/exclude"), "foo\n!bar");
|
||||||
|
|
||||||
|
let (ig, err) = IgnoreBuilder::new().build().add_child(td.path());
|
||||||
|
assert!(err.is_none());
|
||||||
|
assert!(ig.matched("foo", false).is_ignore());
|
||||||
|
assert!(ig.matched("bar", false).is_whitelist());
|
||||||
|
assert!(ig.matched("baz", false).is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn gitignore() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
wfile(td.path().join(".gitignore"), "foo\n!bar");
|
||||||
|
|
||||||
|
let (ig, err) = IgnoreBuilder::new().build().add_child(td.path());
|
||||||
|
assert!(err.is_none());
|
||||||
|
assert!(ig.matched("foo", false).is_ignore());
|
||||||
|
assert!(ig.matched("bar", false).is_whitelist());
|
||||||
|
assert!(ig.matched("baz", false).is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ignore() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
wfile(td.path().join(".ignore"), "foo\n!bar");
|
||||||
|
|
||||||
|
let (ig, err) = IgnoreBuilder::new().build().add_child(td.path());
|
||||||
|
assert!(err.is_none());
|
||||||
|
assert!(ig.matched("foo", false).is_ignore());
|
||||||
|
assert!(ig.matched("bar", false).is_whitelist());
|
||||||
|
assert!(ig.matched("baz", false).is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tests that an .ignore will override a .gitignore.
|
||||||
|
#[test]
|
||||||
|
fn ignore_over_gitignore() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
wfile(td.path().join(".gitignore"), "foo");
|
||||||
|
wfile(td.path().join(".ignore"), "!foo");
|
||||||
|
|
||||||
|
let (ig, err) = IgnoreBuilder::new().build().add_child(td.path());
|
||||||
|
assert!(err.is_none());
|
||||||
|
assert!(ig.matched("foo", false).is_whitelist());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tests that exclude has lower precedent than both .ignore and .gitignore.
|
||||||
|
#[test]
|
||||||
|
fn exclude_lowest() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
wfile(td.path().join(".gitignore"), "!foo");
|
||||||
|
wfile(td.path().join(".ignore"), "!bar");
|
||||||
|
mkdirp(td.path().join(".git/info"));
|
||||||
|
wfile(td.path().join(".git/info/exclude"), "foo\nbar\nbaz");
|
||||||
|
|
||||||
|
let (ig, err) = IgnoreBuilder::new().build().add_child(td.path());
|
||||||
|
assert!(err.is_none());
|
||||||
|
assert!(ig.matched("baz", false).is_ignore());
|
||||||
|
assert!(ig.matched("foo", false).is_whitelist());
|
||||||
|
assert!(ig.matched("bar", false).is_whitelist());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn errored() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
wfile(td.path().join(".gitignore"), "f**oo");
|
||||||
|
|
||||||
|
let (_, err) = IgnoreBuilder::new().build().add_child(td.path());
|
||||||
|
assert!(err.is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn errored_both() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
wfile(td.path().join(".gitignore"), "f**oo");
|
||||||
|
wfile(td.path().join(".ignore"), "fo**o");
|
||||||
|
|
||||||
|
let (_, err) = IgnoreBuilder::new().build().add_child(td.path());
|
||||||
|
assert_eq!(2, partial(err.expect("an error")).len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn errored_partial() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
wfile(td.path().join(".gitignore"), "f**oo\nbar");
|
||||||
|
|
||||||
|
let (ig, err) = IgnoreBuilder::new().build().add_child(td.path());
|
||||||
|
assert!(err.is_some());
|
||||||
|
assert!(ig.matched("bar", false).is_ignore());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn errored_partial_and_ignore() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
wfile(td.path().join(".gitignore"), "f**oo\nbar");
|
||||||
|
wfile(td.path().join(".ignore"), "!bar");
|
||||||
|
|
||||||
|
let (ig, err) = IgnoreBuilder::new().build().add_child(td.path());
|
||||||
|
assert!(err.is_some());
|
||||||
|
assert!(ig.matched("bar", false).is_whitelist());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn not_present_empty() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
|
||||||
|
let (_, err) = IgnoreBuilder::new().build().add_child(td.path());
|
||||||
|
assert!(err.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn stops_at_git_dir() {
|
||||||
|
// This tests that .gitignore files beyond a .git barrier aren't
|
||||||
|
// matched, but .ignore files are.
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
mkdirp(td.path().join(".git"));
|
||||||
|
mkdirp(td.path().join("foo/.git"));
|
||||||
|
wfile(td.path().join(".gitignore"), "foo");
|
||||||
|
wfile(td.path().join(".ignore"), "bar");
|
||||||
|
|
||||||
|
let ig0 = IgnoreBuilder::new().build();
|
||||||
|
let (ig1, err) = ig0.add_child(td.path());
|
||||||
|
assert!(err.is_none());
|
||||||
|
let (ig2, err) = ig1.add_child(ig1.path().join("foo"));
|
||||||
|
assert!(err.is_none());
|
||||||
|
|
||||||
|
assert!(ig1.matched("foo", false).is_ignore());
|
||||||
|
assert!(ig2.matched("foo", false).is_none());
|
||||||
|
|
||||||
|
assert!(ig1.matched("bar", false).is_ignore());
|
||||||
|
assert!(ig2.matched("bar", false).is_ignore());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn absolute_parent() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
mkdirp(td.path().join(".git"));
|
||||||
|
mkdirp(td.path().join("foo"));
|
||||||
|
wfile(td.path().join(".gitignore"), "bar");
|
||||||
|
|
||||||
|
// First, check that the parent gitignore file isn't detected if the
|
||||||
|
// parent isn't added. This establishes a baseline.
|
||||||
|
let ig0 = IgnoreBuilder::new().build();
|
||||||
|
let (ig1, err) = ig0.add_child(td.path().join("foo"));
|
||||||
|
assert!(err.is_none());
|
||||||
|
assert!(ig1.matched("bar", false).is_none());
|
||||||
|
|
||||||
|
// Second, check that adding a parent directory actually works.
|
||||||
|
let ig0 = IgnoreBuilder::new().build();
|
||||||
|
let (ig1, err) = ig0.add_parents(td.path().join("foo"));
|
||||||
|
assert!(err.is_none());
|
||||||
|
let (ig2, err) = ig1.add_child(td.path().join("foo"));
|
||||||
|
assert!(err.is_none());
|
||||||
|
assert!(ig2.matched("bar", false).is_ignore());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn absolute_parent_anchored() {
|
||||||
|
let td = TempDir::new("ignore-test-").unwrap();
|
||||||
|
mkdirp(td.path().join(".git"));
|
||||||
|
mkdirp(td.path().join("src/llvm"));
|
||||||
|
wfile(td.path().join(".gitignore"), "/llvm/\nfoo");
|
||||||
|
|
||||||
|
let ig0 = IgnoreBuilder::new().build();
|
||||||
|
let (ig1, err) = ig0.add_parents(td.path().join("src"));
|
||||||
|
assert!(err.is_none());
|
||||||
|
let (ig2, err) = ig1.add_child("src");
|
||||||
|
assert!(err.is_none());
|
||||||
|
|
||||||
|
assert!(ig1.matched("llvm", true).is_none());
|
||||||
|
assert!(ig2.matched("llvm", true).is_none());
|
||||||
|
assert!(ig2.matched("src/llvm", true).is_none());
|
||||||
|
assert!(ig2.matched("foo", false).is_ignore());
|
||||||
|
assert!(ig2.matched("src/foo", false).is_ignore());
|
||||||
|
}
|
||||||
|
}
|
||||||
610
ignore/src/gitignore.rs
Normal file
610
ignore/src/gitignore.rs
Normal file
@@ -0,0 +1,610 @@
|
|||||||
|
/*!
|
||||||
|
The gitignore module provides a way to match globs from a gitignore file
|
||||||
|
against file paths.
|
||||||
|
|
||||||
|
Note that this module implements the specification as described in the
|
||||||
|
`gitignore` man page from scratch. That is, this module does *not* shell out to
|
||||||
|
the `git` command line tool.
|
||||||
|
*/
|
||||||
|
|
||||||
|
use std::cell::RefCell;
|
||||||
|
use std::env;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::{self, BufRead, Read};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::str;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use globset::{Candidate, GlobBuilder, GlobSet, GlobSetBuilder};
|
||||||
|
use regex::bytes::Regex;
|
||||||
|
use thread_local::ThreadLocal;
|
||||||
|
|
||||||
|
use pathutil::{is_file_name, strip_prefix};
|
||||||
|
use {Error, Match, PartialErrorBuilder};
|
||||||
|
|
||||||
|
/// Glob represents a single glob in a gitignore file.
|
||||||
|
///
|
||||||
|
/// This is used to report information about the highest precedent glob that
|
||||||
|
/// matched in one or more gitignore files.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Glob {
|
||||||
|
/// The file path that this glob was extracted from.
|
||||||
|
from: Option<PathBuf>,
|
||||||
|
/// The original glob string.
|
||||||
|
original: String,
|
||||||
|
/// The actual glob string used to convert to a regex.
|
||||||
|
actual: String,
|
||||||
|
/// Whether this is a whitelisted glob or not.
|
||||||
|
is_whitelist: bool,
|
||||||
|
/// Whether this glob should only match directories or not.
|
||||||
|
is_only_dir: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Glob {
|
||||||
|
/// Returns the file path that defined this glob.
|
||||||
|
pub fn from(&self) -> Option<&Path> {
|
||||||
|
self.from.as_ref().map(|p| &**p)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The original glob as it was defined in a gitignore file.
|
||||||
|
pub fn original(&self) -> &str {
|
||||||
|
&self.original
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The actual glob that was compiled to respect gitignore
|
||||||
|
/// semantics.
|
||||||
|
pub fn actual(&self) -> &str {
|
||||||
|
&self.actual
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether this was a whitelisted glob or not.
|
||||||
|
pub fn is_whitelist(&self) -> bool {
|
||||||
|
self.is_whitelist
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether this glob must match a directory or not.
|
||||||
|
pub fn is_only_dir(&self) -> bool {
|
||||||
|
self.is_only_dir
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gitignore is a matcher for the globs in one or more gitignore files
|
||||||
|
/// in the same directory.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Gitignore {
|
||||||
|
set: GlobSet,
|
||||||
|
root: PathBuf,
|
||||||
|
globs: Vec<Glob>,
|
||||||
|
num_ignores: u64,
|
||||||
|
num_whitelists: u64,
|
||||||
|
matches: Arc<ThreadLocal<RefCell<Vec<usize>>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Gitignore {
|
||||||
|
/// Creates a new gitignore matcher from the gitignore file path given.
|
||||||
|
///
|
||||||
|
/// If it's desirable to include multiple gitignore files in a single
|
||||||
|
/// matcher, or read gitignore globs from a different source, then
|
||||||
|
/// use `GitignoreBuilder`.
|
||||||
|
///
|
||||||
|
/// This always returns a valid matcher, even if it's empty. In particular,
|
||||||
|
/// a Gitignore file can be partially valid, e.g., when one glob is invalid
|
||||||
|
/// but the rest aren't.
|
||||||
|
///
|
||||||
|
/// Note that I/O errors are ignored. For more granular control over
|
||||||
|
/// errors, use `GitignoreBuilder`.
|
||||||
|
pub fn new<P: AsRef<Path>>(
|
||||||
|
gitignore_path: P,
|
||||||
|
) -> (Gitignore, Option<Error>) {
|
||||||
|
let path = gitignore_path.as_ref();
|
||||||
|
let parent = path.parent().unwrap_or(Path::new("/"));
|
||||||
|
let mut builder = GitignoreBuilder::new(parent);
|
||||||
|
let mut errs = PartialErrorBuilder::default();
|
||||||
|
errs.maybe_push_ignore_io(builder.add(path));
|
||||||
|
match builder.build() {
|
||||||
|
Ok(gi) => (gi, errs.into_error_option()),
|
||||||
|
Err(err) => {
|
||||||
|
errs.push(err);
|
||||||
|
(Gitignore::empty(), errs.into_error_option())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new gitignore matcher from the global ignore file, if one
|
||||||
|
/// exists.
|
||||||
|
///
|
||||||
|
/// The global config file path is specified by git's `core.excludesFile`
|
||||||
|
/// config option.
|
||||||
|
///
|
||||||
|
/// Git's config file location is `$HOME/.gitconfig`. If `$HOME/.gitconfig`
|
||||||
|
/// does not exist or does not specify `core.excludesFile`, then
|
||||||
|
/// `$XDG_CONFIG_HOME/git/ignore` is read. If `$XDG_CONFIG_HOME` is not
|
||||||
|
/// set or is empty, then `$HOME/.config/git/ignore` is used instead.
|
||||||
|
pub fn global() -> (Gitignore, Option<Error>) {
|
||||||
|
match gitconfig_excludes_path() {
|
||||||
|
None => (Gitignore::empty(), None),
|
||||||
|
Some(path) => {
|
||||||
|
if !path.is_file() {
|
||||||
|
(Gitignore::empty(), None)
|
||||||
|
} else {
|
||||||
|
Gitignore::new(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new empty gitignore matcher that never matches anything.
|
||||||
|
///
|
||||||
|
/// Its path is empty.
|
||||||
|
pub fn empty() -> Gitignore {
|
||||||
|
GitignoreBuilder::new("").build().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the directory containing this gitignore matcher.
|
||||||
|
///
|
||||||
|
/// All matches are done relative to this path.
|
||||||
|
pub fn path(&self) -> &Path {
|
||||||
|
&*self.root
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if and only if this gitignore has zero globs, and
|
||||||
|
/// therefore never matches any file path.
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.set.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the total number of globs, which should be equivalent to
|
||||||
|
/// `num_ignores + num_whitelists`.
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.set.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the total number of ignore globs.
|
||||||
|
pub fn num_ignores(&self) -> u64 {
|
||||||
|
self.num_ignores
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the total number of whitelisted globs.
|
||||||
|
pub fn num_whitelists(&self) -> u64 {
|
||||||
|
self.num_whitelists
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether the given file path matched a pattern in this gitignore
|
||||||
|
/// matcher.
|
||||||
|
///
|
||||||
|
/// `is_dir` should be true if the path refers to a directory and false
|
||||||
|
/// otherwise.
|
||||||
|
///
|
||||||
|
/// The given path is matched relative to the path given when building
|
||||||
|
/// the matcher. Specifically, before matching `path`, its prefix (as
|
||||||
|
/// determined by a common suffix of the directory containing this
|
||||||
|
/// gitignore) is stripped. If there is no common suffix/prefix overlap,
|
||||||
|
/// then `path` is assumed to be relative to this matcher.
|
||||||
|
pub fn matched<P: AsRef<Path>>(
|
||||||
|
&self,
|
||||||
|
path: P,
|
||||||
|
is_dir: bool,
|
||||||
|
) -> Match<&Glob> {
|
||||||
|
if self.is_empty() {
|
||||||
|
return Match::None;
|
||||||
|
}
|
||||||
|
self.matched_stripped(self.strip(path.as_ref()), is_dir)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Like matched, but takes a path that has already been stripped.
|
||||||
|
fn matched_stripped<P: AsRef<Path>>(
|
||||||
|
&self,
|
||||||
|
path: P,
|
||||||
|
is_dir: bool,
|
||||||
|
) -> Match<&Glob> {
|
||||||
|
if self.is_empty() {
|
||||||
|
return Match::None;
|
||||||
|
}
|
||||||
|
let path = path.as_ref();
|
||||||
|
let _matches = self.matches.get_default();
|
||||||
|
let mut matches = _matches.borrow_mut();
|
||||||
|
let candidate = Candidate::new(path);
|
||||||
|
self.set.matches_candidate_into(&candidate, &mut *matches);
|
||||||
|
for &i in matches.iter().rev() {
|
||||||
|
let glob = &self.globs[i];
|
||||||
|
if !glob.is_only_dir() || is_dir {
|
||||||
|
return if glob.is_whitelist() {
|
||||||
|
Match::Whitelist(glob)
|
||||||
|
} else {
|
||||||
|
Match::Ignore(glob)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Match::None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Strips the given path such that it's suitable for matching with this
|
||||||
|
/// gitignore matcher.
|
||||||
|
fn strip<'a, P: 'a + AsRef<Path> + ?Sized>(
|
||||||
|
&'a self,
|
||||||
|
path: &'a P,
|
||||||
|
) -> &'a Path {
|
||||||
|
let mut path = path.as_ref();
|
||||||
|
// A leading ./ is completely superfluous. We also strip it from
|
||||||
|
// our gitignore root path, so we need to strip it from our candidate
|
||||||
|
// path too.
|
||||||
|
if let Some(p) = strip_prefix("./", path) {
|
||||||
|
path = p;
|
||||||
|
}
|
||||||
|
// Strip any common prefix between the candidate path and the root
|
||||||
|
// of the gitignore, to make sure we get relative matching right.
|
||||||
|
// BUT, a file name might not have any directory components to it,
|
||||||
|
// in which case, we don't want to accidentally strip any part of the
|
||||||
|
// file name.
|
||||||
|
if !is_file_name(path) {
|
||||||
|
if let Some(p) = strip_prefix(&self.root, path) {
|
||||||
|
path = p;
|
||||||
|
// If we're left with a leading slash, get rid of it.
|
||||||
|
if let Some(p) = strip_prefix("/", path) {
|
||||||
|
path = p;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds a matcher for a single set of globs from a .gitignore file.
|
||||||
|
pub struct GitignoreBuilder {
|
||||||
|
builder: GlobSetBuilder,
|
||||||
|
root: PathBuf,
|
||||||
|
globs: Vec<Glob>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GitignoreBuilder {
|
||||||
|
/// Create a new builder for a gitignore file.
|
||||||
|
///
|
||||||
|
/// The path given should be the path at which the globs for this gitignore
|
||||||
|
/// file should be matched. Note that paths are always matched relative
|
||||||
|
/// to the root path given here. Generally, the root path should correspond
|
||||||
|
/// to the *directory* containing a `.gitignore` file.
|
||||||
|
pub fn new<P: AsRef<Path>>(root: P) -> GitignoreBuilder {
|
||||||
|
let root = root.as_ref();
|
||||||
|
GitignoreBuilder {
|
||||||
|
builder: GlobSetBuilder::new(),
|
||||||
|
root: strip_prefix("./", root).unwrap_or(root).to_path_buf(),
|
||||||
|
globs: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds a new matcher from the globs added so far.
|
||||||
|
///
|
||||||
|
/// Once a matcher is built, no new globs can be added to it.
|
||||||
|
pub fn build(&self) -> Result<Gitignore, Error> {
|
||||||
|
let nignore = self.globs.iter().filter(|g| !g.is_whitelist()).count();
|
||||||
|
let nwhite = self.globs.iter().filter(|g| g.is_whitelist()).count();
|
||||||
|
let set = try!(
|
||||||
|
self.builder.build().map_err(|err| Error::Glob(err.to_string())));
|
||||||
|
Ok(Gitignore {
|
||||||
|
set: set,
|
||||||
|
root: self.root.clone(),
|
||||||
|
globs: self.globs.clone(),
|
||||||
|
num_ignores: nignore as u64,
|
||||||
|
num_whitelists: nwhite as u64,
|
||||||
|
matches: Arc::new(ThreadLocal::default()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add each glob from the file path given.
|
||||||
|
///
|
||||||
|
/// The file given should be formatted as a `gitignore` file.
|
||||||
|
///
|
||||||
|
/// Note that partial errors can be returned. For example, if there was
|
||||||
|
/// a problem adding one glob, an error for that will be returned, but
|
||||||
|
/// all other valid globs will still be added.
|
||||||
|
pub fn add<P: AsRef<Path>>(&mut self, path: P) -> Option<Error> {
|
||||||
|
let path = path.as_ref();
|
||||||
|
let file = match File::open(path) {
|
||||||
|
Err(err) => return Some(Error::Io(err).with_path(path)),
|
||||||
|
Ok(file) => file,
|
||||||
|
};
|
||||||
|
let rdr = io::BufReader::new(file);
|
||||||
|
let mut errs = PartialErrorBuilder::default();
|
||||||
|
for (i, line) in rdr.lines().enumerate() {
|
||||||
|
let lineno = (i + 1) as u64;
|
||||||
|
let line = match line {
|
||||||
|
Ok(line) => line,
|
||||||
|
Err(err) => {
|
||||||
|
errs.push(Error::Io(err).tagged(path, lineno));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if let Err(err) = self.add_line(Some(path.to_path_buf()), &line) {
|
||||||
|
errs.push(err.tagged(path, lineno));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
errs.into_error_option()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add each glob line from the string given.
|
||||||
|
///
|
||||||
|
/// If this string came from a particular `gitignore` file, then its path
|
||||||
|
/// should be provided here.
|
||||||
|
///
|
||||||
|
/// The string given should be formatted as a `gitignore` file.
|
||||||
|
#[cfg(test)]
|
||||||
|
fn add_str(
|
||||||
|
&mut self,
|
||||||
|
from: Option<PathBuf>,
|
||||||
|
gitignore: &str,
|
||||||
|
) -> Result<&mut GitignoreBuilder, Error> {
|
||||||
|
for line in gitignore.lines() {
|
||||||
|
try!(self.add_line(from.clone(), line));
|
||||||
|
}
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a line from a gitignore file to this builder.
|
||||||
|
///
|
||||||
|
/// If this line came from a particular `gitignore` file, then its path
|
||||||
|
/// should be provided here.
|
||||||
|
///
|
||||||
|
/// If the line could not be parsed as a glob, then an error is returned.
|
||||||
|
pub fn add_line(
|
||||||
|
&mut self,
|
||||||
|
from: Option<PathBuf>,
|
||||||
|
mut line: &str,
|
||||||
|
) -> Result<&mut GitignoreBuilder, Error> {
|
||||||
|
if line.starts_with("#") {
|
||||||
|
return Ok(self);
|
||||||
|
}
|
||||||
|
if !line.ends_with("\\ ") {
|
||||||
|
line = line.trim_right();
|
||||||
|
}
|
||||||
|
if line.is_empty() {
|
||||||
|
return Ok(self);
|
||||||
|
}
|
||||||
|
let mut glob = Glob {
|
||||||
|
from: from,
|
||||||
|
original: line.to_string(),
|
||||||
|
actual: String::new(),
|
||||||
|
is_whitelist: false,
|
||||||
|
is_only_dir: false,
|
||||||
|
};
|
||||||
|
let mut literal_separator = false;
|
||||||
|
let has_slash = line.chars().any(|c| c == '/');
|
||||||
|
let mut is_absolute = false;
|
||||||
|
if line.starts_with("\\!") || line.starts_with("\\#") {
|
||||||
|
line = &line[1..];
|
||||||
|
is_absolute = line.chars().nth(0) == Some('/');
|
||||||
|
} else {
|
||||||
|
if line.starts_with("!") {
|
||||||
|
glob.is_whitelist = true;
|
||||||
|
line = &line[1..];
|
||||||
|
}
|
||||||
|
if line.starts_with("/") {
|
||||||
|
// `man gitignore` says that if a glob starts with a slash,
|
||||||
|
// then the glob can only match the beginning of a path
|
||||||
|
// (relative to the location of gitignore). We achieve this by
|
||||||
|
// simply banning wildcards from matching /.
|
||||||
|
literal_separator = true;
|
||||||
|
line = &line[1..];
|
||||||
|
is_absolute = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If it ends with a slash, then this should only match directories,
|
||||||
|
// but the slash should otherwise not be used while globbing.
|
||||||
|
if let Some((i, c)) = line.char_indices().rev().nth(0) {
|
||||||
|
if c == '/' {
|
||||||
|
glob.is_only_dir = true;
|
||||||
|
line = &line[..i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If there is a literal slash, then we note that so that globbing
|
||||||
|
// doesn't let wildcards match slashes.
|
||||||
|
glob.actual = line.to_string();
|
||||||
|
if has_slash {
|
||||||
|
literal_separator = true;
|
||||||
|
}
|
||||||
|
// If there was a leading slash, then this is a glob that must
|
||||||
|
// match the entire path name. Otherwise, we should let it match
|
||||||
|
// anywhere, so use a **/ prefix.
|
||||||
|
if !is_absolute {
|
||||||
|
// ... but only if we don't already have a **/ prefix.
|
||||||
|
if !glob.actual.starts_with("**/") {
|
||||||
|
glob.actual = format!("**/{}", glob.actual);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If the glob ends with `/**`, then we should only match everything
|
||||||
|
// inside a directory, but not the directory itself. Standard globs
|
||||||
|
// will match the directory. So we add `/*` to force the issue.
|
||||||
|
if glob.actual.ends_with("/**") {
|
||||||
|
glob.actual = format!("{}/*", glob.actual);
|
||||||
|
}
|
||||||
|
let parsed = try!(
|
||||||
|
GlobBuilder::new(&glob.actual)
|
||||||
|
.literal_separator(literal_separator)
|
||||||
|
.build()
|
||||||
|
.map_err(|err| Error::Glob(err.to_string())));
|
||||||
|
self.builder.add(parsed);
|
||||||
|
self.globs.push(glob);
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the file path of the current environment's global gitignore file.
|
||||||
|
///
|
||||||
|
/// Note that the file path returned may not exist.
|
||||||
|
fn gitconfig_excludes_path() -> Option<PathBuf> {
|
||||||
|
gitconfig_contents()
|
||||||
|
.and_then(|data| parse_excludes_file(&data))
|
||||||
|
.or_else(excludes_file_default)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the file contents of git's global config file, if one exists.
|
||||||
|
fn gitconfig_contents() -> Option<Vec<u8>> {
|
||||||
|
let home = match env::var_os("HOME") {
|
||||||
|
None => return None,
|
||||||
|
Some(home) => PathBuf::from(home),
|
||||||
|
};
|
||||||
|
let mut file = match File::open(home.join(".gitconfig")) {
|
||||||
|
Err(_) => return None,
|
||||||
|
Ok(file) => io::BufReader::new(file),
|
||||||
|
};
|
||||||
|
let mut contents = vec![];
|
||||||
|
file.read_to_end(&mut contents).ok().map(|_| contents)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the default file path for a global .gitignore file.
|
||||||
|
///
|
||||||
|
/// Specifically, this respects XDG_CONFIG_HOME.
|
||||||
|
fn excludes_file_default() -> Option<PathBuf> {
|
||||||
|
env::var_os("XDG_CONFIG_HOME")
|
||||||
|
.and_then(|x| if x.is_empty() { None } else { Some(PathBuf::from(x)) })
|
||||||
|
.or_else(|| env::home_dir().map(|p| p.join(".config")))
|
||||||
|
.map(|x| x.join("git/ignore"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract git's `core.excludesfile` config setting from the raw file contents
|
||||||
|
/// given.
|
||||||
|
fn parse_excludes_file(data: &[u8]) -> Option<PathBuf> {
|
||||||
|
// N.B. This is the lazy approach, and isn't technically correct, but
|
||||||
|
// probably works in more circumstances. I guess we would ideally have
|
||||||
|
// a full INI parser. Yuck.
|
||||||
|
lazy_static! {
|
||||||
|
static ref RE: Regex = Regex::new(
|
||||||
|
r"(?ium)^\s*excludesfile\s*=\s*(.+)\s*$").unwrap();
|
||||||
|
};
|
||||||
|
let caps = match RE.captures(data) {
|
||||||
|
None => return None,
|
||||||
|
Some(caps) => caps,
|
||||||
|
};
|
||||||
|
str::from_utf8(&caps[1]).ok().map(|s| PathBuf::from(expand_tilde(s)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Expands ~ in file paths to the value of $HOME.
|
||||||
|
fn expand_tilde(path: &str) -> String {
|
||||||
|
let home = match env::var("HOME") {
|
||||||
|
Err(_) => return path.to_string(),
|
||||||
|
Ok(home) => home,
|
||||||
|
};
|
||||||
|
path.replace("~", &home)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::path::Path;
|
||||||
|
use super::{Gitignore, GitignoreBuilder};
|
||||||
|
|
||||||
|
fn gi_from_str<P: AsRef<Path>>(root: P, s: &str) -> Gitignore {
|
||||||
|
let mut builder = GitignoreBuilder::new(root);
|
||||||
|
builder.add_str(None, s).unwrap();
|
||||||
|
builder.build().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! ignored {
|
||||||
|
($name:ident, $root:expr, $gi:expr, $path:expr) => {
|
||||||
|
ignored!($name, $root, $gi, $path, false);
|
||||||
|
};
|
||||||
|
($name:ident, $root:expr, $gi:expr, $path:expr, $is_dir:expr) => {
|
||||||
|
#[test]
|
||||||
|
fn $name() {
|
||||||
|
let gi = gi_from_str($root, $gi);
|
||||||
|
assert!(gi.matched($path, $is_dir).is_ignore());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! not_ignored {
|
||||||
|
($name:ident, $root:expr, $gi:expr, $path:expr) => {
|
||||||
|
not_ignored!($name, $root, $gi, $path, false);
|
||||||
|
};
|
||||||
|
($name:ident, $root:expr, $gi:expr, $path:expr, $is_dir:expr) => {
|
||||||
|
#[test]
|
||||||
|
fn $name() {
|
||||||
|
let gi = gi_from_str($root, $gi);
|
||||||
|
assert!(!gi.matched($path, $is_dir).is_ignore());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const ROOT: &'static str = "/home/foobar/rust/rg";
|
||||||
|
|
||||||
|
ignored!(ig1, ROOT, "months", "months");
|
||||||
|
ignored!(ig2, ROOT, "*.lock", "Cargo.lock");
|
||||||
|
ignored!(ig3, ROOT, "*.rs", "src/main.rs");
|
||||||
|
ignored!(ig4, ROOT, "src/*.rs", "src/main.rs");
|
||||||
|
ignored!(ig5, ROOT, "/*.c", "cat-file.c");
|
||||||
|
ignored!(ig6, ROOT, "/src/*.rs", "src/main.rs");
|
||||||
|
ignored!(ig7, ROOT, "!src/main.rs\n*.rs", "src/main.rs");
|
||||||
|
ignored!(ig8, ROOT, "foo/", "foo", true);
|
||||||
|
ignored!(ig9, ROOT, "**/foo", "foo");
|
||||||
|
ignored!(ig10, ROOT, "**/foo", "src/foo");
|
||||||
|
ignored!(ig11, ROOT, "**/foo/**", "src/foo/bar");
|
||||||
|
ignored!(ig12, ROOT, "**/foo/**", "wat/src/foo/bar/baz");
|
||||||
|
ignored!(ig13, ROOT, "**/foo/bar", "foo/bar");
|
||||||
|
ignored!(ig14, ROOT, "**/foo/bar", "src/foo/bar");
|
||||||
|
ignored!(ig15, ROOT, "abc/**", "abc/x");
|
||||||
|
ignored!(ig16, ROOT, "abc/**", "abc/x/y");
|
||||||
|
ignored!(ig17, ROOT, "abc/**", "abc/x/y/z");
|
||||||
|
ignored!(ig18, ROOT, "a/**/b", "a/b");
|
||||||
|
ignored!(ig19, ROOT, "a/**/b", "a/x/b");
|
||||||
|
ignored!(ig20, ROOT, "a/**/b", "a/x/y/b");
|
||||||
|
ignored!(ig21, ROOT, r"\!xy", "!xy");
|
||||||
|
ignored!(ig22, ROOT, r"\#foo", "#foo");
|
||||||
|
ignored!(ig23, ROOT, "foo", "./foo");
|
||||||
|
ignored!(ig24, ROOT, "target", "grep/target");
|
||||||
|
ignored!(ig25, ROOT, "Cargo.lock", "./tabwriter-bin/Cargo.lock");
|
||||||
|
ignored!(ig26, ROOT, "/foo/bar/baz", "./foo/bar/baz");
|
||||||
|
ignored!(ig27, ROOT, "foo/", "xyz/foo", true);
|
||||||
|
ignored!(ig28, ROOT, "src/*.rs", "src/grep/src/main.rs");
|
||||||
|
ignored!(ig29, "./src", "/llvm/", "./src/llvm", true);
|
||||||
|
ignored!(ig30, ROOT, "node_modules/ ", "node_modules", true);
|
||||||
|
|
||||||
|
not_ignored!(ignot1, ROOT, "amonths", "months");
|
||||||
|
not_ignored!(ignot2, ROOT, "monthsa", "months");
|
||||||
|
not_ignored!(ignot3, ROOT, "/src/*.rs", "src/grep/src/main.rs");
|
||||||
|
not_ignored!(ignot4, ROOT, "/*.c", "mozilla-sha1/sha1.c");
|
||||||
|
not_ignored!(ignot5, ROOT, "/src/*.rs", "src/grep/src/main.rs");
|
||||||
|
not_ignored!(ignot6, ROOT, "*.rs\n!src/main.rs", "src/main.rs");
|
||||||
|
not_ignored!(ignot7, ROOT, "foo/", "foo", false);
|
||||||
|
not_ignored!(ignot8, ROOT, "**/foo/**", "wat/src/afoo/bar/baz");
|
||||||
|
not_ignored!(ignot9, ROOT, "**/foo/**", "wat/src/fooa/bar/baz");
|
||||||
|
not_ignored!(ignot10, ROOT, "**/foo/bar", "foo/src/bar");
|
||||||
|
not_ignored!(ignot11, ROOT, "#foo", "#foo");
|
||||||
|
not_ignored!(ignot12, ROOT, "\n\n\n", "foo");
|
||||||
|
not_ignored!(ignot13, ROOT, "foo/**", "foo", true);
|
||||||
|
not_ignored!(
|
||||||
|
ignot14, "./third_party/protobuf", "m4/ltoptions.m4",
|
||||||
|
"./third_party/protobuf/csharp/src/packages/repositories.config");
|
||||||
|
not_ignored!(ignot15, ROOT, "!/bar", "foo/bar");
|
||||||
|
|
||||||
|
fn bytes(s: &str) -> Vec<u8> {
|
||||||
|
s.to_string().into_bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn path_string<P: AsRef<Path>>(path: P) -> String {
|
||||||
|
path.as_ref().to_str().unwrap().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_excludes_file1() {
|
||||||
|
let data = bytes("[core]\nexcludesFile = /foo/bar");
|
||||||
|
let got = super::parse_excludes_file(&data).unwrap();
|
||||||
|
assert_eq!(path_string(got), "/foo/bar");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_excludes_file2() {
|
||||||
|
let data = bytes("[core]\nexcludesFile = ~/foo/bar");
|
||||||
|
let got = super::parse_excludes_file(&data).unwrap();
|
||||||
|
assert_eq!(path_string(got), super::expand_tilde("~/foo/bar"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parse_excludes_file3() {
|
||||||
|
let data = bytes("[core]\nexcludeFile = /foo/bar");
|
||||||
|
assert!(super::parse_excludes_file(&data).is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/106
|
||||||
|
#[test]
|
||||||
|
fn regression_106() {
|
||||||
|
gi_from_str("/", " ");
|
||||||
|
}
|
||||||
|
}
|
||||||
391
ignore/src/lib.rs
Normal file
391
ignore/src/lib.rs
Normal file
@@ -0,0 +1,391 @@
|
|||||||
|
/*!
|
||||||
|
The ignore crate provides a fast recursive directory iterator that respects
|
||||||
|
various filters such as globs, file types and `.gitignore` files. The precise
|
||||||
|
matching rules and precedence is explained in the documentation for
|
||||||
|
`WalkBuilder`.
|
||||||
|
|
||||||
|
Secondarily, this crate exposes gitignore and file type matchers for use cases
|
||||||
|
that demand more fine-grained control.
|
||||||
|
|
||||||
|
# Example
|
||||||
|
|
||||||
|
This example shows the most basic usage of this crate. This code will
|
||||||
|
recursively traverse the current directory while automatically filtering out
|
||||||
|
files and directories according to ignore globs found in files like
|
||||||
|
`.ignore` and `.gitignore`:
|
||||||
|
|
||||||
|
|
||||||
|
```rust,no_run
|
||||||
|
use ignore::Walk;
|
||||||
|
|
||||||
|
for result in Walk::new("./") {
|
||||||
|
// Each item yielded by the iterator is either a directory entry or an
|
||||||
|
// error, so either print the path or the error.
|
||||||
|
match result {
|
||||||
|
Ok(entry) => println!("{}", entry.path().display()),
|
||||||
|
Err(err) => println!("ERROR: {}", err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
# Example: advanced
|
||||||
|
|
||||||
|
By default, the recursive directory iterator will ignore hidden files and
|
||||||
|
directories. This can be disabled by building the iterator with `WalkBuilder`:
|
||||||
|
|
||||||
|
```rust,no_run
|
||||||
|
use ignore::WalkBuilder;
|
||||||
|
|
||||||
|
for result in WalkBuilder::new("./").hidden(false).build() {
|
||||||
|
println!("{:?}", result);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
See the documentation for `WalkBuilder` for many other options.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#![deny(missing_docs)]
|
||||||
|
|
||||||
|
extern crate crossbeam;
|
||||||
|
extern crate globset;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate lazy_static;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate log;
|
||||||
|
extern crate memchr;
|
||||||
|
extern crate regex;
|
||||||
|
#[cfg(test)]
|
||||||
|
extern crate tempdir;
|
||||||
|
extern crate thread_local;
|
||||||
|
extern crate walkdir;
|
||||||
|
|
||||||
|
use std::error;
|
||||||
|
use std::fmt;
|
||||||
|
use std::io;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
pub use walk::{DirEntry, Walk, WalkBuilder, WalkParallel, WalkState};
|
||||||
|
|
||||||
|
mod dir;
|
||||||
|
pub mod gitignore;
|
||||||
|
mod pathutil;
|
||||||
|
pub mod overrides;
|
||||||
|
pub mod types;
|
||||||
|
mod walk;
|
||||||
|
|
||||||
|
/// Represents an error that can occur when parsing a gitignore file.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum Error {
|
||||||
|
/// A collection of "soft" errors. These occur when adding an ignore
|
||||||
|
/// file partially succeeded.
|
||||||
|
Partial(Vec<Error>),
|
||||||
|
/// An error associated with a specific line number.
|
||||||
|
WithLineNumber {
|
||||||
|
/// The line number.
|
||||||
|
line: u64,
|
||||||
|
/// The underlying error.
|
||||||
|
err: Box<Error>,
|
||||||
|
},
|
||||||
|
/// An error associated with a particular file path.
|
||||||
|
WithPath {
|
||||||
|
/// The file path.
|
||||||
|
path: PathBuf,
|
||||||
|
/// The underlying error.
|
||||||
|
err: Box<Error>,
|
||||||
|
},
|
||||||
|
/// An error associated with a particular directory depth when recursively
|
||||||
|
/// walking a directory.
|
||||||
|
WithDepth {
|
||||||
|
/// The directory depth.
|
||||||
|
depth: usize,
|
||||||
|
/// The underlying error.
|
||||||
|
err: Box<Error>,
|
||||||
|
},
|
||||||
|
/// An error that occurs when a file loop is detected when traversing
|
||||||
|
/// symbolic links.
|
||||||
|
Loop {
|
||||||
|
/// The ancestor file path in the loop.
|
||||||
|
ancestor: PathBuf,
|
||||||
|
/// The child file path in the loop.
|
||||||
|
child: PathBuf,
|
||||||
|
},
|
||||||
|
/// An error that occurs when doing I/O, such as reading an ignore file.
|
||||||
|
Io(io::Error),
|
||||||
|
/// An error that occurs when trying to parse a glob.
|
||||||
|
Glob(String),
|
||||||
|
/// A type selection for a file type that is not defined.
|
||||||
|
UnrecognizedFileType(String),
|
||||||
|
/// A user specified file type definition could not be parsed.
|
||||||
|
InvalidDefinition,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Error {
|
||||||
|
/// Returns true if this is a partial error.
|
||||||
|
///
|
||||||
|
/// A partial error occurs when only some operations failed while others
|
||||||
|
/// may have succeeded. For example, an ignore file may contain an invalid
|
||||||
|
/// glob among otherwise valid globs.
|
||||||
|
pub fn is_partial(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
Error::Partial(_) => true,
|
||||||
|
Error::WithLineNumber { ref err, .. } => err.is_partial(),
|
||||||
|
Error::WithPath { ref err, .. } => err.is_partial(),
|
||||||
|
Error::WithDepth { ref err, .. } => err.is_partial(),
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if this error is exclusively an I/O error.
|
||||||
|
pub fn is_io(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
Error::Partial(ref errs) => errs.len() == 1 && errs[0].is_io(),
|
||||||
|
Error::WithLineNumber { ref err, .. } => err.is_io(),
|
||||||
|
Error::WithPath { ref err, .. } => err.is_io(),
|
||||||
|
Error::WithDepth { ref err, .. } => err.is_io(),
|
||||||
|
Error::Loop { .. } => false,
|
||||||
|
Error::Io(_) => true,
|
||||||
|
Error::Glob(_) => false,
|
||||||
|
Error::UnrecognizedFileType(_) => false,
|
||||||
|
Error::InvalidDefinition => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a depth associated with recursively walking a directory (if
|
||||||
|
/// this error was generated from a recursive directory iterator).
|
||||||
|
pub fn depth(&self) -> Option<usize> {
|
||||||
|
match *self {
|
||||||
|
Error::WithPath { ref err, .. } => err.depth(),
|
||||||
|
Error::WithDepth { depth, .. } => Some(depth),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Turn an error into a tagged error with the given file path.
|
||||||
|
fn with_path<P: AsRef<Path>>(self, path: P) -> Error {
|
||||||
|
Error::WithPath {
|
||||||
|
path: path.as_ref().to_path_buf(),
|
||||||
|
err: Box::new(self),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Turn an error into a tagged error with the given depth.
|
||||||
|
fn with_depth(self, depth: usize) -> Error {
|
||||||
|
Error::WithDepth {
|
||||||
|
depth: depth,
|
||||||
|
err: Box::new(self),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Turn an error into a tagged error with the given file path and line
|
||||||
|
/// number. If path is empty, then it is omitted from the error.
|
||||||
|
fn tagged<P: AsRef<Path>>(self, path: P, lineno: u64) -> Error {
|
||||||
|
let errline = Error::WithLineNumber {
|
||||||
|
line: lineno,
|
||||||
|
err: Box::new(self),
|
||||||
|
};
|
||||||
|
if path.as_ref().as_os_str().is_empty() {
|
||||||
|
return errline;
|
||||||
|
}
|
||||||
|
errline.with_path(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl error::Error for Error {
|
||||||
|
fn description(&self) -> &str {
|
||||||
|
match *self {
|
||||||
|
Error::Partial(_) => "partial error",
|
||||||
|
Error::WithLineNumber { ref err, .. } => err.description(),
|
||||||
|
Error::WithPath { ref err, .. } => err.description(),
|
||||||
|
Error::WithDepth { ref err, .. } => err.description(),
|
||||||
|
Error::Loop { .. } => "file system loop found",
|
||||||
|
Error::Io(ref err) => err.description(),
|
||||||
|
Error::Glob(ref msg) => msg,
|
||||||
|
Error::UnrecognizedFileType(_) => "unrecognized file type",
|
||||||
|
Error::InvalidDefinition => "invalid definition",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Error {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match *self {
|
||||||
|
Error::Partial(ref errs) => {
|
||||||
|
let msgs: Vec<String> =
|
||||||
|
errs.iter().map(|err| err.to_string()).collect();
|
||||||
|
write!(f, "{}", msgs.join("\n"))
|
||||||
|
}
|
||||||
|
Error::WithLineNumber { line, ref err } => {
|
||||||
|
write!(f, "line {}: {}", line, err)
|
||||||
|
}
|
||||||
|
Error::WithPath { ref path, ref err } => {
|
||||||
|
write!(f, "{}: {}", path.display(), err)
|
||||||
|
}
|
||||||
|
Error::WithDepth { ref err, .. } => err.fmt(f),
|
||||||
|
Error::Loop { ref ancestor, ref child } => {
|
||||||
|
write!(f, "File system loop found: \
|
||||||
|
{} points to an ancestor {}",
|
||||||
|
child.display(), ancestor.display())
|
||||||
|
}
|
||||||
|
Error::Io(ref err) => err.fmt(f),
|
||||||
|
Error::Glob(ref msg) => write!(f, "{}", msg),
|
||||||
|
Error::UnrecognizedFileType(ref ty) => {
|
||||||
|
write!(f, "unrecognized file type: {}", ty)
|
||||||
|
}
|
||||||
|
Error::InvalidDefinition => {
|
||||||
|
write!(f, "invalid definition (format is type:glob, e.g., \
|
||||||
|
html:*.html)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<io::Error> for Error {
|
||||||
|
fn from(err: io::Error) -> Error {
|
||||||
|
Error::Io(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<walkdir::Error> for Error {
|
||||||
|
fn from(err: walkdir::Error) -> Error {
|
||||||
|
let depth = err.depth();
|
||||||
|
if let (Some(anc), Some(child)) = (err.loop_ancestor(), err.path()) {
|
||||||
|
return Error::WithDepth {
|
||||||
|
depth: depth,
|
||||||
|
err: Box::new(Error::Loop {
|
||||||
|
ancestor: anc.to_path_buf(),
|
||||||
|
child: child.to_path_buf(),
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
let path = err.path().map(|p| p.to_path_buf());
|
||||||
|
let mut ig_err = Error::Io(io::Error::from(err));
|
||||||
|
if let Some(path) = path {
|
||||||
|
ig_err = Error::WithPath {
|
||||||
|
path: path,
|
||||||
|
err: Box::new(ig_err),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
ig_err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
struct PartialErrorBuilder(Vec<Error>);
|
||||||
|
|
||||||
|
impl PartialErrorBuilder {
|
||||||
|
fn push(&mut self, err: Error) {
|
||||||
|
self.0.push(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push_ignore_io(&mut self, err: Error) {
|
||||||
|
if !err.is_io() {
|
||||||
|
self.push(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn maybe_push(&mut self, err: Option<Error>) {
|
||||||
|
if let Some(err) = err {
|
||||||
|
self.push(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn maybe_push_ignore_io(&mut self, err: Option<Error>) {
|
||||||
|
if let Some(err) = err {
|
||||||
|
self.push_ignore_io(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn into_error_option(mut self) -> Option<Error> {
|
||||||
|
if self.0.is_empty() {
|
||||||
|
None
|
||||||
|
} else if self.0.len() == 1 {
|
||||||
|
Some(self.0.pop().unwrap())
|
||||||
|
} else {
|
||||||
|
Some(Error::Partial(self.0))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The result of a glob match.
|
||||||
|
///
|
||||||
|
/// The type parameter `T` typically refers to a type that provides more
|
||||||
|
/// information about a particular match. For example, it might identify
|
||||||
|
/// the specific gitignore file and the specific glob pattern that caused
|
||||||
|
/// the match.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum Match<T> {
|
||||||
|
/// The path didn't match any glob.
|
||||||
|
None,
|
||||||
|
/// The highest precedent glob matched indicates the path should be
|
||||||
|
/// ignored.
|
||||||
|
Ignore(T),
|
||||||
|
/// The highest precedent glob matched indicates the path should be
|
||||||
|
/// whitelisted.
|
||||||
|
Whitelist(T),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Match<T> {
|
||||||
|
/// Returns true if the match result didn't match any globs.
|
||||||
|
pub fn is_none(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
Match::None => true,
|
||||||
|
Match::Ignore(_) | Match::Whitelist(_) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the match result implies the path should be ignored.
|
||||||
|
pub fn is_ignore(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
Match::Ignore(_) => true,
|
||||||
|
Match::None | Match::Whitelist(_) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the match result implies the path should be
|
||||||
|
/// whitelisted.
|
||||||
|
pub fn is_whitelist(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
Match::Whitelist(_) => true,
|
||||||
|
Match::None | Match::Ignore(_) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inverts the match so that `Ignore` becomes `Whitelist` and
|
||||||
|
/// `Whitelist` becomes `Ignore`. A non-match remains the same.
|
||||||
|
pub fn invert(self) -> Match<T> {
|
||||||
|
match self {
|
||||||
|
Match::None => Match::None,
|
||||||
|
Match::Ignore(t) => Match::Whitelist(t),
|
||||||
|
Match::Whitelist(t) => Match::Ignore(t),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the value inside this match if it exists.
|
||||||
|
pub fn inner(&self) -> Option<&T> {
|
||||||
|
match *self {
|
||||||
|
Match::None => None,
|
||||||
|
Match::Ignore(ref t) => Some(t),
|
||||||
|
Match::Whitelist(ref t) => Some(t),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Apply the given function to the value inside this match.
|
||||||
|
///
|
||||||
|
/// If the match has no value, then return the match unchanged.
|
||||||
|
pub fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Match<U> {
|
||||||
|
match self {
|
||||||
|
Match::None => Match::None,
|
||||||
|
Match::Ignore(t) => Match::Ignore(f(t)),
|
||||||
|
Match::Whitelist(t) => Match::Whitelist(f(t)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the match if it is not none. Otherwise, return other.
|
||||||
|
pub fn or(self, other: Self) -> Self {
|
||||||
|
if self.is_none() {
|
||||||
|
other
|
||||||
|
} else {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
223
ignore/src/overrides.rs
Normal file
223
ignore/src/overrides.rs
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
/*!
|
||||||
|
The overrides module provides a way to specify a set of override globs.
|
||||||
|
This provides functionality similar to `--include` or `--exclude` in command
|
||||||
|
line tools.
|
||||||
|
*/
|
||||||
|
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use gitignore::{self, Gitignore, GitignoreBuilder};
|
||||||
|
use {Error, Match};
|
||||||
|
|
||||||
|
/// Glob represents a single glob in an override matcher.
|
||||||
|
///
|
||||||
|
/// This is used to report information about the highest precedent glob
|
||||||
|
/// that matched.
|
||||||
|
///
|
||||||
|
/// Note that not all matches necessarily correspond to a specific glob. For
|
||||||
|
/// example, if there are one or more whitelist globs and a file path doesn't
|
||||||
|
/// match any glob in the set, then the file path is considered to be ignored.
|
||||||
|
///
|
||||||
|
/// The lifetime `'a` refers to the lifetime of the matcher that produced
|
||||||
|
/// this glob.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Glob<'a>(GlobInner<'a>);
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
enum GlobInner<'a> {
|
||||||
|
/// No glob matched, but the file path should still be ignored.
|
||||||
|
UnmatchedIgnore,
|
||||||
|
/// A glob matched.
|
||||||
|
Matched(&'a gitignore::Glob),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Glob<'a> {
|
||||||
|
fn unmatched() -> Glob<'a> {
|
||||||
|
Glob(GlobInner::UnmatchedIgnore)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Manages a set of overrides provided explicitly by the end user.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Override(Gitignore);
|
||||||
|
|
||||||
|
impl Override {
|
||||||
|
/// Returns an empty matcher that never matches any file path.
|
||||||
|
pub fn empty() -> Override {
|
||||||
|
Override(Gitignore::empty())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the directory of this override set.
|
||||||
|
///
|
||||||
|
/// All matches are done relative to this path.
|
||||||
|
pub fn path(&self) -> &Path {
|
||||||
|
self.0.path()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if and only if this matcher is empty.
|
||||||
|
///
|
||||||
|
/// When a matcher is empty, it will never match any file path.
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.0.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the total number of ignore globs.
|
||||||
|
pub fn num_ignores(&self) -> u64 {
|
||||||
|
self.0.num_whitelists()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the total number of whitelisted globs.
|
||||||
|
pub fn num_whitelists(&self) -> u64 {
|
||||||
|
self.0.num_ignores()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns whether the given file path matched a pattern in this override
|
||||||
|
/// matcher.
|
||||||
|
///
|
||||||
|
/// `is_dir` should be true if the path refers to a directory and false
|
||||||
|
/// otherwise.
|
||||||
|
///
|
||||||
|
/// If there are no overrides, then this always returns `Match::None`.
|
||||||
|
///
|
||||||
|
/// If there is at least one whitelist override and `is_dir` is false, then
|
||||||
|
/// this never returns `Match::None`, since non-matches are interpreted as
|
||||||
|
/// ignored.
|
||||||
|
///
|
||||||
|
/// The given path is matched to the globs relative to the path given
|
||||||
|
/// when building the override matcher. Specifically, before matching
|
||||||
|
/// `path`, its prefix (as determined by a common suffix of the directory
|
||||||
|
/// given) is stripped. If there is no common suffix/prefix overlap, then
|
||||||
|
/// `path` is assumed to reside in the same directory as the root path for
|
||||||
|
/// this set of overrides.
|
||||||
|
pub fn matched<'a, P: AsRef<Path>>(
|
||||||
|
&'a self,
|
||||||
|
path: P,
|
||||||
|
is_dir: bool,
|
||||||
|
) -> Match<Glob<'a>> {
|
||||||
|
if self.is_empty() {
|
||||||
|
return Match::None;
|
||||||
|
}
|
||||||
|
let mat = self.0.matched(path, is_dir).invert();
|
||||||
|
if mat.is_none() && self.num_whitelists() > 0 && !is_dir {
|
||||||
|
return Match::Ignore(Glob::unmatched());
|
||||||
|
}
|
||||||
|
mat.map(move |giglob| Glob(GlobInner::Matched(giglob)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds a matcher for a set of glob overrides.
|
||||||
|
pub struct OverrideBuilder {
|
||||||
|
builder: GitignoreBuilder,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OverrideBuilder {
|
||||||
|
/// Create a new override builder.
|
||||||
|
///
|
||||||
|
/// Matching is done relative to the directory path provided.
|
||||||
|
pub fn new<P: AsRef<Path>>(path: P) -> OverrideBuilder {
|
||||||
|
OverrideBuilder {
|
||||||
|
builder: GitignoreBuilder::new(path),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Builds a new override matcher from the globs added so far.
|
||||||
|
///
|
||||||
|
/// Once a matcher is built, no new globs can be added to it.
|
||||||
|
pub fn build(&self) -> Result<Override, Error> {
|
||||||
|
Ok(Override(try!(self.builder.build())))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a glob to the set of overrides.
|
||||||
|
///
|
||||||
|
/// Globs provided here have precisely the same semantics as a single
|
||||||
|
/// line in a `gitignore` file, where the meaning of `!` is inverted:
|
||||||
|
/// namely, `!` at the beginning of a glob will ignore a file. Without `!`,
|
||||||
|
/// all matches of the glob provided are treated as whitelist matches.
|
||||||
|
pub fn add(&mut self, glob: &str) -> Result<&mut OverrideBuilder, Error> {
|
||||||
|
try!(self.builder.add_line(None, glob));
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::{Override, OverrideBuilder};
|
||||||
|
|
||||||
|
const ROOT: &'static str = "/home/andrew/foo";
|
||||||
|
|
||||||
|
fn ov(globs: &[&str]) -> Override {
|
||||||
|
let mut builder = OverrideBuilder::new(ROOT);
|
||||||
|
for glob in globs {
|
||||||
|
builder.add(glob).unwrap();
|
||||||
|
}
|
||||||
|
builder.build().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty() {
|
||||||
|
let ov = ov(&[]);
|
||||||
|
assert!(ov.matched("a.foo", false).is_none());
|
||||||
|
assert!(ov.matched("a", false).is_none());
|
||||||
|
assert!(ov.matched("", false).is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn simple() {
|
||||||
|
let ov = ov(&["*.foo", "!*.bar"]);
|
||||||
|
assert!(ov.matched("a.foo", false).is_whitelist());
|
||||||
|
assert!(ov.matched("a.foo", true).is_whitelist());
|
||||||
|
assert!(ov.matched("a.rs", false).is_ignore());
|
||||||
|
assert!(ov.matched("a.rs", true).is_none());
|
||||||
|
assert!(ov.matched("a.bar", false).is_ignore());
|
||||||
|
assert!(ov.matched("a.bar", true).is_ignore());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn only_ignores() {
|
||||||
|
let ov = ov(&["!*.bar"]);
|
||||||
|
assert!(ov.matched("a.rs", false).is_none());
|
||||||
|
assert!(ov.matched("a.rs", true).is_none());
|
||||||
|
assert!(ov.matched("a.bar", false).is_ignore());
|
||||||
|
assert!(ov.matched("a.bar", true).is_ignore());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn precedence() {
|
||||||
|
let ov = ov(&["*.foo", "!*.bar.foo"]);
|
||||||
|
assert!(ov.matched("a.foo", false).is_whitelist());
|
||||||
|
assert!(ov.matched("a.baz", false).is_ignore());
|
||||||
|
assert!(ov.matched("a.bar.foo", false).is_ignore());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn gitignore() {
|
||||||
|
let ov = ov(&["/foo", "bar/*.rs", "baz/**"]);
|
||||||
|
assert!(ov.matched("bar/wat/lib.rs", false).is_ignore());
|
||||||
|
assert!(ov.matched("wat/bar/lib.rs", false).is_whitelist());
|
||||||
|
assert!(ov.matched("foo", false).is_whitelist());
|
||||||
|
assert!(ov.matched("wat/foo", false).is_ignore());
|
||||||
|
assert!(ov.matched("baz", false).is_ignore());
|
||||||
|
assert!(ov.matched("baz/a", false).is_whitelist());
|
||||||
|
assert!(ov.matched("baz/a/b", false).is_whitelist());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn allow_directories() {
|
||||||
|
// This tests that directories are NOT ignored when they are unmatched.
|
||||||
|
let ov = ov(&["*.rs"]);
|
||||||
|
assert!(ov.matched("foo.rs", false).is_whitelist());
|
||||||
|
assert!(ov.matched("foo.c", false).is_ignore());
|
||||||
|
assert!(ov.matched("foo", false).is_ignore());
|
||||||
|
assert!(ov.matched("foo", true).is_none());
|
||||||
|
assert!(ov.matched("src/foo.rs", false).is_whitelist());
|
||||||
|
assert!(ov.matched("src/foo.c", false).is_ignore());
|
||||||
|
assert!(ov.matched("src/foo", false).is_ignore());
|
||||||
|
assert!(ov.matched("src/foo", true).is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn absolute_path() {
|
||||||
|
let ov = ov(&["!/bar"]);
|
||||||
|
assert!(ov.matched("./foo/bar", false).is_none());
|
||||||
|
}
|
||||||
|
}
|
||||||
108
ignore/src/pathutil.rs
Normal file
108
ignore/src/pathutil.rs
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
use std::ffi::OsStr;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
/// Returns true if and only if this file path is considered to be hidden.
|
||||||
|
#[cfg(unix)]
|
||||||
|
pub fn is_hidden<P: AsRef<Path>>(path: P) -> bool {
|
||||||
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
|
||||||
|
if let Some(name) = file_name(path.as_ref()) {
|
||||||
|
name.as_bytes().get(0) == Some(&b'.')
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if and only if this file path is considered to be hidden.
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
pub fn is_hidden<P: AsRef<Path>>(path: P) -> bool {
|
||||||
|
if let Some(name) = file_name(path.as_ref()) {
|
||||||
|
name.to_str().map(|s| s.starts_with(".")).unwrap_or(false)
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Strip `prefix` from the `path` and return the remainder.
|
||||||
|
///
|
||||||
|
/// If `path` doesn't have a prefix `prefix`, then return `None`.
|
||||||
|
#[cfg(unix)]
|
||||||
|
pub fn strip_prefix<'a, P: AsRef<Path> + ?Sized>(
|
||||||
|
prefix: &'a P,
|
||||||
|
path: &'a Path,
|
||||||
|
) -> Option<&'a Path> {
|
||||||
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
|
||||||
|
let prefix = prefix.as_ref().as_os_str().as_bytes();
|
||||||
|
let path = path.as_os_str().as_bytes();
|
||||||
|
if prefix.len() > path.len() || prefix != &path[0..prefix.len()] {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(&Path::new(OsStr::from_bytes(&path[prefix.len()..])))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Strip `prefix` from the `path` and return the remainder.
|
||||||
|
///
|
||||||
|
/// If `path` doesn't have a prefix `prefix`, then return `None`.
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
pub fn strip_prefix<'a, P: AsRef<Path> + ?Sized>(
|
||||||
|
prefix: &'a P,
|
||||||
|
path: &'a Path,
|
||||||
|
) -> Option<&'a Path> {
|
||||||
|
path.strip_prefix(prefix).ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if this file path is just a file name. i.e., Its parent is
|
||||||
|
/// the empty string.
|
||||||
|
#[cfg(unix)]
|
||||||
|
pub fn is_file_name<P: AsRef<Path>>(path: P) -> bool {
|
||||||
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
use memchr::memchr;
|
||||||
|
|
||||||
|
let path = path.as_ref().as_os_str().as_bytes();
|
||||||
|
memchr(b'/', path).is_none()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if this file path is just a file name. i.e., Its parent is
|
||||||
|
/// the empty string.
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
pub fn is_file_name<P: AsRef<Path>>(path: P) -> bool {
|
||||||
|
path.as_ref().parent().map(|p| p.as_os_str().is_empty()).unwrap_or(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The final component of the path, if it is a normal file.
|
||||||
|
///
|
||||||
|
/// If the path terminates in ., .., or consists solely of a root of prefix,
|
||||||
|
/// file_name will return None.
|
||||||
|
#[cfg(unix)]
|
||||||
|
pub fn file_name<'a, P: AsRef<Path> + ?Sized>(
|
||||||
|
path: &'a P,
|
||||||
|
) -> Option<&'a OsStr> {
|
||||||
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
use memchr::memrchr;
|
||||||
|
|
||||||
|
let path = path.as_ref().as_os_str().as_bytes();
|
||||||
|
if path.is_empty() {
|
||||||
|
return None;
|
||||||
|
} else if path.len() == 1 && path[0] == b'.' {
|
||||||
|
return None;
|
||||||
|
} else if path.last() == Some(&b'.') {
|
||||||
|
return None;
|
||||||
|
} else if path.len() >= 2 && &path[path.len() - 2..] == &b".."[..] {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let last_slash = memrchr(b'/', path).map(|i| i + 1).unwrap_or(0);
|
||||||
|
Some(OsStr::from_bytes(&path[last_slash..]))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The final component of the path, if it is a normal file.
|
||||||
|
///
|
||||||
|
/// If the path terminates in ., .., or consists solely of a root of prefix,
|
||||||
|
/// file_name will return None.
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
pub fn file_name<'a, P: AsRef<Path> + ?Sized>(
|
||||||
|
path: &'a P,
|
||||||
|
) -> Option<&'a OsStr> {
|
||||||
|
path.as_ref().file_name()
|
||||||
|
}
|
||||||
674
ignore/src/types.rs
Normal file
674
ignore/src/types.rs
Normal file
@@ -0,0 +1,674 @@
|
|||||||
|
/*!
|
||||||
|
The types module provides a way of associating globs on file names to file
|
||||||
|
types.
|
||||||
|
|
||||||
|
This can be used to match specific types of files. For example, among
|
||||||
|
the default file types provided, the Rust file type is defined to be `*.rs`
|
||||||
|
with name `rust`. Similarly, the C file type is defined to be `*.{c,h}` with
|
||||||
|
name `c`.
|
||||||
|
|
||||||
|
Note that the set of default types may change over time.
|
||||||
|
|
||||||
|
# Example
|
||||||
|
|
||||||
|
This shows how to create and use a simple file type matcher using the default
|
||||||
|
file types defined in this crate.
|
||||||
|
|
||||||
|
```
|
||||||
|
use ignore::types::TypesBuilder;
|
||||||
|
|
||||||
|
let mut builder = TypesBuilder::new();
|
||||||
|
builder.add_defaults();
|
||||||
|
builder.select("rust");
|
||||||
|
let matcher = builder.build().unwrap();
|
||||||
|
|
||||||
|
assert!(matcher.matched("foo.rs", false).is_whitelist());
|
||||||
|
assert!(matcher.matched("foo.c", false).is_ignore());
|
||||||
|
```
|
||||||
|
|
||||||
|
# Example: negation
|
||||||
|
|
||||||
|
This is like the previous example, but shows how negating a file type works.
|
||||||
|
That is, this will let us match file paths that *don't* correspond to a
|
||||||
|
particular file type.
|
||||||
|
|
||||||
|
```
|
||||||
|
use ignore::types::TypesBuilder;
|
||||||
|
|
||||||
|
let mut builder = TypesBuilder::new();
|
||||||
|
builder.add_defaults();
|
||||||
|
builder.negate("c");
|
||||||
|
let matcher = builder.build().unwrap();
|
||||||
|
|
||||||
|
assert!(matcher.matched("foo.rs", false).is_none());
|
||||||
|
assert!(matcher.matched("foo.c", false).is_ignore());
|
||||||
|
```
|
||||||
|
|
||||||
|
# Example: custom file type definitions
|
||||||
|
|
||||||
|
This shows how to extend this library default file type definitions with
|
||||||
|
your own.
|
||||||
|
|
||||||
|
```
|
||||||
|
use ignore::types::TypesBuilder;
|
||||||
|
|
||||||
|
let mut builder = TypesBuilder::new();
|
||||||
|
builder.add_defaults();
|
||||||
|
builder.add("foo", "*.foo");
|
||||||
|
// Another way of adding a file type definition.
|
||||||
|
// This is useful when accepting input from an end user.
|
||||||
|
builder.add_def("bar:*.bar");
|
||||||
|
// Note: we only select `foo`, not `bar`.
|
||||||
|
builder.select("foo");
|
||||||
|
let matcher = builder.build().unwrap();
|
||||||
|
|
||||||
|
assert!(matcher.matched("x.foo", false).is_whitelist());
|
||||||
|
// This is ignored because we only selected the `foo` file type.
|
||||||
|
assert!(matcher.matched("x.bar", false).is_ignore());
|
||||||
|
```
|
||||||
|
|
||||||
|
We can also add file type definitions based on other definitions.
|
||||||
|
|
||||||
|
```
|
||||||
|
use ignore::types::TypesBuilder;
|
||||||
|
|
||||||
|
let mut builder = TypesBuilder::new();
|
||||||
|
builder.add_defaults();
|
||||||
|
builder.add("foo", "*.foo");
|
||||||
|
builder.add_def("bar:include:foo,cpp");
|
||||||
|
builder.select("bar");
|
||||||
|
let matcher = builder.build().unwrap();
|
||||||
|
|
||||||
|
assert!(matcher.matched("x.foo", false).is_whitelist());
|
||||||
|
assert!(matcher.matched("y.cpp", false).is_whitelist());
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
|
||||||
|
use std::cell::RefCell;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use globset::{GlobBuilder, GlobSet, GlobSetBuilder};
|
||||||
|
use regex::Regex;
|
||||||
|
use thread_local::ThreadLocal;
|
||||||
|
|
||||||
|
use pathutil::file_name;
|
||||||
|
use {Error, Match};
|
||||||
|
|
||||||
|
const DEFAULT_TYPES: &'static [(&'static str, &'static [&'static str])] = &[
|
||||||
|
("agda", &["*.agda", "*.lagda"]),
|
||||||
|
("asciidoc", &["*.adoc", "*.asc", "*.asciidoc"]),
|
||||||
|
("asm", &["*.asm", "*.s", "*.S"]),
|
||||||
|
("awk", &["*.awk"]),
|
||||||
|
("c", &["*.c", "*.h", "*.H"]),
|
||||||
|
("cbor", &["*.cbor"]),
|
||||||
|
("ceylon", &["*.ceylon"]),
|
||||||
|
("clojure", &["*.clj", "*.cljc", "*.cljs", "*.cljx"]),
|
||||||
|
("cmake", &["*.cmake", "CMakeLists.txt"]),
|
||||||
|
("coffeescript", &["*.coffee"]),
|
||||||
|
("creole", &["*.creole"]),
|
||||||
|
("config", &["*.config"]),
|
||||||
|
("cpp", &[
|
||||||
|
"*.C", "*.cc", "*.cpp", "*.cxx",
|
||||||
|
"*.h", "*.H", "*.hh", "*.hpp",
|
||||||
|
]),
|
||||||
|
("crystal", &["Projectfile", "*.cr"]),
|
||||||
|
("cs", &["*.cs"]),
|
||||||
|
("csharp", &["*.cs"]),
|
||||||
|
("css", &["*.css", "*.scss"]),
|
||||||
|
("cython", &["*.pyx"]),
|
||||||
|
("dart", &["*.dart"]),
|
||||||
|
("d", &["*.d"]),
|
||||||
|
("elisp", &["*.el"]),
|
||||||
|
("elixir", &["*.ex", "*.eex", "*.exs"]),
|
||||||
|
("erlang", &["*.erl", "*.hrl"]),
|
||||||
|
("fish", &["*.fish"]),
|
||||||
|
("fortran", &[
|
||||||
|
"*.f", "*.F", "*.f77", "*.F77", "*.pfo",
|
||||||
|
"*.f90", "*.F90", "*.f95", "*.F95",
|
||||||
|
]),
|
||||||
|
("fsharp", &["*.fs", "*.fsx", "*.fsi"]),
|
||||||
|
("go", &["*.go"]),
|
||||||
|
("groovy", &["*.groovy", "*.gradle"]),
|
||||||
|
("h", &["*.h", "*.hpp"]),
|
||||||
|
("hbs", &["*.hbs"]),
|
||||||
|
("haskell", &["*.hs", "*.lhs"]),
|
||||||
|
("html", &["*.htm", "*.html", "*.ejs"]),
|
||||||
|
("java", &["*.java"]),
|
||||||
|
("jinja", &["*.jinja", "*.jinja2"]),
|
||||||
|
("js", &[
|
||||||
|
"*.js", "*.jsx", "*.vue",
|
||||||
|
]),
|
||||||
|
("json", &["*.json"]),
|
||||||
|
("jsonl", &["*.jsonl"]),
|
||||||
|
("kotlin", &["*.kt", "*.kts"]),
|
||||||
|
("less", &["*.less"]),
|
||||||
|
("lisp", &["*.el", "*.jl", "*.lisp", "*.lsp", "*.sc", "*.scm"]),
|
||||||
|
("log", &["*.log"]),
|
||||||
|
("lua", &["*.lua"]),
|
||||||
|
("m4", &["*.ac", "*.m4"]),
|
||||||
|
("make", &["gnumakefile", "Gnumakefile", "makefile", "Makefile", "*.mk", "*.mak"]),
|
||||||
|
("markdown", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]),
|
||||||
|
("md", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]),
|
||||||
|
("matlab", &["*.m"]),
|
||||||
|
("mk", &["mkfile"]),
|
||||||
|
("ml", &["*.ml"]),
|
||||||
|
("nim", &["*.nim"]),
|
||||||
|
("objc", &["*.h", "*.m"]),
|
||||||
|
("objcpp", &["*.h", "*.mm"]),
|
||||||
|
("ocaml", &["*.ml", "*.mli", "*.mll", "*.mly"]),
|
||||||
|
("org", &["*.org"]),
|
||||||
|
("perl", &["*.perl", "*.pl", "*.PL", "*.plh", "*.plx", "*.pm", "*.t"]),
|
||||||
|
("pdf", &["*.pdf"]),
|
||||||
|
("php", &["*.php", "*.php3", "*.php4", "*.php5", "*.phtml"]),
|
||||||
|
("pod", &["*.pod"]),
|
||||||
|
("ps", &["*.cdxml", "*.ps1", "*.ps1xml", "*.psd1", "*.psm1"]),
|
||||||
|
("py", &["*.py"]),
|
||||||
|
("readme", &["README*", "*README"]),
|
||||||
|
("r", &["*.R", "*.r", "*.Rmd", "*.Rnw"]),
|
||||||
|
("rdoc", &["*.rdoc"]),
|
||||||
|
("rst", &["*.rst"]),
|
||||||
|
("ruby", &["Gemfile", "*.gemspec", ".irbrc", "Rakefile", "*.rb"]),
|
||||||
|
("rust", &["*.rs"]),
|
||||||
|
("sass", &["*.sass", "*.scss"]),
|
||||||
|
("scala", &["*.scala"]),
|
||||||
|
("sh", &["*.bash", "*.csh", "*.ksh", "*.sh", "*.tcsh"]),
|
||||||
|
("spark", &["*.spark"]),
|
||||||
|
("stylus", &["*.styl"]),
|
||||||
|
("sql", &["*.sql"]),
|
||||||
|
("sv", &["*.v", "*.vg", "*.sv", "*.svh", "*.h"]),
|
||||||
|
("svg", &["*.svg"]),
|
||||||
|
("swift", &["*.swift"]),
|
||||||
|
("swig", &["*.def", "*.i"]),
|
||||||
|
("taskpaper", &["*.taskpaper"]),
|
||||||
|
("tcl", &["*.tcl"]),
|
||||||
|
("tex", &["*.tex", "*.ltx", "*.cls", "*.sty", "*.bib"]),
|
||||||
|
("textile", &["*.textile"]),
|
||||||
|
("ts", &["*.ts", "*.tsx"]),
|
||||||
|
("txt", &["*.txt"]),
|
||||||
|
("toml", &["*.toml", "Cargo.lock"]),
|
||||||
|
("twig", &["*.twig"]),
|
||||||
|
("vala", &["*.vala"]),
|
||||||
|
("vb", &["*.vb"]),
|
||||||
|
("vim", &["*.vim"]),
|
||||||
|
("vimscript", &["*.vim"]),
|
||||||
|
("wiki", &["*.mediawiki", "*.wiki"]),
|
||||||
|
("xml", &["*.xml"]),
|
||||||
|
("yacc", &["*.y"]),
|
||||||
|
("yaml", &["*.yaml", "*.yml"]),
|
||||||
|
("zsh", &["zshenv", ".zshenv", "zprofile", ".zprofile", "zshrc", ".zshrc", "zlogin", ".zlogin", "zlogout", ".zlogout", "*.zsh"]),
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Glob represents a single glob in a set of file type definitions.
|
||||||
|
///
|
||||||
|
/// There may be more than one glob for a particular file type.
|
||||||
|
///
|
||||||
|
/// This is used to report information about the highest precedent glob
|
||||||
|
/// that matched.
|
||||||
|
///
|
||||||
|
/// Note that not all matches necessarily correspond to a specific glob.
|
||||||
|
/// For example, if there are one or more selections and a file path doesn't
|
||||||
|
/// match any of those selections, then the file path is considered to be
|
||||||
|
/// ignored.
|
||||||
|
///
|
||||||
|
/// The lifetime `'a` refers to the lifetime of the underlying file type
|
||||||
|
/// definition, which corresponds to the lifetime of the file type matcher.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Glob<'a>(GlobInner<'a>);
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
enum GlobInner<'a> {
|
||||||
|
/// No glob matched, but the file path should still be ignored.
|
||||||
|
UnmatchedIgnore,
|
||||||
|
/// A glob matched.
|
||||||
|
Matched {
|
||||||
|
/// The file type definition which provided the glob.
|
||||||
|
def: &'a FileTypeDef,
|
||||||
|
/// The index of the glob that matched inside the file type definition.
|
||||||
|
which: usize,
|
||||||
|
/// Whether the selection was negated or not.
|
||||||
|
negated: bool,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Glob<'a> {
|
||||||
|
fn unmatched() -> Glob<'a> {
|
||||||
|
Glob(GlobInner::UnmatchedIgnore)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A single file type definition.
|
||||||
|
///
|
||||||
|
/// File type definitions can be retrieved in aggregate from a file type
|
||||||
|
/// matcher. File type definitions are also reported when its responsible
|
||||||
|
/// for a match.
|
||||||
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
pub struct FileTypeDef {
|
||||||
|
name: String,
|
||||||
|
globs: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileTypeDef {
|
||||||
|
/// Return the name of this file type.
|
||||||
|
pub fn name(&self) -> &str {
|
||||||
|
&self.name
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the globs used to recognize this file type.
|
||||||
|
pub fn globs(&self) -> &[String] {
|
||||||
|
&self.globs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Types is a file type matcher.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Types {
|
||||||
|
/// All of the file type definitions, sorted lexicographically by name.
|
||||||
|
defs: Vec<FileTypeDef>,
|
||||||
|
/// All of the selections made by the user.
|
||||||
|
selections: Vec<Selection<FileTypeDef>>,
|
||||||
|
/// Whether there is at least one Selection::Select in our selections.
|
||||||
|
/// When this is true, a Match::None is converted to Match::Ignore.
|
||||||
|
has_selected: bool,
|
||||||
|
/// A mapping from glob index in the set to two indices. The first is an
|
||||||
|
/// index into `selections` and the second is an index into the
|
||||||
|
/// corresponding file type definition's list of globs.
|
||||||
|
glob_to_selection: Vec<(usize, usize)>,
|
||||||
|
/// The set of all glob selections, used for actual matching.
|
||||||
|
set: GlobSet,
|
||||||
|
/// Temporary storage for globs that match.
|
||||||
|
matches: Arc<ThreadLocal<RefCell<Vec<usize>>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Indicates the type of a selection for a particular file type.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
enum Selection<T> {
|
||||||
|
Select(String, T),
|
||||||
|
Negate(String, T),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Selection<T> {
|
||||||
|
fn is_negated(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
Selection::Select(..) => false,
|
||||||
|
Selection::Negate(..) => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
match *self {
|
||||||
|
Selection::Select(ref name, _) => name,
|
||||||
|
Selection::Negate(ref name, _) => name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Selection<U> {
|
||||||
|
match self {
|
||||||
|
Selection::Select(name, inner) => {
|
||||||
|
Selection::Select(name, f(inner))
|
||||||
|
}
|
||||||
|
Selection::Negate(name, inner) => {
|
||||||
|
Selection::Negate(name, f(inner))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inner(&self) -> &T {
|
||||||
|
match *self {
|
||||||
|
Selection::Select(_, ref inner) => inner,
|
||||||
|
Selection::Negate(_, ref inner) => inner,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Types {
|
||||||
|
/// Creates a new file type matcher that never matches any path and
|
||||||
|
/// contains no file type definitions.
|
||||||
|
pub fn empty() -> Types {
|
||||||
|
Types {
|
||||||
|
defs: vec![],
|
||||||
|
selections: vec![],
|
||||||
|
has_selected: false,
|
||||||
|
glob_to_selection: vec![],
|
||||||
|
set: GlobSetBuilder::new().build().unwrap(),
|
||||||
|
matches: Arc::new(ThreadLocal::default()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if and only if this matcher has zero selections.
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.selections.is_empty()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the number of selections used in this matcher.
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.selections.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the set of current file type definitions.
|
||||||
|
///
|
||||||
|
/// Definitions and globs are sorted.
|
||||||
|
pub fn definitions(&self) -> &[FileTypeDef] {
|
||||||
|
&self.defs
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a match for the given path against this file type matcher.
|
||||||
|
///
|
||||||
|
/// The path is considered whitelisted if it matches a selected file type.
|
||||||
|
/// The path is considered ignored if it matches a negated file type.
|
||||||
|
/// If at least one file type is selected and `path` doesn't match, then
|
||||||
|
/// the path is also considered ignored.
|
||||||
|
pub fn matched<'a, P: AsRef<Path>>(
|
||||||
|
&'a self,
|
||||||
|
path: P,
|
||||||
|
is_dir: bool,
|
||||||
|
) -> Match<Glob<'a>> {
|
||||||
|
// File types don't apply to directories, and we can't do anything
|
||||||
|
// if our glob set is empty.
|
||||||
|
if is_dir || self.set.is_empty() {
|
||||||
|
return Match::None;
|
||||||
|
}
|
||||||
|
// We only want to match against the file name, so extract it.
|
||||||
|
// If one doesn't exist, then we can't match it.
|
||||||
|
let name = match file_name(path.as_ref()) {
|
||||||
|
Some(name) => name,
|
||||||
|
None if self.has_selected => {
|
||||||
|
return Match::Ignore(Glob::unmatched());
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
return Match::None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let mut matches = self.matches.get_default().borrow_mut();
|
||||||
|
self.set.matches_into(name, &mut *matches);
|
||||||
|
// The highest precedent match is the last one.
|
||||||
|
if let Some(&i) = matches.last() {
|
||||||
|
let (isel, iglob) = self.glob_to_selection[i];
|
||||||
|
let sel = &self.selections[isel];
|
||||||
|
let glob = Glob(GlobInner::Matched {
|
||||||
|
def: sel.inner(),
|
||||||
|
which: iglob,
|
||||||
|
negated: sel.is_negated(),
|
||||||
|
});
|
||||||
|
return if sel.is_negated() {
|
||||||
|
Match::Ignore(glob)
|
||||||
|
} else {
|
||||||
|
Match::Whitelist(glob)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if self.has_selected {
|
||||||
|
Match::Ignore(Glob::unmatched())
|
||||||
|
} else {
|
||||||
|
Match::None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// TypesBuilder builds a type matcher from a set of file type definitions and
|
||||||
|
/// a set of file type selections.
|
||||||
|
pub struct TypesBuilder {
|
||||||
|
types: HashMap<String, FileTypeDef>,
|
||||||
|
selections: Vec<Selection<()>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TypesBuilder {
|
||||||
|
/// Create a new builder for a file type matcher.
|
||||||
|
///
|
||||||
|
/// The builder contains *no* type definitions to start with. A set
|
||||||
|
/// of default type definitions can be added with `add_defaults`, and
|
||||||
|
/// additional type definitions can be added with `select` and `negate`.
|
||||||
|
pub fn new() -> TypesBuilder {
|
||||||
|
TypesBuilder {
|
||||||
|
types: HashMap::new(),
|
||||||
|
selections: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build the current set of file type definitions *and* selections into
|
||||||
|
/// a file type matcher.
|
||||||
|
pub fn build(&self) -> Result<Types, Error> {
|
||||||
|
let defs = self.definitions();
|
||||||
|
let has_selected = self.selections.iter().any(|s| !s.is_negated());
|
||||||
|
|
||||||
|
let mut selections = vec![];
|
||||||
|
let mut glob_to_selection = vec![];
|
||||||
|
let mut build_set = GlobSetBuilder::new();
|
||||||
|
for (isel, selection) in self.selections.iter().enumerate() {
|
||||||
|
let def = match self.types.get(selection.name()) {
|
||||||
|
Some(def) => def.clone(),
|
||||||
|
None => {
|
||||||
|
let name = selection.name().to_string();
|
||||||
|
return Err(Error::UnrecognizedFileType(name));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
for (iglob, glob) in def.globs.iter().enumerate() {
|
||||||
|
build_set.add(try!(
|
||||||
|
GlobBuilder::new(glob)
|
||||||
|
.literal_separator(true)
|
||||||
|
.build()
|
||||||
|
.map_err(|err| Error::Glob(err.to_string()))));
|
||||||
|
glob_to_selection.push((isel, iglob));
|
||||||
|
}
|
||||||
|
selections.push(selection.clone().map(move |_| def));
|
||||||
|
}
|
||||||
|
let set = try!(build_set.build().map_err(|err| {
|
||||||
|
Error::Glob(err.to_string())
|
||||||
|
}));
|
||||||
|
Ok(Types {
|
||||||
|
defs: defs,
|
||||||
|
selections: selections,
|
||||||
|
has_selected: has_selected,
|
||||||
|
glob_to_selection: glob_to_selection,
|
||||||
|
set: set,
|
||||||
|
matches: Arc::new(ThreadLocal::default()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the set of current file type definitions.
|
||||||
|
///
|
||||||
|
/// Definitions and globs are sorted.
|
||||||
|
pub fn definitions(&self) -> Vec<FileTypeDef> {
|
||||||
|
let mut defs = vec![];
|
||||||
|
for def in self.types.values() {
|
||||||
|
let mut def = def.clone();
|
||||||
|
def.globs.sort();
|
||||||
|
defs.push(def);
|
||||||
|
}
|
||||||
|
defs.sort_by(|def1, def2| def1.name().cmp(def2.name()));
|
||||||
|
defs
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Select the file type given by `name`.
|
||||||
|
///
|
||||||
|
/// If `name` is `all`, then all file types currently defined are selected.
|
||||||
|
pub fn select(&mut self, name: &str) -> &mut TypesBuilder {
|
||||||
|
if name == "all" {
|
||||||
|
for name in self.types.keys() {
|
||||||
|
self.selections.push(Selection::Select(name.to_string(), ()));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.selections.push(Selection::Select(name.to_string(), ()));
|
||||||
|
}
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Ignore the file type given by `name`.
|
||||||
|
///
|
||||||
|
/// If `name` is `all`, then all file types currently defined are negated.
|
||||||
|
pub fn negate(&mut self, name: &str) -> &mut TypesBuilder {
|
||||||
|
if name == "all" {
|
||||||
|
for name in self.types.keys() {
|
||||||
|
self.selections.push(Selection::Negate(name.to_string(), ()));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.selections.push(Selection::Negate(name.to_string(), ()));
|
||||||
|
}
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Clear any file type definitions for the type name given.
|
||||||
|
pub fn clear(&mut self, name: &str) -> &mut TypesBuilder {
|
||||||
|
self.types.remove(name);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a new file type definition. `name` can be arbitrary and `pat`
|
||||||
|
/// should be a glob recognizing file paths belonging to the `name` type.
|
||||||
|
///
|
||||||
|
/// If `name` is `all` or otherwise contains any character that is not a
|
||||||
|
/// Unicode letter or number, then an error is returned.
|
||||||
|
pub fn add(&mut self, name: &str, glob: &str) -> Result<(), Error> {
|
||||||
|
lazy_static! {
|
||||||
|
static ref RE: Regex = Regex::new(r"^[\pL\pN]+$").unwrap();
|
||||||
|
};
|
||||||
|
if name == "all" || !RE.is_match(name) {
|
||||||
|
return Err(Error::InvalidDefinition);
|
||||||
|
}
|
||||||
|
let (key, glob) = (name.to_string(), glob.to_string());
|
||||||
|
self.types.entry(key).or_insert_with(|| {
|
||||||
|
FileTypeDef { name: name.to_string(), globs: vec![] }
|
||||||
|
}).globs.push(glob);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a new file type definition specified in string form. There are two
|
||||||
|
/// valid formats:
|
||||||
|
/// 1. `{name}:{glob}`. This defines a 'root' definition that associates the
|
||||||
|
/// given name with the given glob.
|
||||||
|
/// 2. `{name}:include:{comma-separated list of already defined names}.
|
||||||
|
/// This defines an 'include' definition that associates the given name
|
||||||
|
/// with the definitions of the given existing types.
|
||||||
|
/// Names may not include any characters that are not
|
||||||
|
/// Unicode letters or numbers.
|
||||||
|
pub fn add_def(&mut self, def: &str) -> Result<(), Error> {
|
||||||
|
let parts: Vec<&str> = def.split(':').collect();
|
||||||
|
match parts.len() {
|
||||||
|
2 => {
|
||||||
|
let name = parts[0];
|
||||||
|
let glob = parts[1];
|
||||||
|
if name.is_empty() || glob.is_empty() {
|
||||||
|
return Err(Error::InvalidDefinition);
|
||||||
|
}
|
||||||
|
self.add(name, glob)
|
||||||
|
}
|
||||||
|
3 => {
|
||||||
|
let name = parts[0];
|
||||||
|
let types_string = parts[2];
|
||||||
|
if name.is_empty() || parts[1] != "include" || types_string.is_empty() {
|
||||||
|
return Err(Error::InvalidDefinition);
|
||||||
|
}
|
||||||
|
let types = types_string.split(',');
|
||||||
|
// Check ahead of time to ensure that all types specified are
|
||||||
|
// present and fail fast if not.
|
||||||
|
if types.clone().any(|t| !self.types.contains_key(t)) {
|
||||||
|
return Err(Error::InvalidDefinition);
|
||||||
|
}
|
||||||
|
for type_name in types {
|
||||||
|
let globs = self.types.get(type_name).unwrap().globs.clone();
|
||||||
|
for glob in globs {
|
||||||
|
try!(self.add(name, &glob));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
_ => Err(Error::InvalidDefinition)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a set of default file type definitions.
|
||||||
|
pub fn add_defaults(&mut self) -> &mut TypesBuilder {
|
||||||
|
static MSG: &'static str = "adding a default type should never fail";
|
||||||
|
for &(name, exts) in DEFAULT_TYPES {
|
||||||
|
for ext in exts {
|
||||||
|
self.add(name, ext).expect(MSG);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::TypesBuilder;
|
||||||
|
|
||||||
|
macro_rules! matched {
|
||||||
|
($name:ident, $types:expr, $sel:expr, $selnot:expr,
|
||||||
|
$path:expr) => {
|
||||||
|
matched!($name, $types, $sel, $selnot, $path, true);
|
||||||
|
};
|
||||||
|
(not, $name:ident, $types:expr, $sel:expr, $selnot:expr,
|
||||||
|
$path:expr) => {
|
||||||
|
matched!($name, $types, $sel, $selnot, $path, false);
|
||||||
|
};
|
||||||
|
($name:ident, $types:expr, $sel:expr, $selnot:expr,
|
||||||
|
$path:expr, $matched:expr) => {
|
||||||
|
#[test]
|
||||||
|
fn $name() {
|
||||||
|
let mut btypes = TypesBuilder::new();
|
||||||
|
for tydef in $types {
|
||||||
|
btypes.add_def(tydef).unwrap();
|
||||||
|
}
|
||||||
|
for sel in $sel {
|
||||||
|
btypes.select(sel);
|
||||||
|
}
|
||||||
|
for selnot in $selnot {
|
||||||
|
btypes.negate(selnot);
|
||||||
|
}
|
||||||
|
let types = btypes.build().unwrap();
|
||||||
|
let mat = types.matched($path, false);
|
||||||
|
assert_eq!($matched, !mat.is_ignore());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn types() -> Vec<&'static str> {
|
||||||
|
vec![
|
||||||
|
"html:*.html",
|
||||||
|
"html:*.htm",
|
||||||
|
"rust:*.rs",
|
||||||
|
"js:*.js",
|
||||||
|
"foo:*.{rs,foo}",
|
||||||
|
"combo:include:html,rust"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
matched!(match1, types(), vec!["rust"], vec![], "lib.rs");
|
||||||
|
matched!(match2, types(), vec!["html"], vec![], "index.html");
|
||||||
|
matched!(match3, types(), vec!["html"], vec![], "index.htm");
|
||||||
|
matched!(match4, types(), vec!["html", "rust"], vec![], "main.rs");
|
||||||
|
matched!(match5, types(), vec![], vec![], "index.html");
|
||||||
|
matched!(match6, types(), vec![], vec!["rust"], "index.html");
|
||||||
|
matched!(match7, types(), vec!["foo"], vec!["rust"], "main.foo");
|
||||||
|
matched!(match8, types(), vec!["combo"], vec![], "index.html");
|
||||||
|
matched!(match9, types(), vec!["combo"], vec![], "lib.rs");
|
||||||
|
|
||||||
|
matched!(not, matchnot1, types(), vec!["rust"], vec![], "index.html");
|
||||||
|
matched!(not, matchnot2, types(), vec![], vec!["rust"], "main.rs");
|
||||||
|
matched!(not, matchnot3, types(), vec!["foo"], vec!["rust"], "main.rs");
|
||||||
|
matched!(not, matchnot4, types(), vec!["rust"], vec!["foo"], "main.rs");
|
||||||
|
matched!(not, matchnot5, types(), vec!["rust"], vec!["foo"], "main.foo");
|
||||||
|
matched!(not, matchnot6, types(), vec!["combo"], vec![], "leftpad.js");
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_invalid_defs() {
|
||||||
|
let mut btypes = TypesBuilder::new();
|
||||||
|
for tydef in types() {
|
||||||
|
btypes.add_def(tydef).unwrap();
|
||||||
|
}
|
||||||
|
// Preserve the original definitions for later comparison.
|
||||||
|
let original_defs = btypes.definitions();
|
||||||
|
let bad_defs = vec![
|
||||||
|
// Reference to type that does not exist
|
||||||
|
"combo:include:html,python",
|
||||||
|
// Bad format
|
||||||
|
"combo:foobar:html,rust",
|
||||||
|
""
|
||||||
|
];
|
||||||
|
for def in bad_defs {
|
||||||
|
assert!(btypes.add_def(def).is_err());
|
||||||
|
// Ensure that nothing changed, even if some of the includes were valid.
|
||||||
|
assert_eq!(btypes.definitions(), original_defs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
1565
ignore/src/walk.rs
Normal file
1565
ignore/src/walk.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
|||||||
# Contributor: Andrew Gallant <jamslam@gmail.com>
|
# Contributor: Andrew Gallant <jamslam@gmail.com>
|
||||||
# Maintainer: Andrew Gallant
|
# Maintainer: Andrew Gallant
|
||||||
pkgname=ripgrep
|
pkgname=ripgrep
|
||||||
pkgver=0.1.16
|
pkgver=0.2.3
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc="A search tool that combines the usability of The Silver Searcher with the raw speed of grep."
|
pkgdesc="A search tool that combines the usability of The Silver Searcher with the raw speed of grep."
|
||||||
arch=('i686' 'x86_64')
|
arch=('i686' 'x86_64')
|
||||||
@@ -9,7 +9,7 @@ url="https://github.com/BurntSushi/ripgrep"
|
|||||||
license=('UNLICENSE')
|
license=('UNLICENSE')
|
||||||
makedepends=('cargo')
|
makedepends=('cargo')
|
||||||
source=("https://github.com/BurntSushi/$pkgname/archive/$pkgver.tar.gz")
|
source=("https://github.com/BurntSushi/$pkgname/archive/$pkgver.tar.gz")
|
||||||
sha256sums=('6f877018742c9a7557102ccebeedb40d7c779b470a5910a7bdab50ca2ce21532')
|
sha256sums=('a88531558d2023df76190ea2e52bee50d739eabece8a57df29abbad0c6bdb917')
|
||||||
|
|
||||||
build() {
|
build() {
|
||||||
cd "$pkgname-$pkgver"
|
cd "$pkgname-$pkgver"
|
||||||
@@ -29,8 +29,9 @@ package() {
|
|||||||
|
|
||||||
install -Dm755 "target/release/rg" "$pkgdir/usr/bin/rg"
|
install -Dm755 "target/release/rg" "$pkgdir/usr/bin/rg"
|
||||||
install -Dm644 "doc/rg.1" "$pkgdir/usr/share/man/man1/rg.1"
|
install -Dm644 "doc/rg.1" "$pkgdir/usr/share/man/man1/rg.1"
|
||||||
install -Dm644 "README-NEW.md" "$pkgdir/usr/share/doc/ripgrep/README.md"
|
install -Dm644 "README.md" "$pkgdir/usr/share/doc/ripgrep/README.md"
|
||||||
install -Dm644 "COPYING" "$pkgdir/usr/share/doc/ripgrep/COPYING"
|
install -Dm644 "COPYING" "$pkgdir/usr/share/doc/ripgrep/COPYING"
|
||||||
install -Dm644 "LICENSE-MIT" "$pkgdir/usr/share/doc/ripgrep/LICENSE-MIT"
|
install -Dm644 "LICENSE-MIT" "$pkgdir/usr/share/doc/ripgrep/LICENSE-MIT"
|
||||||
install -Dm644 "UNLICENSE" "$pkgdir/usr/share/doc/ripgrep/UNLICENSE"
|
install -Dm644 "UNLICENSE" "$pkgdir/usr/share/doc/ripgrep/UNLICENSE"
|
||||||
|
install -Dm644 "CHANGELOG.md" "$pkgdir/usr/share/doc/ripgrep/CHANGELOG.md"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,18 @@
|
|||||||
class RipgrepBin < Formula
|
class RipgrepBin < Formula
|
||||||
version '0.2.1'
|
version '0.5.0'
|
||||||
desc "Search tool like grep and The Silver Searcher."
|
desc "Search tool like grep and The Silver Searcher."
|
||||||
homepage "https://github.com/BurntSushi/ripgrep"
|
homepage "https://github.com/BurntSushi/ripgrep"
|
||||||
url "https://github.com/BurntSushi/ripgrep/releases/download/#{version}/ripgrep-#{version}-x86_64-apple-darwin.tar.gz"
|
url "https://github.com/BurntSushi/ripgrep/releases/download/#{version}/ripgrep-#{version}-x86_64-apple-darwin.tar.gz"
|
||||||
sha256 "f8b208239b988708da2e58f848a75bf70ad144e201b3ed99cd323cc5a699625f"
|
sha256 "5bfa8872c4f2a5d010ddec1c213d518056e62d4dd3b3f23a0ef099b85343dbdd"
|
||||||
|
|
||||||
conflicts_with "ripgrep"
|
conflicts_with "ripgrep"
|
||||||
|
|
||||||
def install
|
def install
|
||||||
bin.install "rg"
|
bin.install "rg"
|
||||||
man1.install "rg.1"
|
man1.install "rg.1"
|
||||||
|
|
||||||
|
bash_completion.install "complete/rg.bash-completion"
|
||||||
|
fish_completion.install "complete/rg.fish"
|
||||||
|
zsh_completion.install "complete/_rg"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
511
src/app.rs
Normal file
511
src/app.rs
Normal file
@@ -0,0 +1,511 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use clap::{App, AppSettings, Arg, ArgSettings};
|
||||||
|
|
||||||
|
const ABOUT: &'static str = "
|
||||||
|
ripgrep (rg) recursively searches your current directory for a regex pattern.
|
||||||
|
|
||||||
|
ripgrep's regex engine uses finite automata and guarantees linear time
|
||||||
|
searching. Because of this, features like backreferences and arbitrary
|
||||||
|
lookaround are not supported.
|
||||||
|
|
||||||
|
Project home page: https://github.com/BurntSushi/ripgrep
|
||||||
|
|
||||||
|
Use -h for short descriptions and --help for more details.";
|
||||||
|
|
||||||
|
const USAGE: &'static str = "
|
||||||
|
rg [OPTIONS] <pattern> [<path> ...]
|
||||||
|
rg [OPTIONS] [-e PATTERN | -f FILE ]... [<path> ...]
|
||||||
|
rg [OPTIONS] --files [<path> ...]
|
||||||
|
rg [OPTIONS] --type-list";
|
||||||
|
|
||||||
|
const TEMPLATE: &'static str = "\
|
||||||
|
{bin} {version}
|
||||||
|
{author}
|
||||||
|
{about}
|
||||||
|
|
||||||
|
USAGE:{usage}
|
||||||
|
|
||||||
|
ARGS:
|
||||||
|
{positionals}
|
||||||
|
|
||||||
|
OPTIONS:
|
||||||
|
{unified}";
|
||||||
|
|
||||||
|
/// Build a clap application parameterized by usage strings.
|
||||||
|
///
|
||||||
|
/// The function given should take a clap argument name and return a help
|
||||||
|
/// string. `app` will panic if a usage string is not defined.
|
||||||
|
///
|
||||||
|
/// This is an intentionally stand-alone module so that it can be used easily
|
||||||
|
/// in a `build.rs` script to build shell completion files.
|
||||||
|
pub fn app() -> App<'static, 'static> {
|
||||||
|
let arg = |name| {
|
||||||
|
Arg::with_name(name).help(USAGES[name].short).long_help(USAGES[name].long)
|
||||||
|
};
|
||||||
|
let flag = |name| arg(name).long(name);
|
||||||
|
|
||||||
|
App::new("ripgrep")
|
||||||
|
.author(crate_authors!())
|
||||||
|
.version(crate_version!())
|
||||||
|
.about(ABOUT)
|
||||||
|
.max_term_width(100)
|
||||||
|
.setting(AppSettings::UnifiedHelpMessage)
|
||||||
|
.usage(USAGE)
|
||||||
|
.template(TEMPLATE)
|
||||||
|
.help_message("Prints help information. Use --help for more details.")
|
||||||
|
// First, set up primary positional/flag arguments.
|
||||||
|
.arg(arg("pattern")
|
||||||
|
.required_unless_one(&[
|
||||||
|
"file", "files", "help-short", "help", "regexp", "type-list",
|
||||||
|
"ripgrep-version",
|
||||||
|
]))
|
||||||
|
.arg(arg("path").multiple(true))
|
||||||
|
.arg(flag("regexp").short("e")
|
||||||
|
.takes_value(true).multiple(true).number_of_values(1)
|
||||||
|
.set(ArgSettings::AllowLeadingHyphen)
|
||||||
|
.value_name("pattern"))
|
||||||
|
.arg(flag("files")
|
||||||
|
// This should also conflict with `pattern`, but the first file
|
||||||
|
// path will actually be in `pattern`.
|
||||||
|
.conflicts_with_all(&["file", "regexp", "type-list"]))
|
||||||
|
.arg(flag("type-list")
|
||||||
|
.conflicts_with_all(&["file", "files", "pattern", "regexp"]))
|
||||||
|
// Second, set up common flags.
|
||||||
|
.arg(flag("text").short("a"))
|
||||||
|
.arg(flag("count").short("c"))
|
||||||
|
.arg(flag("color")
|
||||||
|
.value_name("WHEN")
|
||||||
|
.takes_value(true)
|
||||||
|
.hide_possible_values(true)
|
||||||
|
.possible_values(&["never", "auto", "always", "ansi"]))
|
||||||
|
.arg(flag("colors").value_name("SPEC")
|
||||||
|
.takes_value(true).multiple(true).number_of_values(1))
|
||||||
|
.arg(flag("encoding").short("E").value_name("ENCODING")
|
||||||
|
.takes_value(true).number_of_values(1))
|
||||||
|
.arg(flag("fixed-strings").short("F"))
|
||||||
|
.arg(flag("glob").short("g")
|
||||||
|
.takes_value(true).multiple(true).number_of_values(1)
|
||||||
|
.value_name("GLOB"))
|
||||||
|
.arg(flag("ignore-case").short("i"))
|
||||||
|
.arg(flag("line-number").short("n"))
|
||||||
|
.arg(flag("no-line-number").short("N"))
|
||||||
|
.arg(flag("quiet").short("q"))
|
||||||
|
.arg(flag("type").short("t")
|
||||||
|
.takes_value(true).multiple(true).number_of_values(1)
|
||||||
|
.value_name("TYPE"))
|
||||||
|
.arg(flag("type-not").short("T")
|
||||||
|
.takes_value(true).multiple(true).number_of_values(1)
|
||||||
|
.value_name("TYPE"))
|
||||||
|
.arg(flag("unrestricted").short("u")
|
||||||
|
.multiple(true))
|
||||||
|
.arg(flag("invert-match").short("v"))
|
||||||
|
.arg(flag("word-regexp").short("w"))
|
||||||
|
// Third, set up less common flags.
|
||||||
|
.arg(flag("after-context").short("A")
|
||||||
|
.value_name("NUM").takes_value(true)
|
||||||
|
.validator(validate_number))
|
||||||
|
.arg(flag("before-context").short("B")
|
||||||
|
.value_name("NUM").takes_value(true)
|
||||||
|
.validator(validate_number))
|
||||||
|
.arg(flag("context").short("C")
|
||||||
|
.value_name("NUM").takes_value(true)
|
||||||
|
.validator(validate_number))
|
||||||
|
.arg(flag("column"))
|
||||||
|
.arg(flag("context-separator")
|
||||||
|
.value_name("SEPARATOR").takes_value(true))
|
||||||
|
.arg(flag("debug"))
|
||||||
|
.arg(flag("file").short("f")
|
||||||
|
.value_name("FILE").takes_value(true)
|
||||||
|
.multiple(true).number_of_values(1))
|
||||||
|
.arg(flag("files-with-matches").short("l"))
|
||||||
|
.arg(flag("files-without-match"))
|
||||||
|
.arg(flag("with-filename").short("H"))
|
||||||
|
.arg(flag("no-filename"))
|
||||||
|
.arg(flag("heading").overrides_with("no-heading"))
|
||||||
|
.arg(flag("no-heading").overrides_with("heading"))
|
||||||
|
.arg(flag("hidden"))
|
||||||
|
.arg(flag("ignore-file")
|
||||||
|
.value_name("FILE").takes_value(true)
|
||||||
|
.multiple(true).number_of_values(1))
|
||||||
|
.arg(flag("follow").short("L"))
|
||||||
|
.arg(flag("max-count")
|
||||||
|
.short("m").value_name("NUM").takes_value(true)
|
||||||
|
.validator(validate_number))
|
||||||
|
.arg(flag("max-filesize")
|
||||||
|
.value_name("NUM+SUFFIX?").takes_value(true))
|
||||||
|
.arg(flag("maxdepth")
|
||||||
|
.value_name("NUM").takes_value(true)
|
||||||
|
.validator(validate_number))
|
||||||
|
.arg(flag("mmap"))
|
||||||
|
.arg(flag("no-messages"))
|
||||||
|
.arg(flag("no-mmap"))
|
||||||
|
.arg(flag("no-ignore"))
|
||||||
|
.arg(flag("no-ignore-parent"))
|
||||||
|
.arg(flag("no-ignore-vcs"))
|
||||||
|
.arg(flag("null").short("0"))
|
||||||
|
.arg(flag("only-matching").short("o").conflicts_with("replace"))
|
||||||
|
.arg(flag("path-separator").value_name("SEPARATOR").takes_value(true))
|
||||||
|
.arg(flag("pretty").short("p"))
|
||||||
|
.arg(flag("replace").short("r").value_name("ARG").takes_value(true))
|
||||||
|
.arg(flag("case-sensitive").short("s"))
|
||||||
|
.arg(flag("smart-case").short("S"))
|
||||||
|
.arg(flag("sort-files"))
|
||||||
|
.arg(flag("threads")
|
||||||
|
.short("j").value_name("ARG").takes_value(true)
|
||||||
|
.validator(validate_number))
|
||||||
|
.arg(flag("vimgrep"))
|
||||||
|
.arg(flag("max-columns").short("M")
|
||||||
|
.value_name("NUM").takes_value(true)
|
||||||
|
.validator(validate_number))
|
||||||
|
.arg(flag("type-add")
|
||||||
|
.value_name("TYPE").takes_value(true)
|
||||||
|
.multiple(true).number_of_values(1))
|
||||||
|
.arg(flag("type-clear")
|
||||||
|
.value_name("TYPE").takes_value(true)
|
||||||
|
.multiple(true).number_of_values(1))
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Usage {
|
||||||
|
short: &'static str,
|
||||||
|
long: &'static str,
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! doc {
|
||||||
|
($map:expr, $name:expr, $short:expr) => {
|
||||||
|
doc!($map, $name, $short, $short)
|
||||||
|
};
|
||||||
|
($map:expr, $name:expr, $short:expr, $long:expr) => {
|
||||||
|
$map.insert($name, Usage {
|
||||||
|
short: $short,
|
||||||
|
long: concat!($long, "\n "),
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref USAGES: HashMap<&'static str, Usage> = {
|
||||||
|
let mut h = HashMap::new();
|
||||||
|
doc!(h, "help-short",
|
||||||
|
"Show short help output.",
|
||||||
|
"Show short help output. Use --help to show more details.");
|
||||||
|
doc!(h, "help",
|
||||||
|
"Show verbose help output.",
|
||||||
|
"When given, more details about flags are provided.");
|
||||||
|
doc!(h, "ripgrep-version",
|
||||||
|
"Prints version information.");
|
||||||
|
|
||||||
|
doc!(h, "pattern",
|
||||||
|
"A regular expression used for searching.",
|
||||||
|
"A regular expression used for searching. Multiple patterns \
|
||||||
|
may be given. To match a pattern beginning with a -, use [-].");
|
||||||
|
doc!(h, "regexp",
|
||||||
|
"A regular expression used for searching.",
|
||||||
|
"A regular expression used for searching. Multiple patterns \
|
||||||
|
may be given. To match a pattern beginning with a -, use [-].");
|
||||||
|
doc!(h, "path",
|
||||||
|
"A file or directory to search.",
|
||||||
|
"A file or directory to search. Directories are searched \
|
||||||
|
recursively.");
|
||||||
|
doc!(h, "files",
|
||||||
|
"Print each file that would be searched.",
|
||||||
|
"Print each file that would be searched without actually \
|
||||||
|
performing the search. This is useful to determine whether a \
|
||||||
|
particular file is being searched or not.");
|
||||||
|
doc!(h, "type-list",
|
||||||
|
"Show all supported file types.",
|
||||||
|
"Show all supported file types and their corresponding globs.");
|
||||||
|
|
||||||
|
doc!(h, "text",
|
||||||
|
"Search binary files as if they were text.");
|
||||||
|
doc!(h, "count",
|
||||||
|
"Only show count of matches for each file.");
|
||||||
|
doc!(h, "color",
|
||||||
|
"When to use color. [default: auto]",
|
||||||
|
"When to use color in the output. The possible values are \
|
||||||
|
never, auto, always or ansi. The default is auto. When always \
|
||||||
|
is used, coloring is attempted based on your environment. When \
|
||||||
|
ansi used, coloring is forcefully done using ANSI escape color \
|
||||||
|
codes.");
|
||||||
|
doc!(h, "colors",
|
||||||
|
"Configure color settings and styles.",
|
||||||
|
"This flag specifies color settings for use in the output. \
|
||||||
|
This flag may be provided multiple times. Settings are applied \
|
||||||
|
iteratively. Colors are limited to one of eight choices: \
|
||||||
|
red, blue, green, cyan, magenta, yellow, white and black. \
|
||||||
|
Styles are limited to nobold, bold, nointense or intense.\n\n\
|
||||||
|
The format of the flag is {type}:{attribute}:{value}. {type} \
|
||||||
|
should be one of path, line, column or match. {attribute} can \
|
||||||
|
be fg, bg or style. {value} is either a color (for fg and bg) \
|
||||||
|
or a text style. A special format, {type}:none, will clear all \
|
||||||
|
color settings for {type}.\n\nFor example, the following \
|
||||||
|
command will change the match color to magenta and the \
|
||||||
|
background color for line numbers to yellow:\n\n\
|
||||||
|
rg --colors 'match:fg:magenta' --colors 'line:bg:yellow' foo.");
|
||||||
|
doc!(h, "encoding",
|
||||||
|
"Specify the text encoding of files to search.",
|
||||||
|
"Specify the text encoding that ripgrep will use on all files \
|
||||||
|
searched. The default value is 'auto', which will cause ripgrep \
|
||||||
|
to do a best effort automatic detection of encoding on a \
|
||||||
|
per-file basis. Other supported values can be found in the list \
|
||||||
|
of labels here: \
|
||||||
|
https://encoding.spec.whatwg.org/#concept-encoding-get");
|
||||||
|
doc!(h, "fixed-strings",
|
||||||
|
"Treat the pattern as a literal string.",
|
||||||
|
"Treat the pattern as a literal string instead of a regular \
|
||||||
|
expression. When this flag is used, special regular expression \
|
||||||
|
meta characters such as (){}*+. do not need to be escaped.");
|
||||||
|
doc!(h, "glob",
|
||||||
|
"Include or exclude files/directories.",
|
||||||
|
"Include or exclude files/directories for searching that \
|
||||||
|
match the given glob. This always overrides any other \
|
||||||
|
ignore logic. Multiple glob flags may be used. Globbing \
|
||||||
|
rules match .gitignore globs. Precede a glob with a ! \
|
||||||
|
to exclude it.");
|
||||||
|
doc!(h, "ignore-case",
|
||||||
|
"Case insensitive search.",
|
||||||
|
"Case insensitive search. This is overridden by \
|
||||||
|
--case-sensitive.");
|
||||||
|
doc!(h, "line-number",
|
||||||
|
"Show line numbers.",
|
||||||
|
"Show line numbers (1-based). This is enabled by default when \
|
||||||
|
searching in a tty.");
|
||||||
|
doc!(h, "no-line-number",
|
||||||
|
"Suppress line numbers.",
|
||||||
|
"Suppress line numbers. This is enabled by default when NOT \
|
||||||
|
searching in a tty.");
|
||||||
|
doc!(h, "quiet",
|
||||||
|
"Do not print anything to stdout.",
|
||||||
|
"Do not print anything to stdout. If a match is found in a file, \
|
||||||
|
stop searching. This is useful when ripgrep is used only for \
|
||||||
|
its exit code.");
|
||||||
|
doc!(h, "type",
|
||||||
|
"Only search files matching TYPE.",
|
||||||
|
"Only search files matching TYPE. Multiple type flags may be \
|
||||||
|
provided. Use the --type-list flag to list all available \
|
||||||
|
types.");
|
||||||
|
doc!(h, "type-not",
|
||||||
|
"Do not search files matching TYPE.",
|
||||||
|
"Do not search files matching TYPE. Multiple type-not flags may \
|
||||||
|
be provided. Use the --type-list flag to list all available \
|
||||||
|
types.");
|
||||||
|
doc!(h, "unrestricted",
|
||||||
|
"Reduce the level of \"smart\" searching.",
|
||||||
|
"Reduce the level of \"smart\" searching. A single -u \
|
||||||
|
won't respect .gitignore (etc.) files. Two -u flags will \
|
||||||
|
additionally search hidden files and directories. Three \
|
||||||
|
-u flags will additionally search binary files. -uu is \
|
||||||
|
roughly equivalent to grep -r and -uuu is roughly \
|
||||||
|
equivalent to grep -a -r.");
|
||||||
|
doc!(h, "invert-match",
|
||||||
|
"Invert matching.",
|
||||||
|
"Invert matching. Show lines that don't match given patterns.");
|
||||||
|
doc!(h, "word-regexp",
|
||||||
|
"Only show matches surrounded by word boundaries.",
|
||||||
|
"Only show matches surrounded by word boundaries. This is \
|
||||||
|
equivalent to putting \\b before and after all of the search \
|
||||||
|
patterns.");
|
||||||
|
|
||||||
|
doc!(h, "after-context",
|
||||||
|
"Show NUM lines after each match.");
|
||||||
|
doc!(h, "before-context",
|
||||||
|
"Show NUM lines before each match.");
|
||||||
|
doc!(h, "context",
|
||||||
|
"Show NUM lines before and after each match.");
|
||||||
|
doc!(h, "column",
|
||||||
|
"Show column numbers",
|
||||||
|
"Show column numbers (1-based). This only shows the column \
|
||||||
|
numbers for the first match on each line. This does not try \
|
||||||
|
to account for Unicode. One byte is equal to one column. This \
|
||||||
|
implies --line-number.");
|
||||||
|
doc!(h, "context-separator",
|
||||||
|
"Set the context separator string. [default: --]",
|
||||||
|
"The string used to separate non-contiguous context lines in the \
|
||||||
|
output. Escape sequences like \\x7F or \\t may be used. The \
|
||||||
|
default value is --.");
|
||||||
|
doc!(h, "debug",
|
||||||
|
"Show debug messages.",
|
||||||
|
"Show debug messages. Please use this when filing a bug report.");
|
||||||
|
doc!(h, "file",
|
||||||
|
"Search for patterns from the given file.",
|
||||||
|
"Search for patterns from the given file, with one pattern per \
|
||||||
|
line. When this flag is used or multiple times or in \
|
||||||
|
combination with the -e/--regexp flag, then all patterns \
|
||||||
|
provided are searched. Empty pattern lines will match all input \
|
||||||
|
lines, and the newline is not counted as part of the pattern.");
|
||||||
|
doc!(h, "files-with-matches",
|
||||||
|
"Only show the paths with at least one match.");
|
||||||
|
doc!(h, "files-without-match",
|
||||||
|
"Only show the paths that contains zero matches.");
|
||||||
|
doc!(h, "with-filename",
|
||||||
|
"Show file name for each match.",
|
||||||
|
"Prefix each match with the file name that contains it. This is \
|
||||||
|
the default when more than one file is searched.");
|
||||||
|
doc!(h, "no-filename",
|
||||||
|
"Never show the file name for a match.",
|
||||||
|
"Never show the file name for a match. This is the default when \
|
||||||
|
one file is searched.");
|
||||||
|
doc!(h, "heading",
|
||||||
|
"Show matches grouped by each file.",
|
||||||
|
"This shows the file name above clusters of matches from each \
|
||||||
|
file instead of showing the file name for every match. This is \
|
||||||
|
the default mode at a tty.");
|
||||||
|
doc!(h, "no-heading",
|
||||||
|
"Don't group matches by each file.",
|
||||||
|
"Don't group matches by each file. If -H/--with-filename is \
|
||||||
|
enabled, then file names will be shown for every line matched. \
|
||||||
|
This is the default mode when not at a tty.");
|
||||||
|
doc!(h, "hidden",
|
||||||
|
"Search hidden files and directories.",
|
||||||
|
"Search hidden files and directories. By default, hidden files \
|
||||||
|
and directories are skipped.");
|
||||||
|
doc!(h, "ignore-file",
|
||||||
|
"Specify additional ignore files.",
|
||||||
|
"Specify additional ignore files for filtering file paths. \
|
||||||
|
Ignore files should be in the gitignore format and are matched \
|
||||||
|
relative to the current working directory. These ignore files \
|
||||||
|
have lower precedence than all other ignore files. When \
|
||||||
|
specifying multiple ignore files, earlier files have lower \
|
||||||
|
precedence than later files.");
|
||||||
|
doc!(h, "follow",
|
||||||
|
"Follow symbolic links.");
|
||||||
|
doc!(h, "max-count",
|
||||||
|
"Limit the number of matches.",
|
||||||
|
"Limit the number of matching lines per file searched to NUM.");
|
||||||
|
doc!(h, "max-filesize",
|
||||||
|
"Ignore files larger than NUM in size.",
|
||||||
|
"Ignore files larger than NUM in size. Does not ignore \
|
||||||
|
directories. \
|
||||||
|
\n\nThe input format accepts suffixes of K, M or G which \
|
||||||
|
correspond to kilobytes, megabytes and gigabytes. If no suffix \
|
||||||
|
is provided the input is treated as bytes. \
|
||||||
|
\n\nExample: --max-filesize 50K or --max-filesize 80M");
|
||||||
|
doc!(h, "maxdepth",
|
||||||
|
"Descend at most NUM directories.",
|
||||||
|
"Limit the depth of directory traversal to NUM levels beyond \
|
||||||
|
the paths given. A value of zero only searches the \
|
||||||
|
starting-points themselves.\n\nFor example, \
|
||||||
|
'rg --maxdepth 0 dir/' is a no-op because dir/ will not be \
|
||||||
|
descended into. 'rg --maxdepth 1 dir/' will search only the \
|
||||||
|
direct children of dir/.");
|
||||||
|
doc!(h, "mmap",
|
||||||
|
"Searching using memory maps when possible.",
|
||||||
|
"Search using memory maps when possible. This is enabled by \
|
||||||
|
default when ripgrep thinks it will be faster. Note that memory \
|
||||||
|
map searching doesn't currently support all options, so if an \
|
||||||
|
incompatible option (e.g., --context) is given with --mmap, \
|
||||||
|
then memory maps will not be used.");
|
||||||
|
doc!(h, "no-messages",
|
||||||
|
"Suppress all error messages.",
|
||||||
|
"Suppress all error messages. This is equivalent to redirecting \
|
||||||
|
stderr to /dev/null.");
|
||||||
|
doc!(h, "no-mmap",
|
||||||
|
"Never use memory maps.",
|
||||||
|
"Never use memory maps, even when they might be faster.");
|
||||||
|
doc!(h, "no-ignore",
|
||||||
|
"Don't respect ignore files.",
|
||||||
|
"Don't respect ignore files (.gitignore, .ignore, etc.). This \
|
||||||
|
implies --no-ignore-parent and --no-ignore-vcs.");
|
||||||
|
doc!(h, "no-ignore-parent",
|
||||||
|
"Don't respect ignore files in parent directories.",
|
||||||
|
"Don't respect ignore files (.gitignore, .ignore, etc.) in \
|
||||||
|
parent directories.");
|
||||||
|
doc!(h, "no-ignore-vcs",
|
||||||
|
"Don't respect VCS ignore files",
|
||||||
|
"Don't respect version control ignore files (.gitignore, etc.). \
|
||||||
|
This implies --no-ignore-parent. Note that .ignore files will \
|
||||||
|
continue to be respected.");
|
||||||
|
doc!(h, "null",
|
||||||
|
"Print NUL byte after file names",
|
||||||
|
"Whenever a file name is printed, follow it with a NUL byte. \
|
||||||
|
This includes printing file names before matches, and when \
|
||||||
|
printing a list of matching files such as with --count, \
|
||||||
|
--files-with-matches and --files. This option is useful for use \
|
||||||
|
with xargs.");
|
||||||
|
doc!(h, "only-matching",
|
||||||
|
"Print only matched parts of a line.",
|
||||||
|
"Print only the matched (non-empty) parts of a matching line, \
|
||||||
|
with each such part on a separate output line.");
|
||||||
|
doc!(h, "path-separator",
|
||||||
|
"Path separator to use when printing file paths.",
|
||||||
|
"The path separator to use when printing file paths. This \
|
||||||
|
defaults to your platform's path separator, which is / on Unix \
|
||||||
|
and \\ on Windows. This flag is intended for overriding the \
|
||||||
|
default when the environment demands it (e.g., cygwin). A path \
|
||||||
|
separator is limited to a single byte.");
|
||||||
|
doc!(h, "pretty",
|
||||||
|
"Alias for --color always --heading -n.");
|
||||||
|
doc!(h, "replace",
|
||||||
|
"Replace matches with string given.",
|
||||||
|
"Replace every match with the string given when printing \
|
||||||
|
results. Neither this flag nor any other flag will modify your \
|
||||||
|
files.\n\nCapture group indices (e.g., $5) and names \
|
||||||
|
(e.g., $foo) are supported in the replacement string.\n\n\
|
||||||
|
Note that the replacement by default replaces each match, and \
|
||||||
|
NOT the entire line. To replace the entire line, you should \
|
||||||
|
match the entire line.");
|
||||||
|
doc!(h, "case-sensitive",
|
||||||
|
"Search case sensitively.",
|
||||||
|
"Search case sensitively. This overrides -i/--ignore-case and \
|
||||||
|
-S/--smart-case.");
|
||||||
|
doc!(h, "smart-case",
|
||||||
|
"Smart case search.",
|
||||||
|
"Searches case insensitively if the pattern is all lowercase. \
|
||||||
|
Search case sensitively otherwise. This is overridden by \
|
||||||
|
either -s/--case-sensitive or -i/--ignore-case.");
|
||||||
|
doc!(h, "sort-files",
|
||||||
|
"Sort results by file path. Implies --threads=1.",
|
||||||
|
"Sort results by file path. Note that this currently \
|
||||||
|
disables all parallelism and runs search in a single thread.");
|
||||||
|
doc!(h, "threads",
|
||||||
|
"The approximate number of threads to use.",
|
||||||
|
"The approximate number of threads to use. A value of 0 (which \
|
||||||
|
is the default) causes ripgrep to choose the thread count \
|
||||||
|
using heuristics.");
|
||||||
|
doc!(h, "vimgrep",
|
||||||
|
"Show results in vim compatible format.",
|
||||||
|
"Show results with every match on its own line, including \
|
||||||
|
line numbers and column numbers. With this option, a line with \
|
||||||
|
more than one match will be printed more than once.");
|
||||||
|
doc!(h, "max-columns",
|
||||||
|
"Don't print lines longer than this limit in bytes.",
|
||||||
|
"Don't print lines longer than this limit in bytes. Longer lines \
|
||||||
|
are omitted, and only the number of matches in that line is \
|
||||||
|
printed.");
|
||||||
|
|
||||||
|
doc!(h, "type-add",
|
||||||
|
"Add a new glob for a file type.",
|
||||||
|
"Add a new glob for a particular file type. Only one glob can be \
|
||||||
|
added at a time. Multiple --type-add flags can be provided. \
|
||||||
|
Unless --type-clear is used, globs are added to any existing \
|
||||||
|
globs defined inside of ripgrep.\n\nNote that this MUST be \
|
||||||
|
passed to every invocation of ripgrep. Type settings are NOT \
|
||||||
|
persisted.\n\nExample: \
|
||||||
|
rg --type-add 'foo:*.foo' -tfoo PATTERN.\n\n\
|
||||||
|
--type-add can also be used to include rules from other types \
|
||||||
|
with the special include directive. The include directive \
|
||||||
|
permits specifying one or more other type names (separated by a \
|
||||||
|
comma) that have been defined and its rules will automatically \
|
||||||
|
be imported into the type specified. For example, to create a \
|
||||||
|
type called src that matches C++, Python and Markdown files, \
|
||||||
|
one can use:\n\n\
|
||||||
|
--type-add 'src:include:cpp,py,md'\n\n\
|
||||||
|
Additional glob rules can still be added to the src type by \
|
||||||
|
using the --type-add flag again:\n\n\
|
||||||
|
--type-add 'src:include:cpp,py,md' --type-add 'src:*.foo'\n\n\
|
||||||
|
Note that type names must consist only of Unicode letters or \
|
||||||
|
numbers. Punctuation characters are not allowed.");
|
||||||
|
doc!(h, "type-clear",
|
||||||
|
"Clear globs for given file type.",
|
||||||
|
"Clear the file type globs previously defined for TYPE. This \
|
||||||
|
only clears the default type definitions that are found inside \
|
||||||
|
of ripgrep.\n\nNote that this MUST be passed to every \
|
||||||
|
invocation of ripgrep. Type settings are NOT persisted.");
|
||||||
|
|
||||||
|
h
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn validate_number(s: String) -> Result<(), String> {
|
||||||
|
s.parse::<usize>().map(|_|()).map_err(|err| err.to_string())
|
||||||
|
}
|
||||||
1440
src/args.rs
1440
src/args.rs
File diff suppressed because it is too large
Load Diff
69
src/atty.rs
69
src/atty.rs
@@ -1,69 +0,0 @@
|
|||||||
/*!
|
|
||||||
This atty module contains functions for detecting whether ripgrep is being fed
|
|
||||||
from (or to) a terminal. Windows and Unix do this differently, so implement
|
|
||||||
both here.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#[cfg(unix)]
|
|
||||||
pub fn stdin_is_readable() -> bool {
|
|
||||||
use std::fs::File;
|
|
||||||
use std::os::unix::fs::FileTypeExt;
|
|
||||||
use std::os::unix::io::{FromRawFd, IntoRawFd};
|
|
||||||
use libc;
|
|
||||||
|
|
||||||
let file = unsafe { File::from_raw_fd(libc::STDIN_FILENO) };
|
|
||||||
let md = file.metadata();
|
|
||||||
let _ = file.into_raw_fd();
|
|
||||||
let ft = match md {
|
|
||||||
Err(_) => return false,
|
|
||||||
Ok(md) => md.file_type(),
|
|
||||||
};
|
|
||||||
ft.is_file() || ft.is_fifo()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
pub fn stdin_is_readable() -> bool {
|
|
||||||
// ???
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if there is a tty on stdin.
|
|
||||||
#[cfg(unix)]
|
|
||||||
pub fn on_stdin() -> bool {
|
|
||||||
use libc;
|
|
||||||
0 < unsafe { libc::isatty(libc::STDIN_FILENO) }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if there is a tty on stdout.
|
|
||||||
#[cfg(unix)]
|
|
||||||
pub fn on_stdout() -> bool {
|
|
||||||
use libc;
|
|
||||||
0 < unsafe { libc::isatty(libc::STDOUT_FILENO) }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if there is a tty on stdin.
|
|
||||||
#[cfg(windows)]
|
|
||||||
pub fn on_stdin() -> bool {
|
|
||||||
// BUG: https://github.com/BurntSushi/ripgrep/issues/19
|
|
||||||
// It's not clear to me how to determine whether there is a tty on stdin.
|
|
||||||
// Checking GetConsoleMode(GetStdHandle(stdin)) != 0 appears to report
|
|
||||||
// that stdin is a pipe, even if it's not in a cygwin terminal, for
|
|
||||||
// example.
|
|
||||||
//
|
|
||||||
// To fix this, we just assume there is always a tty on stdin. If Windows
|
|
||||||
// users need to search stdin, they'll have to pass -. Ug.
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if there is a tty on stdout.
|
|
||||||
#[cfg(windows)]
|
|
||||||
pub fn on_stdout() -> bool {
|
|
||||||
use kernel32;
|
|
||||||
use winapi;
|
|
||||||
|
|
||||||
unsafe {
|
|
||||||
let fd = winapi::winbase::STD_OUTPUT_HANDLE;
|
|
||||||
let mut out = 0;
|
|
||||||
kernel32::GetConsoleMode(kernel32::GetStdHandle(fd), &mut out) != 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
456
src/decoder.rs
Normal file
456
src/decoder.rs
Normal file
@@ -0,0 +1,456 @@
|
|||||||
|
use std::cmp;
|
||||||
|
use std::io::{self, Read};
|
||||||
|
|
||||||
|
use encoding_rs::{Decoder, Encoding, UTF_8};
|
||||||
|
|
||||||
|
/// A BOM is at least 2 bytes and at most 3 bytes.
|
||||||
|
///
|
||||||
|
/// If fewer than 2 bytes are available to be read at the beginning of a
|
||||||
|
/// reader, then a BOM is `None`.
|
||||||
|
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||||
|
struct Bom {
|
||||||
|
bytes: [u8; 3],
|
||||||
|
len: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Bom {
|
||||||
|
fn as_slice(&self) -> &[u8] {
|
||||||
|
&self.bytes[0..self.len]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn decoder(&self) -> Option<Decoder> {
|
||||||
|
let bom = self.as_slice();
|
||||||
|
if bom.len() < 3 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
if let Some((enc, _)) = Encoding::for_bom(bom) {
|
||||||
|
if enc != UTF_8 {
|
||||||
|
return Some(enc.new_decoder_with_bom_removal());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// BomPeeker wraps `R` and satisfies the `io::Read` interface while also
|
||||||
|
/// providing a peek at the BOM if one exists. Peeking at the BOM does not
|
||||||
|
/// advance the reader.
|
||||||
|
struct BomPeeker<R> {
|
||||||
|
rdr: R,
|
||||||
|
bom: Option<Bom>,
|
||||||
|
nread: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R: io::Read> BomPeeker<R> {
|
||||||
|
/// Create a new BomPeeker.
|
||||||
|
///
|
||||||
|
/// The first three bytes can be read using the `peek_bom` method, but
|
||||||
|
/// will not advance the reader.
|
||||||
|
fn new(rdr: R) -> BomPeeker<R> {
|
||||||
|
BomPeeker { rdr: rdr, bom: None, nread: 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Peek at the first three bytes of the underlying reader.
|
||||||
|
///
|
||||||
|
/// This does not advance the reader provided by `BomPeeker`.
|
||||||
|
///
|
||||||
|
/// If the underlying reader does not have at least two bytes available,
|
||||||
|
/// then `None` is returned.
|
||||||
|
fn peek_bom(&mut self) -> io::Result<Bom> {
|
||||||
|
if let Some(bom) = self.bom {
|
||||||
|
return Ok(bom);
|
||||||
|
}
|
||||||
|
self.bom = Some(Bom { bytes: [0; 3], len: 0 });
|
||||||
|
let mut buf = [0u8; 3];
|
||||||
|
let bom_len = try!(read_full(&mut self.rdr, &mut buf));
|
||||||
|
self.bom = Some(Bom { bytes: buf, len: bom_len });
|
||||||
|
Ok(self.bom.unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R: io::Read> io::Read for BomPeeker<R> {
|
||||||
|
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||||
|
if self.nread < 3 {
|
||||||
|
let bom = try!(self.peek_bom());
|
||||||
|
let bom = bom.as_slice();
|
||||||
|
if self.nread < bom.len() {
|
||||||
|
let rest = &bom[self.nread..];
|
||||||
|
let len = cmp::min(buf.len(), rest.len());
|
||||||
|
buf[..len].copy_from_slice(&rest[..len]);
|
||||||
|
self.nread += len;
|
||||||
|
return Ok(len);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let nread = try!(self.rdr.read(buf));
|
||||||
|
self.nread += nread;
|
||||||
|
Ok(nread)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Like io::Read::read_exact, except it never returns UnexpectedEof and
|
||||||
|
/// instead returns the number of bytes read if EOF is seen before filling
|
||||||
|
/// `buf`.
|
||||||
|
fn read_full<R: io::Read>(
|
||||||
|
mut rdr: R,
|
||||||
|
mut buf: &mut [u8],
|
||||||
|
) -> io::Result<usize> {
|
||||||
|
let mut nread = 0;
|
||||||
|
while !buf.is_empty() {
|
||||||
|
match rdr.read(buf) {
|
||||||
|
Ok(0) => break,
|
||||||
|
Ok(n) => {
|
||||||
|
nread += n;
|
||||||
|
let tmp = buf;
|
||||||
|
buf = &mut tmp[n..];
|
||||||
|
}
|
||||||
|
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
|
||||||
|
Err(e) => return Err(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(nread)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A reader that transcodes to UTF-8. The source encoding is determined by
|
||||||
|
/// inspecting the BOM from the stream read from `R`, if one exists. If a
|
||||||
|
/// UTF-16 BOM exists, then the source stream is trancoded to UTF-8 with
|
||||||
|
/// invalid UTF-16 sequences translated to the Unicode replacement character.
|
||||||
|
/// In all other cases, the underlying reader is passed through unchanged.
|
||||||
|
///
|
||||||
|
/// `R` is the type of the underlying reader and `B` is the type of an internal
|
||||||
|
/// buffer used to store the results of trancoding.
|
||||||
|
///
|
||||||
|
/// Note that not all methods on `io::Read` work with this implementation.
|
||||||
|
/// For example, the `bytes` adapter method attempts to read a single byte at
|
||||||
|
/// a time, but this implementation requires a buffer of size at least `4`. If
|
||||||
|
/// a buffer of size less than 4 is given, then an error is returned.
|
||||||
|
pub struct DecodeReader<R, B> {
|
||||||
|
/// The underlying reader, wrapped in a peeker for reading a BOM if one
|
||||||
|
/// exists.
|
||||||
|
rdr: BomPeeker<R>,
|
||||||
|
/// The internal buffer to store transcoded bytes before they are read by
|
||||||
|
/// callers.
|
||||||
|
buf: B,
|
||||||
|
/// The current position in `buf`. Subsequent reads start here.
|
||||||
|
pos: usize,
|
||||||
|
/// The number of transcoded bytes in `buf`. Subsequent reads end here.
|
||||||
|
buflen: usize,
|
||||||
|
/// Whether this is the first read or not (in which we inspect the BOM).
|
||||||
|
first: bool,
|
||||||
|
/// Whether a "last" read has occurred. After this point, EOF will always
|
||||||
|
/// be returned.
|
||||||
|
last: bool,
|
||||||
|
/// The underlying text decoder derived from the BOM, if one exists.
|
||||||
|
decoder: Option<Decoder>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R: io::Read, B: AsMut<[u8]>> DecodeReader<R, B> {
|
||||||
|
/// Create a new transcoder that converts a source stream to valid UTF-8.
|
||||||
|
///
|
||||||
|
/// If an encoding is specified, then it is used to transcode `rdr` to
|
||||||
|
/// UTF-8. Otherwise, if no encoding is specified, and if a UTF-16 BOM is
|
||||||
|
/// found, then the corresponding UTF-16 encoding is used to transcode
|
||||||
|
/// `rdr` to UTF-8. In all other cases, `rdr` is assumed to be at least
|
||||||
|
/// ASCII-compatible and passed through untouched.
|
||||||
|
///
|
||||||
|
/// Errors in the encoding of `rdr` are handled with the Unicode
|
||||||
|
/// replacement character. If no encoding of `rdr` is specified, then
|
||||||
|
/// errors are not handled.
|
||||||
|
pub fn new(
|
||||||
|
rdr: R,
|
||||||
|
buf: B,
|
||||||
|
enc: Option<&'static Encoding>,
|
||||||
|
) -> DecodeReader<R, B> {
|
||||||
|
DecodeReader {
|
||||||
|
rdr: BomPeeker::new(rdr),
|
||||||
|
buf: buf,
|
||||||
|
buflen: 0,
|
||||||
|
pos: 0,
|
||||||
|
first: enc.is_none(),
|
||||||
|
last: false,
|
||||||
|
decoder: enc.map(|enc| enc.new_decoder_with_bom_removal()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fill the internal buffer from the underlying reader.
|
||||||
|
///
|
||||||
|
/// If there are unread bytes in the internal buffer, then we move them
|
||||||
|
/// to the beginning of the internal buffer and fill the remainder.
|
||||||
|
///
|
||||||
|
/// If the internal buffer is too small to read additional bytes, then an
|
||||||
|
/// error is returned.
|
||||||
|
#[inline(always)] // massive perf benefit (???)
|
||||||
|
fn fill(&mut self) -> io::Result<()> {
|
||||||
|
if self.pos < self.buflen {
|
||||||
|
if self.buflen >= self.buf.as_mut().len() {
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::Other,
|
||||||
|
"DecodeReader: internal buffer exhausted"));
|
||||||
|
}
|
||||||
|
let newlen = self.buflen - self.pos;
|
||||||
|
let mut tmp = Vec::with_capacity(newlen);
|
||||||
|
tmp.extend_from_slice(&self.buf.as_mut()[self.pos..self.buflen]);
|
||||||
|
self.buf.as_mut()[..newlen].copy_from_slice(&tmp);
|
||||||
|
self.buflen = newlen;
|
||||||
|
} else {
|
||||||
|
self.buflen = 0;
|
||||||
|
}
|
||||||
|
self.pos = 0;
|
||||||
|
self.buflen +=
|
||||||
|
try!(self.rdr.read(&mut self.buf.as_mut()[self.buflen..]));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Transcode the inner stream to UTF-8 in `buf`. This assumes that there
|
||||||
|
/// is a decoder capable of transcoding the inner stream to UTF-8. This
|
||||||
|
/// returns the number of bytes written to `buf`.
|
||||||
|
///
|
||||||
|
/// When this function returns, exactly one of the following things will
|
||||||
|
/// be true:
|
||||||
|
///
|
||||||
|
/// 1. A non-zero number of bytes were written to `buf`.
|
||||||
|
/// 2. The underlying reader reached EOF.
|
||||||
|
/// 3. An error is returned: the internal buffer ran out of room.
|
||||||
|
/// 4. An I/O error occurred.
|
||||||
|
///
|
||||||
|
/// Note that `buf` must have at least 4 bytes of space.
|
||||||
|
fn transcode(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||||
|
assert!(buf.len() >= 4);
|
||||||
|
if self.last {
|
||||||
|
return Ok(0);
|
||||||
|
}
|
||||||
|
if self.pos >= self.buflen {
|
||||||
|
try!(self.fill());
|
||||||
|
}
|
||||||
|
let mut nwrite = 0;
|
||||||
|
loop {
|
||||||
|
let (_, nin, nout, _) =
|
||||||
|
self.decoder.as_mut().unwrap().decode_to_utf8(
|
||||||
|
&self.buf.as_mut()[self.pos..self.buflen], buf, false);
|
||||||
|
self.pos += nin;
|
||||||
|
nwrite += nout;
|
||||||
|
// If we've written at least one byte to the caller-provided
|
||||||
|
// buffer, then our mission is complete.
|
||||||
|
if nwrite > 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// Otherwise, we know that our internal buffer has insufficient
|
||||||
|
// data to transcode at least one char, so we attempt to refill it.
|
||||||
|
try!(self.fill());
|
||||||
|
// Quit on EOF.
|
||||||
|
if self.buflen == 0 {
|
||||||
|
self.pos = 0;
|
||||||
|
self.last = true;
|
||||||
|
let (_, _, nout, _) =
|
||||||
|
self.decoder.as_mut().unwrap().decode_to_utf8(
|
||||||
|
&[], buf, true);
|
||||||
|
return Ok(nout);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(nwrite)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline(never)] // impacts perf...
|
||||||
|
fn detect(&mut self) -> io::Result<()> {
|
||||||
|
let bom = try!(self.rdr.peek_bom());
|
||||||
|
self.decoder = bom.decoder();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R: io::Read, B: AsMut<[u8]>> io::Read for DecodeReader<R, B> {
|
||||||
|
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||||
|
if self.first {
|
||||||
|
self.first = false;
|
||||||
|
try!(self.detect());
|
||||||
|
}
|
||||||
|
if self.decoder.is_none() {
|
||||||
|
return self.rdr.read(buf);
|
||||||
|
}
|
||||||
|
// When decoding UTF-8, we need at least 4 bytes of space to guarantee
|
||||||
|
// that we can decode at least one codepoint. If we don't have it, we
|
||||||
|
// can either return `0` for the number of bytes read or return an
|
||||||
|
// error. Since `0` would be interpreted as a possibly premature EOF,
|
||||||
|
// we opt for an error.
|
||||||
|
if buf.len() < 4 {
|
||||||
|
return Err(io::Error::new(
|
||||||
|
io::ErrorKind::Other,
|
||||||
|
"DecodeReader: byte buffer must have length at least 4"));
|
||||||
|
}
|
||||||
|
self.transcode(buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::io::Read;
|
||||||
|
|
||||||
|
use encoding_rs::Encoding;
|
||||||
|
|
||||||
|
use super::{Bom, BomPeeker, DecodeReader};
|
||||||
|
|
||||||
|
fn read_to_string<R: Read>(mut rdr: R) -> String {
|
||||||
|
let mut s = String::new();
|
||||||
|
rdr.read_to_string(&mut s).unwrap();
|
||||||
|
s
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn peeker_empty() {
|
||||||
|
let buf = [];
|
||||||
|
let mut peeker = BomPeeker::new(&buf[..]);
|
||||||
|
assert_eq!(Bom { bytes: [0; 3], len: 0}, peeker.peek_bom().unwrap());
|
||||||
|
|
||||||
|
let mut tmp = [0; 100];
|
||||||
|
assert_eq!(0, peeker.read(&mut tmp).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn peeker_one() {
|
||||||
|
let buf = [1];
|
||||||
|
let mut peeker = BomPeeker::new(&buf[..]);
|
||||||
|
assert_eq!(
|
||||||
|
Bom { bytes: [1, 0, 0], len: 1},
|
||||||
|
peeker.peek_bom().unwrap());
|
||||||
|
|
||||||
|
let mut tmp = [0; 100];
|
||||||
|
assert_eq!(1, peeker.read(&mut tmp).unwrap());
|
||||||
|
assert_eq!(1, tmp[0]);
|
||||||
|
assert_eq!(0, peeker.read(&mut tmp).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn peeker_two() {
|
||||||
|
let buf = [1, 2];
|
||||||
|
let mut peeker = BomPeeker::new(&buf[..]);
|
||||||
|
assert_eq!(
|
||||||
|
Bom { bytes: [1, 2, 0], len: 2},
|
||||||
|
peeker.peek_bom().unwrap());
|
||||||
|
|
||||||
|
let mut tmp = [0; 100];
|
||||||
|
assert_eq!(2, peeker.read(&mut tmp).unwrap());
|
||||||
|
assert_eq!(1, tmp[0]);
|
||||||
|
assert_eq!(2, tmp[1]);
|
||||||
|
assert_eq!(0, peeker.read(&mut tmp).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn peeker_three() {
|
||||||
|
let buf = [1, 2, 3];
|
||||||
|
let mut peeker = BomPeeker::new(&buf[..]);
|
||||||
|
assert_eq!(
|
||||||
|
Bom { bytes: [1, 2, 3], len: 3},
|
||||||
|
peeker.peek_bom().unwrap());
|
||||||
|
|
||||||
|
let mut tmp = [0; 100];
|
||||||
|
assert_eq!(3, peeker.read(&mut tmp).unwrap());
|
||||||
|
assert_eq!(1, tmp[0]);
|
||||||
|
assert_eq!(2, tmp[1]);
|
||||||
|
assert_eq!(3, tmp[2]);
|
||||||
|
assert_eq!(0, peeker.read(&mut tmp).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn peeker_four() {
|
||||||
|
let buf = [1, 2, 3, 4];
|
||||||
|
let mut peeker = BomPeeker::new(&buf[..]);
|
||||||
|
assert_eq!(
|
||||||
|
Bom { bytes: [1, 2, 3], len: 3},
|
||||||
|
peeker.peek_bom().unwrap());
|
||||||
|
|
||||||
|
let mut tmp = [0; 100];
|
||||||
|
assert_eq!(3, peeker.read(&mut tmp).unwrap());
|
||||||
|
assert_eq!(1, tmp[0]);
|
||||||
|
assert_eq!(2, tmp[1]);
|
||||||
|
assert_eq!(3, tmp[2]);
|
||||||
|
assert_eq!(1, peeker.read(&mut tmp).unwrap());
|
||||||
|
assert_eq!(4, tmp[0]);
|
||||||
|
assert_eq!(0, peeker.read(&mut tmp).unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn peeker_one_at_a_time() {
|
||||||
|
let buf = [1, 2, 3, 4];
|
||||||
|
let mut peeker = BomPeeker::new(&buf[..]);
|
||||||
|
|
||||||
|
let mut tmp = [0; 1];
|
||||||
|
assert_eq!(0, peeker.read(&mut tmp[..0]).unwrap());
|
||||||
|
assert_eq!(0, tmp[0]);
|
||||||
|
assert_eq!(1, peeker.read(&mut tmp).unwrap());
|
||||||
|
assert_eq!(1, tmp[0]);
|
||||||
|
assert_eq!(1, peeker.read(&mut tmp).unwrap());
|
||||||
|
assert_eq!(2, tmp[0]);
|
||||||
|
assert_eq!(1, peeker.read(&mut tmp).unwrap());
|
||||||
|
assert_eq!(3, tmp[0]);
|
||||||
|
assert_eq!(1, peeker.read(&mut tmp).unwrap());
|
||||||
|
assert_eq!(4, tmp[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// In cases where all we have is a bom, we expect the bytes to be
|
||||||
|
// passed through unchanged.
|
||||||
|
#[test]
|
||||||
|
fn trans_utf16_bom() {
|
||||||
|
let srcbuf = vec![0xFF, 0xFE];
|
||||||
|
let mut dstbuf = vec![0; 8 * (1<<10)];
|
||||||
|
let mut rdr = DecodeReader::new(&*srcbuf, vec![0; 8 * (1<<10)], None);
|
||||||
|
let n = rdr.read(&mut dstbuf).unwrap();
|
||||||
|
assert_eq!(&*srcbuf, &dstbuf[..n]);
|
||||||
|
|
||||||
|
let srcbuf = vec![0xFE, 0xFF];
|
||||||
|
let mut rdr = DecodeReader::new(&*srcbuf, vec![0; 8 * (1<<10)], None);
|
||||||
|
let n = rdr.read(&mut dstbuf).unwrap();
|
||||||
|
assert_eq!(&*srcbuf, &dstbuf[..n]);
|
||||||
|
|
||||||
|
let srcbuf = vec![0xEF, 0xBB, 0xBF];
|
||||||
|
let mut rdr = DecodeReader::new(&*srcbuf, vec![0; 8 * (1<<10)], None);
|
||||||
|
let n = rdr.read(&mut dstbuf).unwrap();
|
||||||
|
assert_eq!(&*srcbuf, &dstbuf[..n]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test basic UTF-16 decoding.
|
||||||
|
#[test]
|
||||||
|
fn trans_utf16_basic() {
|
||||||
|
let srcbuf = vec![0xFF, 0xFE, 0x61, 0x00];
|
||||||
|
let mut rdr = DecodeReader::new(&*srcbuf, vec![0; 8 * (1<<10)], None);
|
||||||
|
assert_eq!("a", read_to_string(&mut rdr));
|
||||||
|
|
||||||
|
let srcbuf = vec![0xFE, 0xFF, 0x00, 0x61];
|
||||||
|
let mut rdr = DecodeReader::new(&*srcbuf, vec![0; 8 * (1<<10)], None);
|
||||||
|
assert_eq!("a", read_to_string(&mut rdr));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test incomplete UTF-16 decoding. This ensures we see a replacement char
|
||||||
|
// if the stream ends with an unpaired code unit.
|
||||||
|
#[test]
|
||||||
|
fn trans_utf16_incomplete() {
|
||||||
|
let srcbuf = vec![0xFF, 0xFE, 0x61, 0x00, 0x00];
|
||||||
|
let mut rdr = DecodeReader::new(&*srcbuf, vec![0; 8 * (1<<10)], None);
|
||||||
|
assert_eq!("a\u{FFFD}", read_to_string(&mut rdr));
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! test_trans_simple {
|
||||||
|
($name:ident, $enc:expr, $srcbytes:expr, $dst:expr) => {
|
||||||
|
#[test]
|
||||||
|
fn $name() {
|
||||||
|
let srcbuf = &$srcbytes[..];
|
||||||
|
let enc = Encoding::for_label($enc.as_bytes());
|
||||||
|
let mut rdr = DecodeReader::new(
|
||||||
|
&*srcbuf, vec![0; 8 * (1<<10)], enc);
|
||||||
|
assert_eq!($dst, read_to_string(&mut rdr));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This isn't exhaustive obviously, but it lets us test base level support.
|
||||||
|
test_trans_simple!(trans_simple_auto, "does not exist", b"\xD0\x96", "Ж");
|
||||||
|
test_trans_simple!(trans_simple_utf8, "utf-8", b"\xD0\x96", "Ж");
|
||||||
|
test_trans_simple!(trans_simple_utf16le, "utf-16le", b"\x16\x04", "Ж");
|
||||||
|
test_trans_simple!(trans_simple_utf16be, "utf-16be", b"\x04\x16", "Ж");
|
||||||
|
test_trans_simple!(trans_simple_chinese, "chinese", b"\xA7\xA8", "Ж");
|
||||||
|
test_trans_simple!(trans_simple_korean, "korean", b"\xAC\xA8", "Ж");
|
||||||
|
test_trans_simple!(
|
||||||
|
trans_simple_big5_hkscs, "big5-hkscs", b"\xC7\xFA", "Ж");
|
||||||
|
test_trans_simple!(trans_simple_gbk, "gbk", b"\xA7\xA8", "Ж");
|
||||||
|
test_trans_simple!(trans_simple_sjis, "sjis", b"\x84\x47", "Ж");
|
||||||
|
test_trans_simple!(trans_simple_eucjp, "euc-jp", b"\xA7\xA8", "Ж");
|
||||||
|
test_trans_simple!(trans_simple_latin1, "latin1", b"\xA9", "©");
|
||||||
|
}
|
||||||
455
src/gitignore.rs
455
src/gitignore.rs
@@ -1,455 +0,0 @@
|
|||||||
/*!
|
|
||||||
The gitignore module provides a way of reading a gitignore file and applying
|
|
||||||
it to a particular file name to determine whether it should be ignore or not.
|
|
||||||
The motivation for this submodule is performance and portability:
|
|
||||||
|
|
||||||
1. There is a gitignore crate on crates.io, but it uses the standard `glob`
|
|
||||||
crate and checks patterns one-by-one. This is a reasonable implementation,
|
|
||||||
but not suitable for the performance we need here.
|
|
||||||
2. We could shell out to a `git` sub-command like ls-files or status, but it
|
|
||||||
seems better to not rely on the existence of external programs for a search
|
|
||||||
tool. Besides, we need to implement this logic anyway to support things like
|
|
||||||
an .ignore file.
|
|
||||||
|
|
||||||
The key implementation detail here is that a single gitignore file is compiled
|
|
||||||
into a single RegexSet, which can be used to report which globs match a
|
|
||||||
particular file name. We can then do a quick post-processing step to implement
|
|
||||||
additional rules such as whitelists (prefix of `!`) or directory-only globs
|
|
||||||
(suffix of `/`).
|
|
||||||
*/
|
|
||||||
|
|
||||||
// TODO(burntsushi): Implement something similar, but for Mercurial. We can't
|
|
||||||
// use this exact implementation because hgignore files are different.
|
|
||||||
|
|
||||||
use std::cell::RefCell;
|
|
||||||
use std::error::Error as StdError;
|
|
||||||
use std::fmt;
|
|
||||||
use std::fs::File;
|
|
||||||
use std::io::{self, BufRead};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use globset::{self, Candidate, GlobBuilder, GlobSet, GlobSetBuilder};
|
|
||||||
use regex;
|
|
||||||
use thread_local::ThreadLocal;
|
|
||||||
|
|
||||||
use pathutil::{is_file_name, strip_prefix};
|
|
||||||
|
|
||||||
/// Represents an error that can occur when parsing a gitignore file.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
Glob(globset::Error),
|
|
||||||
Regex(regex::Error),
|
|
||||||
Io(io::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StdError for Error {
|
|
||||||
fn description(&self) -> &str {
|
|
||||||
match *self {
|
|
||||||
Error::Glob(ref err) => err.description(),
|
|
||||||
Error::Regex(ref err) => err.description(),
|
|
||||||
Error::Io(ref err) => err.description(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match *self {
|
|
||||||
Error::Glob(ref err) => err.fmt(f),
|
|
||||||
Error::Regex(ref err) => err.fmt(f),
|
|
||||||
Error::Io(ref err) => err.fmt(f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<globset::Error> for Error {
|
|
||||||
fn from(err: globset::Error) -> Error {
|
|
||||||
Error::Glob(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<regex::Error> for Error {
|
|
||||||
fn from(err: regex::Error) -> Error {
|
|
||||||
Error::Regex(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<io::Error> for Error {
|
|
||||||
fn from(err: io::Error) -> Error {
|
|
||||||
Error::Io(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Gitignore is a matcher for the glob patterns in a single gitignore file.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Gitignore {
|
|
||||||
set: GlobSet,
|
|
||||||
root: PathBuf,
|
|
||||||
patterns: Vec<Pattern>,
|
|
||||||
num_ignores: u64,
|
|
||||||
num_whitelist: u64,
|
|
||||||
matches: Arc<ThreadLocal<RefCell<Vec<usize>>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Gitignore {
|
|
||||||
/// Create a new gitignore glob matcher from the given root directory and
|
|
||||||
/// string containing the contents of a gitignore file.
|
|
||||||
#[allow(dead_code)]
|
|
||||||
fn from_str<P: AsRef<Path>>(
|
|
||||||
root: P,
|
|
||||||
gitignore: &str,
|
|
||||||
) -> Result<Gitignore, Error> {
|
|
||||||
let mut builder = GitignoreBuilder::new(root);
|
|
||||||
try!(builder.add_str(gitignore));
|
|
||||||
builder.build()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if and only if the given file path should be ignored
|
|
||||||
/// according to the globs in this gitignore. `is_dir` should be true if
|
|
||||||
/// the path refers to a directory and false otherwise.
|
|
||||||
///
|
|
||||||
/// Before matching path, its prefix (as determined by a common suffix
|
|
||||||
/// of the directory containing this gitignore) is stripped. If there is
|
|
||||||
/// no common suffix/prefix overlap, then path is assumed to reside in the
|
|
||||||
/// same directory as this gitignore file.
|
|
||||||
pub fn matched<P: AsRef<Path>>(&self, path: P, is_dir: bool) -> Match {
|
|
||||||
let mut path = path.as_ref();
|
|
||||||
if let Some(p) = strip_prefix("./", path) {
|
|
||||||
path = p;
|
|
||||||
}
|
|
||||||
// Strip any common prefix between the candidate path and the root
|
|
||||||
// of the gitignore, to make sure we get relative matching right.
|
|
||||||
// BUT, a file name might not have any directory components to it,
|
|
||||||
// in which case, we don't want to accidentally strip any part of the
|
|
||||||
// file name.
|
|
||||||
if !is_file_name(path) {
|
|
||||||
if let Some(p) = strip_prefix(&self.root, path) {
|
|
||||||
path = p;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some(p) = strip_prefix("/", path) {
|
|
||||||
path = p;
|
|
||||||
}
|
|
||||||
self.matched_stripped(path, is_dir)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Like matched, but takes a path that has already been stripped.
|
|
||||||
pub fn matched_stripped(&self, path: &Path, is_dir: bool) -> Match {
|
|
||||||
let _matches = self.matches.get_default();
|
|
||||||
let mut matches = _matches.borrow_mut();
|
|
||||||
let candidate = Candidate::new(path);
|
|
||||||
self.set.matches_candidate_into(&candidate, &mut *matches);
|
|
||||||
for &i in matches.iter().rev() {
|
|
||||||
let pat = &self.patterns[i];
|
|
||||||
if !pat.only_dir || is_dir {
|
|
||||||
return if pat.whitelist {
|
|
||||||
Match::Whitelist(pat)
|
|
||||||
} else {
|
|
||||||
Match::Ignored(pat)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Match::None
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the total number of ignore patterns.
|
|
||||||
pub fn num_ignores(&self) -> u64 {
|
|
||||||
self.num_ignores
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The result of a glob match.
|
|
||||||
///
|
|
||||||
/// The lifetime `'a` refers to the lifetime of the pattern that resulted in
|
|
||||||
/// a match (whether ignored or whitelisted).
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub enum Match<'a> {
|
|
||||||
/// The path didn't match any glob in the gitignore file.
|
|
||||||
None,
|
|
||||||
/// The last glob matched indicates the path should be ignored.
|
|
||||||
Ignored(&'a Pattern),
|
|
||||||
/// The last glob matched indicates the path should be whitelisted.
|
|
||||||
Whitelist(&'a Pattern),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Match<'a> {
|
|
||||||
/// Returns true if the match result implies the path should be ignored.
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn is_ignored(&self) -> bool {
|
|
||||||
match *self {
|
|
||||||
Match::Ignored(_) => true,
|
|
||||||
Match::None | Match::Whitelist(_) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the match result didn't match any globs.
|
|
||||||
pub fn is_none(&self) -> bool {
|
|
||||||
match *self {
|
|
||||||
Match::None => true,
|
|
||||||
Match::Ignored(_) | Match::Whitelist(_) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Inverts the match so that Ignored becomes Whitelisted and Whitelisted
|
|
||||||
/// becomes Ignored. A non-match remains the same.
|
|
||||||
pub fn invert(self) -> Match<'a> {
|
|
||||||
match self {
|
|
||||||
Match::None => Match::None,
|
|
||||||
Match::Ignored(pat) => Match::Whitelist(pat),
|
|
||||||
Match::Whitelist(pat) => Match::Ignored(pat),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// GitignoreBuilder constructs a matcher for a single set of globs from a
|
|
||||||
/// .gitignore file.
|
|
||||||
pub struct GitignoreBuilder {
|
|
||||||
builder: GlobSetBuilder,
|
|
||||||
root: PathBuf,
|
|
||||||
patterns: Vec<Pattern>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Pattern represents a single pattern in a gitignore file. It doesn't
|
|
||||||
/// know how to do glob matching directly, but it does store additional
|
|
||||||
/// options on a pattern, such as whether it's whitelisted.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Pattern {
|
|
||||||
/// The file path that this pattern was extracted from (may be empty).
|
|
||||||
pub from: PathBuf,
|
|
||||||
/// The original glob pattern string.
|
|
||||||
pub original: String,
|
|
||||||
/// The actual glob pattern string used to convert to a regex.
|
|
||||||
pub pat: String,
|
|
||||||
/// Whether this is a whitelisted pattern or not.
|
|
||||||
pub whitelist: bool,
|
|
||||||
/// Whether this pattern should only match directories or not.
|
|
||||||
pub only_dir: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GitignoreBuilder {
|
|
||||||
/// Create a new builder for a gitignore file.
|
|
||||||
///
|
|
||||||
/// The path given should be the path at which the globs for this gitignore
|
|
||||||
/// file should be matched.
|
|
||||||
pub fn new<P: AsRef<Path>>(root: P) -> GitignoreBuilder {
|
|
||||||
let root = strip_prefix("./", root.as_ref()).unwrap_or(root.as_ref());
|
|
||||||
GitignoreBuilder {
|
|
||||||
builder: GlobSetBuilder::new(),
|
|
||||||
root: root.to_path_buf(),
|
|
||||||
patterns: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Builds a new matcher from the glob patterns added so far.
|
|
||||||
///
|
|
||||||
/// Once a matcher is built, no new glob patterns can be added to it.
|
|
||||||
pub fn build(self) -> Result<Gitignore, Error> {
|
|
||||||
let nignores = self.patterns.iter().filter(|p| !p.whitelist).count();
|
|
||||||
let nwhitelist = self.patterns.iter().filter(|p| p.whitelist).count();
|
|
||||||
Ok(Gitignore {
|
|
||||||
set: try!(self.builder.build()),
|
|
||||||
root: self.root,
|
|
||||||
patterns: self.patterns,
|
|
||||||
num_ignores: nignores as u64,
|
|
||||||
num_whitelist: nwhitelist as u64,
|
|
||||||
matches: Arc::new(ThreadLocal::default()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add each pattern line from the file path given.
|
|
||||||
pub fn add_path<P: AsRef<Path>>(&mut self, path: P) -> Result<(), Error> {
|
|
||||||
let rdr = io::BufReader::new(try!(File::open(&path)));
|
|
||||||
debug!("gitignore: {}", path.as_ref().display());
|
|
||||||
for (i, line) in rdr.lines().enumerate() {
|
|
||||||
let line = match line {
|
|
||||||
Ok(line) => line,
|
|
||||||
Err(err) => {
|
|
||||||
debug!("error reading line {} in {}: {}",
|
|
||||||
i, path.as_ref().display(), err);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if let Err(err) = self.add(&path, &line) {
|
|
||||||
debug!("error adding gitignore pattern: '{}': {}", line, err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add each pattern line from the string given.
|
|
||||||
pub fn add_str(&mut self, gitignore: &str) -> Result<(), Error> {
|
|
||||||
for line in gitignore.lines() {
|
|
||||||
try!(self.add("", line));
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a line from a gitignore file to this builder.
|
|
||||||
///
|
|
||||||
/// If the line could not be parsed as a glob, then an error is returned.
|
|
||||||
pub fn add<P: AsRef<Path>>(
|
|
||||||
&mut self,
|
|
||||||
from: P,
|
|
||||||
mut line: &str,
|
|
||||||
) -> Result<(), Error> {
|
|
||||||
if line.starts_with("#") {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
if !line.ends_with("\\ ") {
|
|
||||||
line = line.trim_right();
|
|
||||||
}
|
|
||||||
if line.is_empty() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
let mut pat = Pattern {
|
|
||||||
from: from.as_ref().to_path_buf(),
|
|
||||||
original: line.to_string(),
|
|
||||||
pat: String::new(),
|
|
||||||
whitelist: false,
|
|
||||||
only_dir: false,
|
|
||||||
};
|
|
||||||
let mut literal_separator = false;
|
|
||||||
let has_slash = line.chars().any(|c| c == '/');
|
|
||||||
let is_absolute = line.chars().nth(0).unwrap() == '/';
|
|
||||||
if line.starts_with("\\!") || line.starts_with("\\#") {
|
|
||||||
line = &line[1..];
|
|
||||||
} else {
|
|
||||||
if line.starts_with("!") {
|
|
||||||
pat.whitelist = true;
|
|
||||||
line = &line[1..];
|
|
||||||
}
|
|
||||||
if line.starts_with("/") {
|
|
||||||
// `man gitignore` says that if a glob starts with a slash,
|
|
||||||
// then the glob can only match the beginning of a path
|
|
||||||
// (relative to the location of gitignore). We achieve this by
|
|
||||||
// simply banning wildcards from matching /.
|
|
||||||
literal_separator = true;
|
|
||||||
line = &line[1..];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// If it ends with a slash, then this should only match directories,
|
|
||||||
// but the slash should otherwise not be used while globbing.
|
|
||||||
if let Some((i, c)) = line.char_indices().rev().nth(0) {
|
|
||||||
if c == '/' {
|
|
||||||
pat.only_dir = true;
|
|
||||||
line = &line[..i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// If there is a literal slash, then we note that so that globbing
|
|
||||||
// doesn't let wildcards match slashes.
|
|
||||||
pat.pat = line.to_string();
|
|
||||||
if has_slash {
|
|
||||||
literal_separator = true;
|
|
||||||
}
|
|
||||||
// If there was a leading slash, then this is a pattern that must
|
|
||||||
// match the entire path name. Otherwise, we should let it match
|
|
||||||
// anywhere, so use a **/ prefix.
|
|
||||||
if !is_absolute {
|
|
||||||
// ... but only if we don't already have a **/ prefix.
|
|
||||||
if !pat.pat.starts_with("**/") {
|
|
||||||
pat.pat = format!("**/{}", pat.pat);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// If the pattern ends with `/**`, then we should only match everything
|
|
||||||
// inside a directory, but not the directory itself. Standard globs
|
|
||||||
// will match the directory. So we add `/*` to force the issue.
|
|
||||||
if pat.pat.ends_with("/**") {
|
|
||||||
pat.pat = format!("{}/*", pat.pat);
|
|
||||||
}
|
|
||||||
let parsed = try!(
|
|
||||||
GlobBuilder::new(&pat.pat)
|
|
||||||
.literal_separator(literal_separator)
|
|
||||||
.build());
|
|
||||||
self.builder.add(parsed);
|
|
||||||
self.patterns.push(pat);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::Gitignore;
|
|
||||||
|
|
||||||
macro_rules! ignored {
|
|
||||||
($name:ident, $root:expr, $gi:expr, $path:expr) => {
|
|
||||||
ignored!($name, $root, $gi, $path, false);
|
|
||||||
};
|
|
||||||
($name:ident, $root:expr, $gi:expr, $path:expr, $is_dir:expr) => {
|
|
||||||
#[test]
|
|
||||||
fn $name() {
|
|
||||||
let gi = Gitignore::from_str($root, $gi).unwrap();
|
|
||||||
assert!(gi.matched($path, $is_dir).is_ignored());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! not_ignored {
|
|
||||||
($name:ident, $root:expr, $gi:expr, $path:expr) => {
|
|
||||||
not_ignored!($name, $root, $gi, $path, false);
|
|
||||||
};
|
|
||||||
($name:ident, $root:expr, $gi:expr, $path:expr, $is_dir:expr) => {
|
|
||||||
#[test]
|
|
||||||
fn $name() {
|
|
||||||
let gi = Gitignore::from_str($root, $gi).unwrap();
|
|
||||||
assert!(!gi.matched($path, $is_dir).is_ignored());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const ROOT: &'static str = "/home/foobar/rust/rg";
|
|
||||||
|
|
||||||
ignored!(ig1, ROOT, "months", "months");
|
|
||||||
ignored!(ig2, ROOT, "*.lock", "Cargo.lock");
|
|
||||||
ignored!(ig3, ROOT, "*.rs", "src/main.rs");
|
|
||||||
ignored!(ig4, ROOT, "src/*.rs", "src/main.rs");
|
|
||||||
ignored!(ig5, ROOT, "/*.c", "cat-file.c");
|
|
||||||
ignored!(ig6, ROOT, "/src/*.rs", "src/main.rs");
|
|
||||||
ignored!(ig7, ROOT, "!src/main.rs\n*.rs", "src/main.rs");
|
|
||||||
ignored!(ig8, ROOT, "foo/", "foo", true);
|
|
||||||
ignored!(ig9, ROOT, "**/foo", "foo");
|
|
||||||
ignored!(ig10, ROOT, "**/foo", "src/foo");
|
|
||||||
ignored!(ig11, ROOT, "**/foo/**", "src/foo/bar");
|
|
||||||
ignored!(ig12, ROOT, "**/foo/**", "wat/src/foo/bar/baz");
|
|
||||||
ignored!(ig13, ROOT, "**/foo/bar", "foo/bar");
|
|
||||||
ignored!(ig14, ROOT, "**/foo/bar", "src/foo/bar");
|
|
||||||
ignored!(ig15, ROOT, "abc/**", "abc/x");
|
|
||||||
ignored!(ig16, ROOT, "abc/**", "abc/x/y");
|
|
||||||
ignored!(ig17, ROOT, "abc/**", "abc/x/y/z");
|
|
||||||
ignored!(ig18, ROOT, "a/**/b", "a/b");
|
|
||||||
ignored!(ig19, ROOT, "a/**/b", "a/x/b");
|
|
||||||
ignored!(ig20, ROOT, "a/**/b", "a/x/y/b");
|
|
||||||
ignored!(ig21, ROOT, r"\!xy", "!xy");
|
|
||||||
ignored!(ig22, ROOT, r"\#foo", "#foo");
|
|
||||||
ignored!(ig23, ROOT, "foo", "./foo");
|
|
||||||
ignored!(ig24, ROOT, "target", "grep/target");
|
|
||||||
ignored!(ig25, ROOT, "Cargo.lock", "./tabwriter-bin/Cargo.lock");
|
|
||||||
ignored!(ig26, ROOT, "/foo/bar/baz", "./foo/bar/baz");
|
|
||||||
ignored!(ig27, ROOT, "foo/", "xyz/foo", true);
|
|
||||||
ignored!(ig28, ROOT, "src/*.rs", "src/grep/src/main.rs");
|
|
||||||
ignored!(ig29, "./src", "/llvm/", "./src/llvm", true);
|
|
||||||
ignored!(ig30, ROOT, "node_modules/ ", "node_modules", true);
|
|
||||||
|
|
||||||
not_ignored!(ignot1, ROOT, "amonths", "months");
|
|
||||||
not_ignored!(ignot2, ROOT, "monthsa", "months");
|
|
||||||
not_ignored!(ignot3, ROOT, "/src/*.rs", "src/grep/src/main.rs");
|
|
||||||
not_ignored!(ignot4, ROOT, "/*.c", "mozilla-sha1/sha1.c");
|
|
||||||
not_ignored!(ignot5, ROOT, "/src/*.rs", "src/grep/src/main.rs");
|
|
||||||
not_ignored!(ignot6, ROOT, "*.rs\n!src/main.rs", "src/main.rs");
|
|
||||||
not_ignored!(ignot7, ROOT, "foo/", "foo", false);
|
|
||||||
not_ignored!(ignot8, ROOT, "**/foo/**", "wat/src/afoo/bar/baz");
|
|
||||||
not_ignored!(ignot9, ROOT, "**/foo/**", "wat/src/fooa/bar/baz");
|
|
||||||
not_ignored!(ignot10, ROOT, "**/foo/bar", "foo/src/bar");
|
|
||||||
not_ignored!(ignot11, ROOT, "#foo", "#foo");
|
|
||||||
not_ignored!(ignot12, ROOT, "\n\n\n", "foo");
|
|
||||||
not_ignored!(ignot13, ROOT, "foo/**", "foo", true);
|
|
||||||
not_ignored!(
|
|
||||||
ignot14, "./third_party/protobuf", "m4/ltoptions.m4",
|
|
||||||
"./third_party/protobuf/csharp/src/packages/repositories.config");
|
|
||||||
|
|
||||||
// See: https://github.com/BurntSushi/ripgrep/issues/106
|
|
||||||
#[test]
|
|
||||||
fn regression_106() {
|
|
||||||
Gitignore::from_str("/", " ").unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
490
src/ignore.rs
490
src/ignore.rs
@@ -1,490 +0,0 @@
|
|||||||
/*!
|
|
||||||
The ignore module is responsible for managing the state required to determine
|
|
||||||
whether a *single* file path should be searched or not.
|
|
||||||
|
|
||||||
In general, there are two ways to ignore a particular file:
|
|
||||||
|
|
||||||
1. Specify an ignore rule in some "global" configuration, such as a
|
|
||||||
$HOME/.ignore or on the command line.
|
|
||||||
2. A specific ignore file (like .gitignore) found during directory traversal.
|
|
||||||
|
|
||||||
The `IgnoreDir` type handles ignore patterns for any one particular directory
|
|
||||||
(including "global" ignore patterns), while the `Ignore` type handles a stack
|
|
||||||
of `IgnoreDir`s for use during directory traversal.
|
|
||||||
*/
|
|
||||||
|
|
||||||
use std::error::Error as StdError;
|
|
||||||
use std::ffi::OsString;
|
|
||||||
use std::fmt;
|
|
||||||
use std::io;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
use gitignore::{self, Gitignore, GitignoreBuilder, Match, Pattern};
|
|
||||||
use pathutil::{file_name, is_hidden};
|
|
||||||
use types::Types;
|
|
||||||
|
|
||||||
const IGNORE_NAMES: &'static [&'static str] = &[
|
|
||||||
".gitignore",
|
|
||||||
".ignore",
|
|
||||||
".rgignore",
|
|
||||||
];
|
|
||||||
|
|
||||||
/// Represents an error that can occur when parsing a gitignore file.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
Gitignore(gitignore::Error),
|
|
||||||
Io {
|
|
||||||
path: PathBuf,
|
|
||||||
err: io::Error,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Error {
|
|
||||||
fn from_io<P: AsRef<Path>>(path: P, err: io::Error) -> Error {
|
|
||||||
Error::Io { path: path.as_ref().to_path_buf(), err: err }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StdError for Error {
|
|
||||||
fn description(&self) -> &str {
|
|
||||||
match *self {
|
|
||||||
Error::Gitignore(ref err) => err.description(),
|
|
||||||
Error::Io { ref err, .. } => err.description(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match *self {
|
|
||||||
Error::Gitignore(ref err) => err.fmt(f),
|
|
||||||
Error::Io { ref path, ref err } => {
|
|
||||||
write!(f, "{}: {}", path.display(), err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<gitignore::Error> for Error {
|
|
||||||
fn from(err: gitignore::Error) -> Error {
|
|
||||||
Error::Gitignore(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Ignore represents a collection of ignore patterns organized by directory.
|
|
||||||
/// In particular, a stack is maintained, where the top of the stack
|
|
||||||
/// corresponds to the current directory being searched and the bottom of the
|
|
||||||
/// stack represents the root of a search. Ignore patterns at the top of the
|
|
||||||
/// stack take precedence over ignore patterns at the bottom of the stack.
|
|
||||||
pub struct Ignore {
|
|
||||||
/// A stack of ignore patterns at each directory level of traversal.
|
|
||||||
/// A directory that contributes no ignore patterns is `None`.
|
|
||||||
stack: Vec<IgnoreDir>,
|
|
||||||
/// A stack of parent directories above the root of the current search.
|
|
||||||
parent_stack: Vec<IgnoreDir>,
|
|
||||||
/// A set of override globs that are always checked first. A match (whether
|
|
||||||
/// it's whitelist or blacklist) trumps anything in stack.
|
|
||||||
overrides: Overrides,
|
|
||||||
/// A file type matcher.
|
|
||||||
types: Types,
|
|
||||||
/// Whether to ignore hidden files or not.
|
|
||||||
ignore_hidden: bool,
|
|
||||||
/// When true, don't look at .gitignore or .ignore files for ignore
|
|
||||||
/// rules.
|
|
||||||
no_ignore: bool,
|
|
||||||
/// When true, don't look at .gitignore files for ignore rules.
|
|
||||||
no_ignore_vcs: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ignore {
|
|
||||||
/// Create an empty set of ignore patterns.
|
|
||||||
pub fn new() -> Ignore {
|
|
||||||
Ignore {
|
|
||||||
stack: vec![],
|
|
||||||
parent_stack: vec![],
|
|
||||||
overrides: Overrides::new(None),
|
|
||||||
types: Types::empty(),
|
|
||||||
ignore_hidden: true,
|
|
||||||
no_ignore: false,
|
|
||||||
no_ignore_vcs: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set whether hidden files/folders should be ignored (defaults to true).
|
|
||||||
pub fn ignore_hidden(&mut self, yes: bool) -> &mut Ignore {
|
|
||||||
self.ignore_hidden = yes;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// When set, ignore files are ignored.
|
|
||||||
pub fn no_ignore(&mut self, yes: bool) -> &mut Ignore {
|
|
||||||
self.no_ignore = yes;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// When set, VCS ignore files are ignored.
|
|
||||||
pub fn no_ignore_vcs(&mut self, yes: bool) -> &mut Ignore {
|
|
||||||
self.no_ignore_vcs = yes;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a set of globs that overrides all other match logic.
|
|
||||||
pub fn add_override(&mut self, gi: Gitignore) -> &mut Ignore {
|
|
||||||
self.overrides = Overrides::new(Some(gi));
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a file type matcher. The file type matcher has the lowest
|
|
||||||
/// precedence.
|
|
||||||
pub fn add_types(&mut self, types: Types) -> &mut Ignore {
|
|
||||||
self.types = types;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Push parent directories of `path` on to the stack.
|
|
||||||
pub fn push_parents<P: AsRef<Path>>(
|
|
||||||
&mut self,
|
|
||||||
path: P,
|
|
||||||
) -> Result<(), Error> {
|
|
||||||
let path = try!(path.as_ref().canonicalize().map_err(|err| {
|
|
||||||
Error::from_io(path.as_ref(), err)
|
|
||||||
}));
|
|
||||||
let mut path = &*path;
|
|
||||||
let mut saw_git = path.join(".git").is_dir();
|
|
||||||
let mut ignore_names = IGNORE_NAMES.to_vec();
|
|
||||||
if self.no_ignore_vcs {
|
|
||||||
ignore_names.retain(|&name| name != ".gitignore");
|
|
||||||
}
|
|
||||||
let mut ignore_dir_results = vec![];
|
|
||||||
while let Some(parent) = path.parent() {
|
|
||||||
if self.no_ignore {
|
|
||||||
ignore_dir_results.push(Ok(IgnoreDir::empty(parent)));
|
|
||||||
} else {
|
|
||||||
if saw_git {
|
|
||||||
ignore_names.retain(|&name| name != ".gitignore");
|
|
||||||
} else {
|
|
||||||
saw_git = parent.join(".git").is_dir();
|
|
||||||
}
|
|
||||||
let ignore_dir_result =
|
|
||||||
IgnoreDir::with_ignore_names(parent, ignore_names.iter());
|
|
||||||
ignore_dir_results.push(ignore_dir_result);
|
|
||||||
}
|
|
||||||
path = parent;
|
|
||||||
}
|
|
||||||
|
|
||||||
for ignore_dir_result in ignore_dir_results.into_iter().rev() {
|
|
||||||
self.parent_stack.push(try!(ignore_dir_result));
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a directory to the stack.
|
|
||||||
///
|
|
||||||
/// Note that even if this returns an error, the directory is added to the
|
|
||||||
/// stack (and therefore should be popped).
|
|
||||||
pub fn push<P: AsRef<Path>>(&mut self, path: P) -> Result<(), Error> {
|
|
||||||
if self.no_ignore {
|
|
||||||
self.stack.push(IgnoreDir::empty(path));
|
|
||||||
Ok(())
|
|
||||||
} else if self.no_ignore_vcs {
|
|
||||||
self.push_ignore_dir(IgnoreDir::without_vcs(path))
|
|
||||||
} else {
|
|
||||||
self.push_ignore_dir(IgnoreDir::new(path))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Pushes the result of building a directory matcher on to the stack.
|
|
||||||
///
|
|
||||||
/// If the result given contains an error, then it is returned.
|
|
||||||
pub fn push_ignore_dir(
|
|
||||||
&mut self,
|
|
||||||
result: Result<IgnoreDir, Error>,
|
|
||||||
) -> Result<(), Error> {
|
|
||||||
match result {
|
|
||||||
Ok(id) => {
|
|
||||||
self.stack.push(id);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
// Don't leave the stack in an inconsistent state.
|
|
||||||
self.stack.push(IgnoreDir::empty("error"));
|
|
||||||
Err(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Pop a directory from the stack.
|
|
||||||
///
|
|
||||||
/// This panics if the stack is empty.
|
|
||||||
pub fn pop(&mut self) {
|
|
||||||
self.stack.pop().expect("non-empty stack");
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if and only if the given file path should be ignored.
|
|
||||||
pub fn ignored<P: AsRef<Path>>(&self, path: P, is_dir: bool) -> bool {
|
|
||||||
let path = path.as_ref();
|
|
||||||
let mat = self.overrides.matched(path, is_dir);
|
|
||||||
if let Some(is_ignored) = self.ignore_match(path, mat) {
|
|
||||||
return is_ignored;
|
|
||||||
}
|
|
||||||
let mut whitelisted = false;
|
|
||||||
if !self.no_ignore {
|
|
||||||
for id in self.stack.iter().rev() {
|
|
||||||
let mat = id.matched(path, is_dir);
|
|
||||||
if let Some(is_ignored) = self.ignore_match(path, mat) {
|
|
||||||
if is_ignored {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// If this path is whitelisted by an ignore, then
|
|
||||||
// fallthrough and let the file type matcher have a say.
|
|
||||||
whitelisted = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// If the file has been whitelisted, then we have to stop checking
|
|
||||||
// parent directories. The only thing that can override a whitelist
|
|
||||||
// at this point is a type filter.
|
|
||||||
if !whitelisted {
|
|
||||||
let mut path = path.to_path_buf();
|
|
||||||
for id in self.parent_stack.iter().rev() {
|
|
||||||
if let Some(ref dirname) = id.name {
|
|
||||||
path = Path::new(dirname).join(path);
|
|
||||||
}
|
|
||||||
let mat = id.matched(&*path, is_dir);
|
|
||||||
if let Some(is_ignored) = self.ignore_match(&*path, mat) {
|
|
||||||
if is_ignored {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// If this path is whitelisted by an ignore, then
|
|
||||||
// fallthrough and let the file type matcher have a
|
|
||||||
// say.
|
|
||||||
whitelisted = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let mat = self.types.matched(path, is_dir);
|
|
||||||
if let Some(is_ignored) = self.ignore_match(path, mat) {
|
|
||||||
if is_ignored {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
whitelisted = true;
|
|
||||||
}
|
|
||||||
if !whitelisted && self.ignore_hidden && is_hidden(&path) {
|
|
||||||
debug!("{} ignored because it is hidden", path.display());
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if the given match says the given pattern should be
|
|
||||||
/// ignored or false if the given pattern should be explicitly whitelisted.
|
|
||||||
/// Returns None otherwise.
|
|
||||||
pub fn ignore_match<P: AsRef<Path>>(
|
|
||||||
&self,
|
|
||||||
path: P,
|
|
||||||
mat: Match,
|
|
||||||
) -> Option<bool> {
|
|
||||||
let path = path.as_ref();
|
|
||||||
match mat {
|
|
||||||
Match::Whitelist(ref pat) => {
|
|
||||||
debug!("{} whitelisted by {:?}", path.display(), pat);
|
|
||||||
Some(false)
|
|
||||||
}
|
|
||||||
Match::Ignored(ref pat) => {
|
|
||||||
debug!("{} ignored by {:?}", path.display(), pat);
|
|
||||||
Some(true)
|
|
||||||
}
|
|
||||||
Match::None => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// IgnoreDir represents a set of ignore patterns retrieved from a single
|
|
||||||
/// directory.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct IgnoreDir {
|
|
||||||
/// The path to this directory as given.
|
|
||||||
path: PathBuf,
|
|
||||||
/// The directory name, if one exists.
|
|
||||||
name: Option<OsString>,
|
|
||||||
/// A single accumulation of glob patterns for this directory, matched
|
|
||||||
/// using gitignore semantics.
|
|
||||||
///
|
|
||||||
/// This will include patterns from rgignore as well. The patterns are
|
|
||||||
/// ordered so that precedence applies automatically (e.g., rgignore
|
|
||||||
/// patterns procede gitignore patterns).
|
|
||||||
gi: Option<Gitignore>,
|
|
||||||
// TODO(burntsushi): Matching other types of glob patterns that don't
|
|
||||||
// conform to gitignore will probably require refactoring this approach.
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IgnoreDir {
|
|
||||||
/// Create a new matcher for the given directory.
|
|
||||||
pub fn new<P: AsRef<Path>>(path: P) -> Result<IgnoreDir, Error> {
|
|
||||||
IgnoreDir::with_ignore_names(path, IGNORE_NAMES.iter())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new matcher for the given directory.
|
|
||||||
///
|
|
||||||
/// Don't respect VCS ignore files.
|
|
||||||
pub fn without_vcs<P: AsRef<Path>>(path: P) -> Result<IgnoreDir, Error> {
|
|
||||||
let names = IGNORE_NAMES.iter().filter(|name| **name != ".gitignore");
|
|
||||||
IgnoreDir::with_ignore_names(path, names)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new IgnoreDir that never matches anything with the given path.
|
|
||||||
pub fn empty<P: AsRef<Path>>(path: P) -> IgnoreDir {
|
|
||||||
IgnoreDir {
|
|
||||||
path: path.as_ref().to_path_buf(),
|
|
||||||
name: file_name(path.as_ref()).map(|s| s.to_os_string()),
|
|
||||||
gi: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new matcher for the given directory using only the ignore
|
|
||||||
/// patterns found in the file names given.
|
|
||||||
///
|
|
||||||
/// If no ignore glob patterns could be found in the directory then `None`
|
|
||||||
/// is returned.
|
|
||||||
///
|
|
||||||
/// Note that the order of the names given is meaningful. Names appearing
|
|
||||||
/// later in the list have precedence over names appearing earlier in the
|
|
||||||
/// list.
|
|
||||||
pub fn with_ignore_names<P: AsRef<Path>, S, I>(
|
|
||||||
path: P,
|
|
||||||
names: I,
|
|
||||||
) -> Result<IgnoreDir, Error>
|
|
||||||
where P: AsRef<Path>, S: AsRef<str>, I: Iterator<Item=S> {
|
|
||||||
let mut id = IgnoreDir::empty(path);
|
|
||||||
let mut ok = false;
|
|
||||||
let mut builder = GitignoreBuilder::new(&id.path);
|
|
||||||
// The ordering here is important. Later globs have higher precedence.
|
|
||||||
for name in names {
|
|
||||||
ok = builder.add_path(id.path.join(name.as_ref())).is_ok() || ok;
|
|
||||||
}
|
|
||||||
if !ok {
|
|
||||||
return Ok(id);
|
|
||||||
}
|
|
||||||
id.gi = Some(try!(builder.build()));
|
|
||||||
Ok(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if and only if the given file path should be ignored
|
|
||||||
/// according to the globs in this directory. `is_dir` should be true if
|
|
||||||
/// the path refers to a directory and false otherwise.
|
|
||||||
///
|
|
||||||
/// Before matching path, its prefix (as determined by a common suffix
|
|
||||||
/// of this directory) is stripped. If there is
|
|
||||||
/// no common suffix/prefix overlap, then path is assumed to reside
|
|
||||||
/// directly in this directory.
|
|
||||||
///
|
|
||||||
/// If the given path has a `./` prefix then it is stripped before
|
|
||||||
/// matching.
|
|
||||||
pub fn matched<P: AsRef<Path>>(&self, path: P, is_dir: bool) -> Match {
|
|
||||||
self.gi.as_ref()
|
|
||||||
.map(|gi| gi.matched(path, is_dir))
|
|
||||||
.unwrap_or(Match::None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Manages a set of overrides provided explicitly by the end user.
|
|
||||||
struct Overrides {
|
|
||||||
gi: Option<Gitignore>,
|
|
||||||
unmatched_pat: Pattern,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Overrides {
|
|
||||||
/// Creates a new set of overrides from the gitignore matcher provided.
|
|
||||||
/// If no matcher is provided, then the resulting overrides have no effect.
|
|
||||||
fn new(gi: Option<Gitignore>) -> Overrides {
|
|
||||||
Overrides {
|
|
||||||
gi: gi,
|
|
||||||
unmatched_pat: Pattern {
|
|
||||||
from: Path::new("<argv>").to_path_buf(),
|
|
||||||
original: "<none>".to_string(),
|
|
||||||
pat: "<none>".to_string(),
|
|
||||||
whitelist: false,
|
|
||||||
only_dir: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a match for the given path against this set of overrides.
|
|
||||||
///
|
|
||||||
/// If there are no overrides, then this always returns Match::None.
|
|
||||||
///
|
|
||||||
/// If there is at least one positive override, then this never returns
|
|
||||||
/// Match::None (and interpreting non-matches as ignored) unless is_dir
|
|
||||||
/// is true.
|
|
||||||
pub fn matched<P: AsRef<Path>>(&self, path: P, is_dir: bool) -> Match {
|
|
||||||
let path = path.as_ref();
|
|
||||||
self.gi.as_ref()
|
|
||||||
.map(|gi| {
|
|
||||||
let mat = gi.matched_stripped(path, is_dir).invert();
|
|
||||||
if mat.is_none() && !is_dir {
|
|
||||||
if gi.num_ignores() > 0 {
|
|
||||||
return Match::Ignored(&self.unmatched_pat);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
mat
|
|
||||||
})
|
|
||||||
.unwrap_or(Match::None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::path::Path;
|
|
||||||
use gitignore::GitignoreBuilder;
|
|
||||||
use super::IgnoreDir;
|
|
||||||
|
|
||||||
macro_rules! ignored_dir {
|
|
||||||
($name:ident, $root:expr, $gi:expr, $xi:expr, $path:expr) => {
|
|
||||||
#[test]
|
|
||||||
fn $name() {
|
|
||||||
let mut builder = GitignoreBuilder::new(&$root);
|
|
||||||
builder.add_str($gi).unwrap();
|
|
||||||
builder.add_str($xi).unwrap();
|
|
||||||
let gi = builder.build().unwrap();
|
|
||||||
let id = IgnoreDir {
|
|
||||||
path: Path::new($root).to_path_buf(),
|
|
||||||
name: Path::new($root).file_name().map(|s| {
|
|
||||||
s.to_os_string()
|
|
||||||
}),
|
|
||||||
gi: Some(gi),
|
|
||||||
};
|
|
||||||
assert!(id.matched($path, false).is_ignored());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! not_ignored_dir {
|
|
||||||
($name:ident, $root:expr, $gi:expr, $xi:expr, $path:expr) => {
|
|
||||||
#[test]
|
|
||||||
fn $name() {
|
|
||||||
let mut builder = GitignoreBuilder::new(&$root);
|
|
||||||
builder.add_str($gi).unwrap();
|
|
||||||
builder.add_str($xi).unwrap();
|
|
||||||
let gi = builder.build().unwrap();
|
|
||||||
let id = IgnoreDir {
|
|
||||||
path: Path::new($root).to_path_buf(),
|
|
||||||
name: Path::new($root).file_name().map(|s| {
|
|
||||||
s.to_os_string()
|
|
||||||
}),
|
|
||||||
gi: Some(gi),
|
|
||||||
};
|
|
||||||
assert!(!id.matched($path, false).is_ignored());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const ROOT: &'static str = "/home/foobar/rust/rg";
|
|
||||||
|
|
||||||
ignored_dir!(id1, ROOT, "src/main.rs", "", "src/main.rs");
|
|
||||||
ignored_dir!(id2, ROOT, "", "src/main.rs", "src/main.rs");
|
|
||||||
ignored_dir!(id3, ROOT, "!src/main.rs", "*.rs", "src/main.rs");
|
|
||||||
|
|
||||||
not_ignored_dir!(idnot1, ROOT, "*.rs", "!src/main.rs", "src/main.rs");
|
|
||||||
}
|
|
||||||
570
src/main.rs
570
src/main.rs
@@ -1,10 +1,11 @@
|
|||||||
extern crate deque;
|
extern crate atty;
|
||||||
extern crate docopt;
|
extern crate bytecount;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate clap;
|
||||||
|
extern crate encoding_rs;
|
||||||
extern crate env_logger;
|
extern crate env_logger;
|
||||||
extern crate globset;
|
|
||||||
extern crate grep;
|
extern crate grep;
|
||||||
#[cfg(windows)]
|
extern crate ignore;
|
||||||
extern crate kernel32;
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
extern crate libc;
|
extern crate libc;
|
||||||
@@ -14,37 +15,19 @@ extern crate memchr;
|
|||||||
extern crate memmap;
|
extern crate memmap;
|
||||||
extern crate num_cpus;
|
extern crate num_cpus;
|
||||||
extern crate regex;
|
extern crate regex;
|
||||||
extern crate rustc_serialize;
|
extern crate same_file;
|
||||||
extern crate term;
|
extern crate termcolor;
|
||||||
extern crate thread_local;
|
|
||||||
extern crate walkdir;
|
|
||||||
#[cfg(windows)]
|
|
||||||
extern crate winapi;
|
|
||||||
|
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::fs::File;
|
|
||||||
use std::io;
|
|
||||||
use std::path::Path;
|
|
||||||
use std::process;
|
use std::process;
|
||||||
use std::result;
|
use std::result;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::Arc;
|
||||||
use std::sync::atomic::{AtomicBool, Ordering};
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
|
use std::sync::mpsc;
|
||||||
use std::thread;
|
use std::thread;
|
||||||
use std::cmp;
|
|
||||||
|
|
||||||
use deque::{Stealer, Stolen};
|
|
||||||
use grep::Grep;
|
|
||||||
use memmap::{Mmap, Protection};
|
|
||||||
use term::Terminal;
|
|
||||||
use walkdir::DirEntry;
|
|
||||||
|
|
||||||
use args::Args;
|
use args::Args;
|
||||||
use out::{ColoredTerminal, Out};
|
use worker::Work;
|
||||||
use pathutil::strip_prefix;
|
|
||||||
use printer::Printer;
|
|
||||||
use search_stream::InputBuffer;
|
|
||||||
#[cfg(windows)]
|
|
||||||
use terminal_win::WindowsBuffer;
|
|
||||||
|
|
||||||
macro_rules! errored {
|
macro_rules! errored {
|
||||||
($($tt:tt)*) => {
|
($($tt:tt)*) => {
|
||||||
@@ -59,25 +42,21 @@ macro_rules! eprintln {
|
|||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mod app;
|
||||||
mod args;
|
mod args;
|
||||||
mod atty;
|
mod decoder;
|
||||||
mod gitignore;
|
|
||||||
mod ignore;
|
|
||||||
mod out;
|
|
||||||
mod pathutil;
|
mod pathutil;
|
||||||
mod printer;
|
mod printer;
|
||||||
mod search_buffer;
|
mod search_buffer;
|
||||||
mod search_stream;
|
mod search_stream;
|
||||||
#[cfg(windows)]
|
mod unescape;
|
||||||
mod terminal_win;
|
mod worker;
|
||||||
mod types;
|
|
||||||
mod walk;
|
|
||||||
|
|
||||||
pub type Result<T> = result::Result<T, Box<Error + Send + Sync>>;
|
pub type Result<T> = result::Result<T, Box<Error + Send + Sync>>;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
match Args::parse().and_then(run) {
|
match Args::parse().map(Arc::new).and_then(run) {
|
||||||
Ok(count) if count == 0 => process::exit(1),
|
Ok(0) => process::exit(1),
|
||||||
Ok(_) => process::exit(0),
|
Ok(_) => process::exit(0),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
eprintln!("{}", err);
|
eprintln!("{}", err);
|
||||||
@@ -86,154 +65,177 @@ fn main() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run(args: Args) -> Result<u64> {
|
fn run(args: Arc<Args>) -> Result<u64> {
|
||||||
let args = Arc::new(args);
|
if args.never_match() {
|
||||||
let paths = args.paths();
|
return Ok(0);
|
||||||
let threads = cmp::max(1, args.threads() - 1);
|
}
|
||||||
let isone =
|
let threads = args.threads();
|
||||||
paths.len() == 1 && (paths[0] == Path::new("-") || paths[0].is_file());
|
|
||||||
if args.files() {
|
if args.files() {
|
||||||
return run_files(args.clone());
|
if threads == 1 || args.is_one_path() {
|
||||||
}
|
run_files_one_thread(args)
|
||||||
if args.type_list() {
|
|
||||||
return run_types(args.clone());
|
|
||||||
}
|
|
||||||
if threads == 1 || isone {
|
|
||||||
return run_one_thread(args.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
let out = Arc::new(Mutex::new(args.out()));
|
|
||||||
let quiet_matched = QuietMatched::new(args.quiet());
|
|
||||||
let mut workers = vec![];
|
|
||||||
|
|
||||||
let workq = {
|
|
||||||
let (workq, stealer) = deque::new();
|
|
||||||
for _ in 0..threads {
|
|
||||||
let worker = MultiWorker {
|
|
||||||
chan_work: stealer.clone(),
|
|
||||||
quiet_matched: quiet_matched.clone(),
|
|
||||||
out: out.clone(),
|
|
||||||
outbuf: Some(args.outbuf()),
|
|
||||||
worker: Worker {
|
|
||||||
args: args.clone(),
|
|
||||||
inpbuf: args.input_buffer(),
|
|
||||||
grep: args.grep(),
|
|
||||||
match_count: 0,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
workers.push(thread::spawn(move || worker.run()));
|
|
||||||
}
|
|
||||||
workq
|
|
||||||
};
|
|
||||||
let mut paths_searched: u64 = 0;
|
|
||||||
for p in paths {
|
|
||||||
if quiet_matched.has_match() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if p == Path::new("-") {
|
|
||||||
paths_searched += 1;
|
|
||||||
workq.push(Work::Stdin);
|
|
||||||
} else {
|
} else {
|
||||||
for ent in try!(args.walker(p)) {
|
run_files_parallel(args)
|
||||||
if quiet_matched.has_match() {
|
}
|
||||||
break;
|
} else if args.type_list() {
|
||||||
|
run_types(args)
|
||||||
|
} else if threads == 1 || args.is_one_path() {
|
||||||
|
run_one_thread(args)
|
||||||
|
} else {
|
||||||
|
run_parallel(args)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_parallel(args: Arc<Args>) -> Result<u64> {
|
||||||
|
let bufwtr = Arc::new(args.buffer_writer());
|
||||||
|
let quiet_matched = args.quiet_matched();
|
||||||
|
let paths_searched = Arc::new(AtomicUsize::new(0));
|
||||||
|
let match_count = Arc::new(AtomicUsize::new(0));
|
||||||
|
|
||||||
|
args.walker_parallel().run(|| {
|
||||||
|
let args = args.clone();
|
||||||
|
let quiet_matched = quiet_matched.clone();
|
||||||
|
let paths_searched = paths_searched.clone();
|
||||||
|
let match_count = match_count.clone();
|
||||||
|
let bufwtr = bufwtr.clone();
|
||||||
|
let mut buf = bufwtr.buffer();
|
||||||
|
let mut worker = args.worker();
|
||||||
|
Box::new(move |result| {
|
||||||
|
use ignore::WalkState::*;
|
||||||
|
|
||||||
|
if quiet_matched.has_match() {
|
||||||
|
return Quit;
|
||||||
|
}
|
||||||
|
let dent = match get_or_log_dir_entry(
|
||||||
|
result,
|
||||||
|
args.stdout_handle(),
|
||||||
|
args.no_messages(),
|
||||||
|
) {
|
||||||
|
None => return Continue,
|
||||||
|
Some(dent) => dent,
|
||||||
|
};
|
||||||
|
paths_searched.fetch_add(1, Ordering::SeqCst);
|
||||||
|
buf.clear();
|
||||||
|
{
|
||||||
|
// This block actually executes the search and prints the
|
||||||
|
// results into outbuf.
|
||||||
|
let mut printer = args.printer(&mut buf);
|
||||||
|
let count =
|
||||||
|
if dent.is_stdin() {
|
||||||
|
worker.run(&mut printer, Work::Stdin)
|
||||||
|
} else {
|
||||||
|
worker.run(&mut printer, Work::DirEntry(dent))
|
||||||
|
};
|
||||||
|
match_count.fetch_add(count as usize, Ordering::SeqCst);
|
||||||
|
if quiet_matched.set_match(count > 0) {
|
||||||
|
return Quit;
|
||||||
}
|
}
|
||||||
paths_searched += 1;
|
}
|
||||||
workq.push(Work::File(ent));
|
// BUG(burntsushi): We should handle this error instead of ignoring
|
||||||
|
// it. See: https://github.com/BurntSushi/ripgrep/issues/200
|
||||||
|
let _ = bufwtr.print(&buf);
|
||||||
|
Continue
|
||||||
|
})
|
||||||
|
});
|
||||||
|
if !args.paths().is_empty() && paths_searched.load(Ordering::SeqCst) == 0 {
|
||||||
|
if !args.no_messages() {
|
||||||
|
eprint_nothing_searched();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(match_count.load(Ordering::SeqCst) as u64)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_one_thread(args: Arc<Args>) -> Result<u64> {
|
||||||
|
let stdout = args.stdout();
|
||||||
|
let mut stdout = stdout.lock();
|
||||||
|
let mut worker = args.worker();
|
||||||
|
let mut paths_searched: u64 = 0;
|
||||||
|
let mut match_count = 0;
|
||||||
|
for result in args.walker() {
|
||||||
|
let dent = match get_or_log_dir_entry(
|
||||||
|
result,
|
||||||
|
args.stdout_handle(),
|
||||||
|
args.no_messages(),
|
||||||
|
) {
|
||||||
|
None => continue,
|
||||||
|
Some(dent) => dent,
|
||||||
|
};
|
||||||
|
let mut printer = args.printer(&mut stdout);
|
||||||
|
if match_count > 0 {
|
||||||
|
if args.quiet() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if let Some(sep) = args.file_separator() {
|
||||||
|
printer = printer.file_separator(sep);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
paths_searched += 1;
|
||||||
|
match_count +=
|
||||||
|
if dent.is_stdin() {
|
||||||
|
worker.run(&mut printer, Work::Stdin)
|
||||||
|
} else {
|
||||||
|
worker.run(&mut printer, Work::DirEntry(dent))
|
||||||
|
};
|
||||||
}
|
}
|
||||||
if !paths.is_empty() && paths_searched == 0 {
|
if !args.paths().is_empty() && paths_searched == 0 {
|
||||||
eprintln!("No files were searched, which means ripgrep probably \
|
if !args.no_messages() {
|
||||||
applied a filter you didn't expect. \
|
eprint_nothing_searched();
|
||||||
Try running again with --debug.");
|
}
|
||||||
}
|
|
||||||
for _ in 0..workers.len() {
|
|
||||||
workq.push(Work::Quit);
|
|
||||||
}
|
|
||||||
let mut match_count = 0;
|
|
||||||
for worker in workers {
|
|
||||||
match_count += worker.join().unwrap();
|
|
||||||
}
|
}
|
||||||
Ok(match_count)
|
Ok(match_count)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_one_thread(args: Arc<Args>) -> Result<u64> {
|
fn run_files_parallel(args: Arc<Args>) -> Result<u64> {
|
||||||
let mut worker = Worker {
|
let print_args = args.clone();
|
||||||
args: args.clone(),
|
let (tx, rx) = mpsc::channel::<ignore::DirEntry>();
|
||||||
inpbuf: args.input_buffer(),
|
let print_thread = thread::spawn(move || {
|
||||||
grep: args.grep(),
|
let stdout = print_args.stdout();
|
||||||
match_count: 0,
|
let mut printer = print_args.printer(stdout.lock());
|
||||||
};
|
let mut file_count = 0;
|
||||||
let paths = args.paths();
|
for dent in rx.iter() {
|
||||||
let mut term = args.stdout();
|
printer.path(dent.path());
|
||||||
|
file_count += 1;
|
||||||
let mut paths_searched: u64 = 0;
|
|
||||||
for p in paths {
|
|
||||||
if args.quiet() && worker.match_count > 0 {
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
if p == Path::new("-") {
|
file_count
|
||||||
paths_searched += 1;
|
});
|
||||||
let mut printer = args.printer(&mut term);
|
args.walker_parallel().run(move || {
|
||||||
if worker.match_count > 0 {
|
let args = args.clone();
|
||||||
if let Some(sep) = args.file_separator() {
|
let tx = tx.clone();
|
||||||
printer = printer.file_separator(sep);
|
Box::new(move |result| {
|
||||||
}
|
if let Some(dent) = get_or_log_dir_entry(
|
||||||
|
result,
|
||||||
|
args.stdout_handle(),
|
||||||
|
args.no_messages(),
|
||||||
|
) {
|
||||||
|
tx.send(dent).unwrap();
|
||||||
}
|
}
|
||||||
worker.do_work(&mut printer, WorkReady::Stdin);
|
ignore::WalkState::Continue
|
||||||
} else {
|
})
|
||||||
for ent in try!(args.walker(p)) {
|
});
|
||||||
paths_searched += 1;
|
Ok(print_thread.join().unwrap())
|
||||||
let mut printer = args.printer(&mut term);
|
|
||||||
if worker.match_count > 0 {
|
|
||||||
if args.quiet() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if let Some(sep) = args.file_separator() {
|
|
||||||
printer = printer.file_separator(sep);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let file = match File::open(ent.path()) {
|
|
||||||
Ok(file) => file,
|
|
||||||
Err(err) => {
|
|
||||||
eprintln!("{}: {}", ent.path().display(), err);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
worker.do_work(&mut printer, WorkReady::DirFile(ent, file));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !paths.is_empty() && paths_searched == 0 {
|
|
||||||
eprintln!("No files were searched, which means ripgrep probably \
|
|
||||||
applied a filter you didn't expect. \
|
|
||||||
Try running again with --debug.");
|
|
||||||
}
|
|
||||||
Ok(worker.match_count)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_files(args: Arc<Args>) -> Result<u64> {
|
fn run_files_one_thread(args: Arc<Args>) -> Result<u64> {
|
||||||
let term = args.stdout();
|
let stdout = args.stdout();
|
||||||
let mut printer = args.printer(term);
|
let mut printer = args.printer(stdout.lock());
|
||||||
let mut file_count = 0;
|
let mut file_count = 0;
|
||||||
for p in args.paths() {
|
for result in args.walker() {
|
||||||
if p == Path::new("-") {
|
let dent = match get_or_log_dir_entry(
|
||||||
printer.path(&Path::new("<stdin>"));
|
result,
|
||||||
file_count += 1;
|
args.stdout_handle(),
|
||||||
} else {
|
args.no_messages(),
|
||||||
for ent in try!(args.walker(p)) {
|
) {
|
||||||
printer.path(ent.path());
|
None => continue,
|
||||||
file_count += 1;
|
Some(dent) => dent,
|
||||||
}
|
};
|
||||||
}
|
printer.path(dent.path());
|
||||||
|
file_count += 1;
|
||||||
}
|
}
|
||||||
Ok(file_count)
|
Ok(file_count)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_types(args: Arc<Args>) -> Result<u64> {
|
fn run_types(args: Arc<Args>) -> Result<u64> {
|
||||||
let term = args.stdout();
|
let stdout = args.stdout();
|
||||||
let mut printer = args.printer(term);
|
let mut printer = args.printer(stdout.lock());
|
||||||
let mut ty_count = 0;
|
let mut ty_count = 0;
|
||||||
for def in args.type_defs() {
|
for def in args.type_defs() {
|
||||||
printer.type_def(def);
|
printer.type_def(def);
|
||||||
@@ -242,167 +244,85 @@ fn run_types(args: Arc<Args>) -> Result<u64> {
|
|||||||
Ok(ty_count)
|
Ok(ty_count)
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Work {
|
fn get_or_log_dir_entry(
|
||||||
Stdin,
|
result: result::Result<ignore::DirEntry, ignore::Error>,
|
||||||
File(DirEntry),
|
stdout_handle: Option<&same_file::Handle>,
|
||||||
Quit,
|
no_messages: bool,
|
||||||
}
|
) -> Option<ignore::DirEntry> {
|
||||||
|
match result {
|
||||||
enum WorkReady {
|
Err(err) => {
|
||||||
Stdin,
|
if !no_messages {
|
||||||
DirFile(DirEntry, File),
|
|
||||||
}
|
|
||||||
|
|
||||||
struct MultiWorker {
|
|
||||||
chan_work: Stealer<Work>,
|
|
||||||
quiet_matched: QuietMatched,
|
|
||||||
out: Arc<Mutex<Out>>,
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
outbuf: Option<ColoredTerminal<term::TerminfoTerminal<Vec<u8>>>>,
|
|
||||||
#[cfg(windows)]
|
|
||||||
outbuf: Option<ColoredTerminal<WindowsBuffer>>,
|
|
||||||
worker: Worker,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct Worker {
|
|
||||||
args: Arc<Args>,
|
|
||||||
inpbuf: InputBuffer,
|
|
||||||
grep: Grep,
|
|
||||||
match_count: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MultiWorker {
|
|
||||||
fn run(mut self) -> u64 {
|
|
||||||
loop {
|
|
||||||
if self.quiet_matched.has_match() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
let work = match self.chan_work.steal() {
|
|
||||||
Stolen::Empty | Stolen::Abort => continue,
|
|
||||||
Stolen::Data(Work::Quit) => break,
|
|
||||||
Stolen::Data(Work::Stdin) => WorkReady::Stdin,
|
|
||||||
Stolen::Data(Work::File(ent)) => {
|
|
||||||
match File::open(ent.path()) {
|
|
||||||
Ok(file) => WorkReady::DirFile(ent, file),
|
|
||||||
Err(err) => {
|
|
||||||
eprintln!("{}: {}", ent.path().display(), err);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let mut outbuf = self.outbuf.take().unwrap();
|
|
||||||
outbuf.clear();
|
|
||||||
let mut printer = self.worker.args.printer(outbuf);
|
|
||||||
self.worker.do_work(&mut printer, work);
|
|
||||||
if self.quiet_matched.set_match(self.worker.match_count > 0) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
let outbuf = printer.into_inner();
|
|
||||||
if !outbuf.get_ref().is_empty() {
|
|
||||||
let mut out = self.out.lock().unwrap();
|
|
||||||
out.write(&outbuf);
|
|
||||||
}
|
|
||||||
self.outbuf = Some(outbuf);
|
|
||||||
}
|
|
||||||
self.worker.match_count
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Worker {
|
|
||||||
fn do_work<W: Terminal + Send>(
|
|
||||||
&mut self,
|
|
||||||
printer: &mut Printer<W>,
|
|
||||||
work: WorkReady,
|
|
||||||
) {
|
|
||||||
let result = match work {
|
|
||||||
WorkReady::Stdin => {
|
|
||||||
let stdin = io::stdin();
|
|
||||||
let stdin = stdin.lock();
|
|
||||||
self.search(printer, &Path::new("<stdin>"), stdin)
|
|
||||||
}
|
|
||||||
WorkReady::DirFile(ent, file) => {
|
|
||||||
let mut path = ent.path();
|
|
||||||
if let Some(p) = strip_prefix("./", path) {
|
|
||||||
path = p;
|
|
||||||
}
|
|
||||||
if self.args.mmap() {
|
|
||||||
self.search_mmap(printer, path, &file)
|
|
||||||
} else {
|
|
||||||
self.search(printer, path, file)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
match result {
|
|
||||||
Ok(count) => {
|
|
||||||
self.match_count += count;
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
eprintln!("{}", err);
|
eprintln!("{}", err);
|
||||||
}
|
}
|
||||||
|
None
|
||||||
}
|
}
|
||||||
}
|
Ok(dent) => {
|
||||||
|
if let Some(err) = dent.error() {
|
||||||
fn search<R: io::Read, W: Terminal + Send>(
|
if !no_messages {
|
||||||
&mut self,
|
eprintln!("{}", err);
|
||||||
printer: &mut Printer<W>,
|
}
|
||||||
path: &Path,
|
}
|
||||||
rdr: R,
|
let ft = match dent.file_type() {
|
||||||
) -> Result<u64> {
|
None => return Some(dent), // entry is stdin
|
||||||
self.args.searcher(
|
Some(ft) => ft,
|
||||||
&mut self.inpbuf,
|
};
|
||||||
printer,
|
// A depth of 0 means the user gave the path explicitly, so we
|
||||||
&self.grep,
|
// should always try to search it.
|
||||||
path,
|
if dent.depth() == 0 && !ft.is_dir() {
|
||||||
rdr,
|
return Some(dent);
|
||||||
).run().map_err(From::from)
|
} else if !ft.is_file() {
|
||||||
}
|
return None;
|
||||||
|
}
|
||||||
fn search_mmap<W: Terminal + Send>(
|
// If we are redirecting stdout to a file, then don't search that
|
||||||
&mut self,
|
// file.
|
||||||
printer: &mut Printer<W>,
|
if is_stdout_file(&dent, stdout_handle, no_messages) {
|
||||||
path: &Path,
|
return None;
|
||||||
file: &File,
|
}
|
||||||
) -> Result<u64> {
|
Some(dent)
|
||||||
if try!(file.metadata()).len() == 0 {
|
|
||||||
// Opening a memory map with an empty file results in an error.
|
|
||||||
// However, this may not actually be an empty file! For example,
|
|
||||||
// /proc/cpuinfo reports itself as an empty file, but it can
|
|
||||||
// produce data when it's read from. Therefore, we fall back to
|
|
||||||
// regular read calls.
|
|
||||||
return self.search(printer, path, file);
|
|
||||||
}
|
|
||||||
let mmap = try!(Mmap::open(file, Protection::Read));
|
|
||||||
Ok(self.args.searcher_buffer(
|
|
||||||
printer,
|
|
||||||
&self.grep,
|
|
||||||
path,
|
|
||||||
unsafe { mmap.as_slice() },
|
|
||||||
).run())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
struct QuietMatched(Arc<Option<AtomicBool>>);
|
|
||||||
|
|
||||||
impl QuietMatched {
|
|
||||||
fn new(quiet: bool) -> QuietMatched {
|
|
||||||
let atomic = if quiet { Some(AtomicBool::new(false)) } else { None };
|
|
||||||
QuietMatched(Arc::new(atomic))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn has_match(&self) -> bool {
|
|
||||||
match *self.0 {
|
|
||||||
None => false,
|
|
||||||
Some(ref matched) => matched.load(Ordering::SeqCst),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_match(&self, yes: bool) -> bool {
|
|
||||||
match *self.0 {
|
|
||||||
None => false,
|
|
||||||
Some(_) if !yes => false,
|
|
||||||
Some(ref m) => { m.store(true, Ordering::SeqCst); true }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_stdout_file(
|
||||||
|
dent: &ignore::DirEntry,
|
||||||
|
stdout_handle: Option<&same_file::Handle>,
|
||||||
|
no_messages: bool,
|
||||||
|
) -> bool {
|
||||||
|
let stdout_handle = match stdout_handle {
|
||||||
|
None => return false,
|
||||||
|
Some(stdout_handle) => stdout_handle,
|
||||||
|
};
|
||||||
|
// If we know for sure that these two things aren't equal, then avoid
|
||||||
|
// the costly extra stat call to determine equality.
|
||||||
|
if !maybe_dent_eq_handle(dent, stdout_handle) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
match same_file::Handle::from_path(dent.path()) {
|
||||||
|
Ok(h) => stdout_handle == &h,
|
||||||
|
Err(err) => {
|
||||||
|
if !no_messages {
|
||||||
|
eprintln!("{}: {}", dent.path().display(), err);
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
fn maybe_dent_eq_handle(
|
||||||
|
dent: &ignore::DirEntry,
|
||||||
|
handle: &same_file::Handle,
|
||||||
|
) -> bool {
|
||||||
|
dent.ino() == Some(handle.ino())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(unix))]
|
||||||
|
fn maybe_dent_eq_handle(_: &ignore::DirEntry, _: &same_file::Handle) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eprint_nothing_searched() {
|
||||||
|
eprintln!("No files were searched, which means ripgrep probably \
|
||||||
|
applied a filter you didn't expect. \
|
||||||
|
Try running again with --debug.");
|
||||||
|
}
|
||||||
|
|||||||
374
src/out.rs
374
src/out.rs
@@ -1,374 +0,0 @@
|
|||||||
use std::io::{self, Write};
|
|
||||||
|
|
||||||
use term::{self, Terminal};
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
use term::terminfo::TermInfo;
|
|
||||||
#[cfg(windows)]
|
|
||||||
use term::WinConsole;
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
use terminal_win::WindowsBuffer;
|
|
||||||
|
|
||||||
/// Out controls the actual output of all search results for a particular file
|
|
||||||
/// to the end user.
|
|
||||||
///
|
|
||||||
/// (The difference between Out and Printer is that a Printer works with
|
|
||||||
/// individual search results where as Out works with search results for each
|
|
||||||
/// file as a whole. For example, it knows when to print a file separator.)
|
|
||||||
pub struct Out {
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
term: ColoredTerminal<term::TerminfoTerminal<io::BufWriter<io::Stdout>>>,
|
|
||||||
#[cfg(windows)]
|
|
||||||
term: ColoredTerminal<WinConsole<io::Stdout>>,
|
|
||||||
printed: bool,
|
|
||||||
file_separator: Option<Vec<u8>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Out {
|
|
||||||
/// Create a new Out that writes to the wtr given.
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
pub fn new(color: bool) -> Out {
|
|
||||||
let wtr = io::BufWriter::new(io::stdout());
|
|
||||||
Out {
|
|
||||||
term: ColoredTerminal::new(wtr, color),
|
|
||||||
printed: false,
|
|
||||||
file_separator: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new Out that writes to the wtr given.
|
|
||||||
#[cfg(windows)]
|
|
||||||
pub fn new(color: bool) -> Out {
|
|
||||||
Out {
|
|
||||||
term: ColoredTerminal::new_stdout(color),
|
|
||||||
printed: false,
|
|
||||||
file_separator: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If set, the separator is printed between matches from different files.
|
|
||||||
/// By default, no separator is printed.
|
|
||||||
pub fn file_separator(mut self, sep: Vec<u8>) -> Out {
|
|
||||||
self.file_separator = Some(sep);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Write the search results of a single file to the underlying wtr and
|
|
||||||
/// flush wtr.
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
pub fn write(
|
|
||||||
&mut self,
|
|
||||||
buf: &ColoredTerminal<term::TerminfoTerminal<Vec<u8>>>,
|
|
||||||
) {
|
|
||||||
self.write_sep();
|
|
||||||
match *buf {
|
|
||||||
ColoredTerminal::Colored(ref tt) => {
|
|
||||||
let _ = self.term.write_all(tt.get_ref());
|
|
||||||
}
|
|
||||||
ColoredTerminal::NoColor(ref buf) => {
|
|
||||||
let _ = self.term.write_all(buf);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.write_done();
|
|
||||||
}
|
|
||||||
/// Write the search results of a single file to the underlying wtr and
|
|
||||||
/// flush wtr.
|
|
||||||
#[cfg(windows)]
|
|
||||||
pub fn write(
|
|
||||||
&mut self,
|
|
||||||
buf: &ColoredTerminal<WindowsBuffer>,
|
|
||||||
) {
|
|
||||||
self.write_sep();
|
|
||||||
match *buf {
|
|
||||||
ColoredTerminal::Colored(ref tt) => {
|
|
||||||
tt.print_stdout(&mut self.term);
|
|
||||||
}
|
|
||||||
ColoredTerminal::NoColor(ref buf) => {
|
|
||||||
let _ = self.term.write_all(buf);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.write_done();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_sep(&mut self) {
|
|
||||||
if let Some(ref sep) = self.file_separator {
|
|
||||||
if self.printed {
|
|
||||||
let _ = self.term.write_all(sep);
|
|
||||||
let _ = self.term.write_all(b"\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_done(&mut self) {
|
|
||||||
let _ = self.term.flush();
|
|
||||||
self.printed = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// ColoredTerminal provides optional colored output through the term::Terminal
|
|
||||||
/// trait. In particular, it will dynamically configure itself to use coloring
|
|
||||||
/// if it's available in the environment.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub enum ColoredTerminal<T: Terminal + Send> {
|
|
||||||
Colored(T),
|
|
||||||
NoColor(T::Output),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
impl<W: io::Write + Send> ColoredTerminal<term::TerminfoTerminal<W>> {
|
|
||||||
/// Create a new output buffer.
|
|
||||||
///
|
|
||||||
/// When color is true, the buffer will attempt to support coloring.
|
|
||||||
pub fn new(wtr: W, color: bool) -> Self {
|
|
||||||
lazy_static! {
|
|
||||||
// Only pay for parsing the terminfo once.
|
|
||||||
static ref TERMINFO: Option<TermInfo> = {
|
|
||||||
match TermInfo::from_env() {
|
|
||||||
Ok(info) => Some(info),
|
|
||||||
Err(err) => {
|
|
||||||
debug!("error loading terminfo for coloring: {}", err);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
// If we want color, build a term::TerminfoTerminal and see if the
|
|
||||||
// current environment supports coloring. If not, bail with NoColor. To
|
|
||||||
// avoid losing our writer (ownership), do this the long way.
|
|
||||||
if !color {
|
|
||||||
return ColoredTerminal::NoColor(wtr);
|
|
||||||
}
|
|
||||||
let terminfo = match *TERMINFO {
|
|
||||||
None => return ColoredTerminal::NoColor(wtr),
|
|
||||||
Some(ref ti) => {
|
|
||||||
// Ug, this should go away with the next release of `term`.
|
|
||||||
TermInfo {
|
|
||||||
names: ti.names.clone(),
|
|
||||||
bools: ti.bools.clone(),
|
|
||||||
numbers: ti.numbers.clone(),
|
|
||||||
strings: ti.strings.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let tt = term::TerminfoTerminal::new_with_terminfo(wtr, terminfo);
|
|
||||||
if !tt.supports_color() {
|
|
||||||
debug!("environment doesn't support coloring");
|
|
||||||
return ColoredTerminal::NoColor(tt.into_inner());
|
|
||||||
}
|
|
||||||
ColoredTerminal::Colored(tt)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
impl ColoredTerminal<term::TerminfoTerminal<Vec<u8>>> {
|
|
||||||
/// Clear the give buffer of all search results such that it is reusable
|
|
||||||
/// in another search.
|
|
||||||
pub fn clear(&mut self) {
|
|
||||||
match *self {
|
|
||||||
ColoredTerminal::Colored(ref mut tt) => {
|
|
||||||
tt.get_mut().clear();
|
|
||||||
}
|
|
||||||
ColoredTerminal::NoColor(ref mut buf) => {
|
|
||||||
buf.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
impl ColoredTerminal<WindowsBuffer> {
|
|
||||||
/// Create a new output buffer.
|
|
||||||
///
|
|
||||||
/// When color is true, the buffer will attempt to support coloring.
|
|
||||||
pub fn new_buffer(color: bool) -> Self {
|
|
||||||
if !color {
|
|
||||||
ColoredTerminal::NoColor(vec![])
|
|
||||||
} else {
|
|
||||||
ColoredTerminal::Colored(WindowsBuffer::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clear the give buffer of all search results such that it is reusable
|
|
||||||
/// in another search.
|
|
||||||
pub fn clear(&mut self) {
|
|
||||||
match *self {
|
|
||||||
ColoredTerminal::Colored(ref mut win) => win.clear(),
|
|
||||||
ColoredTerminal::NoColor(ref mut buf) => buf.clear(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
impl ColoredTerminal<WinConsole<io::Stdout>> {
|
|
||||||
/// Create a new output buffer.
|
|
||||||
///
|
|
||||||
/// When color is true, the buffer will attempt to support coloring.
|
|
||||||
pub fn new_stdout(color: bool) -> Self {
|
|
||||||
if !color {
|
|
||||||
return ColoredTerminal::NoColor(io::stdout());
|
|
||||||
}
|
|
||||||
match WinConsole::new(io::stdout()) {
|
|
||||||
Ok(win) => ColoredTerminal::Colored(win),
|
|
||||||
Err(_) => ColoredTerminal::NoColor(io::stdout()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Terminal + Send> ColoredTerminal<T> {
|
|
||||||
fn map_result<F>(
|
|
||||||
&mut self,
|
|
||||||
mut f: F,
|
|
||||||
) -> term::Result<()>
|
|
||||||
where F: FnMut(&mut T) -> term::Result<()> {
|
|
||||||
match *self {
|
|
||||||
ColoredTerminal::Colored(ref mut w) => f(w),
|
|
||||||
ColoredTerminal::NoColor(_) => Err(term::Error::NotSupported),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn map_bool<F>(
|
|
||||||
&self,
|
|
||||||
mut f: F,
|
|
||||||
) -> bool
|
|
||||||
where F: FnMut(&T) -> bool {
|
|
||||||
match *self {
|
|
||||||
ColoredTerminal::Colored(ref w) => f(w),
|
|
||||||
ColoredTerminal::NoColor(_) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Terminal + Send> io::Write for ColoredTerminal<T> {
|
|
||||||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
|
||||||
match *self {
|
|
||||||
ColoredTerminal::Colored(ref mut w) => w.write(buf),
|
|
||||||
ColoredTerminal::NoColor(ref mut w) => w.write(buf),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flush(&mut self) -> io::Result<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Terminal + Send> term::Terminal for ColoredTerminal<T> {
|
|
||||||
type Output = T::Output;
|
|
||||||
|
|
||||||
fn fg(&mut self, fg: term::color::Color) -> term::Result<()> {
|
|
||||||
self.map_result(|w| w.fg(fg))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bg(&mut self, bg: term::color::Color) -> term::Result<()> {
|
|
||||||
self.map_result(|w| w.bg(bg))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn attr(&mut self, attr: term::Attr) -> term::Result<()> {
|
|
||||||
self.map_result(|w| w.attr(attr))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn supports_attr(&self, attr: term::Attr) -> bool {
|
|
||||||
self.map_bool(|w| w.supports_attr(attr))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn reset(&mut self) -> term::Result<()> {
|
|
||||||
self.map_result(|w| w.reset())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn supports_reset(&self) -> bool {
|
|
||||||
self.map_bool(|w| w.supports_reset())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn supports_color(&self) -> bool {
|
|
||||||
self.map_bool(|w| w.supports_color())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn cursor_up(&mut self) -> term::Result<()> {
|
|
||||||
self.map_result(|w| w.cursor_up())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn delete_line(&mut self) -> term::Result<()> {
|
|
||||||
self.map_result(|w| w.delete_line())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn carriage_return(&mut self) -> term::Result<()> {
|
|
||||||
self.map_result(|w| w.carriage_return())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_ref(&self) -> &Self::Output {
|
|
||||||
match *self {
|
|
||||||
ColoredTerminal::Colored(ref w) => w.get_ref(),
|
|
||||||
ColoredTerminal::NoColor(ref w) => w,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_mut(&mut self) -> &mut Self::Output {
|
|
||||||
match *self {
|
|
||||||
ColoredTerminal::Colored(ref mut w) => w.get_mut(),
|
|
||||||
ColoredTerminal::NoColor(ref mut w) => w,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_inner(self) -> Self::Output {
|
|
||||||
match self {
|
|
||||||
ColoredTerminal::Colored(w) => w.into_inner(),
|
|
||||||
ColoredTerminal::NoColor(w) => w,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, T: Terminal + Send> term::Terminal for &'a mut ColoredTerminal<T> {
|
|
||||||
type Output = T::Output;
|
|
||||||
|
|
||||||
fn fg(&mut self, fg: term::color::Color) -> term::Result<()> {
|
|
||||||
(**self).fg(fg)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bg(&mut self, bg: term::color::Color) -> term::Result<()> {
|
|
||||||
(**self).bg(bg)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn attr(&mut self, attr: term::Attr) -> term::Result<()> {
|
|
||||||
(**self).attr(attr)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn supports_attr(&self, attr: term::Attr) -> bool {
|
|
||||||
(**self).supports_attr(attr)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn reset(&mut self) -> term::Result<()> {
|
|
||||||
(**self).reset()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn supports_reset(&self) -> bool {
|
|
||||||
(**self).supports_reset()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn supports_color(&self) -> bool {
|
|
||||||
(**self).supports_color()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn cursor_up(&mut self) -> term::Result<()> {
|
|
||||||
(**self).cursor_up()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn delete_line(&mut self) -> term::Result<()> {
|
|
||||||
(**self).delete_line()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn carriage_return(&mut self) -> term::Result<()> {
|
|
||||||
(**self).carriage_return()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_ref(&self) -> &Self::Output {
|
|
||||||
(**self).get_ref()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_mut(&mut self) -> &mut Self::Output {
|
|
||||||
(**self).get_mut()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_inner(self) -> Self::Output {
|
|
||||||
// Good golly miss molly...
|
|
||||||
unimplemented!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
/*!
|
/*!
|
||||||
The pathutil module provides platform specific operations on paths that are
|
The pathutil module provides platform specific operations on paths that are
|
||||||
typically faster than the same operations as provided in std::path. In
|
typically faster than the same operations as provided in `std::path`. In
|
||||||
particular, we really want to avoid the costly operation of parsing the path
|
particular, we really want to avoid the costly operation of parsing the path
|
||||||
into its constituent components. We give up on Windows, but on Unix, we deal
|
into its constituent components. We give up on Windows, but on Unix, we deal
|
||||||
with the raw bytes directly.
|
with the raw bytes directly.
|
||||||
@@ -8,7 +8,6 @@ with the raw bytes directly.
|
|||||||
On large repositories (like chromium), this can have a ~25% performance
|
On large repositories (like chromium), this can have a ~25% performance
|
||||||
improvement on just listing the files to search (!).
|
improvement on just listing the files to search (!).
|
||||||
*/
|
*/
|
||||||
use std::ffi::OsStr;
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
/// Strip `prefix` from the `path` and return the remainder.
|
/// Strip `prefix` from the `path` and return the remainder.
|
||||||
@@ -19,6 +18,7 @@ pub fn strip_prefix<'a, P: AsRef<Path> + ?Sized>(
|
|||||||
prefix: &'a P,
|
prefix: &'a P,
|
||||||
path: &'a Path,
|
path: &'a Path,
|
||||||
) -> Option<&'a Path> {
|
) -> Option<&'a Path> {
|
||||||
|
use std::ffi::OsStr;
|
||||||
use std::os::unix::ffi::OsStrExt;
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
|
||||||
let prefix = prefix.as_ref().as_os_str().as_bytes();
|
let prefix = prefix.as_ref().as_os_str().as_bytes();
|
||||||
@@ -26,7 +26,7 @@ pub fn strip_prefix<'a, P: AsRef<Path> + ?Sized>(
|
|||||||
if prefix.len() > path.len() || prefix != &path[0..prefix.len()] {
|
if prefix.len() > path.len() || prefix != &path[0..prefix.len()] {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(&Path::new(OsStr::from_bytes(&path[prefix.len()..])))
|
Some(Path::new(OsStr::from_bytes(&path[prefix.len()..])))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -40,79 +40,3 @@ pub fn strip_prefix<'a, P: AsRef<Path> + ?Sized>(
|
|||||||
) -> Option<&'a Path> {
|
) -> Option<&'a Path> {
|
||||||
path.strip_prefix(prefix).ok()
|
path.strip_prefix(prefix).ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The final component of the path, if it is a normal file.
|
|
||||||
///
|
|
||||||
/// If the path terminates in ., .., or consists solely of a root of prefix,
|
|
||||||
/// file_name will return None.
|
|
||||||
#[cfg(unix)]
|
|
||||||
pub fn file_name<'a, P: AsRef<Path> + ?Sized>(
|
|
||||||
path: &'a P,
|
|
||||||
) -> Option<&'a OsStr> {
|
|
||||||
use std::os::unix::ffi::OsStrExt;
|
|
||||||
use memchr::memrchr;
|
|
||||||
|
|
||||||
let path = path.as_ref().as_os_str().as_bytes();
|
|
||||||
if path.is_empty() {
|
|
||||||
return None;
|
|
||||||
} else if path.len() == 1 && path[0] == b'.' {
|
|
||||||
return None;
|
|
||||||
} else if path.last() == Some(&b'.') {
|
|
||||||
return None;
|
|
||||||
} else if path.len() >= 2 && &path[path.len() - 2..] == &b".."[..] {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
let last_slash = memrchr(b'/', path).map(|i| i + 1).unwrap_or(0);
|
|
||||||
Some(OsStr::from_bytes(&path[last_slash..]))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The final component of the path, if it is a normal file.
|
|
||||||
///
|
|
||||||
/// If the path terminates in ., .., or consists solely of a root of prefix,
|
|
||||||
/// file_name will return None.
|
|
||||||
#[cfg(not(unix))]
|
|
||||||
pub fn file_name<'a, P: AsRef<Path> + ?Sized>(
|
|
||||||
path: &'a P,
|
|
||||||
) -> Option<&'a OsStr> {
|
|
||||||
path.as_ref().file_name()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if and only if this file path is considered to be hidden.
|
|
||||||
#[cfg(unix)]
|
|
||||||
pub fn is_hidden<P: AsRef<Path>>(path: P) -> bool {
|
|
||||||
use std::os::unix::ffi::OsStrExt;
|
|
||||||
|
|
||||||
if let Some(name) = file_name(path.as_ref()) {
|
|
||||||
name.as_bytes().get(0) == Some(&b'.')
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if and only if this file path is considered to be hidden.
|
|
||||||
#[cfg(not(unix))]
|
|
||||||
pub fn is_hidden<P: AsRef<Path>>(path: P) -> bool {
|
|
||||||
if let Some(name) = file_name(path.as_ref()) {
|
|
||||||
name.to_str().map(|s| s.starts_with(".")).unwrap_or(false)
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if this file path is just a file name. i.e., Its parent is
|
|
||||||
/// the empty string.
|
|
||||||
#[cfg(unix)]
|
|
||||||
pub fn is_file_name<P: AsRef<Path>>(path: P) -> bool {
|
|
||||||
use std::os::unix::ffi::OsStrExt;
|
|
||||||
use memchr::memchr;
|
|
||||||
|
|
||||||
let path = path.as_ref().as_os_str().as_bytes();
|
|
||||||
memchr(b'/', path).is_none()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if this file path is just a file name. i.e., Its parent is
|
|
||||||
/// the empty string.
|
|
||||||
#[cfg(not(unix))]
|
|
||||||
pub fn is_file_name<P: AsRef<Path>>(path: P) -> bool {
|
|
||||||
path.as_ref().parent().map(|p| p.as_os_str().is_empty()).unwrap_or(false)
|
|
||||||
}
|
|
||||||
|
|||||||
699
src/printer.rs
699
src/printer.rs
@@ -1,11 +1,33 @@
|
|||||||
|
use std::error;
|
||||||
|
use std::fmt;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
use regex::bytes::Regex;
|
use regex::bytes::{Regex, Replacer, Captures};
|
||||||
use term::{Attr, Terminal};
|
use termcolor::{Color, ColorSpec, ParseColorError, WriteColor};
|
||||||
use term::color;
|
|
||||||
|
|
||||||
use pathutil::strip_prefix;
|
use pathutil::strip_prefix;
|
||||||
use types::FileTypeDef;
|
use ignore::types::FileTypeDef;
|
||||||
|
|
||||||
|
/// CountingReplacer implements the Replacer interface for Regex,
|
||||||
|
/// and counts how often replacement is being performed.
|
||||||
|
struct CountingReplacer<'r> {
|
||||||
|
replace: &'r [u8],
|
||||||
|
count: &'r mut usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'r> CountingReplacer<'r> {
|
||||||
|
fn new(replace: &'r [u8], count: &'r mut usize) -> CountingReplacer<'r> {
|
||||||
|
CountingReplacer { replace: replace, count: count }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'r> Replacer for CountingReplacer<'r> {
|
||||||
|
fn replace_append(&mut self, caps: &Captures, dst: &mut Vec<u8>) {
|
||||||
|
*self.count += 1;
|
||||||
|
caps.expand(self.replace, dst);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Printer encapsulates all output logic for searching.
|
/// Printer encapsulates all output logic for searching.
|
||||||
///
|
///
|
||||||
@@ -36,42 +58,22 @@ pub struct Printer<W> {
|
|||||||
/// Whether to print NUL bytes after a file path instead of new lines
|
/// Whether to print NUL bytes after a file path instead of new lines
|
||||||
/// or `:`.
|
/// or `:`.
|
||||||
null: bool,
|
null: bool,
|
||||||
|
/// Print only the matched (non-empty) parts of a matching line
|
||||||
|
only_matching: bool,
|
||||||
/// A string to use as a replacement of each match in a matching line.
|
/// A string to use as a replacement of each match in a matching line.
|
||||||
replace: Option<Vec<u8>>,
|
replace: Option<Vec<u8>>,
|
||||||
/// Whether to prefix each match with the corresponding file name.
|
/// Whether to prefix each match with the corresponding file name.
|
||||||
with_filename: bool,
|
with_filename: bool,
|
||||||
/// The choice of colors.
|
/// The color specifications.
|
||||||
color_choice: ColorChoice
|
colors: ColorSpecs,
|
||||||
|
/// The separator to use for file paths. If empty, this is ignored.
|
||||||
|
path_separator: Option<u8>,
|
||||||
|
/// Restrict lines to this many columns.
|
||||||
|
max_columns: Option<usize>
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ColorChoice {
|
impl<W: WriteColor> Printer<W> {
|
||||||
matched_line: color::Color,
|
/// Create a new printer that writes to wtr with the given color settings.
|
||||||
heading: color::Color,
|
|
||||||
line_number: color::Color
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ColorChoice {
|
|
||||||
#[cfg(unix)]
|
|
||||||
pub fn new() -> ColorChoice {
|
|
||||||
ColorChoice {
|
|
||||||
matched_line: color::RED,
|
|
||||||
heading: color::GREEN,
|
|
||||||
line_number: color::BLUE
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(unix))]
|
|
||||||
pub fn new() -> ColorChoice {
|
|
||||||
ColorChoice {
|
|
||||||
matched_line: color::BRIGHT_RED,
|
|
||||||
heading: color::BRIGHT_GREEN,
|
|
||||||
line_number: color::BRIGHT_BLUE
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<W: Terminal + Send> Printer<W> {
|
|
||||||
/// Create a new printer that writes to wtr.
|
|
||||||
pub fn new(wtr: W) -> Printer<W> {
|
pub fn new(wtr: W) -> Printer<W> {
|
||||||
Printer {
|
Printer {
|
||||||
wtr: wtr,
|
wtr: wtr,
|
||||||
@@ -83,12 +85,21 @@ impl<W: Terminal + Send> Printer<W> {
|
|||||||
heading: false,
|
heading: false,
|
||||||
line_per_match: false,
|
line_per_match: false,
|
||||||
null: false,
|
null: false,
|
||||||
|
only_matching: false,
|
||||||
replace: None,
|
replace: None,
|
||||||
with_filename: false,
|
with_filename: false,
|
||||||
color_choice: ColorChoice::new()
|
colors: ColorSpecs::default(),
|
||||||
|
path_separator: None,
|
||||||
|
max_columns: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Set the color specifications.
|
||||||
|
pub fn colors(mut self, colors: ColorSpecs) -> Printer<W> {
|
||||||
|
self.colors = colors;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// When set, column numbers will be printed for the first match on each
|
/// When set, column numbers will be printed for the first match on each
|
||||||
/// line.
|
/// line.
|
||||||
pub fn column(mut self, yes: bool) -> Printer<W> {
|
pub fn column(mut self, yes: bool) -> Printer<W> {
|
||||||
@@ -136,11 +147,21 @@ impl<W: Terminal + Send> Printer<W> {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Print only the matched (non-empty) parts of a matching line
|
||||||
|
pub fn only_matching(mut self, yes: bool) -> Printer<W> {
|
||||||
|
self.only_matching = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A separator to use when printing file paths. When empty, use the
|
||||||
|
/// default separator for the current platform. (/ on Unix, \ on Windows.)
|
||||||
|
pub fn path_separator(mut self, sep: Option<u8>) -> Printer<W> {
|
||||||
|
self.path_separator = sep;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Replace every match in each matching line with the replacement string
|
/// Replace every match in each matching line with the replacement string
|
||||||
/// given.
|
/// given.
|
||||||
///
|
|
||||||
/// The replacement string syntax is documented here:
|
|
||||||
/// https://doc.rust-lang.org/regex/regex/bytes/struct.Captures.html#method.expand
|
|
||||||
pub fn replace(mut self, replacement: Vec<u8>) -> Printer<W> {
|
pub fn replace(mut self, replacement: Vec<u8>) -> Printer<W> {
|
||||||
self.replace = Some(replacement);
|
self.replace = Some(replacement);
|
||||||
self
|
self
|
||||||
@@ -152,12 +173,19 @@ impl<W: Terminal + Send> Printer<W> {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Configure the max. number of columns used for printing matching lines.
|
||||||
|
pub fn max_columns(mut self, max_columns: Option<usize>) -> Printer<W> {
|
||||||
|
self.max_columns = max_columns;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if and only if something has been printed.
|
/// Returns true if and only if something has been printed.
|
||||||
pub fn has_printed(&self) -> bool {
|
pub fn has_printed(&self) -> bool {
|
||||||
self.has_printed
|
self.has_printed
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Flushes the underlying writer and returns it.
|
/// Flushes the underlying writer and returns it.
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn into_inner(mut self) -> W {
|
pub fn into_inner(mut self) -> W {
|
||||||
let _ = self.wtr.flush();
|
let _ = self.wtr.flush();
|
||||||
self.wtr
|
self.wtr
|
||||||
@@ -168,11 +196,11 @@ impl<W: Terminal + Send> Printer<W> {
|
|||||||
self.write(def.name().as_bytes());
|
self.write(def.name().as_bytes());
|
||||||
self.write(b": ");
|
self.write(b": ");
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
for pat in def.patterns() {
|
for glob in def.globs() {
|
||||||
if !first {
|
if !first {
|
||||||
self.write(b", ");
|
self.write(b", ");
|
||||||
}
|
}
|
||||||
self.write(pat.as_bytes());
|
self.write(glob.as_bytes());
|
||||||
first = false;
|
first = false;
|
||||||
}
|
}
|
||||||
self.write_eol();
|
self.write_eol();
|
||||||
@@ -182,22 +210,14 @@ impl<W: Terminal + Send> Printer<W> {
|
|||||||
pub fn path<P: AsRef<Path>>(&mut self, path: P) {
|
pub fn path<P: AsRef<Path>>(&mut self, path: P) {
|
||||||
let path = strip_prefix("./", path.as_ref()).unwrap_or(path.as_ref());
|
let path = strip_prefix("./", path.as_ref()).unwrap_or(path.as_ref());
|
||||||
self.write_path(path);
|
self.write_path(path);
|
||||||
if self.null {
|
self.write_path_eol();
|
||||||
self.write(b"\x00");
|
|
||||||
} else {
|
|
||||||
self.write_eol();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Prints the given path and a count of the number of matches found.
|
/// Prints the given path and a count of the number of matches found.
|
||||||
pub fn path_count<P: AsRef<Path>>(&mut self, path: P, count: u64) {
|
pub fn path_count<P: AsRef<Path>>(&mut self, path: P, count: u64) {
|
||||||
if self.with_filename {
|
if self.with_filename {
|
||||||
self.write_path(path);
|
self.write_path(path);
|
||||||
if self.null {
|
self.write_path_sep(b':');
|
||||||
self.write(b"\x00");
|
|
||||||
} else {
|
|
||||||
self.write(b":");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
self.write(count.to_string().as_bytes());
|
self.write(count.to_string().as_bytes());
|
||||||
self.write_eol();
|
self.write_eol();
|
||||||
@@ -205,13 +225,11 @@ impl<W: Terminal + Send> Printer<W> {
|
|||||||
|
|
||||||
/// Prints the context separator.
|
/// Prints the context separator.
|
||||||
pub fn context_separate(&mut self) {
|
pub fn context_separate(&mut self) {
|
||||||
// N.B. We can't use `write` here because of borrowing restrictions.
|
|
||||||
if self.context_separator.is_empty() {
|
if self.context_separator.is_empty() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
self.has_printed = true;
|
|
||||||
let _ = self.wtr.write_all(&self.context_separator);
|
let _ = self.wtr.write_all(&self.context_separator);
|
||||||
let _ = self.wtr.write_all(&[self.eol]);
|
self.write_eol();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn matched<P: AsRef<Path>>(
|
pub fn matched<P: AsRef<Path>>(
|
||||||
@@ -223,19 +241,24 @@ impl<W: Terminal + Send> Printer<W> {
|
|||||||
end: usize,
|
end: usize,
|
||||||
line_number: Option<u64>,
|
line_number: Option<u64>,
|
||||||
) {
|
) {
|
||||||
if !self.line_per_match {
|
if !self.line_per_match && !self.only_matching {
|
||||||
let column =
|
let column =
|
||||||
if self.column {
|
if self.column {
|
||||||
Some(re.find(&buf[start..end])
|
Some(re.find(&buf[start..end])
|
||||||
.map(|(s, _)| s).unwrap_or(0) as u64)
|
.map(|m| m.start()).unwrap_or(0) as u64)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
return self.write_match(
|
return self.write_match(
|
||||||
re, path, buf, start, end, line_number, column);
|
re, path, buf, start, end, line_number, column);
|
||||||
}
|
}
|
||||||
for (s, _) in re.find_iter(&buf[start..end]) {
|
for m in re.find_iter(&buf[start..end]) {
|
||||||
let column = if self.column { Some(s as u64) } else { None };
|
let column =
|
||||||
|
if self.column {
|
||||||
|
Some(m.start() as u64)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
self.write_match(
|
self.write_match(
|
||||||
re, path.as_ref(), buf, start, end, line_number, column);
|
re, path.as_ref(), buf, start, end, line_number, column);
|
||||||
}
|
}
|
||||||
@@ -253,44 +276,71 @@ impl<W: Terminal + Send> Printer<W> {
|
|||||||
) {
|
) {
|
||||||
if self.heading && self.with_filename && !self.has_printed {
|
if self.heading && self.with_filename && !self.has_printed {
|
||||||
self.write_file_sep();
|
self.write_file_sep();
|
||||||
self.write_heading(path.as_ref());
|
self.write_path(path);
|
||||||
|
self.write_path_eol();
|
||||||
} else if !self.heading && self.with_filename {
|
} else if !self.heading && self.with_filename {
|
||||||
self.write_non_heading_path(path.as_ref());
|
self.write_path(path);
|
||||||
|
self.write_path_sep(b':');
|
||||||
}
|
}
|
||||||
if let Some(line_number) = line_number {
|
if let Some(line_number) = line_number {
|
||||||
self.line_number(line_number, b':');
|
self.line_number(line_number, b':');
|
||||||
}
|
}
|
||||||
if let Some(c) = column {
|
if let Some(c) = column {
|
||||||
self.write((c + 1).to_string().as_bytes());
|
self.column_number(c + 1, b':');
|
||||||
self.write(b":");
|
|
||||||
}
|
}
|
||||||
if self.replace.is_some() {
|
if self.replace.is_some() {
|
||||||
let line = re.replace_all(
|
let mut count = 0;
|
||||||
&buf[start..end], &**self.replace.as_ref().unwrap());
|
let line = {
|
||||||
|
let replacer = CountingReplacer::new(
|
||||||
|
self.replace.as_ref().unwrap(), &mut count);
|
||||||
|
re.replace_all(&buf[start..end], replacer)
|
||||||
|
};
|
||||||
|
if self.max_columns.map_or(false, |m| line.len() > m) {
|
||||||
|
let msg = format!(
|
||||||
|
"[Omitted long line with {} replacements]", count);
|
||||||
|
self.write_colored(msg.as_bytes(), |colors| colors.matched());
|
||||||
|
self.write_eol();
|
||||||
|
return;
|
||||||
|
}
|
||||||
self.write(&line);
|
self.write(&line);
|
||||||
|
if line.last() != Some(&self.eol) {
|
||||||
|
self.write_eol();
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
self.write_matched_line(re, &buf[start..end]);
|
let line_buf = if self.only_matching {
|
||||||
}
|
let m = re.find(&buf[start..end]).unwrap();
|
||||||
if buf[start..end].last() != Some(&self.eol) {
|
&buf[start + m.start()..start + m.end()]
|
||||||
self.write_eol();
|
} else {
|
||||||
|
&buf[start..end]
|
||||||
|
};
|
||||||
|
self.write_matched_line(re, line_buf);
|
||||||
|
// write_matched_line guarantees to write a newline.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_matched_line(&mut self, re: &Regex, buf: &[u8]) {
|
fn write_matched_line(&mut self, re: &Regex, buf: &[u8]) {
|
||||||
if !self.wtr.supports_color() {
|
if self.max_columns.map_or(false, |m| buf.len() > m) {
|
||||||
self.write(buf);
|
let count = re.find_iter(buf).count();
|
||||||
|
let msg = format!("[Omitted long line with {} matches]", count);
|
||||||
|
self.write_colored(msg.as_bytes(), |colors| colors.matched());
|
||||||
|
self.write_eol();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let mut last_written = 0;
|
if !self.wtr.supports_color() || self.colors.matched().is_none() {
|
||||||
for (s, e) in re.find_iter(buf) {
|
self.write(buf);
|
||||||
self.write(&buf[last_written..s]);
|
} else {
|
||||||
let _ = self.wtr.fg(self.color_choice.matched_line);
|
let mut last_written = 0;
|
||||||
let _ = self.wtr.attr(Attr::Bold);
|
for m in re.find_iter(buf) {
|
||||||
self.write(&buf[s..e]);
|
self.write(&buf[last_written..m.start()]);
|
||||||
let _ = self.wtr.reset();
|
self.write_colored(
|
||||||
last_written = e;
|
&buf[m.start()..m.end()], |colors| colors.matched());
|
||||||
|
last_written = m.end();
|
||||||
|
}
|
||||||
|
self.write(&buf[last_written..]);
|
||||||
|
}
|
||||||
|
if buf.last() != Some(&self.eol) {
|
||||||
|
self.write_eol();
|
||||||
}
|
}
|
||||||
self.write(&buf[last_written..]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn context<P: AsRef<Path>>(
|
pub fn context<P: AsRef<Path>>(
|
||||||
@@ -303,79 +353,83 @@ impl<W: Terminal + Send> Printer<W> {
|
|||||||
) {
|
) {
|
||||||
if self.heading && self.with_filename && !self.has_printed {
|
if self.heading && self.with_filename && !self.has_printed {
|
||||||
self.write_file_sep();
|
self.write_file_sep();
|
||||||
self.write_heading(path.as_ref());
|
self.write_path(path);
|
||||||
|
self.write_path_eol();
|
||||||
} else if !self.heading && self.with_filename {
|
} else if !self.heading && self.with_filename {
|
||||||
self.write_path(path.as_ref());
|
self.write_path(path);
|
||||||
if self.null {
|
self.write_path_sep(b'-');
|
||||||
self.write(b"\x00");
|
|
||||||
} else {
|
|
||||||
self.write(b"-");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if let Some(line_number) = line_number {
|
if let Some(line_number) = line_number {
|
||||||
self.line_number(line_number, b'-');
|
self.line_number(line_number, b'-');
|
||||||
}
|
}
|
||||||
|
if self.max_columns.map_or(false, |m| end - start > m) {
|
||||||
|
self.write(format!("[Omitted long context line]").as_bytes());
|
||||||
|
self.write_eol();
|
||||||
|
return;
|
||||||
|
}
|
||||||
self.write(&buf[start..end]);
|
self.write(&buf[start..end]);
|
||||||
if buf[start..end].last() != Some(&self.eol) {
|
if buf[start..end].last() != Some(&self.eol) {
|
||||||
self.write_eol();
|
self.write_eol();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_heading<P: AsRef<Path>>(&mut self, path: P) {
|
fn separator(&mut self, sep: &[u8]) {
|
||||||
if self.wtr.supports_color() {
|
self.write(&sep);
|
||||||
let _ = self.wtr.fg(self.color_choice.heading);
|
}
|
||||||
let _ = self.wtr.attr(Attr::Bold);
|
|
||||||
|
fn write_path_sep(&mut self, sep: u8) {
|
||||||
|
if self.null {
|
||||||
|
self.write(b"\x00");
|
||||||
|
} else {
|
||||||
|
self.separator(&[sep]);
|
||||||
}
|
}
|
||||||
self.write_path(path.as_ref());
|
}
|
||||||
|
|
||||||
|
fn write_path_eol(&mut self) {
|
||||||
if self.null {
|
if self.null {
|
||||||
self.write(b"\x00");
|
self.write(b"\x00");
|
||||||
} else {
|
} else {
|
||||||
self.write_eol();
|
self.write_eol();
|
||||||
}
|
}
|
||||||
if self.wtr.supports_color() {
|
|
||||||
let _ = self.wtr.reset();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_non_heading_path<P: AsRef<Path>>(&mut self, path: P) {
|
|
||||||
if self.wtr.supports_color() {
|
|
||||||
let _ = self.wtr.fg(self.color_choice.heading);
|
|
||||||
let _ = self.wtr.attr(Attr::Bold);
|
|
||||||
}
|
|
||||||
self.write_path(path.as_ref());
|
|
||||||
if self.wtr.supports_color() {
|
|
||||||
let _ = self.wtr.reset();
|
|
||||||
}
|
|
||||||
if self.null {
|
|
||||||
self.write(b"\x00");
|
|
||||||
} else {
|
|
||||||
self.write(b":");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn line_number(&mut self, n: u64, sep: u8) {
|
|
||||||
if self.wtr.supports_color() {
|
|
||||||
let _ = self.wtr.fg(self.color_choice.line_number);
|
|
||||||
let _ = self.wtr.attr(Attr::Bold);
|
|
||||||
}
|
|
||||||
self.write(n.to_string().as_bytes());
|
|
||||||
if self.wtr.supports_color() {
|
|
||||||
let _ = self.wtr.reset();
|
|
||||||
}
|
|
||||||
self.write(&[sep]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
fn write_path<P: AsRef<Path>>(&mut self, path: P) {
|
fn write_path<P: AsRef<Path>>(&mut self, path: P) {
|
||||||
use std::os::unix::ffi::OsStrExt;
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
|
||||||
let path = path.as_ref().as_os_str().as_bytes();
|
let path = path.as_ref().as_os_str().as_bytes();
|
||||||
self.write(path);
|
self.write_path_replace_separator(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(unix))]
|
#[cfg(not(unix))]
|
||||||
fn write_path<P: AsRef<Path>>(&mut self, path: P) {
|
fn write_path<P: AsRef<Path>>(&mut self, path: P) {
|
||||||
self.write(path.as_ref().to_string_lossy().as_bytes());
|
let path = path.as_ref().to_string_lossy();
|
||||||
|
self.write_path_replace_separator(path.as_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_path_replace_separator(&mut self, path: &[u8]) {
|
||||||
|
match self.path_separator {
|
||||||
|
None => self.write_colored(path, |colors| colors.path()),
|
||||||
|
Some(sep) => {
|
||||||
|
let transformed_path: Vec<_> = path.iter().map(|&b| {
|
||||||
|
if b == b'/' || (cfg!(windows) && b == b'\\') {
|
||||||
|
sep
|
||||||
|
} else {
|
||||||
|
b
|
||||||
|
}
|
||||||
|
}).collect();
|
||||||
|
self.write_colored(&transformed_path, |colors| colors.path());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn line_number(&mut self, n: u64, sep: u8) {
|
||||||
|
self.write_colored(n.to_string().as_bytes(), |colors| colors.line());
|
||||||
|
self.separator(&[sep]);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn column_number(&mut self, n: u64, sep: u8) {
|
||||||
|
self.write_colored(n.to_string().as_bytes(), |colors| colors.column());
|
||||||
|
self.separator(&[sep]);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write(&mut self, buf: &[u8]) {
|
fn write(&mut self, buf: &[u8]) {
|
||||||
@@ -388,6 +442,14 @@ impl<W: Terminal + Send> Printer<W> {
|
|||||||
self.write(&[eol]);
|
self.write(&[eol]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn write_colored<F>(&mut self, buf: &[u8], get_color: F)
|
||||||
|
where F: Fn(&ColorSpecs) -> &ColorSpec
|
||||||
|
{
|
||||||
|
let _ = self.wtr.set_color( get_color(&self.colors) );
|
||||||
|
self.write(buf);
|
||||||
|
let _ = self.wtr.reset();
|
||||||
|
}
|
||||||
|
|
||||||
fn write_file_sep(&mut self) {
|
fn write_file_sep(&mut self) {
|
||||||
if let Some(ref sep) = self.file_separator {
|
if let Some(ref sep) = self.file_separator {
|
||||||
self.has_printed = true;
|
self.has_printed = true;
|
||||||
@@ -396,3 +458,392 @@ impl<W: Terminal + Send> Printer<W> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// An error that can occur when parsing color specifications.
|
||||||
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
pub enum Error {
|
||||||
|
/// This occurs when an unrecognized output type is used.
|
||||||
|
UnrecognizedOutType(String),
|
||||||
|
/// This occurs when an unrecognized spec type is used.
|
||||||
|
UnrecognizedSpecType(String),
|
||||||
|
/// This occurs when an unrecognized color name is used.
|
||||||
|
UnrecognizedColor(String, String),
|
||||||
|
/// This occurs when an unrecognized style attribute is used.
|
||||||
|
UnrecognizedStyle(String),
|
||||||
|
/// This occurs when the format of a color specification is invalid.
|
||||||
|
InvalidFormat(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl error::Error for Error {
|
||||||
|
fn description(&self) -> &str {
|
||||||
|
match *self {
|
||||||
|
Error::UnrecognizedOutType(_) => "unrecognized output type",
|
||||||
|
Error::UnrecognizedSpecType(_) => "unrecognized spec type",
|
||||||
|
Error::UnrecognizedColor(_, _) => "unrecognized color name",
|
||||||
|
Error::UnrecognizedStyle(_) => "unrecognized style attribute",
|
||||||
|
Error::InvalidFormat(_) => "invalid color spec",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cause(&self) -> Option<&error::Error> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Error {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
match *self {
|
||||||
|
Error::UnrecognizedOutType(ref name) => {
|
||||||
|
write!(f, "Unrecognized output type '{}'. Choose from: \
|
||||||
|
path, line, column, match.", name)
|
||||||
|
}
|
||||||
|
Error::UnrecognizedSpecType(ref name) => {
|
||||||
|
write!(f, "Unrecognized spec type '{}'. Choose from: \
|
||||||
|
fg, bg, style, none.", name)
|
||||||
|
}
|
||||||
|
Error::UnrecognizedColor(_, ref msg) => {
|
||||||
|
write!(f, "{}", msg)
|
||||||
|
}
|
||||||
|
Error::UnrecognizedStyle(ref name) => {
|
||||||
|
write!(f, "Unrecognized style attribute '{}'. Choose from: \
|
||||||
|
nobold, bold, nointense, intense.", name)
|
||||||
|
}
|
||||||
|
Error::InvalidFormat(ref original) => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"Invalid color speci format: '{}'. Valid format \
|
||||||
|
is '(path|line|column|match):(fg|bg|style):(value)'.",
|
||||||
|
original)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ParseColorError> for Error {
|
||||||
|
fn from(err: ParseColorError) -> Error {
|
||||||
|
Error::UnrecognizedColor(err.invalid().to_string(), err.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A merged set of color specifications.
|
||||||
|
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||||
|
pub struct ColorSpecs {
|
||||||
|
path: ColorSpec,
|
||||||
|
line: ColorSpec,
|
||||||
|
column: ColorSpec,
|
||||||
|
matched: ColorSpec,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A single color specification provided by the user.
|
||||||
|
///
|
||||||
|
/// A `ColorSpecs` can be built by merging a sequence of `Spec`s.
|
||||||
|
///
|
||||||
|
/// ## Example
|
||||||
|
///
|
||||||
|
/// The only way to build a `Spec` is to parse it from a string. Once multiple
|
||||||
|
/// `Spec`s have been constructed, then can be merged into a single
|
||||||
|
/// `ColorSpecs` value.
|
||||||
|
///
|
||||||
|
/// ```rust
|
||||||
|
/// use termcolor::{Color, ColorSpecs, Spec};
|
||||||
|
///
|
||||||
|
/// let spec1: Spec = "path:fg:blue".parse().unwrap();
|
||||||
|
/// let spec2: Spec = "match:bg:green".parse().unwrap();
|
||||||
|
/// let specs = ColorSpecs::new(&[spec1, spec2]);
|
||||||
|
///
|
||||||
|
/// assert_eq!(specs.path().fg(), Some(Color::Blue));
|
||||||
|
/// assert_eq!(specs.matched().bg(), Some(Color::Green));
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// ## Format
|
||||||
|
///
|
||||||
|
/// The format of a `Spec` is a triple: `{type}:{attribute}:{value}`. Each
|
||||||
|
/// component is defined as follows:
|
||||||
|
///
|
||||||
|
/// * `{type}` can be one of `path`, `line`, `column` or `match`.
|
||||||
|
/// * `{attribute}` can be one of `fg`, `bg` or `style`. `{attribute}` may also
|
||||||
|
/// be the special value `none`, in which case, `{value}` can be omitted.
|
||||||
|
/// * `{value}` is either a color name (for `fg`/`bg`) or a style instruction.
|
||||||
|
///
|
||||||
|
/// `{type}` controls which part of the output should be styled and is
|
||||||
|
/// application dependent.
|
||||||
|
///
|
||||||
|
/// When `{attribute}` is `none`, then this should cause any existing color
|
||||||
|
/// settings to be cleared.
|
||||||
|
///
|
||||||
|
/// `{value}` should be a color when `{attribute}` is `fg` or `bg`, or it
|
||||||
|
/// should be a style instruction when `{attribute}` is `style`. When
|
||||||
|
/// `{attribute}` is `none`, `{value}` must be omitted.
|
||||||
|
///
|
||||||
|
/// Valid colors are `black`, `blue`, `green`, `red`, `cyan`, `magenta`,
|
||||||
|
/// `yellow`, `white`.
|
||||||
|
///
|
||||||
|
/// Valid style instructions are `nobold` and `bold`.
|
||||||
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
pub struct Spec {
|
||||||
|
ty: OutType,
|
||||||
|
value: SpecValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The actual value given by the specification.
|
||||||
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
enum SpecValue {
|
||||||
|
None,
|
||||||
|
Fg(Color),
|
||||||
|
Bg(Color),
|
||||||
|
Style(Style),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The set of configurable portions of ripgrep's output.
|
||||||
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
enum OutType {
|
||||||
|
Path,
|
||||||
|
Line,
|
||||||
|
Column,
|
||||||
|
Match,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The specification type.
|
||||||
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
enum SpecType {
|
||||||
|
Fg,
|
||||||
|
Bg,
|
||||||
|
Style,
|
||||||
|
None,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The set of available styles for use in the terminal.
|
||||||
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||||
|
enum Style {
|
||||||
|
Bold,
|
||||||
|
NoBold,
|
||||||
|
Intense,
|
||||||
|
NoIntense,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ColorSpecs {
|
||||||
|
/// Create color specifications from a list of user supplied
|
||||||
|
/// specifications.
|
||||||
|
pub fn new(user_specs: &[Spec]) -> ColorSpecs {
|
||||||
|
let mut specs = ColorSpecs::default();
|
||||||
|
for user_spec in user_specs {
|
||||||
|
match user_spec.ty {
|
||||||
|
OutType::Path => user_spec.merge_into(&mut specs.path),
|
||||||
|
OutType::Line => user_spec.merge_into(&mut specs.line),
|
||||||
|
OutType::Column => user_spec.merge_into(&mut specs.column),
|
||||||
|
OutType::Match => user_spec.merge_into(&mut specs.matched),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
specs
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the color specification for coloring file paths.
|
||||||
|
fn path(&self) -> &ColorSpec {
|
||||||
|
&self.path
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the color specification for coloring line numbers.
|
||||||
|
fn line(&self) -> &ColorSpec {
|
||||||
|
&self.line
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the color specification for coloring column numbers.
|
||||||
|
fn column(&self) -> &ColorSpec {
|
||||||
|
&self.column
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the color specification for coloring matched text.
|
||||||
|
fn matched(&self) -> &ColorSpec {
|
||||||
|
&self.matched
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Spec {
|
||||||
|
/// Merge this spec into the given color specification.
|
||||||
|
fn merge_into(&self, cspec: &mut ColorSpec) {
|
||||||
|
self.value.merge_into(cspec);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SpecValue {
|
||||||
|
/// Merge this spec value into the given color specification.
|
||||||
|
fn merge_into(&self, cspec: &mut ColorSpec) {
|
||||||
|
match *self {
|
||||||
|
SpecValue::None => cspec.clear(),
|
||||||
|
SpecValue::Fg(ref color) => { cspec.set_fg(Some(color.clone())); }
|
||||||
|
SpecValue::Bg(ref color) => { cspec.set_bg(Some(color.clone())); }
|
||||||
|
SpecValue::Style(ref style) => {
|
||||||
|
match *style {
|
||||||
|
Style::Bold => { cspec.set_bold(true); }
|
||||||
|
Style::NoBold => { cspec.set_bold(false); }
|
||||||
|
Style::Intense => { cspec.set_intense(true); }
|
||||||
|
Style::NoIntense => { cspec.set_intense(false); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Spec {
|
||||||
|
type Err = Error;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Spec, Error> {
|
||||||
|
let pieces: Vec<&str> = s.split(':').collect();
|
||||||
|
if pieces.len() <= 1 || pieces.len() > 3 {
|
||||||
|
return Err(Error::InvalidFormat(s.to_string()));
|
||||||
|
}
|
||||||
|
let otype: OutType = try!(pieces[0].parse());
|
||||||
|
match try!(pieces[1].parse()) {
|
||||||
|
SpecType::None => Ok(Spec { ty: otype, value: SpecValue::None }),
|
||||||
|
SpecType::Style => {
|
||||||
|
if pieces.len() < 3 {
|
||||||
|
return Err(Error::InvalidFormat(s.to_string()));
|
||||||
|
}
|
||||||
|
let style: Style = try!(pieces[2].parse());
|
||||||
|
Ok(Spec { ty: otype, value: SpecValue::Style(style) })
|
||||||
|
}
|
||||||
|
SpecType::Fg => {
|
||||||
|
if pieces.len() < 3 {
|
||||||
|
return Err(Error::InvalidFormat(s.to_string()));
|
||||||
|
}
|
||||||
|
let color: Color = try!(pieces[2].parse());
|
||||||
|
Ok(Spec { ty: otype, value: SpecValue::Fg(color) })
|
||||||
|
}
|
||||||
|
SpecType::Bg => {
|
||||||
|
if pieces.len() < 3 {
|
||||||
|
return Err(Error::InvalidFormat(s.to_string()));
|
||||||
|
}
|
||||||
|
let color: Color = try!(pieces[2].parse());
|
||||||
|
Ok(Spec { ty: otype, value: SpecValue::Bg(color) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for OutType {
|
||||||
|
type Err = Error;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<OutType, Error> {
|
||||||
|
match &*s.to_lowercase() {
|
||||||
|
"path" => Ok(OutType::Path),
|
||||||
|
"line" => Ok(OutType::Line),
|
||||||
|
"column" => Ok(OutType::Column),
|
||||||
|
"match" => Ok(OutType::Match),
|
||||||
|
_ => Err(Error::UnrecognizedOutType(s.to_string())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for SpecType {
|
||||||
|
type Err = Error;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<SpecType, Error> {
|
||||||
|
match &*s.to_lowercase() {
|
||||||
|
"fg" => Ok(SpecType::Fg),
|
||||||
|
"bg" => Ok(SpecType::Bg),
|
||||||
|
"style" => Ok(SpecType::Style),
|
||||||
|
"none" => Ok(SpecType::None),
|
||||||
|
_ => Err(Error::UnrecognizedSpecType(s.to_string())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for Style {
|
||||||
|
type Err = Error;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Style, Error> {
|
||||||
|
match &*s.to_lowercase() {
|
||||||
|
"bold" => Ok(Style::Bold),
|
||||||
|
"nobold" => Ok(Style::NoBold),
|
||||||
|
"intense" => Ok(Style::Intense),
|
||||||
|
"nointense" => Ok(Style::NoIntense),
|
||||||
|
_ => Err(Error::UnrecognizedStyle(s.to_string())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use termcolor::{Color, ColorSpec};
|
||||||
|
use super::{ColorSpecs, Error, OutType, Spec, SpecValue, Style};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn merge() {
|
||||||
|
let user_specs: &[Spec] = &[
|
||||||
|
"match:fg:blue".parse().unwrap(),
|
||||||
|
"match:none".parse().unwrap(),
|
||||||
|
"match:style:bold".parse().unwrap(),
|
||||||
|
];
|
||||||
|
let mut expect_matched = ColorSpec::new();
|
||||||
|
expect_matched.set_bold(true);
|
||||||
|
assert_eq!(ColorSpecs::new(user_specs), ColorSpecs {
|
||||||
|
path: ColorSpec::default(),
|
||||||
|
line: ColorSpec::default(),
|
||||||
|
column: ColorSpec::default(),
|
||||||
|
matched: expect_matched,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn specs() {
|
||||||
|
let spec: Spec = "path:fg:blue".parse().unwrap();
|
||||||
|
assert_eq!(spec, Spec {
|
||||||
|
ty: OutType::Path,
|
||||||
|
value: SpecValue::Fg(Color::Blue),
|
||||||
|
});
|
||||||
|
|
||||||
|
let spec: Spec = "path:bg:red".parse().unwrap();
|
||||||
|
assert_eq!(spec, Spec {
|
||||||
|
ty: OutType::Path,
|
||||||
|
value: SpecValue::Bg(Color::Red),
|
||||||
|
});
|
||||||
|
|
||||||
|
let spec: Spec = "match:style:bold".parse().unwrap();
|
||||||
|
assert_eq!(spec, Spec {
|
||||||
|
ty: OutType::Match,
|
||||||
|
value: SpecValue::Style(Style::Bold),
|
||||||
|
});
|
||||||
|
|
||||||
|
let spec: Spec = "match:style:intense".parse().unwrap();
|
||||||
|
assert_eq!(spec, Spec {
|
||||||
|
ty: OutType::Match,
|
||||||
|
value: SpecValue::Style(Style::Intense),
|
||||||
|
});
|
||||||
|
|
||||||
|
let spec: Spec = "line:none".parse().unwrap();
|
||||||
|
assert_eq!(spec, Spec {
|
||||||
|
ty: OutType::Line,
|
||||||
|
value: SpecValue::None,
|
||||||
|
});
|
||||||
|
|
||||||
|
let spec: Spec = "column:bg:green".parse().unwrap();
|
||||||
|
assert_eq!(spec, Spec {
|
||||||
|
ty: OutType::Column,
|
||||||
|
value: SpecValue::Bg(Color::Green),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn spec_errors() {
|
||||||
|
let err = "line:nonee".parse::<Spec>().unwrap_err();
|
||||||
|
assert_eq!(err, Error::UnrecognizedSpecType("nonee".to_string()));
|
||||||
|
|
||||||
|
let err = "".parse::<Spec>().unwrap_err();
|
||||||
|
assert_eq!(err, Error::InvalidFormat("".to_string()));
|
||||||
|
|
||||||
|
let err = "foo".parse::<Spec>().unwrap_err();
|
||||||
|
assert_eq!(err, Error::InvalidFormat("foo".to_string()));
|
||||||
|
|
||||||
|
let err = "line:style:italic".parse::<Spec>().unwrap_err();
|
||||||
|
assert_eq!(err, Error::UnrecognizedStyle("italic".to_string()));
|
||||||
|
|
||||||
|
let err = "line:fg:brown".parse::<Spec>().unwrap_err();
|
||||||
|
match err {
|
||||||
|
Error::UnrecognizedColor(name, _) => assert_eq!(name, "brown"),
|
||||||
|
err => assert!(false, "unexpected error: {:?}", err),
|
||||||
|
}
|
||||||
|
|
||||||
|
let err = "foo:fg:brown".parse::<Spec>().unwrap_err();
|
||||||
|
assert_eq!(err, Error::UnrecognizedOutType("foo".to_string()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
/*!
|
/*!
|
||||||
The search_buffer module is responsible for searching a single file all in a
|
The `search_buffer` module is responsible for searching a single file all in a
|
||||||
single buffer. Typically, the source of the buffer is a memory map. This can
|
single buffer. Typically, the source of the buffer is a memory map. This can
|
||||||
be useful for when memory maps are faster than streaming search.
|
be useful for when memory maps are faster than streaming search.
|
||||||
|
|
||||||
Note that this module doesn't quite support everything that search_stream does.
|
Note that this module doesn't quite support everything that `search_stream`
|
||||||
Notably, showing contexts.
|
does. Notably, showing contexts.
|
||||||
*/
|
*/
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use grep::Grep;
|
use grep::Grep;
|
||||||
use term::Terminal;
|
use termcolor::WriteColor;
|
||||||
|
|
||||||
use printer::Printer;
|
use printer::Printer;
|
||||||
use search_stream::{IterLines, Options, count_lines, is_binary};
|
use search_stream::{IterLines, Options, count_lines, is_binary};
|
||||||
@@ -26,7 +26,7 @@ pub struct BufferSearcher<'a, W: 'a> {
|
|||||||
last_line: usize,
|
last_line: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, W: Send + Terminal> BufferSearcher<'a, W> {
|
impl<'a, W: WriteColor> BufferSearcher<'a, W> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
printer: &'a mut Printer<W>,
|
printer: &'a mut Printer<W>,
|
||||||
grep: &'a Grep,
|
grep: &'a Grep,
|
||||||
@@ -61,6 +61,15 @@ impl<'a, W: Send + Terminal> BufferSearcher<'a, W> {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// If enabled, searching will print the path of files that *don't* match
|
||||||
|
/// the given pattern.
|
||||||
|
///
|
||||||
|
/// Disabled by default.
|
||||||
|
pub fn files_without_matches(mut self, yes: bool) -> Self {
|
||||||
|
self.opts.files_without_matches = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Set the end-of-line byte used by this searcher.
|
/// Set the end-of-line byte used by this searcher.
|
||||||
pub fn eol(mut self, eol: u8) -> Self {
|
pub fn eol(mut self, eol: u8) -> Self {
|
||||||
self.opts.eol = eol;
|
self.opts.eol = eol;
|
||||||
@@ -81,6 +90,14 @@ impl<'a, W: Send + Terminal> BufferSearcher<'a, W> {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Limit the number of matches to the given count.
|
||||||
|
///
|
||||||
|
/// The default is None, which corresponds to no limit.
|
||||||
|
pub fn max_count(mut self, count: Option<u64>) -> Self {
|
||||||
|
self.opts.max_count = count;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// If enabled, don't show any output and quit searching after the first
|
/// If enabled, don't show any output and quit searching after the first
|
||||||
/// match is found.
|
/// match is found.
|
||||||
pub fn quiet(mut self, yes: bool) -> Self {
|
pub fn quiet(mut self, yes: bool) -> Self {
|
||||||
@@ -96,8 +113,8 @@ impl<'a, W: Send + Terminal> BufferSearcher<'a, W> {
|
|||||||
|
|
||||||
#[inline(never)]
|
#[inline(never)]
|
||||||
pub fn run(mut self) -> u64 {
|
pub fn run(mut self) -> u64 {
|
||||||
let binary_upto = cmp::min(4096, self.buf.len());
|
let binary_upto = cmp::min(10240, self.buf.len());
|
||||||
if !self.opts.text && is_binary(&self.buf[..binary_upto]) {
|
if !self.opts.text && is_binary(&self.buf[..binary_upto], true) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -111,11 +128,11 @@ impl<'a, W: Send + Terminal> BufferSearcher<'a, W> {
|
|||||||
self.print_match(m.start(), m.end());
|
self.print_match(m.start(), m.end());
|
||||||
}
|
}
|
||||||
last_end = m.end();
|
last_end = m.end();
|
||||||
if self.opts.stop_after_first_match() {
|
if self.opts.terminate(self.match_count) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if self.opts.invert_match {
|
if self.opts.invert_match && !self.opts.terminate(self.match_count) {
|
||||||
let upto = self.buf.len();
|
let upto = self.buf.len();
|
||||||
self.print_inverted_matches(last_end, upto);
|
self.print_inverted_matches(last_end, upto);
|
||||||
}
|
}
|
||||||
@@ -125,6 +142,9 @@ impl<'a, W: Send + Terminal> BufferSearcher<'a, W> {
|
|||||||
if self.opts.files_with_matches && self.match_count > 0 {
|
if self.opts.files_with_matches && self.match_count > 0 {
|
||||||
self.printer.path(self.path);
|
self.printer.path(self.path);
|
||||||
}
|
}
|
||||||
|
if self.opts.files_without_matches && self.match_count == 0 {
|
||||||
|
self.printer.path(self.path);
|
||||||
|
}
|
||||||
self.match_count
|
self.match_count
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -146,6 +166,9 @@ impl<'a, W: Send + Terminal> BufferSearcher<'a, W> {
|
|||||||
debug_assert!(self.opts.invert_match);
|
debug_assert!(self.opts.invert_match);
|
||||||
let mut it = IterLines::new(self.opts.eol, start);
|
let mut it = IterLines::new(self.opts.eol, start);
|
||||||
while let Some((s, e)) = it.next(&self.buf[..end]) {
|
while let Some((s, e)) = it.next(&self.buf[..end]) {
|
||||||
|
if self.opts.terminate(self.match_count) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
self.print_match(s, e);
|
self.print_match(s, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -173,10 +196,9 @@ mod tests {
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use grep::GrepBuilder;
|
use grep::GrepBuilder;
|
||||||
use term::{Terminal, TerminfoTerminal};
|
|
||||||
|
|
||||||
use out::ColoredTerminal;
|
|
||||||
use printer::Printer;
|
use printer::Printer;
|
||||||
|
use termcolor;
|
||||||
|
|
||||||
use super::BufferSearcher;
|
use super::BufferSearcher;
|
||||||
|
|
||||||
@@ -193,15 +215,14 @@ and exhibited clearly, with a label attached.\
|
|||||||
&Path::new("/baz.rs")
|
&Path::new("/baz.rs")
|
||||||
}
|
}
|
||||||
|
|
||||||
type TestSearcher<'a> =
|
type TestSearcher<'a> = BufferSearcher<'a, termcolor::NoColor<Vec<u8>>>;
|
||||||
BufferSearcher<'a, ColoredTerminal<TerminfoTerminal<Vec<u8>>>>;
|
|
||||||
|
|
||||||
fn search<F: FnMut(TestSearcher) -> TestSearcher>(
|
fn search<F: FnMut(TestSearcher) -> TestSearcher>(
|
||||||
pat: &str,
|
pat: &str,
|
||||||
haystack: &str,
|
haystack: &str,
|
||||||
mut map: F,
|
mut map: F,
|
||||||
) -> (u64, String) {
|
) -> (u64, String) {
|
||||||
let outbuf = ColoredTerminal::NoColor(vec![]);
|
let outbuf = termcolor::NoColor::new(vec![]);
|
||||||
let mut pp = Printer::new(outbuf).with_filename(true);
|
let mut pp = Printer::new(outbuf).with_filename(true);
|
||||||
let grep = GrepBuilder::new(pat).build().unwrap();
|
let grep = GrepBuilder::new(pat).build().unwrap();
|
||||||
let count = {
|
let count = {
|
||||||
@@ -266,6 +287,34 @@ and exhibited clearly, with a label attached.\
|
|||||||
assert_eq!(out, "/baz.rs\n");
|
assert_eq!(out, "/baz.rs\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn files_without_matches() {
|
||||||
|
let (count, out) = search(
|
||||||
|
"zzzz", SHERLOCK, |s| s.files_without_matches(true));
|
||||||
|
assert_eq!(0, count);
|
||||||
|
assert_eq!(out, "/baz.rs\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn max_count() {
|
||||||
|
let (count, out) = search(
|
||||||
|
"Sherlock", SHERLOCK, |s| s.max_count(Some(1)));
|
||||||
|
assert_eq!(1, count);
|
||||||
|
assert_eq!(out, "\
|
||||||
|
/baz.rs:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invert_match_max_count() {
|
||||||
|
let (count, out) = search(
|
||||||
|
"zzzz", SHERLOCK, |s| s.invert_match(true).max_count(Some(1)));
|
||||||
|
assert_eq!(1, count);
|
||||||
|
assert_eq!(out, "\
|
||||||
|
/baz.rs:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn invert_match() {
|
fn invert_match() {
|
||||||
let (count, out) = search(
|
let (count, out) = search(
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
/*!
|
/*!
|
||||||
The search_stream module is responsible for searching a single file and
|
The `search_stream` module is responsible for searching a single file and
|
||||||
printing matches. In particular, it searches the file in a streaming fashion
|
printing matches. In particular, it searches the file in a streaming fashion
|
||||||
using `read` calls and a (roughly) fixed size buffer.
|
using `read` calls and a (roughly) fixed size buffer.
|
||||||
*/
|
*/
|
||||||
@@ -10,9 +10,10 @@ use std::fmt;
|
|||||||
use std::io;
|
use std::io;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use bytecount;
|
||||||
use grep::{Grep, Match};
|
use grep::{Grep, Match};
|
||||||
use memchr::{memchr, memrchr};
|
use memchr::{memchr, memrchr};
|
||||||
use term::Terminal;
|
use termcolor::WriteColor;
|
||||||
|
|
||||||
use printer::Printer;
|
use printer::Printer;
|
||||||
|
|
||||||
@@ -81,9 +82,11 @@ pub struct Options {
|
|||||||
pub before_context: usize,
|
pub before_context: usize,
|
||||||
pub count: bool,
|
pub count: bool,
|
||||||
pub files_with_matches: bool,
|
pub files_with_matches: bool,
|
||||||
|
pub files_without_matches: bool,
|
||||||
pub eol: u8,
|
pub eol: u8,
|
||||||
pub invert_match: bool,
|
pub invert_match: bool,
|
||||||
pub line_number: bool,
|
pub line_number: bool,
|
||||||
|
pub max_count: Option<u64>,
|
||||||
pub quiet: bool,
|
pub quiet: bool,
|
||||||
pub text: bool,
|
pub text: bool,
|
||||||
}
|
}
|
||||||
@@ -95,9 +98,11 @@ impl Default for Options {
|
|||||||
before_context: 0,
|
before_context: 0,
|
||||||
count: false,
|
count: false,
|
||||||
files_with_matches: false,
|
files_with_matches: false,
|
||||||
|
files_without_matches: false,
|
||||||
eol: b'\n',
|
eol: b'\n',
|
||||||
invert_match: false,
|
invert_match: false,
|
||||||
line_number: false,
|
line_number: false,
|
||||||
|
max_count: None,
|
||||||
quiet: false,
|
quiet: false,
|
||||||
text: false,
|
text: false,
|
||||||
}
|
}
|
||||||
@@ -106,20 +111,32 @@ impl Default for Options {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Options {
|
impl Options {
|
||||||
/// Several options (--quiet, --count, --files-with-matches) imply that
|
/// Several options (--quiet, --count, --files-with-matches,
|
||||||
/// we shouldn't ever display matches.
|
/// --files-without-match) imply that we shouldn't ever display matches.
|
||||||
pub fn skip_matches(&self) -> bool {
|
pub fn skip_matches(&self) -> bool {
|
||||||
self.count || self.files_with_matches || self.quiet
|
self.count || self.files_with_matches || self.files_without_matches
|
||||||
|
|| self.quiet
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Some options (--quiet, --files-with-matches) imply that we can stop
|
/// Some options (--quiet, --files-with-matches, --files-without-match)
|
||||||
/// searching after the first match.
|
/// imply that we can stop searching after the first match.
|
||||||
pub fn stop_after_first_match(&self) -> bool {
|
pub fn stop_after_first_match(&self) -> bool {
|
||||||
self.files_with_matches || self.quiet
|
self.files_with_matches || self.files_without_matches || self.quiet
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if the search should terminate based on the match count.
|
||||||
|
pub fn terminate(&self, match_count: u64) -> bool {
|
||||||
|
if match_count > 0 && self.stop_after_first_match() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if self.max_count.map_or(false, |max| match_count >= max) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
impl<'a, R: io::Read, W: WriteColor> Searcher<'a, R, W> {
|
||||||
/// Create a new searcher.
|
/// Create a new searcher.
|
||||||
///
|
///
|
||||||
/// `inp` is a reusable input buffer that is used as scratch space by this
|
/// `inp` is a reusable input buffer that is used as scratch space by this
|
||||||
@@ -185,6 +202,14 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// If enabled, searching will print the path of files without any matches.
|
||||||
|
///
|
||||||
|
/// Disabled by default.
|
||||||
|
pub fn files_without_matches(mut self, yes: bool) -> Self {
|
||||||
|
self.opts.files_without_matches = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Set the end-of-line byte used by this searcher.
|
/// Set the end-of-line byte used by this searcher.
|
||||||
pub fn eol(mut self, eol: u8) -> Self {
|
pub fn eol(mut self, eol: u8) -> Self {
|
||||||
self.opts.eol = eol;
|
self.opts.eol = eol;
|
||||||
@@ -205,6 +230,14 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Limit the number of matches to the given count.
|
||||||
|
///
|
||||||
|
/// The default is None, which corresponds to no limit.
|
||||||
|
pub fn max_count(mut self, count: Option<u64>) -> Self {
|
||||||
|
self.opts.max_count = count;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// If enabled, don't show any output and quit searching after the first
|
/// If enabled, don't show any output and quit searching after the first
|
||||||
/// match is found.
|
/// match is found.
|
||||||
pub fn quiet(mut self, yes: bool) -> Self {
|
pub fn quiet(mut self, yes: bool) -> Self {
|
||||||
@@ -215,6 +248,7 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
|||||||
/// If enabled, search binary files as if they were text.
|
/// If enabled, search binary files as if they were text.
|
||||||
pub fn text(mut self, yes: bool) -> Self {
|
pub fn text(mut self, yes: bool) -> Self {
|
||||||
self.opts.text = yes;
|
self.opts.text = yes;
|
||||||
|
self.inp.text(yes);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -233,13 +267,10 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
|||||||
if !try!(self.fill()) {
|
if !try!(self.fill()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if !self.opts.text && self.inp.is_binary {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
while !self.terminate() && self.inp.pos < self.inp.lastnl {
|
while !self.terminate() && self.inp.pos < self.inp.lastnl {
|
||||||
let matched = self.grep.read_match(
|
let matched = self.grep.read_match(
|
||||||
&mut self.last_match,
|
&mut self.last_match,
|
||||||
&mut self.inp.buf[..self.inp.lastnl],
|
&self.inp.buf[..self.inp.lastnl],
|
||||||
self.inp.pos);
|
self.inp.pos);
|
||||||
if self.opts.invert_match {
|
if self.opts.invert_match {
|
||||||
let upto =
|
let upto =
|
||||||
@@ -274,13 +305,15 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
|||||||
} else if self.opts.files_with_matches {
|
} else if self.opts.files_with_matches {
|
||||||
self.printer.path(self.path);
|
self.printer.path(self.path);
|
||||||
}
|
}
|
||||||
|
} else if self.opts.files_without_matches {
|
||||||
|
self.printer.path(self.path);
|
||||||
}
|
}
|
||||||
Ok(self.match_count)
|
Ok(self.match_count)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn terminate(&self) -> bool {
|
fn terminate(&self) -> bool {
|
||||||
self.match_count > 0 && self.opts.stop_after_first_match()
|
self.opts.terminate(self.match_count)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
@@ -296,12 +329,12 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
|||||||
lines);
|
lines);
|
||||||
}
|
}
|
||||||
if keep < self.last_printed {
|
if keep < self.last_printed {
|
||||||
self.last_printed = self.last_printed - keep;
|
self.last_printed -= keep;
|
||||||
} else {
|
} else {
|
||||||
self.last_printed = 0;
|
self.last_printed = 0;
|
||||||
}
|
}
|
||||||
if keep <= self.last_line {
|
if keep <= self.last_line {
|
||||||
self.last_line = self.last_line - keep;
|
self.last_line -= keep;
|
||||||
} else {
|
} else {
|
||||||
self.count_lines(keep);
|
self.count_lines(keep);
|
||||||
self.last_line = 0;
|
self.last_line = 0;
|
||||||
@@ -317,6 +350,9 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
|||||||
debug_assert!(self.opts.invert_match);
|
debug_assert!(self.opts.invert_match);
|
||||||
let mut it = IterLines::new(self.opts.eol, self.inp.pos);
|
let mut it = IterLines::new(self.opts.eol, self.inp.pos);
|
||||||
while let Some((start, end)) = it.next(&self.inp.buf[..upto]) {
|
while let Some((start, end)) = it.next(&self.inp.buf[..upto]) {
|
||||||
|
if self.terminate() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
self.print_match(start, end);
|
self.print_match(start, end);
|
||||||
self.inp.pos = end;
|
self.inp.pos = end;
|
||||||
}
|
}
|
||||||
@@ -419,7 +455,7 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// InputBuffer encapsulates the logic of maintaining a ~fixed sized buffer
|
/// `InputBuffer` encapsulates the logic of maintaining a ~fixed sized buffer
|
||||||
/// on which to search. There are three key pieces of complexity:
|
/// on which to search. There are three key pieces of complexity:
|
||||||
///
|
///
|
||||||
/// 1. We must be able to handle lines that are longer than the size of the
|
/// 1. We must be able to handle lines that are longer than the size of the
|
||||||
@@ -435,7 +471,7 @@ impl<'a, R: io::Read, W: Terminal + Send> Searcher<'a, R, W> {
|
|||||||
/// may occur at the beginning of a buffer, in which case, lines at the end
|
/// may occur at the beginning of a buffer, in which case, lines at the end
|
||||||
/// of the previous contents of the buffer need to be printed.
|
/// of the previous contents of the buffer need to be printed.
|
||||||
///
|
///
|
||||||
/// An InputBuffer is designed to be reused and isn't tied to any particular
|
/// An `InputBuffer` is designed to be reused and isn't tied to any particular
|
||||||
/// reader.
|
/// reader.
|
||||||
pub struct InputBuffer {
|
pub struct InputBuffer {
|
||||||
/// The number of bytes to attempt to read at a time. Once set, this is
|
/// The number of bytes to attempt to read at a time. Once set, this is
|
||||||
@@ -463,10 +499,8 @@ pub struct InputBuffer {
|
|||||||
end: usize,
|
end: usize,
|
||||||
/// Set to true if and only if no reads have occurred yet.
|
/// Set to true if and only if no reads have occurred yet.
|
||||||
first: bool,
|
first: bool,
|
||||||
/// Set to true if and only if the contents of buf are determined to be
|
/// Set to true if all binary data should be treated as if it were text.
|
||||||
/// "binary" (i.e., not searchable text). Note that its value may be
|
text: bool,
|
||||||
/// falsely negative *or* falsely positive. It is only a heuristic.
|
|
||||||
is_binary: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl InputBuffer {
|
impl InputBuffer {
|
||||||
@@ -494,13 +528,23 @@ impl InputBuffer {
|
|||||||
lastnl: 0,
|
lastnl: 0,
|
||||||
end: 0,
|
end: 0,
|
||||||
first: true,
|
first: true,
|
||||||
is_binary: false,
|
text: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the end-of-line terminator used by this input buffer.
|
/// Set the end-of-line terminator used by this input buffer.
|
||||||
pub fn eol(&mut self, eol: u8) {
|
pub fn eol(&mut self, eol: u8) -> &mut Self {
|
||||||
self.eol = eol;
|
self.eol = eol;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If enabled, search binary files as if they were text.
|
||||||
|
///
|
||||||
|
/// Note that this may cause the buffer to load the entire contents of a
|
||||||
|
/// file into memory.
|
||||||
|
pub fn text(&mut self, yes: bool) -> &mut Self {
|
||||||
|
self.text = yes;
|
||||||
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resets this buffer so that it may be reused with a new reader.
|
/// Resets this buffer so that it may be reused with a new reader.
|
||||||
@@ -509,7 +553,6 @@ impl InputBuffer {
|
|||||||
self.lastnl = 0;
|
self.lastnl = 0;
|
||||||
self.end = 0;
|
self.end = 0;
|
||||||
self.first = true;
|
self.first = true;
|
||||||
self.is_binary = false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fill the contents of this buffer with the reader given. The reader
|
/// Fill the contents of this buffer with the reader given. The reader
|
||||||
@@ -524,9 +567,10 @@ impl InputBuffer {
|
|||||||
keep_from: usize,
|
keep_from: usize,
|
||||||
) -> Result<bool, io::Error> {
|
) -> Result<bool, io::Error> {
|
||||||
// Rollover bytes from buf[keep_from..end] and update our various
|
// Rollover bytes from buf[keep_from..end] and update our various
|
||||||
// pointers. N.B. This could be done with the unsafe ptr::copy, but
|
// pointers. N.B. This could be done with the ptr::copy, but I haven't
|
||||||
// I haven't been able to produce a benchmark that notices a difference
|
// been able to produce a benchmark that notices a difference in
|
||||||
// in performance. (Invariably, ptr::copy is also clearer IMO.)
|
// performance. (Invariably, ptr::copy is seems clearer IMO, but it is
|
||||||
|
// not safe.)
|
||||||
self.tmp.clear();
|
self.tmp.clear();
|
||||||
self.tmp.extend_from_slice(&self.buf[keep_from..self.end]);
|
self.tmp.extend_from_slice(&self.buf[keep_from..self.end]);
|
||||||
self.buf[0..self.tmp.len()].copy_from_slice(&self.tmp);
|
self.buf[0..self.tmp.len()].copy_from_slice(&self.tmp);
|
||||||
@@ -543,8 +587,10 @@ impl InputBuffer {
|
|||||||
}
|
}
|
||||||
let n = try!(rdr.read(
|
let n = try!(rdr.read(
|
||||||
&mut self.buf[self.end..self.end + self.read_size]));
|
&mut self.buf[self.end..self.end + self.read_size]));
|
||||||
if self.first && is_binary(&self.buf[self.end..self.end + n]) {
|
if !self.text {
|
||||||
self.is_binary = true;
|
if is_binary(&self.buf[self.end..self.end + n], self.first) {
|
||||||
|
return Ok(false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
self.first = false;
|
self.first = false;
|
||||||
// We assume that reading 0 bytes means we've hit EOF.
|
// We assume that reading 0 bytes means we've hit EOF.
|
||||||
@@ -574,97 +620,17 @@ impl InputBuffer {
|
|||||||
///
|
///
|
||||||
/// Note that this may return both false positives and false negatives.
|
/// Note that this may return both false positives and false negatives.
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn is_binary(buf: &[u8]) -> bool {
|
pub fn is_binary(buf: &[u8], first: bool) -> bool {
|
||||||
if buf.len() >= 4 && &buf[0..4] == b"%PDF" {
|
if first && buf.len() >= 4 && &buf[0..4] == b"%PDF" {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
memchr(b'\x00', &buf[0..cmp::min(1024, buf.len())]).is_some()
|
memchr(b'\x00', buf).is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Count the number of lines in the given buffer.
|
/// Count the number of lines in the given buffer.
|
||||||
#[inline(never)]
|
|
||||||
|
|
||||||
#[inline(never)]
|
#[inline(never)]
|
||||||
pub fn count_lines(buf: &[u8], eol: u8) -> u64 {
|
pub fn count_lines(buf: &[u8], eol: u8) -> u64 {
|
||||||
// This was adapted from code in the memchr crate. The specific benefit
|
bytecount::count(buf, eol) as u64
|
||||||
// here is that we can avoid a branch in the inner loop because all we're
|
|
||||||
// doing is counting.
|
|
||||||
|
|
||||||
// The technique to count EOL bytes was adapted from:
|
|
||||||
// http://bits.stephan-brumme.com/null.html
|
|
||||||
const LO_U64: u64 = 0x0101010101010101;
|
|
||||||
const HI_U64: u64 = 0x8080808080808080;
|
|
||||||
|
|
||||||
// use truncation
|
|
||||||
const LO_USIZE: usize = LO_U64 as usize;
|
|
||||||
const HI_USIZE: usize = HI_U64 as usize;
|
|
||||||
|
|
||||||
#[cfg(target_pointer_width = "32")]
|
|
||||||
const USIZE_BYTES: usize = 4;
|
|
||||||
#[cfg(target_pointer_width = "64")]
|
|
||||||
const USIZE_BYTES: usize = 8;
|
|
||||||
|
|
||||||
fn count_eol(eol: usize) -> u64 {
|
|
||||||
// Ideally, this would compile down to a POPCNT instruction, but
|
|
||||||
// it looks like you need to set RUSTFLAGS="-C target-cpu=native"
|
|
||||||
// (or target-feature=+popcnt) to get that to work. Bummer.
|
|
||||||
(eol.wrapping_sub(LO_USIZE) & !eol & HI_USIZE).count_ones() as u64
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_pointer_width = "32")]
|
|
||||||
fn repeat_byte(b: u8) -> usize {
|
|
||||||
let mut rep = (b as usize) << 8 | b as usize;
|
|
||||||
rep = rep << 16 | rep;
|
|
||||||
rep
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(target_pointer_width = "64")]
|
|
||||||
fn repeat_byte(b: u8) -> usize {
|
|
||||||
let mut rep = (b as usize) << 8 | b as usize;
|
|
||||||
rep = rep << 16 | rep;
|
|
||||||
rep = rep << 32 | rep;
|
|
||||||
rep
|
|
||||||
}
|
|
||||||
|
|
||||||
fn count_lines_slow(mut buf: &[u8], eol: u8) -> u64 {
|
|
||||||
let mut count = 0;
|
|
||||||
while let Some(pos) = memchr(eol, buf) {
|
|
||||||
count += 1;
|
|
||||||
buf = &buf[pos + 1..];
|
|
||||||
}
|
|
||||||
count
|
|
||||||
}
|
|
||||||
|
|
||||||
let len = buf.len();
|
|
||||||
let ptr = buf.as_ptr();
|
|
||||||
let mut count = 0;
|
|
||||||
|
|
||||||
// Search up to an aligned boundary...
|
|
||||||
let align = (ptr as usize) & (USIZE_BYTES - 1);
|
|
||||||
let mut i = 0;
|
|
||||||
if align > 0 {
|
|
||||||
i = cmp::min(USIZE_BYTES - align, len);
|
|
||||||
count += count_lines_slow(&buf[..i], eol);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ... and search the rest.
|
|
||||||
let repeated_eol = repeat_byte(eol);
|
|
||||||
|
|
||||||
if len >= 2 * USIZE_BYTES {
|
|
||||||
while i <= len - (2 * USIZE_BYTES) {
|
|
||||||
unsafe {
|
|
||||||
let u = *(ptr.offset(i as isize) as *const usize);
|
|
||||||
let v = *(ptr.offset((i + USIZE_BYTES) as isize)
|
|
||||||
as *const usize);
|
|
||||||
|
|
||||||
count += count_eol(u ^ repeated_eol);
|
|
||||||
count += count_eol(v ^ repeated_eol);
|
|
||||||
}
|
|
||||||
i += USIZE_BYTES * 2;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
count += count_lines_slow(&buf[i..], eol);
|
|
||||||
count
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Replaces a with b in buf.
|
/// Replaces a with b in buf.
|
||||||
@@ -804,10 +770,8 @@ mod tests {
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use grep::GrepBuilder;
|
use grep::GrepBuilder;
|
||||||
use term::{Terminal, TerminfoTerminal};
|
|
||||||
|
|
||||||
use out::ColoredTerminal;
|
|
||||||
use printer::Printer;
|
use printer::Printer;
|
||||||
|
use termcolor;
|
||||||
|
|
||||||
use super::{InputBuffer, Searcher, start_of_previous_lines};
|
use super::{InputBuffer, Searcher, start_of_previous_lines};
|
||||||
|
|
||||||
@@ -847,7 +811,7 @@ fn main() {
|
|||||||
type TestSearcher<'a> = Searcher<
|
type TestSearcher<'a> = Searcher<
|
||||||
'a,
|
'a,
|
||||||
io::Cursor<Vec<u8>>,
|
io::Cursor<Vec<u8>>,
|
||||||
ColoredTerminal<TerminfoTerminal<Vec<u8>>>,
|
termcolor::NoColor<Vec<u8>>,
|
||||||
>;
|
>;
|
||||||
|
|
||||||
fn search_smallcap<F: FnMut(TestSearcher) -> TestSearcher>(
|
fn search_smallcap<F: FnMut(TestSearcher) -> TestSearcher>(
|
||||||
@@ -856,7 +820,7 @@ fn main() {
|
|||||||
mut map: F,
|
mut map: F,
|
||||||
) -> (u64, String) {
|
) -> (u64, String) {
|
||||||
let mut inp = InputBuffer::with_capacity(1);
|
let mut inp = InputBuffer::with_capacity(1);
|
||||||
let outbuf = ColoredTerminal::NoColor(vec![]);
|
let outbuf = termcolor::NoColor::new(vec![]);
|
||||||
let mut pp = Printer::new(outbuf).with_filename(true);
|
let mut pp = Printer::new(outbuf).with_filename(true);
|
||||||
let grep = GrepBuilder::new(pat).build().unwrap();
|
let grep = GrepBuilder::new(pat).build().unwrap();
|
||||||
let count = {
|
let count = {
|
||||||
@@ -873,7 +837,7 @@ fn main() {
|
|||||||
mut map: F,
|
mut map: F,
|
||||||
) -> (u64, String) {
|
) -> (u64, String) {
|
||||||
let mut inp = InputBuffer::with_capacity(4096);
|
let mut inp = InputBuffer::with_capacity(4096);
|
||||||
let outbuf = ColoredTerminal::NoColor(vec![]);
|
let outbuf = termcolor::NoColor::new(vec![]);
|
||||||
let mut pp = Printer::new(outbuf).with_filename(true);
|
let mut pp = Printer::new(outbuf).with_filename(true);
|
||||||
let grep = GrepBuilder::new(pat).build().unwrap();
|
let grep = GrepBuilder::new(pat).build().unwrap();
|
||||||
let count = {
|
let count = {
|
||||||
@@ -1040,6 +1004,34 @@ fn main() {
|
|||||||
assert_eq!(out, "/baz.rs\n");
|
assert_eq!(out, "/baz.rs\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn files_without_matches() {
|
||||||
|
let (count, out) = search_smallcap(
|
||||||
|
"zzzz", SHERLOCK, |s| s.files_without_matches(true));
|
||||||
|
assert_eq!(0, count);
|
||||||
|
assert_eq!(out, "/baz.rs\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn max_count() {
|
||||||
|
let (count, out) = search_smallcap(
|
||||||
|
"Sherlock", SHERLOCK, |s| s.max_count(Some(1)));
|
||||||
|
assert_eq!(1, count);
|
||||||
|
assert_eq!(out, "\
|
||||||
|
/baz.rs:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invert_match_max_count() {
|
||||||
|
let (count, out) = search(
|
||||||
|
"zzzz", SHERLOCK, |s| s.invert_match(true).max_count(Some(1)));
|
||||||
|
assert_eq!(1, count);
|
||||||
|
assert_eq!(out, "\
|
||||||
|
/baz.rs:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn invert_match() {
|
fn invert_match() {
|
||||||
let (count, out) = search_smallcap(
|
let (count, out) = search_smallcap(
|
||||||
|
|||||||
@@ -1,176 +0,0 @@
|
|||||||
/*!
|
|
||||||
This module contains a Windows-only *in-memory* implementation of the
|
|
||||||
`term::Terminal` trait.
|
|
||||||
|
|
||||||
This particular implementation is a bit idiosyncratic, and the "in-memory"
|
|
||||||
specification is to blame. In particular, on Windows, coloring requires
|
|
||||||
communicating with the console synchronously as data is written to stdout.
|
|
||||||
This is anathema to how ripgrep fundamentally works: by writing search results
|
|
||||||
to intermediate thread local buffers in order to maximize parallelism.
|
|
||||||
|
|
||||||
Eliminating parallelism on Windows isn't an option, because that would negate
|
|
||||||
a tremendous performance benefit just for coloring.
|
|
||||||
|
|
||||||
We've worked around this by providing an implementation of `term::Terminal`
|
|
||||||
that records precisely where a color or a reset should be invoked, according
|
|
||||||
to a byte offset in the in memory buffer. When the buffer is actually printed,
|
|
||||||
we copy the bytes from the buffer to stdout incrementally while invoking the
|
|
||||||
corresponding console APIs for coloring at the right location.
|
|
||||||
|
|
||||||
(Another approach would be to do ANSI coloring unconditionally, then parse that
|
|
||||||
and translate it to console commands. The advantage of that approach is that
|
|
||||||
it doesn't require any additional memory for storing offsets. In practice
|
|
||||||
though, coloring is only used in the terminal, which tends to correspond to
|
|
||||||
searches that produce very few results with respect to the corpus searched.
|
|
||||||
Therefore, this is an acceptable trade off. Namely, we do not pay for it when
|
|
||||||
coloring is disabled.
|
|
||||||
*/
|
|
||||||
use std::io;
|
|
||||||
|
|
||||||
use term::{self, Terminal};
|
|
||||||
use term::color::Color;
|
|
||||||
|
|
||||||
/// An in-memory buffer that provides Windows console coloring.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct WindowsBuffer {
|
|
||||||
buf: Vec<u8>,
|
|
||||||
pos: usize,
|
|
||||||
colors: Vec<WindowsColor>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A color associated with a particular location in a buffer.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
struct WindowsColor {
|
|
||||||
pos: usize,
|
|
||||||
opt: WindowsOption,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A color or reset directive that can be translated into an instruction to
|
|
||||||
/// the Windows console.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
enum WindowsOption {
|
|
||||||
Foreground(Color),
|
|
||||||
Background(Color),
|
|
||||||
Reset,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WindowsBuffer {
|
|
||||||
/// Create a new empty buffer for Windows console coloring.
|
|
||||||
pub fn new() -> WindowsBuffer {
|
|
||||||
WindowsBuffer {
|
|
||||||
buf: vec![],
|
|
||||||
pos: 0,
|
|
||||||
colors: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn push(&mut self, opt: WindowsOption) {
|
|
||||||
let pos = self.pos;
|
|
||||||
self.colors.push(WindowsColor { pos: pos, opt: opt });
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Print the contents to the given terminal.
|
|
||||||
pub fn print_stdout<T: Terminal + Send>(&self, tt: &mut T) {
|
|
||||||
if !tt.supports_color() {
|
|
||||||
let _ = tt.write_all(&self.buf);
|
|
||||||
let _ = tt.flush();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let mut last = 0;
|
|
||||||
for col in &self.colors {
|
|
||||||
let _ = tt.write_all(&self.buf[last..col.pos]);
|
|
||||||
match col.opt {
|
|
||||||
WindowsOption::Foreground(c) => {
|
|
||||||
let _ = tt.fg(c);
|
|
||||||
}
|
|
||||||
WindowsOption::Background(c) => {
|
|
||||||
let _ = tt.bg(c);
|
|
||||||
}
|
|
||||||
WindowsOption::Reset => {
|
|
||||||
let _ = tt.reset();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
last = col.pos;
|
|
||||||
}
|
|
||||||
let _ = tt.write_all(&self.buf[last..]);
|
|
||||||
let _ = tt.flush();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clear the buffer.
|
|
||||||
pub fn clear(&mut self) {
|
|
||||||
self.buf.clear();
|
|
||||||
self.colors.clear();
|
|
||||||
self.pos = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl io::Write for WindowsBuffer {
|
|
||||||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
|
||||||
let n = try!(self.buf.write(buf));
|
|
||||||
self.pos += n;
|
|
||||||
Ok(n)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flush(&mut self) -> io::Result<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Terminal for WindowsBuffer {
|
|
||||||
type Output = Vec<u8>;
|
|
||||||
|
|
||||||
fn fg(&mut self, fg: Color) -> term::Result<()> {
|
|
||||||
self.push(WindowsOption::Foreground(fg));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn bg(&mut self, bg: Color) -> term::Result<()> {
|
|
||||||
self.push(WindowsOption::Background(bg));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn attr(&mut self, _attr: term::Attr) -> term::Result<()> {
|
|
||||||
Err(term::Error::NotSupported)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn supports_attr(&self, _attr: term::Attr) -> bool {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
fn reset(&mut self) -> term::Result<()> {
|
|
||||||
self.push(WindowsOption::Reset);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn supports_reset(&self) -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn supports_color(&self) -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn cursor_up(&mut self) -> term::Result<()> {
|
|
||||||
Err(term::Error::NotSupported)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn delete_line(&mut self) -> term::Result<()> {
|
|
||||||
Err(term::Error::NotSupported)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn carriage_return(&mut self) -> term::Result<()> {
|
|
||||||
Err(term::Error::NotSupported)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_ref(&self) -> &Vec<u8> {
|
|
||||||
&self.buf
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_mut(&mut self) -> &mut Vec<u8> {
|
|
||||||
&mut self.buf
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_inner(self) -> Vec<u8> {
|
|
||||||
self.buf
|
|
||||||
}
|
|
||||||
}
|
|
||||||
455
src/types.rs
455
src/types.rs
@@ -1,455 +0,0 @@
|
|||||||
/*!
|
|
||||||
The types module provides a way of associating glob patterns on file names to
|
|
||||||
file types.
|
|
||||||
*/
|
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::error::Error as StdError;
|
|
||||||
use std::fmt;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use regex;
|
|
||||||
|
|
||||||
use gitignore::{Match, Pattern};
|
|
||||||
use globset::{self, GlobBuilder, GlobSet, GlobSetBuilder};
|
|
||||||
|
|
||||||
const TYPE_EXTENSIONS: &'static [(&'static str, &'static [&'static str])] = &[
|
|
||||||
("asm", &["*.asm", "*.s", "*.S"]),
|
|
||||||
("awk", &["*.awk"]),
|
|
||||||
("c", &["*.c", "*.h", "*.H"]),
|
|
||||||
("cbor", &["*.cbor"]),
|
|
||||||
("clojure", &["*.clj", "*.cljc", "*.cljs", "*.cljx"]),
|
|
||||||
("cmake", &["*.cmake", "CMakeLists.txt"]),
|
|
||||||
("coffeescript", &["*.coffee"]),
|
|
||||||
("config", &["*.config"]),
|
|
||||||
("cpp", &[
|
|
||||||
"*.C", "*.cc", "*.cpp", "*.cxx",
|
|
||||||
"*.h", "*.H", "*.hh", "*.hpp",
|
|
||||||
]),
|
|
||||||
("csharp", &["*.cs"]),
|
|
||||||
("css", &["*.css"]),
|
|
||||||
("cython", &["*.pyx"]),
|
|
||||||
("dart", &["*.dart"]),
|
|
||||||
("d", &["*.d"]),
|
|
||||||
("elisp", &["*.el"]),
|
|
||||||
("erlang", &["*.erl", "*.hrl"]),
|
|
||||||
("fortran", &[
|
|
||||||
"*.f", "*.F", "*.f77", "*.F77", "*.pfo",
|
|
||||||
"*.f90", "*.F90", "*.f95", "*.F95",
|
|
||||||
]),
|
|
||||||
("fsharp", &["*.fs", "*.fsx", "*.fsi"]),
|
|
||||||
("go", &["*.go"]),
|
|
||||||
("groovy", &["*.groovy"]),
|
|
||||||
("haskell", &["*.hs", "*.lhs"]),
|
|
||||||
("html", &["*.htm", "*.html"]),
|
|
||||||
("java", &["*.java"]),
|
|
||||||
("jinja", &["*.jinja", "*.jinja2"]),
|
|
||||||
("js", &[
|
|
||||||
"*.js", "*.jsx", "*.vue",
|
|
||||||
]),
|
|
||||||
("json", &["*.json"]),
|
|
||||||
("jsonl", &["*.jsonl"]),
|
|
||||||
("lisp", &["*.el", "*.jl", "*.lisp", "*.lsp", "*.sc", "*.scm"]),
|
|
||||||
("lua", &["*.lua"]),
|
|
||||||
("m4", &["*.ac", "*.m4"]),
|
|
||||||
("make", &["gnumakefile", "Gnumakefile", "makefile", "Makefile", "*.mk"]),
|
|
||||||
("markdown", &["*.md"]),
|
|
||||||
("md", &["*.md"]),
|
|
||||||
("matlab", &["*.m"]),
|
|
||||||
("mk", &["mkfile"]),
|
|
||||||
("ml", &["*.ml"]),
|
|
||||||
("nim", &["*.nim"]),
|
|
||||||
("objc", &["*.h", "*.m"]),
|
|
||||||
("objcpp", &["*.h", "*.mm"]),
|
|
||||||
("ocaml", &["*.ml", "*.mli", "*.mll", "*.mly"]),
|
|
||||||
("perl", &["*.perl", "*.pl", "*.PL", "*.plh", "*.plx", "*.pm"]),
|
|
||||||
("php", &["*.php", "*.php3", "*.php4", "*.php5", "*.phtml"]),
|
|
||||||
("py", &["*.py"]),
|
|
||||||
("readme", &["README*", "*README"]),
|
|
||||||
("r", &["*.R", "*.r", "*.Rmd", "*.Rnw"]),
|
|
||||||
("rst", &["*.rst"]),
|
|
||||||
("ruby", &["*.rb"]),
|
|
||||||
("rust", &["*.rs"]),
|
|
||||||
("scala", &["*.scala"]),
|
|
||||||
("sh", &["*.bash", "*.csh", "*.ksh", "*.sh", "*.tcsh"]),
|
|
||||||
("spark", &["*.spark"]),
|
|
||||||
("sql", &["*.sql"]),
|
|
||||||
("sv", &["*.v", "*.vg", "*.sv", "*.svh", "*.h"]),
|
|
||||||
("swift", &["*.swift"]),
|
|
||||||
("tex", &["*.tex", "*.cls", "*.sty"]),
|
|
||||||
("ts", &["*.ts", "*.tsx"]),
|
|
||||||
("txt", &["*.txt"]),
|
|
||||||
("toml", &["*.toml", "Cargo.lock"]),
|
|
||||||
("vala", &["*.vala"]),
|
|
||||||
("vb", &["*.vb"]),
|
|
||||||
("vimscript", &["*.vim"]),
|
|
||||||
("xml", &["*.xml"]),
|
|
||||||
("yacc", &["*.y"]),
|
|
||||||
("yaml", &["*.yaml", "*.yml"]),
|
|
||||||
];
|
|
||||||
|
|
||||||
/// Describes all the possible failure conditions for building a file type
|
|
||||||
/// matcher.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
/// We tried to select (or negate) a file type that is not defined.
|
|
||||||
UnrecognizedFileType(String),
|
|
||||||
/// A user specified file type definition could not be parsed.
|
|
||||||
InvalidDefinition,
|
|
||||||
/// There was an error building the matcher (probably a bad glob).
|
|
||||||
Glob(globset::Error),
|
|
||||||
/// There was an error compiling a glob as a regex.
|
|
||||||
Regex(regex::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl StdError for Error {
|
|
||||||
fn description(&self) -> &str {
|
|
||||||
match *self {
|
|
||||||
Error::UnrecognizedFileType(_) => "unrecognized file type",
|
|
||||||
Error::InvalidDefinition => "invalid definition",
|
|
||||||
Error::Glob(ref err) => err.description(),
|
|
||||||
Error::Regex(ref err) => err.description(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match *self {
|
|
||||||
Error::UnrecognizedFileType(ref ty) => {
|
|
||||||
write!(f, "unrecognized file type: {}", ty)
|
|
||||||
}
|
|
||||||
Error::InvalidDefinition => {
|
|
||||||
write!(f, "invalid definition (format is type:glob, e.g., \
|
|
||||||
html:*.html)")
|
|
||||||
}
|
|
||||||
Error::Glob(ref err) => err.fmt(f),
|
|
||||||
Error::Regex(ref err) => err.fmt(f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<globset::Error> for Error {
|
|
||||||
fn from(err: globset::Error) -> Error {
|
|
||||||
Error::Glob(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<regex::Error> for Error {
|
|
||||||
fn from(err: regex::Error) -> Error {
|
|
||||||
Error::Regex(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A single file type definition.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct FileTypeDef {
|
|
||||||
name: String,
|
|
||||||
pats: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FileTypeDef {
|
|
||||||
/// Return the name of this file type.
|
|
||||||
pub fn name(&self) -> &str {
|
|
||||||
&self.name
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the glob patterns used to recognize this file type.
|
|
||||||
pub fn patterns(&self) -> &[String] {
|
|
||||||
&self.pats
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Types is a file type matcher.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Types {
|
|
||||||
defs: Vec<FileTypeDef>,
|
|
||||||
selected: Option<GlobSet>,
|
|
||||||
negated: Option<GlobSet>,
|
|
||||||
has_selected: bool,
|
|
||||||
unmatched_pat: Pattern,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Types {
|
|
||||||
/// Creates a new file type matcher from the given Gitignore matcher. If
|
|
||||||
/// not Gitignore matcher is provided, then the file type matcher has no
|
|
||||||
/// effect.
|
|
||||||
///
|
|
||||||
/// If has_selected is true, then at least one file type was selected.
|
|
||||||
/// Therefore, any non-matches should be ignored.
|
|
||||||
fn new(
|
|
||||||
selected: Option<GlobSet>,
|
|
||||||
negated: Option<GlobSet>,
|
|
||||||
has_selected: bool,
|
|
||||||
defs: Vec<FileTypeDef>,
|
|
||||||
) -> Types {
|
|
||||||
Types {
|
|
||||||
defs: defs,
|
|
||||||
selected: selected,
|
|
||||||
negated: negated,
|
|
||||||
has_selected: has_selected,
|
|
||||||
unmatched_pat: Pattern {
|
|
||||||
from: Path::new("<filetype>").to_path_buf(),
|
|
||||||
original: "<N/A>".to_string(),
|
|
||||||
pat: "<N/A>".to_string(),
|
|
||||||
whitelist: false,
|
|
||||||
only_dir: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new file type matcher that never matches.
|
|
||||||
pub fn empty() -> Types {
|
|
||||||
Types::new(None, None, false, vec![])
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a match for the given path against this file type matcher.
|
|
||||||
///
|
|
||||||
/// The path is considered whitelisted if it matches a selected file type.
|
|
||||||
/// The path is considered ignored if it matched a negated file type.
|
|
||||||
/// If at least one file type is selected and path doesn't match, then
|
|
||||||
/// the path is also considered ignored.
|
|
||||||
pub fn matched<P: AsRef<Path>>(&self, path: P, is_dir: bool) -> Match {
|
|
||||||
// If we don't have any matcher, then we can't do anything.
|
|
||||||
if self.negated.is_none() && self.selected.is_none() {
|
|
||||||
return Match::None;
|
|
||||||
}
|
|
||||||
// File types don't apply to directories.
|
|
||||||
if is_dir {
|
|
||||||
return Match::None;
|
|
||||||
}
|
|
||||||
let path = path.as_ref();
|
|
||||||
let name = match path.file_name() {
|
|
||||||
Some(name) => name.to_string_lossy(),
|
|
||||||
None if self.has_selected => {
|
|
||||||
return Match::Ignored(&self.unmatched_pat);
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
return Match::None;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if self.negated.as_ref().map(|s| s.is_match(&*name)).unwrap_or(false) {
|
|
||||||
return Match::Ignored(&self.unmatched_pat);
|
|
||||||
}
|
|
||||||
if self.selected.as_ref().map(|s|s.is_match(&*name)).unwrap_or(false) {
|
|
||||||
return Match::Whitelist(&self.unmatched_pat);
|
|
||||||
}
|
|
||||||
if self.has_selected {
|
|
||||||
Match::Ignored(&self.unmatched_pat)
|
|
||||||
} else {
|
|
||||||
Match::None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the set of current file type definitions.
|
|
||||||
pub fn definitions(&self) -> &[FileTypeDef] {
|
|
||||||
&self.defs
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// TypesBuilder builds a type matcher from a set of file type definitions and
|
|
||||||
/// a set of file type selections.
|
|
||||||
pub struct TypesBuilder {
|
|
||||||
types: HashMap<String, Vec<String>>,
|
|
||||||
selected: Vec<String>,
|
|
||||||
negated: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TypesBuilder {
|
|
||||||
/// Create a new builder for a file type matcher.
|
|
||||||
pub fn new() -> TypesBuilder {
|
|
||||||
TypesBuilder {
|
|
||||||
types: HashMap::new(),
|
|
||||||
selected: vec![],
|
|
||||||
negated: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Build the current set of file type definitions *and* selections into
|
|
||||||
/// a file type matcher.
|
|
||||||
pub fn build(&self) -> Result<Types, Error> {
|
|
||||||
let selected_globs =
|
|
||||||
if self.selected.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
let mut bset = GlobSetBuilder::new();
|
|
||||||
for name in &self.selected {
|
|
||||||
let globs = match self.types.get(name) {
|
|
||||||
Some(globs) => globs,
|
|
||||||
None => {
|
|
||||||
let msg = name.to_string();
|
|
||||||
return Err(Error::UnrecognizedFileType(msg));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
for glob in globs {
|
|
||||||
let pat = try!(
|
|
||||||
GlobBuilder::new(glob)
|
|
||||||
.literal_separator(true).build());
|
|
||||||
bset.add(pat);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(try!(bset.build()))
|
|
||||||
};
|
|
||||||
let negated_globs =
|
|
||||||
if self.negated.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
let mut bset = GlobSetBuilder::new();
|
|
||||||
for name in &self.negated {
|
|
||||||
let globs = match self.types.get(name) {
|
|
||||||
Some(globs) => globs,
|
|
||||||
None => {
|
|
||||||
let msg = name.to_string();
|
|
||||||
return Err(Error::UnrecognizedFileType(msg));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
for glob in globs {
|
|
||||||
let pat = try!(
|
|
||||||
GlobBuilder::new(glob)
|
|
||||||
.literal_separator(true).build());
|
|
||||||
bset.add(pat);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(try!(bset.build()))
|
|
||||||
};
|
|
||||||
Ok(Types::new(
|
|
||||||
selected_globs,
|
|
||||||
negated_globs,
|
|
||||||
!self.selected.is_empty(),
|
|
||||||
self.definitions(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the set of current file type definitions.
|
|
||||||
pub fn definitions(&self) -> Vec<FileTypeDef> {
|
|
||||||
let mut defs = vec![];
|
|
||||||
for (ref name, ref pats) in &self.types {
|
|
||||||
let mut pats = pats.to_vec();
|
|
||||||
pats.sort();
|
|
||||||
defs.push(FileTypeDef {
|
|
||||||
name: name.to_string(),
|
|
||||||
pats: pats,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
defs.sort_by(|def1, def2| def1.name().cmp(def2.name()));
|
|
||||||
defs
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Select the file type given by `name`.
|
|
||||||
///
|
|
||||||
/// If `name` is `all`, then all file types are selected.
|
|
||||||
pub fn select(&mut self, name: &str) -> &mut TypesBuilder {
|
|
||||||
if name == "all" {
|
|
||||||
for name in self.types.keys() {
|
|
||||||
self.selected.push(name.to_string());
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.selected.push(name.to_string());
|
|
||||||
}
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Ignore the file type given by `name`.
|
|
||||||
///
|
|
||||||
/// If `name` is `all`, then all file types are negated.
|
|
||||||
pub fn negate(&mut self, name: &str) -> &mut TypesBuilder {
|
|
||||||
if name == "all" {
|
|
||||||
for name in self.types.keys() {
|
|
||||||
self.negated.push(name.to_string());
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.negated.push(name.to_string());
|
|
||||||
}
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clear any file type definitions for the type given.
|
|
||||||
pub fn clear(&mut self, name: &str) -> &mut TypesBuilder {
|
|
||||||
self.types.remove(name);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a new file type definition. `name` can be arbitrary and `pat`
|
|
||||||
/// should be a glob recognizing file paths belonging to the `name` type.
|
|
||||||
pub fn add(&mut self, name: &str, pat: &str) -> &mut TypesBuilder {
|
|
||||||
self.types.entry(name.to_string())
|
|
||||||
.or_insert(vec![]).push(pat.to_string());
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a new file type definition specified in string form. The format
|
|
||||||
/// is `name:glob`. Names may not include a colon.
|
|
||||||
pub fn add_def(&mut self, def: &str) -> Result<(), Error> {
|
|
||||||
let name: String = def.chars().take_while(|&c| c != ':').collect();
|
|
||||||
let pat: String = def.chars().skip(name.chars().count() + 1).collect();
|
|
||||||
if name.is_empty() || pat.is_empty() {
|
|
||||||
return Err(Error::InvalidDefinition);
|
|
||||||
}
|
|
||||||
self.add(&name, &pat);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add a set of default file type definitions.
|
|
||||||
pub fn add_defaults(&mut self) -> &mut TypesBuilder {
|
|
||||||
for &(name, exts) in TYPE_EXTENSIONS {
|
|
||||||
for ext in exts {
|
|
||||||
self.add(name, ext);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::TypesBuilder;
|
|
||||||
|
|
||||||
macro_rules! matched {
|
|
||||||
($name:ident, $types:expr, $sel:expr, $selnot:expr,
|
|
||||||
$path:expr) => {
|
|
||||||
matched!($name, $types, $sel, $selnot, $path, true);
|
|
||||||
};
|
|
||||||
(not, $name:ident, $types:expr, $sel:expr, $selnot:expr,
|
|
||||||
$path:expr) => {
|
|
||||||
matched!($name, $types, $sel, $selnot, $path, false);
|
|
||||||
};
|
|
||||||
($name:ident, $types:expr, $sel:expr, $selnot:expr,
|
|
||||||
$path:expr, $matched:expr) => {
|
|
||||||
#[test]
|
|
||||||
fn $name() {
|
|
||||||
let mut btypes = TypesBuilder::new();
|
|
||||||
for tydef in $types {
|
|
||||||
btypes.add_def(tydef).unwrap();
|
|
||||||
}
|
|
||||||
for sel in $sel {
|
|
||||||
btypes.select(sel);
|
|
||||||
}
|
|
||||||
for selnot in $selnot {
|
|
||||||
btypes.negate(selnot);
|
|
||||||
}
|
|
||||||
let types = btypes.build().unwrap();
|
|
||||||
let mat = types.matched($path, false);
|
|
||||||
assert_eq!($matched, !mat.is_ignored());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn types() -> Vec<&'static str> {
|
|
||||||
vec![
|
|
||||||
"html:*.html",
|
|
||||||
"html:*.htm",
|
|
||||||
"rust:*.rs",
|
|
||||||
"js:*.js",
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
matched!(match1, types(), vec!["rust"], vec![], "lib.rs");
|
|
||||||
matched!(match2, types(), vec!["html"], vec![], "index.html");
|
|
||||||
matched!(match3, types(), vec!["html"], vec![], "index.htm");
|
|
||||||
matched!(match4, types(), vec!["html", "rust"], vec![], "main.rs");
|
|
||||||
matched!(match5, types(), vec![], vec![], "index.html");
|
|
||||||
matched!(match6, types(), vec![], vec!["rust"], "index.html");
|
|
||||||
|
|
||||||
matched!(not, matchnot1, types(), vec!["rust"], vec![], "index.html");
|
|
||||||
matched!(not, matchnot2, types(), vec![], vec!["rust"], "main.rs");
|
|
||||||
}
|
|
||||||
128
src/unescape.rs
Normal file
128
src/unescape.rs
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
/// A single state in the state machine used by `unescape`.
|
||||||
|
#[derive(Clone, Copy, Eq, PartialEq)]
|
||||||
|
enum State {
|
||||||
|
/// The state after seeing a `\`.
|
||||||
|
Escape,
|
||||||
|
/// The state after seeing a `\x`.
|
||||||
|
HexFirst,
|
||||||
|
/// The state after seeing a `\x[0-9A-Fa-f]`.
|
||||||
|
HexSecond(char),
|
||||||
|
/// Default state.
|
||||||
|
Literal,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Unescapes a string given on the command line. It supports a limited set of
|
||||||
|
/// escape sequences:
|
||||||
|
///
|
||||||
|
/// * \t, \r and \n are mapped to their corresponding ASCII bytes.
|
||||||
|
/// * \xZZ hexadecimal escapes are mapped to their byte.
|
||||||
|
pub fn unescape(s: &str) -> Vec<u8> {
|
||||||
|
use self::State::*;
|
||||||
|
|
||||||
|
let mut bytes = vec![];
|
||||||
|
let mut state = Literal;
|
||||||
|
for c in s.chars() {
|
||||||
|
match state {
|
||||||
|
Escape => {
|
||||||
|
match c {
|
||||||
|
'n' => { bytes.push(b'\n'); state = Literal; }
|
||||||
|
'r' => { bytes.push(b'\r'); state = Literal; }
|
||||||
|
't' => { bytes.push(b'\t'); state = Literal; }
|
||||||
|
'x' => { state = HexFirst; }
|
||||||
|
c => {
|
||||||
|
bytes.extend(format!(r"\{}", c).into_bytes());
|
||||||
|
state = Literal;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
HexFirst => {
|
||||||
|
match c {
|
||||||
|
'0'...'9' | 'A'...'F' | 'a'...'f' => {
|
||||||
|
state = HexSecond(c);
|
||||||
|
}
|
||||||
|
c => {
|
||||||
|
bytes.extend(format!(r"\x{}", c).into_bytes());
|
||||||
|
state = Literal;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
HexSecond(first) => {
|
||||||
|
match c {
|
||||||
|
'0'...'9' | 'A'...'F' | 'a'...'f' => {
|
||||||
|
let ordinal = format!("{}{}", first, c);
|
||||||
|
let byte = u8::from_str_radix(&ordinal, 16).unwrap();
|
||||||
|
bytes.push(byte);
|
||||||
|
state = Literal;
|
||||||
|
}
|
||||||
|
c => {
|
||||||
|
let original = format!(r"\x{}{}", first, c);
|
||||||
|
bytes.extend(original.into_bytes());
|
||||||
|
state = Literal;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Literal => {
|
||||||
|
match c {
|
||||||
|
'\\' => { state = Escape; }
|
||||||
|
c => { bytes.extend(c.to_string().as_bytes()); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match state {
|
||||||
|
Escape => bytes.push(b'\\'),
|
||||||
|
HexFirst => bytes.extend(b"\\x"),
|
||||||
|
HexSecond(c) => bytes.extend(format!("\\x{}", c).into_bytes()),
|
||||||
|
Literal => {}
|
||||||
|
}
|
||||||
|
bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::unescape;
|
||||||
|
|
||||||
|
fn b(bytes: &'static [u8]) -> Vec<u8> {
|
||||||
|
bytes.to_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unescape_nul() {
|
||||||
|
assert_eq!(b(b"\x00"), unescape(r"\x00"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unescape_nl() {
|
||||||
|
assert_eq!(b(b"\n"), unescape(r"\n"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unescape_tab() {
|
||||||
|
assert_eq!(b(b"\t"), unescape(r"\t"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unescape_carriage() {
|
||||||
|
assert_eq!(b(b"\r"), unescape(r"\r"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unescape_nothing_simple() {
|
||||||
|
assert_eq!(b(b"\\a"), unescape(r"\a"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unescape_nothing_hex0() {
|
||||||
|
assert_eq!(b(b"\\x"), unescape(r"\x"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unescape_nothing_hex1() {
|
||||||
|
assert_eq!(b(b"\\xz"), unescape(r"\xz"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unescape_nothing_hex2() {
|
||||||
|
assert_eq!(b(b"\\xzz"), unescape(r"\xzz"));
|
||||||
|
}
|
||||||
|
}
|
||||||
140
src/walk.rs
140
src/walk.rs
@@ -1,140 +0,0 @@
|
|||||||
/*!
|
|
||||||
The walk module implements a recursive directory iterator (using the `walkdir`)
|
|
||||||
crate that can efficiently skip and ignore files and directories specified in
|
|
||||||
a user's ignore patterns.
|
|
||||||
*/
|
|
||||||
|
|
||||||
use walkdir::{self, DirEntry, WalkDir, WalkDirIterator};
|
|
||||||
|
|
||||||
use ignore::Ignore;
|
|
||||||
|
|
||||||
/// Iter is a recursive directory iterator over file paths in a directory.
|
|
||||||
/// Only file paths should be searched are yielded.
|
|
||||||
pub struct Iter {
|
|
||||||
ig: Ignore,
|
|
||||||
it: WalkEventIter,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Iter {
|
|
||||||
/// Create a new recursive directory iterator using the ignore patterns
|
|
||||||
/// and walkdir iterator given.
|
|
||||||
pub fn new(ig: Ignore, wd: WalkDir) -> Iter {
|
|
||||||
Iter {
|
|
||||||
ig: ig,
|
|
||||||
it: WalkEventIter::from(wd),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns true if this entry should be skipped.
|
|
||||||
#[inline(always)]
|
|
||||||
fn skip_entry(&self, ent: &DirEntry) -> bool {
|
|
||||||
if ent.depth() == 0 {
|
|
||||||
// Never skip the root directory.
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if self.ig.ignored(ent.path(), ent.file_type().is_dir()) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Iterator for Iter {
|
|
||||||
type Item = DirEntry;
|
|
||||||
|
|
||||||
#[inline(always)]
|
|
||||||
fn next(&mut self) -> Option<DirEntry> {
|
|
||||||
while let Some(ev) = self.it.next() {
|
|
||||||
match ev {
|
|
||||||
Err(err) => {
|
|
||||||
eprintln!("{}", err);
|
|
||||||
}
|
|
||||||
Ok(WalkEvent::Exit) => {
|
|
||||||
self.ig.pop();
|
|
||||||
}
|
|
||||||
Ok(WalkEvent::Dir(ent)) => {
|
|
||||||
if self.skip_entry(&ent) {
|
|
||||||
self.it.it.skip_current_dir();
|
|
||||||
// Still need to push this on the stack because we'll
|
|
||||||
// get a WalkEvent::Exit event for this dir. We don't
|
|
||||||
// care if it errors though.
|
|
||||||
let _ = self.ig.push(ent.path());
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if let Err(err) = self.ig.push(ent.path()) {
|
|
||||||
eprintln!("{}", err);
|
|
||||||
self.it.it.skip_current_dir();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(WalkEvent::File(ent)) => {
|
|
||||||
if self.skip_entry(&ent) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// If this isn't actually a file (e.g., a symlink), then
|
|
||||||
// skip it.
|
|
||||||
if !ent.file_type().is_file() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
return Some(ent);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// WalkEventIter transforms a WalkDir iterator into an iterator that more
|
|
||||||
/// accurately describes the directory tree. Namely, it emits events that are
|
|
||||||
/// one of three types: directory, file or "exit." An "exit" event means that
|
|
||||||
/// the entire contents of a directory have been enumerated.
|
|
||||||
struct WalkEventIter {
|
|
||||||
depth: usize,
|
|
||||||
it: walkdir::Iter,
|
|
||||||
next: Option<Result<DirEntry, walkdir::Error>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum WalkEvent {
|
|
||||||
Dir(DirEntry),
|
|
||||||
File(DirEntry),
|
|
||||||
Exit,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<WalkDir> for WalkEventIter {
|
|
||||||
fn from(it: WalkDir) -> WalkEventIter {
|
|
||||||
WalkEventIter { depth: 0, it: it.into_iter(), next: None }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Iterator for WalkEventIter {
|
|
||||||
type Item = walkdir::Result<WalkEvent>;
|
|
||||||
|
|
||||||
#[inline(always)]
|
|
||||||
fn next(&mut self) -> Option<walkdir::Result<WalkEvent>> {
|
|
||||||
let dent = self.next.take().or_else(|| self.it.next());
|
|
||||||
let depth = match dent {
|
|
||||||
None => 0,
|
|
||||||
Some(Ok(ref dent)) => dent.depth(),
|
|
||||||
Some(Err(ref err)) => err.depth(),
|
|
||||||
};
|
|
||||||
if depth < self.depth {
|
|
||||||
self.depth -= 1;
|
|
||||||
self.next = dent;
|
|
||||||
return Some(Ok(WalkEvent::Exit));
|
|
||||||
}
|
|
||||||
self.depth = depth;
|
|
||||||
match dent {
|
|
||||||
None => None,
|
|
||||||
Some(Err(err)) => Some(Err(err)),
|
|
||||||
Some(Ok(dent)) => {
|
|
||||||
if dent.file_type().is_dir() {
|
|
||||||
self.depth += 1;
|
|
||||||
Some(Ok(WalkEvent::Dir(dent)))
|
|
||||||
} else {
|
|
||||||
Some(Ok(WalkEvent::File(dent)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
313
src/worker.rs
Normal file
313
src/worker.rs
Normal file
@@ -0,0 +1,313 @@
|
|||||||
|
use std::fs::File;
|
||||||
|
use std::io;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use encoding_rs::Encoding;
|
||||||
|
use grep::Grep;
|
||||||
|
use ignore::DirEntry;
|
||||||
|
use memmap::{Mmap, Protection};
|
||||||
|
use termcolor::WriteColor;
|
||||||
|
|
||||||
|
use decoder::DecodeReader;
|
||||||
|
use pathutil::strip_prefix;
|
||||||
|
use printer::Printer;
|
||||||
|
use search_buffer::BufferSearcher;
|
||||||
|
use search_stream::{InputBuffer, Searcher};
|
||||||
|
|
||||||
|
use Result;
|
||||||
|
|
||||||
|
pub enum Work {
|
||||||
|
Stdin,
|
||||||
|
DirEntry(DirEntry),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct WorkerBuilder {
|
||||||
|
grep: Grep,
|
||||||
|
opts: Options,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
struct Options {
|
||||||
|
mmap: bool,
|
||||||
|
encoding: Option<&'static Encoding>,
|
||||||
|
after_context: usize,
|
||||||
|
before_context: usize,
|
||||||
|
count: bool,
|
||||||
|
files_with_matches: bool,
|
||||||
|
files_without_matches: bool,
|
||||||
|
eol: u8,
|
||||||
|
invert_match: bool,
|
||||||
|
line_number: bool,
|
||||||
|
max_count: Option<u64>,
|
||||||
|
no_messages: bool,
|
||||||
|
quiet: bool,
|
||||||
|
text: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Options {
|
||||||
|
fn default() -> Options {
|
||||||
|
Options {
|
||||||
|
mmap: false,
|
||||||
|
encoding: None,
|
||||||
|
after_context: 0,
|
||||||
|
before_context: 0,
|
||||||
|
count: false,
|
||||||
|
files_with_matches: false,
|
||||||
|
files_without_matches: false,
|
||||||
|
eol: b'\n',
|
||||||
|
invert_match: false,
|
||||||
|
line_number: false,
|
||||||
|
max_count: None,
|
||||||
|
no_messages: false,
|
||||||
|
quiet: false,
|
||||||
|
text: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WorkerBuilder {
|
||||||
|
/// Create a new builder for a worker.
|
||||||
|
///
|
||||||
|
/// A reusable input buffer and a grep matcher are required, but there
|
||||||
|
/// are numerous additional options that can be configured on this builder.
|
||||||
|
pub fn new(grep: Grep) -> WorkerBuilder {
|
||||||
|
WorkerBuilder {
|
||||||
|
grep: grep,
|
||||||
|
opts: Options::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create the worker from this builder.
|
||||||
|
pub fn build(self) -> Worker {
|
||||||
|
let mut inpbuf = InputBuffer::new();
|
||||||
|
inpbuf.eol(self.opts.eol);
|
||||||
|
Worker {
|
||||||
|
grep: self.grep,
|
||||||
|
inpbuf: inpbuf,
|
||||||
|
decodebuf: vec![0; 8 * (1<<10)],
|
||||||
|
opts: self.opts,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The number of contextual lines to show after each match. The default
|
||||||
|
/// is zero.
|
||||||
|
pub fn after_context(mut self, count: usize) -> Self {
|
||||||
|
self.opts.after_context = count;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The number of contextual lines to show before each match. The default
|
||||||
|
/// is zero.
|
||||||
|
pub fn before_context(mut self, count: usize) -> Self {
|
||||||
|
self.opts.before_context = count;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If enabled, searching will print a count instead of each match.
|
||||||
|
///
|
||||||
|
/// Disabled by default.
|
||||||
|
pub fn count(mut self, yes: bool) -> Self {
|
||||||
|
self.opts.count = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the encoding to use to read each file.
|
||||||
|
///
|
||||||
|
/// If the encoding is `None` (the default), then the encoding is
|
||||||
|
/// automatically detected on a best-effort per-file basis.
|
||||||
|
pub fn encoding(mut self, enc: Option<&'static Encoding>) -> Self {
|
||||||
|
self.opts.encoding = enc;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If enabled, searching will print the path instead of each match.
|
||||||
|
///
|
||||||
|
/// Disabled by default.
|
||||||
|
pub fn files_with_matches(mut self, yes: bool) -> Self {
|
||||||
|
self.opts.files_with_matches = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If enabled, searching will print the path of files without any matches.
|
||||||
|
///
|
||||||
|
/// Disabled by default.
|
||||||
|
pub fn files_without_matches(mut self, yes: bool) -> Self {
|
||||||
|
self.opts.files_without_matches = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Set the end-of-line byte used by this searcher.
|
||||||
|
pub fn eol(mut self, eol: u8) -> Self {
|
||||||
|
self.opts.eol = eol;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If enabled, matching is inverted so that lines that *don't* match the
|
||||||
|
/// given pattern are treated as matches.
|
||||||
|
pub fn invert_match(mut self, yes: bool) -> Self {
|
||||||
|
self.opts.invert_match = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If enabled, compute line numbers and prefix each line of output with
|
||||||
|
/// them.
|
||||||
|
pub fn line_number(mut self, yes: bool) -> Self {
|
||||||
|
self.opts.line_number = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Limit the number of matches to the given count.
|
||||||
|
///
|
||||||
|
/// The default is None, which corresponds to no limit.
|
||||||
|
pub fn max_count(mut self, count: Option<u64>) -> Self {
|
||||||
|
self.opts.max_count = count;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If enabled, try to use memory maps for searching if possible.
|
||||||
|
pub fn mmap(mut self, yes: bool) -> Self {
|
||||||
|
self.opts.mmap = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If enabled, error messages are suppressed.
|
||||||
|
///
|
||||||
|
/// This is disabled by default.
|
||||||
|
pub fn no_messages(mut self, yes: bool) -> Self {
|
||||||
|
self.opts.no_messages = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If enabled, don't show any output and quit searching after the first
|
||||||
|
/// match is found.
|
||||||
|
pub fn quiet(mut self, yes: bool) -> Self {
|
||||||
|
self.opts.quiet = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If enabled, search binary files as if they were text.
|
||||||
|
pub fn text(mut self, yes: bool) -> Self {
|
||||||
|
self.opts.text = yes;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Worker is responsible for executing searches on file paths, while choosing
|
||||||
|
/// streaming search or memory map search as appropriate.
|
||||||
|
pub struct Worker {
|
||||||
|
grep: Grep,
|
||||||
|
inpbuf: InputBuffer,
|
||||||
|
decodebuf: Vec<u8>,
|
||||||
|
opts: Options,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Worker {
|
||||||
|
/// Execute the worker with the given printer and work item.
|
||||||
|
///
|
||||||
|
/// A work item can either be stdin or a file path.
|
||||||
|
pub fn run<W: WriteColor>(
|
||||||
|
&mut self,
|
||||||
|
printer: &mut Printer<W>,
|
||||||
|
work: Work,
|
||||||
|
) -> u64 {
|
||||||
|
let result = match work {
|
||||||
|
Work::Stdin => {
|
||||||
|
let stdin = io::stdin();
|
||||||
|
let stdin = stdin.lock();
|
||||||
|
self.search(printer, Path::new("<stdin>"), stdin)
|
||||||
|
}
|
||||||
|
Work::DirEntry(dent) => {
|
||||||
|
let mut path = dent.path();
|
||||||
|
let file = match File::open(path) {
|
||||||
|
Ok(file) => file,
|
||||||
|
Err(err) => {
|
||||||
|
if !self.opts.no_messages {
|
||||||
|
eprintln!("{}: {}", path.display(), err);
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if let Some(p) = strip_prefix("./", path) {
|
||||||
|
path = p;
|
||||||
|
}
|
||||||
|
if self.opts.mmap {
|
||||||
|
self.search_mmap(printer, path, &file)
|
||||||
|
} else {
|
||||||
|
self.search(printer, path, file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
match result {
|
||||||
|
Ok(count) => {
|
||||||
|
count
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
if !self.opts.no_messages {
|
||||||
|
eprintln!("{}", err);
|
||||||
|
}
|
||||||
|
0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search<R: io::Read, W: WriteColor>(
|
||||||
|
&mut self,
|
||||||
|
printer: &mut Printer<W>,
|
||||||
|
path: &Path,
|
||||||
|
rdr: R,
|
||||||
|
) -> Result<u64> {
|
||||||
|
let rdr = DecodeReader::new(
|
||||||
|
rdr, &mut self.decodebuf, self.opts.encoding);
|
||||||
|
let searcher = Searcher::new(
|
||||||
|
&mut self.inpbuf, printer, &self.grep, path, rdr);
|
||||||
|
searcher
|
||||||
|
.after_context(self.opts.after_context)
|
||||||
|
.before_context(self.opts.before_context)
|
||||||
|
.count(self.opts.count)
|
||||||
|
.files_with_matches(self.opts.files_with_matches)
|
||||||
|
.files_without_matches(self.opts.files_without_matches)
|
||||||
|
.eol(self.opts.eol)
|
||||||
|
.line_number(self.opts.line_number)
|
||||||
|
.invert_match(self.opts.invert_match)
|
||||||
|
.max_count(self.opts.max_count)
|
||||||
|
.quiet(self.opts.quiet)
|
||||||
|
.text(self.opts.text)
|
||||||
|
.run()
|
||||||
|
.map_err(From::from)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_mmap<W: WriteColor>(
|
||||||
|
&mut self,
|
||||||
|
printer: &mut Printer<W>,
|
||||||
|
path: &Path,
|
||||||
|
file: &File,
|
||||||
|
) -> Result<u64> {
|
||||||
|
if try!(file.metadata()).len() == 0 {
|
||||||
|
// Opening a memory map with an empty file results in an error.
|
||||||
|
// However, this may not actually be an empty file! For example,
|
||||||
|
// /proc/cpuinfo reports itself as an empty file, but it can
|
||||||
|
// produce data when it's read from. Therefore, we fall back to
|
||||||
|
// regular read calls.
|
||||||
|
return self.search(printer, path, file);
|
||||||
|
}
|
||||||
|
let mmap = try!(Mmap::open(file, Protection::Read));
|
||||||
|
let buf = unsafe { mmap.as_slice() };
|
||||||
|
if buf.len() >= 3 && Encoding::for_bom(buf).is_some() {
|
||||||
|
// If we have a UTF-16 bom in our memory map, then we need to fall
|
||||||
|
// back to the stream reader, which will do transcoding.
|
||||||
|
return self.search(printer, path, file);
|
||||||
|
}
|
||||||
|
let searcher = BufferSearcher::new(printer, &self.grep, path, buf);
|
||||||
|
Ok(searcher
|
||||||
|
.count(self.opts.count)
|
||||||
|
.files_with_matches(self.opts.files_with_matches)
|
||||||
|
.files_without_matches(self.opts.files_without_matches)
|
||||||
|
.eol(self.opts.eol)
|
||||||
|
.line_number(self.opts.line_number)
|
||||||
|
.invert_match(self.opts.invert_match)
|
||||||
|
.max_count(self.opts.max_count)
|
||||||
|
.quiet(self.opts.quiet)
|
||||||
|
.text(self.opts.text)
|
||||||
|
.run())
|
||||||
|
}
|
||||||
|
}
|
||||||
3
termcolor/COPYING
Normal file
3
termcolor/COPYING
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
This project is dual-licensed under the Unlicense and MIT licenses.
|
||||||
|
|
||||||
|
You may use this code under the terms of either license.
|
||||||
20
termcolor/Cargo.toml
Normal file
20
termcolor/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
[package]
|
||||||
|
name = "termcolor"
|
||||||
|
version = "0.3.2" #:version
|
||||||
|
authors = ["Andrew Gallant <jamslam@gmail.com>"]
|
||||||
|
description = """
|
||||||
|
A simple cross platform library for writing colored text to a terminal.
|
||||||
|
"""
|
||||||
|
documentation = "https://docs.rs/termcolor"
|
||||||
|
homepage = "https://github.com/BurntSushi/ripgrep/tree/master/termcolor"
|
||||||
|
repository = "https://github.com/BurntSushi/ripgrep/tree/master/termcolor"
|
||||||
|
readme = "README.md"
|
||||||
|
keywords = ["windows", "win", "color", "ansi", "console"]
|
||||||
|
license = "Unlicense/MIT"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "termcolor"
|
||||||
|
bench = false
|
||||||
|
|
||||||
|
[target.'cfg(windows)'.dependencies]
|
||||||
|
wincolor = { version = "0.1.3", path = "../wincolor" }
|
||||||
21
termcolor/LICENSE-MIT
Normal file
21
termcolor/LICENSE-MIT
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015 Andrew Gallant
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
86
termcolor/README.md
Normal file
86
termcolor/README.md
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
termcolor
|
||||||
|
=========
|
||||||
|
A simple cross platform library for writing colored text to a terminal. This
|
||||||
|
library writes colored text either using standard ANSI escape sequences or
|
||||||
|
by interacting with the Windows console. Several convenient abstractions
|
||||||
|
are provided for use in single-threaded or multi-threaded command line
|
||||||
|
applications.
|
||||||
|
|
||||||
|
[](https://travis-ci.org/BurntSushi/ripgrep)
|
||||||
|
[](https://ci.appveyor.com/project/BurntSushi/ripgrep)
|
||||||
|
[](https://crates.io/crates/termcolor)
|
||||||
|
|
||||||
|
Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org).
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
[https://docs.rs/termcolor](https://docs.rs/termcolor)
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Add this to your `Cargo.toml`:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
termcolor = "0.3"
|
||||||
|
```
|
||||||
|
|
||||||
|
and this to your crate root:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
extern crate termcolor;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Organization
|
||||||
|
|
||||||
|
The `WriteColor` trait extends the `io::Write` trait with methods for setting
|
||||||
|
colors or resetting them.
|
||||||
|
|
||||||
|
`StandardStream` and `StandardStreamLock` both satisfy `WriteColor` and are
|
||||||
|
analogous to `std::io::Stdout` and `std::io::StdoutLock`, or `std::io::Stderr`
|
||||||
|
and `std::io::StderrLock`.
|
||||||
|
|
||||||
|
`Buffer` is an in memory buffer that supports colored text. In a parallel
|
||||||
|
program, each thread might write to its own buffer. A buffer can be printed to
|
||||||
|
stdout or stderr using a `BufferWriter`. The advantage of this design is that
|
||||||
|
each thread can work in parallel on a buffer without having to synchronize
|
||||||
|
access to global resources such as the Windows console. Moreover, this design
|
||||||
|
also prevents interleaving of buffer output.
|
||||||
|
|
||||||
|
`Ansi` and `NoColor` both satisfy `WriteColor` for arbitrary implementors of
|
||||||
|
`io::Write`. These types are useful when you know exactly what you need. An
|
||||||
|
analogous type for the Windows console is not provided since it cannot exist.
|
||||||
|
|
||||||
|
### Example: using `StandardStream`
|
||||||
|
|
||||||
|
The `StandardStream` type in this crate works similarly to `std::io::Stdout`,
|
||||||
|
except it is augmented with methods for coloring by the `WriteColor` trait.
|
||||||
|
For example, to write some green text:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use std::io::Write;
|
||||||
|
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
||||||
|
|
||||||
|
let mut stdout = StandardStream::stdout(ColorChoice::Always);
|
||||||
|
try!(stdout.set_color(ColorSpec::new().set_fg(Some(Color::Green))));
|
||||||
|
try!(writeln!(&mut stdout, "green text!"));
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example: using `BufferWriter`
|
||||||
|
|
||||||
|
A `BufferWriter` can create buffers and write buffers to stdout or stderr. It
|
||||||
|
does *not* implement `io::Write` or `WriteColor` itself. Instead, `Buffer`
|
||||||
|
implements `io::Write` and `io::WriteColor`.
|
||||||
|
|
||||||
|
This example shows how to print some green text to stderr.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use std::io::Write;
|
||||||
|
use termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor};
|
||||||
|
|
||||||
|
let mut bufwtr = BufferWriter::stderr(ColorChoice::Always);
|
||||||
|
let mut buffer = bufwtr.buffer();
|
||||||
|
try!(buffer.set_color(ColorSpec::new().set_fg(Some(Color::Green))));
|
||||||
|
try!(writeln!(&mut buffer, "green text!"));
|
||||||
|
try!(bufwtr.print(&buffer));
|
||||||
|
```
|
||||||
24
termcolor/UNLICENSE
Normal file
24
termcolor/UNLICENSE
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
This is free and unencumbered software released into the public domain.
|
||||||
|
|
||||||
|
Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||||
|
distribute this software, either in source code form or as a compiled
|
||||||
|
binary, for any purpose, commercial or non-commercial, and by any
|
||||||
|
means.
|
||||||
|
|
||||||
|
In jurisdictions that recognize copyright laws, the author or authors
|
||||||
|
of this software dedicate any and all copyright interest in the
|
||||||
|
software to the public domain. We make this dedication for the benefit
|
||||||
|
of the public at large and to the detriment of our heirs and
|
||||||
|
successors. We intend this dedication to be an overt act of
|
||||||
|
relinquishment in perpetuity of all present and future rights to this
|
||||||
|
software under copyright law.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||||
|
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||||
|
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
|
OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
For more information, please refer to <http://unlicense.org/>
|
||||||
1322
termcolor/src/lib.rs
Normal file
1322
termcolor/src/lib.rs
Normal file
File diff suppressed because it is too large
Load Diff
544
tests/tests.rs
544
tests/tests.rs
@@ -54,6 +54,20 @@ fn path(unix: &str) -> String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn paths(unix: &[&str]) -> Vec<String> {
|
||||||
|
let mut xs: Vec<_> = unix.iter().map(|s| path(s)).collect();
|
||||||
|
xs.sort();
|
||||||
|
xs
|
||||||
|
}
|
||||||
|
|
||||||
|
fn paths_from_stdout(stdout: String) -> Vec<String> {
|
||||||
|
let mut paths: Vec<_> = stdout.lines().map(|s| {
|
||||||
|
s.split(":").next().unwrap().to_string()
|
||||||
|
}).collect();
|
||||||
|
paths.sort();
|
||||||
|
paths
|
||||||
|
}
|
||||||
|
|
||||||
fn sort_lines(lines: &str) -> String {
|
fn sort_lines(lines: &str) -> String {
|
||||||
let mut lines: Vec<String> =
|
let mut lines: Vec<String> =
|
||||||
lines.trim().lines().map(|s| s.to_owned()).collect();
|
lines.trim().lines().map(|s| s.to_owned()).collect();
|
||||||
@@ -93,8 +107,8 @@ sherlock!(columns, |wd: WorkDir, mut cmd: Command| {
|
|||||||
cmd.arg("--column");
|
cmd.arg("--column");
|
||||||
let lines: String = wd.stdout(&mut cmd);
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
let expected = "\
|
let expected = "\
|
||||||
57:For the Doctor Watsons of this world, as opposed to the Sherlock
|
1:57:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||||
49:be, to a very large extent, the result of luck. Sherlock Holmes
|
3:49:be, to a very large extent, the result of luck. Sherlock Holmes
|
||||||
";
|
";
|
||||||
assert_eq!(lines, expected);
|
assert_eq!(lines, expected);
|
||||||
});
|
});
|
||||||
@@ -294,6 +308,17 @@ sherlock!(file_type_add, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
|||||||
assert_eq!(lines, "file.wat:Sherlock\n");
|
assert_eq!(lines, "file.wat:Sherlock\n");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
sherlock!(file_type_add_compose, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("file.py", "Sherlock");
|
||||||
|
wd.create("file.rs", "Sherlock");
|
||||||
|
wd.create("file.wat", "Sherlock");
|
||||||
|
cmd.arg("--type-add").arg("wat:*.wat");
|
||||||
|
cmd.arg("--type-add").arg("combo:include:wat,py").arg("-t").arg("combo");
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
println!("{}", lines);
|
||||||
|
assert_eq!(sort_lines(&lines), "file.py:Sherlock\nfile.wat:Sherlock\n");
|
||||||
|
});
|
||||||
|
|
||||||
sherlock!(glob, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
sherlock!(glob, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
wd.create("file.py", "Sherlock");
|
wd.create("file.py", "Sherlock");
|
||||||
wd.create("file.rs", "Sherlock");
|
wd.create("file.rs", "Sherlock");
|
||||||
@@ -325,6 +350,15 @@ sherlock!(files_with_matches, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
|||||||
assert_eq!(lines, expected);
|
assert_eq!(lines, expected);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
sherlock!(files_without_matches, "Sherlock", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("file.py", "foo");
|
||||||
|
cmd.arg("--files-without-match");
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
let expected = "file.py\n";
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
sherlock!(after_context, |wd: WorkDir, mut cmd: Command| {
|
sherlock!(after_context, |wd: WorkDir, mut cmd: Command| {
|
||||||
cmd.arg("-A").arg("1");
|
cmd.arg("-A").arg("1");
|
||||||
let lines: String = wd.stdout(&mut cmd);
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
@@ -398,6 +432,63 @@ sherlock!(context_line_numbers, "world|attached",
|
|||||||
assert_eq!(lines, expected);
|
assert_eq!(lines, expected);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
sherlock!(max_filesize_parse_error_length, "Sherlock", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
cmd.arg("--max-filesize").arg("44444444444444444444");
|
||||||
|
wd.assert_err(&mut cmd);
|
||||||
|
});
|
||||||
|
|
||||||
|
sherlock!(max_filesize_parse_error_suffix, "Sherlock", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
cmd.arg("--max-filesize").arg("45k");
|
||||||
|
wd.assert_err(&mut cmd);
|
||||||
|
});
|
||||||
|
|
||||||
|
sherlock!(max_filesize_parse_no_suffix, "Sherlock", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.remove("sherlock");
|
||||||
|
wd.create_size("foo", 40);
|
||||||
|
wd.create_size("bar", 60);
|
||||||
|
|
||||||
|
cmd.arg("--max-filesize").arg("50").arg("--files");
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
let expected = "\
|
||||||
|
foo
|
||||||
|
";
|
||||||
|
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
sherlock!(max_filesize_parse_k_suffix, "Sherlock", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.remove("sherlock");
|
||||||
|
wd.create_size("foo", 3048);
|
||||||
|
wd.create_size("bar", 4100);
|
||||||
|
|
||||||
|
cmd.arg("--max-filesize").arg("4K").arg("--files");
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
let expected = "\
|
||||||
|
foo
|
||||||
|
";
|
||||||
|
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
sherlock!(max_filesize_parse_m_suffix, "Sherlock", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.remove("sherlock");
|
||||||
|
wd.create_size("foo", 1000000);
|
||||||
|
wd.create_size("bar", 1400000);
|
||||||
|
|
||||||
|
cmd.arg("--max-filesize").arg("1M").arg("--files");
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
let expected = "\
|
||||||
|
foo
|
||||||
|
";
|
||||||
|
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
sherlock!(ignore_hidden, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
sherlock!(ignore_hidden, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
wd.remove("sherlock");
|
wd.remove("sherlock");
|
||||||
wd.create(".sherlock", hay::SHERLOCK);
|
wd.create(".sherlock", hay::SHERLOCK);
|
||||||
@@ -538,6 +629,7 @@ sherlock:be, to a very large extent, the result of luck. Sherlock Holmes
|
|||||||
assert_eq!(lines, expected);
|
assert_eq!(lines, expected);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
sherlock!(symlink_nofollow, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
sherlock!(symlink_nofollow, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
wd.remove("sherlock");
|
wd.remove("sherlock");
|
||||||
wd.create_dir("foo");
|
wd.create_dir("foo");
|
||||||
@@ -549,6 +641,7 @@ sherlock!(symlink_nofollow, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
|||||||
wd.assert_err(&mut cmd);
|
wd.assert_err(&mut cmd);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
#[cfg(not(windows))]
|
||||||
sherlock!(symlink_follow, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
sherlock!(symlink_follow, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
wd.remove("sherlock");
|
wd.remove("sherlock");
|
||||||
wd.create_dir("foo");
|
wd.create_dir("foo");
|
||||||
@@ -745,7 +838,7 @@ clean!(regression_105_part2, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
|||||||
cmd.arg("--column");
|
cmd.arg("--column");
|
||||||
|
|
||||||
let lines: String = wd.stdout(&mut cmd);
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
assert_eq!(lines, "foo:3:zztest\n");
|
assert_eq!(lines, "foo:1:3:zztest\n");
|
||||||
});
|
});
|
||||||
|
|
||||||
// See: https://github.com/BurntSushi/ripgrep/issues/127
|
// See: https://github.com/BurntSushi/ripgrep/issues/127
|
||||||
@@ -775,6 +868,15 @@ clean!(regression_127, "Sherlock", ".", |wd: WorkDir, mut cmd: Command| {
|
|||||||
assert_eq!(lines, expected);
|
assert_eq!(lines, expected);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/128
|
||||||
|
clean!(regression_128, "x", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create_bytes("foo", b"01234567\x0b\n\x0b\n\x0b\n\x0b\nx");
|
||||||
|
cmd.arg("-n");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "foo:5:x\n");
|
||||||
|
});
|
||||||
|
|
||||||
// See: https://github.com/BurntSushi/ripgrep/issues/131
|
// See: https://github.com/BurntSushi/ripgrep/issues/131
|
||||||
//
|
//
|
||||||
// TODO(burntsushi): Darwin doesn't like this test for some reason.
|
// TODO(burntsushi): Darwin doesn't like this test for some reason.
|
||||||
@@ -837,6 +939,220 @@ clean!(
|
|||||||
assert_eq!(lines, TESTCASE);
|
assert_eq!(lines, TESTCASE);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/184
|
||||||
|
clean!(regression_184, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create(".gitignore", ".*");
|
||||||
|
wd.create_dir("foo/bar");
|
||||||
|
wd.create("foo/bar/baz", "test");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, format!("{}:test\n", path("foo/bar/baz")));
|
||||||
|
|
||||||
|
cmd.current_dir(wd.path().join("./foo/bar"));
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "baz:test\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/199
|
||||||
|
clean!(regression_199, r"\btest\b", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("foo", "tEsT");
|
||||||
|
cmd.arg("--smart-case");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "foo:tEsT\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/206
|
||||||
|
clean!(regression_206, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create_dir("foo");
|
||||||
|
wd.create("foo/bar.txt", "test");
|
||||||
|
cmd.arg("-g").arg("*.txt");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, format!("{}:test\n", path("foo/bar.txt")));
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/210
|
||||||
|
#[cfg(unix)]
|
||||||
|
#[test]
|
||||||
|
fn regression_210() {
|
||||||
|
use std::ffi::OsStr;
|
||||||
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
|
||||||
|
let badutf8 = OsStr::from_bytes(&b"foo\xffbar"[..]);
|
||||||
|
|
||||||
|
let wd = WorkDir::new("regression_210");
|
||||||
|
let mut cmd = wd.command();
|
||||||
|
wd.create(badutf8, "test");
|
||||||
|
cmd.arg("-H").arg("test").arg(badutf8);
|
||||||
|
|
||||||
|
let out = wd.output(&mut cmd);
|
||||||
|
assert_eq!(out.stdout, b"foo\xffbar:test\n".to_vec());
|
||||||
|
}
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/228
|
||||||
|
clean!(regression_228, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create_dir("foo");
|
||||||
|
cmd.arg("--ignore-file").arg("foo");
|
||||||
|
wd.assert_err(&mut cmd);
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/229
|
||||||
|
clean!(regression_229, "[E]conomie", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("foo", "economie");
|
||||||
|
cmd.arg("-S");
|
||||||
|
wd.assert_err(&mut cmd);
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/251
|
||||||
|
clean!(regression_251, "привет", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("foo", "привет\nПривет\nПрИвЕт");
|
||||||
|
cmd.arg("-i");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "foo:привет\nfoo:Привет\nfoo:ПрИвЕт\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/256
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
clean!(regression_256, "test", "foo", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create_dir("bar");
|
||||||
|
wd.create("bar/baz", "test");
|
||||||
|
wd.link_dir("bar", "foo");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "foo/baz:test\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/256
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
clean!(regression_256_j1, "test", "foo", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create_dir("bar");
|
||||||
|
wd.create("bar/baz", "test");
|
||||||
|
wd.link_dir("bar", "foo");
|
||||||
|
cmd.arg("-j1");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "foo/baz:test\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/279
|
||||||
|
clean!(regression_279, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("foo", "test");
|
||||||
|
cmd.arg("-q");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/405
|
||||||
|
clean!(regression_405, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create_dir("foo/bar");
|
||||||
|
wd.create_dir("bar/foo");
|
||||||
|
wd.create("foo/bar/file1.txt", "test");
|
||||||
|
wd.create("bar/foo/file2.txt", "test");
|
||||||
|
cmd.arg("-g").arg("!/foo/**");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, format!("{}:test\n", path("bar/foo/file2.txt")));
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/428
|
||||||
|
clean!(regression_428_color_context_path, "foo", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("sherlock", "foo\nbar");
|
||||||
|
cmd.arg("-A1").arg("-H").arg("--no-heading").arg("-N")
|
||||||
|
.arg("--colors=match:none").arg("--color=always");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
let expected = format!("\
|
||||||
|
{colored_path}:foo
|
||||||
|
{colored_path}-bar
|
||||||
|
", colored_path=format!("\x1b\x5b\x6d\x1b\x5b\x33\x35\x6d{path}\x1b\x5b\x6d", path=path("sherlock")));
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/428
|
||||||
|
clean!(regression_428_unrecognized_style, "Sherlok", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
cmd.arg("--colors=match:style:");
|
||||||
|
wd.assert_err(&mut cmd);
|
||||||
|
|
||||||
|
let output = cmd.output().unwrap();
|
||||||
|
let err = String::from_utf8_lossy(&output.stderr);
|
||||||
|
let expected = "\
|
||||||
|
Unrecognized style attribute ''. Choose from: nobold, bold, nointense, intense.
|
||||||
|
";
|
||||||
|
assert_eq!(err, expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/1
|
||||||
|
clean!(feature_1_sjis, "Шерлок Холмс", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
let sherlock =
|
||||||
|
b"\x84Y\x84u\x84\x82\x84|\x84\x80\x84{ \x84V\x84\x80\x84|\x84}\x84\x83";
|
||||||
|
wd.create_bytes("foo", &sherlock[..]);
|
||||||
|
cmd.arg("-Esjis");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "foo:Шерлок Холмс\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/1
|
||||||
|
clean!(feature_1_utf16_auto, "Шерлок Холмс", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
let sherlock =
|
||||||
|
b"\xff\xfe(\x045\x04@\x04;\x04>\x04:\x04 \x00%\x04>\x04;\x04<\x04A\x04";
|
||||||
|
wd.create_bytes("foo", &sherlock[..]);
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "foo:Шерлок Холмс\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/1
|
||||||
|
clean!(feature_1_utf16_explicit, "Шерлок Холмс", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
let sherlock =
|
||||||
|
b"\xff\xfe(\x045\x04@\x04;\x04>\x04:\x04 \x00%\x04>\x04;\x04<\x04A\x04";
|
||||||
|
wd.create_bytes("foo", &sherlock[..]);
|
||||||
|
cmd.arg("-Eutf-16le");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "foo:Шерлок Холмс\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/1
|
||||||
|
clean!(feature_1_eucjp, "Шерлок Холмс", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
let sherlock =
|
||||||
|
b"\xa7\xba\xa7\xd6\xa7\xe2\xa7\xdd\xa7\xe0\xa7\xdc \xa7\xb7\xa7\xe0\xa7\xdd\xa7\xde\xa7\xe3";
|
||||||
|
wd.create_bytes("foo", &sherlock[..]);
|
||||||
|
cmd.arg("-Eeuc-jp");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "foo:Шерлок Холмс\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/7
|
||||||
|
sherlock!(feature_7, "-fpat", "sherlock", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("pat", "Sherlock\nHolmes");
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
let expected = "\
|
||||||
|
For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||||
|
Holmeses, success in the province of detective work must always
|
||||||
|
be, to a very large extent, the result of luck. Sherlock Holmes
|
||||||
|
";
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/7
|
||||||
|
sherlock!(feature_7_dash, "-f-", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
let output = wd.pipe(&mut cmd, "Sherlock");
|
||||||
|
let lines = String::from_utf8_lossy(&output.stdout);
|
||||||
|
let expected = "\
|
||||||
|
sherlock:For the Doctor Watsons of this world, as opposed to the Sherlock
|
||||||
|
sherlock:be, to a very large extent, the result of luck. Sherlock Holmes
|
||||||
|
";
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
// See: https://github.com/BurntSushi/ripgrep/issues/20
|
// See: https://github.com/BurntSushi/ripgrep/issues/20
|
||||||
sherlock!(feature_20_no_filename, "Sherlock", ".",
|
sherlock!(feature_20_no_filename, "Sherlock", ".",
|
||||||
|wd: WorkDir, mut cmd: Command| {
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
@@ -850,6 +1166,100 @@ be, to a very large extent, the result of luck. Sherlock Holmes
|
|||||||
assert_eq!(lines, expected);
|
assert_eq!(lines, expected);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/34
|
||||||
|
sherlock!(feature_34_only_matching, "Sherlock", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
cmd.arg("--only-matching");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
let expected = "\
|
||||||
|
sherlock:Sherlock
|
||||||
|
sherlock:Sherlock
|
||||||
|
";
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/34
|
||||||
|
sherlock!(feature_34_only_matching_line_column, "Sherlock", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
cmd.arg("--only-matching").arg("--column").arg("--line-number");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
let expected = "\
|
||||||
|
sherlock:1:57:Sherlock
|
||||||
|
sherlock:3:49:Sherlock
|
||||||
|
";
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/45
|
||||||
|
sherlock!(feature_45_relative_cwd, "test", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create(".not-an-ignore", "foo\n/bar");
|
||||||
|
wd.create_dir("bar");
|
||||||
|
wd.create_dir("baz/bar");
|
||||||
|
wd.create_dir("baz/baz/bar");
|
||||||
|
wd.create("bar/test", "test");
|
||||||
|
wd.create("baz/bar/test", "test");
|
||||||
|
wd.create("baz/baz/bar/test", "test");
|
||||||
|
wd.create("baz/foo", "test");
|
||||||
|
wd.create("baz/test", "test");
|
||||||
|
wd.create("foo", "test");
|
||||||
|
wd.create("test", "test");
|
||||||
|
|
||||||
|
// First, get a baseline without applying ignore rules.
|
||||||
|
let lines = paths_from_stdout(wd.stdout(&mut cmd));
|
||||||
|
assert_eq!(lines, paths(&[
|
||||||
|
"bar/test", "baz/bar/test", "baz/baz/bar/test", "baz/foo",
|
||||||
|
"baz/test", "foo", "test",
|
||||||
|
]));
|
||||||
|
|
||||||
|
// Now try again with the ignore file activated.
|
||||||
|
cmd.arg("--ignore-file").arg(".not-an-ignore");
|
||||||
|
let lines = paths_from_stdout(wd.stdout(&mut cmd));
|
||||||
|
assert_eq!(lines, paths(&[
|
||||||
|
"baz/bar/test", "baz/baz/bar/test", "baz/test", "test",
|
||||||
|
]));
|
||||||
|
|
||||||
|
// Now do it again, but inside the baz directory.
|
||||||
|
// Since the ignore file is interpreted relative to the CWD, this will
|
||||||
|
// cause the /bar anchored pattern to filter out baz/bar, which is a
|
||||||
|
// subtle difference between true parent ignore files and manually
|
||||||
|
// specified ignore files.
|
||||||
|
let mut cmd = wd.command();
|
||||||
|
cmd.arg("test").arg(".").arg("--ignore-file").arg("../.not-an-ignore");
|
||||||
|
cmd.current_dir(wd.path().join("baz"));
|
||||||
|
let lines = paths_from_stdout(wd.stdout(&mut cmd));
|
||||||
|
assert_eq!(lines, paths(&["baz/bar/test", "test"]));
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/45
|
||||||
|
sherlock!(feature_45_precedence_with_others, "test", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create(".not-an-ignore", "*.log");
|
||||||
|
wd.create(".ignore", "!imp.log");
|
||||||
|
wd.create("imp.log", "test");
|
||||||
|
wd.create("wat.log", "test");
|
||||||
|
|
||||||
|
cmd.arg("--ignore-file").arg(".not-an-ignore");
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "imp.log:test\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/45
|
||||||
|
sherlock!(feature_45_precedence_internal, "test", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create(".not-an-ignore1", "*.log");
|
||||||
|
wd.create(".not-an-ignore2", "!imp.log");
|
||||||
|
wd.create("imp.log", "test");
|
||||||
|
wd.create("wat.log", "test");
|
||||||
|
|
||||||
|
cmd.arg("--ignore-file").arg(".not-an-ignore1");
|
||||||
|
cmd.arg("--ignore-file").arg(".not-an-ignore2");
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "imp.log:test\n");
|
||||||
|
});
|
||||||
|
|
||||||
// See: https://github.com/BurntSushi/ripgrep/issues/68
|
// See: https://github.com/BurntSushi/ripgrep/issues/68
|
||||||
clean!(feature_68_no_ignore_vcs, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
clean!(feature_68_no_ignore_vcs, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
wd.create(".gitignore", "foo");
|
wd.create(".gitignore", "foo");
|
||||||
@@ -884,6 +1294,16 @@ sherlock!(feature_89_files_with_matches, "Sherlock", ".",
|
|||||||
assert_eq!(lines, "sherlock\x00");
|
assert_eq!(lines, "sherlock\x00");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/89
|
||||||
|
sherlock!(feature_89_files_without_matches, "Sherlock", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("file.py", "foo");
|
||||||
|
cmd.arg("--null").arg("--files-without-match");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "file.py\x00");
|
||||||
|
});
|
||||||
|
|
||||||
// See: https://github.com/BurntSushi/ripgrep/issues/89
|
// See: https://github.com/BurntSushi/ripgrep/issues/89
|
||||||
sherlock!(feature_89_count, "Sherlock", ".",
|
sherlock!(feature_89_count, "Sherlock", ".",
|
||||||
|wd: WorkDir, mut cmd: Command| {
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
@@ -947,6 +1367,91 @@ clean!(feature_109_case_sensitive_part2, "test", ".",
|
|||||||
wd.assert_err(&mut cmd);
|
wd.assert_err(&mut cmd);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/129
|
||||||
|
clean!(feature_129_matches, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("foo", "test\ntest abcdefghijklmnopqrstuvwxyz test");
|
||||||
|
cmd.arg("-M26");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
let expected = "foo:test\nfoo:[Omitted long line with 2 matches]\n";
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/129
|
||||||
|
clean!(feature_129_context, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("foo", "test\nabcdefghijklmnopqrstuvwxyz");
|
||||||
|
cmd.arg("-M20").arg("-C1");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
let expected = "foo:test\nfoo-[Omitted long context line]\n";
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/129
|
||||||
|
clean!(feature_129_replace, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("foo", "test\ntest abcdefghijklmnopqrstuvwxyz test");
|
||||||
|
cmd.arg("-M26").arg("-rfoo");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
let expected = "foo:foo\nfoo:[Omitted long line with 2 replacements]\n";
|
||||||
|
assert_eq!(lines, expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/159
|
||||||
|
clean!(feature_159_works, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("foo", "test\ntest");
|
||||||
|
cmd.arg("-m1");
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "foo:test\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/159
|
||||||
|
clean!(feature_159_zero_max, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("foo", "test\ntest");
|
||||||
|
cmd.arg("-m0");
|
||||||
|
wd.assert_err(&mut cmd);
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/243
|
||||||
|
clean!(feature_243_column_line, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("foo", "test");
|
||||||
|
cmd.arg("--column");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "foo:1:1:test\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/263
|
||||||
|
clean!(feature_263_sort_files, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create("foo", "test");
|
||||||
|
wd.create("abc", "test");
|
||||||
|
wd.create("zoo", "test");
|
||||||
|
wd.create("bar", "test");
|
||||||
|
cmd.arg("--sort-files");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "abc:test\nbar:test\nfoo:test\nzoo:test\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/275
|
||||||
|
clean!(feature_275_pathsep, "test", ".", |wd: WorkDir, mut cmd: Command| {
|
||||||
|
wd.create_dir("foo");
|
||||||
|
wd.create("foo/bar", "test");
|
||||||
|
cmd.arg("--path-separator").arg("Z");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "fooZbar:test\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/419
|
||||||
|
sherlock!(feature_419_zero_as_shortcut_for_null, "Sherlock", ".",
|
||||||
|
|wd: WorkDir, mut cmd: Command| {
|
||||||
|
cmd.arg("-0").arg("--count");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "sherlock\x002\n");
|
||||||
|
});
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn binary_nosearch() {
|
fn binary_nosearch() {
|
||||||
let wd = WorkDir::new("binary_nosearch");
|
let wd = WorkDir::new("binary_nosearch");
|
||||||
@@ -1016,6 +1521,39 @@ fn regression_64() {
|
|||||||
assert_eq!(lines, path("foo/abc\n"));
|
assert_eq!(lines, path("foo/abc\n"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/270
|
||||||
|
#[test]
|
||||||
|
fn regression_270() {
|
||||||
|
let wd = WorkDir::new("regression_270");
|
||||||
|
wd.create("foo", "-test");
|
||||||
|
|
||||||
|
let mut cmd = wd.command();
|
||||||
|
cmd.arg("-e").arg("-test");
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, path("foo:-test\n"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// See: https://github.com/BurntSushi/ripgrep/issues/391
|
||||||
|
#[test]
|
||||||
|
fn regression_391() {
|
||||||
|
let wd = WorkDir::new("regression_391");
|
||||||
|
wd.create_dir(".git");
|
||||||
|
wd.create("lock", "");
|
||||||
|
wd.create("bar.py", "");
|
||||||
|
wd.create(".git/packed-refs", "");
|
||||||
|
wd.create(".git/description", "");
|
||||||
|
|
||||||
|
let mut cmd = wd.command();
|
||||||
|
cmd.arg("--no-ignore").arg("--hidden").arg("--follow").arg("--files")
|
||||||
|
.arg("--glob")
|
||||||
|
.arg("!{.git,node_modules,plugged}/**")
|
||||||
|
.arg("--glob")
|
||||||
|
.arg("*.{js,json,php,md,styl,scss,sass,pug,html,config,py,cpp,c,go,hs}");
|
||||||
|
|
||||||
|
let lines: String = wd.stdout(&mut cmd);
|
||||||
|
assert_eq!(lines, "bar.py\n");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn type_list() {
|
fn type_list() {
|
||||||
let wd = WorkDir::new("type_list");
|
let wd = WorkDir::new("type_list");
|
||||||
|
|||||||
@@ -43,9 +43,21 @@ impl WorkDir {
|
|||||||
|
|
||||||
/// Create a new file with the given name and contents in this directory.
|
/// Create a new file with the given name and contents in this directory.
|
||||||
pub fn create<P: AsRef<Path>>(&self, name: P, contents: &str) {
|
pub fn create<P: AsRef<Path>>(&self, name: P, contents: &str) {
|
||||||
|
self.create_bytes(name, contents.as_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new file with the given name and size.
|
||||||
|
pub fn create_size<P: AsRef<Path>>(&self, name: P, filesize: u64) {
|
||||||
|
let path = self.dir.join(name);
|
||||||
|
let file = nice_err(&path, File::create(&path));
|
||||||
|
nice_err(&path, file.set_len(filesize));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new file with the given name and contents in this directory.
|
||||||
|
pub fn create_bytes<P: AsRef<Path>>(&self, name: P, contents: &[u8]) {
|
||||||
let path = self.dir.join(name);
|
let path = self.dir.join(name);
|
||||||
let mut file = nice_err(&path, File::create(&path));
|
let mut file = nice_err(&path, File::create(&path));
|
||||||
nice_err(&path, file.write_all(contents.as_bytes()));
|
nice_err(&path, file.write_all(contents));
|
||||||
nice_err(&path, file.flush());
|
nice_err(&path, file.flush());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -71,8 +83,29 @@ impl WorkDir {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the path to the ripgrep executable.
|
/// Returns the path to the ripgrep executable.
|
||||||
|
#[cfg(not(windows))]
|
||||||
pub fn bin(&self) -> PathBuf {
|
pub fn bin(&self) -> PathBuf {
|
||||||
self.root.join("rg")
|
let path = self.root.join("rg");
|
||||||
|
if !path.is_file() {
|
||||||
|
// Looks like a recent version of Cargo changed the cwd or the
|
||||||
|
// location of the test executable.
|
||||||
|
self.root.join("../rg")
|
||||||
|
} else {
|
||||||
|
path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the path to the ripgrep executable.
|
||||||
|
#[cfg(windows)]
|
||||||
|
pub fn bin(&self) -> PathBuf {
|
||||||
|
let path = self.root.join("rg.exe");
|
||||||
|
if !path.is_file() {
|
||||||
|
// Looks like a recent version of Cargo changed the cwd or the
|
||||||
|
// location of the test executable.
|
||||||
|
self.root.join("../rg.exe")
|
||||||
|
} else {
|
||||||
|
path
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the path to this directory.
|
/// Returns the path to this directory.
|
||||||
@@ -148,7 +181,41 @@ impl WorkDir {
|
|||||||
|
|
||||||
/// Gets the output of a command. If the command failed, then this panics.
|
/// Gets the output of a command. If the command failed, then this panics.
|
||||||
pub fn output(&self, cmd: &mut process::Command) -> process::Output {
|
pub fn output(&self, cmd: &mut process::Command) -> process::Output {
|
||||||
let o = cmd.output().unwrap();
|
let output = cmd.output().unwrap();
|
||||||
|
self.expect_success(cmd, output)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Pipe `input` to a command, and collect the output.
|
||||||
|
pub fn pipe(
|
||||||
|
&self,
|
||||||
|
cmd: &mut process::Command,
|
||||||
|
input: &str
|
||||||
|
) -> process::Output {
|
||||||
|
cmd.stdin(process::Stdio::piped());
|
||||||
|
cmd.stdout(process::Stdio::piped());
|
||||||
|
cmd.stderr(process::Stdio::piped());
|
||||||
|
|
||||||
|
let mut child = cmd.spawn().unwrap();
|
||||||
|
|
||||||
|
// Pipe input to child process using a separate thread to avoid
|
||||||
|
// risk of deadlock between parent and child process.
|
||||||
|
let mut stdin = child.stdin.take().expect("expected standard input");
|
||||||
|
let input = input.to_owned();
|
||||||
|
let worker = thread::spawn(move || {
|
||||||
|
write!(stdin, "{}", input)
|
||||||
|
});
|
||||||
|
|
||||||
|
let output = self.expect_success(cmd, child.wait_with_output().unwrap());
|
||||||
|
worker.join().unwrap().unwrap();
|
||||||
|
output
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If `o` is not the output of a successful process run
|
||||||
|
fn expect_success(
|
||||||
|
&self,
|
||||||
|
cmd: &process::Command,
|
||||||
|
o: process::Output
|
||||||
|
) -> process::Output {
|
||||||
if !o.status.success() {
|
if !o.status.success() {
|
||||||
let suggest =
|
let suggest =
|
||||||
if o.stderr.is_empty() {
|
if o.stderr.is_empty() {
|
||||||
|
|||||||
3
wincolor/COPYING
Normal file
3
wincolor/COPYING
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
This project is dual-licensed under the Unlicense and MIT licenses.
|
||||||
|
|
||||||
|
You may use this code under the terms of either license.
|
||||||
21
wincolor/Cargo.toml
Normal file
21
wincolor/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
[package]
|
||||||
|
name = "wincolor"
|
||||||
|
version = "0.1.3" #:version
|
||||||
|
authors = ["Andrew Gallant <jamslam@gmail.com>"]
|
||||||
|
description = """
|
||||||
|
A simple Windows specific API for controlling text color in a Windows console.
|
||||||
|
"""
|
||||||
|
documentation = "https://docs.rs/wincolor"
|
||||||
|
homepage = "https://github.com/BurntSushi/ripgrep/tree/master/wincolor"
|
||||||
|
repository = "https://github.com/BurntSushi/ripgrep/tree/master/wincolor"
|
||||||
|
readme = "README.md"
|
||||||
|
keywords = ["windows", "win", "color", "ansi", "console"]
|
||||||
|
license = "Unlicense/MIT"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "wincolor"
|
||||||
|
bench = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
kernel32-sys = "0.2.2"
|
||||||
|
winapi = "0.2.8"
|
||||||
21
wincolor/LICENSE-MIT
Normal file
21
wincolor/LICENSE-MIT
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015 Andrew Gallant
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
44
wincolor/README.md
Normal file
44
wincolor/README.md
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
wincolor
|
||||||
|
========
|
||||||
|
A simple Windows specific API for controlling text color in a Windows console.
|
||||||
|
The purpose of this crate is to expose the full inflexibility of the Windows
|
||||||
|
console without any platform independent abstraction.
|
||||||
|
|
||||||
|
[](https://ci.appveyor.com/project/BurntSushi/ripgrep)
|
||||||
|
[](https://crates.io/crates/wincolor)
|
||||||
|
|
||||||
|
Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org).
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
[https://docs.rs/wincolor](https://docs.rs/wincolor)
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Add this to your `Cargo.toml`:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
wincolor = "0.1"
|
||||||
|
```
|
||||||
|
|
||||||
|
and this to your crate root:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
extern crate wincolor;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
This is a simple example that shows how to write text with a foreground color
|
||||||
|
of cyan and the intense attribute set:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use wincolor::{Console, Color, Intense};
|
||||||
|
|
||||||
|
let mut con = Console::stdout().unwrap();
|
||||||
|
con.fg(Intense::Yes, Color::Cyan).unwrap();
|
||||||
|
println!("This text will be intense cyan.");
|
||||||
|
con.reset().unwrap();
|
||||||
|
println!("This text will be normal.");
|
||||||
|
```
|
||||||
24
wincolor/UNLICENSE
Normal file
24
wincolor/UNLICENSE
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
This is free and unencumbered software released into the public domain.
|
||||||
|
|
||||||
|
Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||||
|
distribute this software, either in source code form or as a compiled
|
||||||
|
binary, for any purpose, commercial or non-commercial, and by any
|
||||||
|
means.
|
||||||
|
|
||||||
|
In jurisdictions that recognize copyright laws, the author or authors
|
||||||
|
of this software dedicate any and all copyright interest in the
|
||||||
|
software to the public domain. We make this dedication for the benefit
|
||||||
|
of the public at large and to the detriment of our heirs and
|
||||||
|
successors. We intend this dedication to be an overt act of
|
||||||
|
relinquishment in perpetuity of all present and future rights to this
|
||||||
|
software under copyright law.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||||
|
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||||
|
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||||
|
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
|
OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
For more information, please refer to <http://unlicense.org/>
|
||||||
32
wincolor/src/lib.rs
Normal file
32
wincolor/src/lib.rs
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
/*!
|
||||||
|
This crate provides a safe and simple Windows specific API to control
|
||||||
|
text attributes in the Windows console. Text attributes are limited to
|
||||||
|
foreground/background colors, as well as whether to make colors intense or not.
|
||||||
|
|
||||||
|
Note that on non-Windows platforms, this crate is empty but will compile.
|
||||||
|
|
||||||
|
# Example
|
||||||
|
|
||||||
|
```no_run
|
||||||
|
use wincolor::{Console, Color, Intense};
|
||||||
|
|
||||||
|
let mut con = Console::stdout().unwrap();
|
||||||
|
con.fg(Intense::Yes, Color::Cyan).unwrap();
|
||||||
|
println!("This text will be intense cyan.");
|
||||||
|
con.reset().unwrap();
|
||||||
|
println!("This text will be normal.");
|
||||||
|
```
|
||||||
|
*/
|
||||||
|
|
||||||
|
#![deny(missing_docs)]
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
extern crate kernel32;
|
||||||
|
#[cfg(windows)]
|
||||||
|
extern crate winapi;
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
pub use win::*;
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
mod win;
|
||||||
237
wincolor/src/win.rs
Normal file
237
wincolor/src/win.rs
Normal file
@@ -0,0 +1,237 @@
|
|||||||
|
use std::io;
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
|
use kernel32;
|
||||||
|
use winapi::{DWORD, HANDLE, WORD};
|
||||||
|
use winapi::winbase::{STD_ERROR_HANDLE, STD_OUTPUT_HANDLE};
|
||||||
|
use winapi::wincon::{
|
||||||
|
FOREGROUND_BLUE as FG_BLUE,
|
||||||
|
FOREGROUND_GREEN as FG_GREEN,
|
||||||
|
FOREGROUND_RED as FG_RED,
|
||||||
|
FOREGROUND_INTENSITY as FG_INTENSITY,
|
||||||
|
};
|
||||||
|
|
||||||
|
const FG_CYAN: DWORD = FG_BLUE | FG_GREEN;
|
||||||
|
const FG_MAGENTA: DWORD = FG_BLUE | FG_RED;
|
||||||
|
const FG_YELLOW: DWORD = FG_GREEN | FG_RED;
|
||||||
|
const FG_WHITE: DWORD = FG_BLUE | FG_GREEN | FG_RED;
|
||||||
|
|
||||||
|
/// A Windows console.
|
||||||
|
///
|
||||||
|
/// This represents a very limited set of functionality available to a Windows
|
||||||
|
/// console. In particular, it can only change text attributes such as color
|
||||||
|
/// and intensity.
|
||||||
|
///
|
||||||
|
/// There is no way to "write" to this console. Simply write to
|
||||||
|
/// stdout or stderr instead, while interleaving instructions to the console
|
||||||
|
/// to change text attributes.
|
||||||
|
///
|
||||||
|
/// A common pitfall when using a console is to forget to flush writes to
|
||||||
|
/// stdout before setting new text attributes.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Console {
|
||||||
|
handle: HANDLE,
|
||||||
|
start_attr: TextAttributes,
|
||||||
|
cur_attr: TextAttributes,
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl Send for Console {}
|
||||||
|
|
||||||
|
impl Drop for Console {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { kernel32::CloseHandle(self.handle); }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Console {
|
||||||
|
/// Get a console for a standard I/O stream.
|
||||||
|
fn create_for_stream(handle_id: DWORD) -> io::Result<Console> {
|
||||||
|
let mut info = unsafe { mem::zeroed() };
|
||||||
|
let (handle, res) = unsafe {
|
||||||
|
let handle = kernel32::GetStdHandle(handle_id);
|
||||||
|
(handle, kernel32::GetConsoleScreenBufferInfo(handle, &mut info))
|
||||||
|
};
|
||||||
|
if res == 0 {
|
||||||
|
return Err(io::Error::last_os_error());
|
||||||
|
}
|
||||||
|
let attr = TextAttributes::from_word(info.wAttributes);
|
||||||
|
Ok(Console {
|
||||||
|
handle: handle,
|
||||||
|
start_attr: attr,
|
||||||
|
cur_attr: attr,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new Console to stdout.
|
||||||
|
///
|
||||||
|
/// If there was a problem creating the console, then an error is returned.
|
||||||
|
pub fn stdout() -> io::Result<Console> {
|
||||||
|
Self::create_for_stream(STD_OUTPUT_HANDLE)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new Console to stderr.
|
||||||
|
///
|
||||||
|
/// If there was a problem creating the console, then an error is returned.
|
||||||
|
pub fn stderr() -> io::Result<Console> {
|
||||||
|
Self::create_for_stream(STD_ERROR_HANDLE)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Applies the current text attributes.
|
||||||
|
fn set(&mut self) -> io::Result<()> {
|
||||||
|
let attr = self.cur_attr.to_word();
|
||||||
|
let res = unsafe {
|
||||||
|
kernel32::SetConsoleTextAttribute(self.handle, attr)
|
||||||
|
};
|
||||||
|
if res == 0 {
|
||||||
|
return Err(io::Error::last_os_error());
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Apply the given intensity and color attributes to the console
|
||||||
|
/// foreground.
|
||||||
|
///
|
||||||
|
/// If there was a problem setting attributes on the console, then an error
|
||||||
|
/// is returned.
|
||||||
|
pub fn fg(&mut self, intense: Intense, color: Color) -> io::Result<()> {
|
||||||
|
self.cur_attr.fg_color = color;
|
||||||
|
self.cur_attr.fg_intense = intense;
|
||||||
|
self.set()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Apply the given intensity and color attributes to the console
|
||||||
|
/// background.
|
||||||
|
///
|
||||||
|
/// If there was a problem setting attributes on the console, then an error
|
||||||
|
/// is returned.
|
||||||
|
pub fn bg(&mut self, intense: Intense, color: Color) -> io::Result<()> {
|
||||||
|
self.cur_attr.bg_color = color;
|
||||||
|
self.cur_attr.bg_intense = intense;
|
||||||
|
self.set()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reset the console text attributes to their original settings.
|
||||||
|
///
|
||||||
|
/// The original settings correspond to the text attributes on the console
|
||||||
|
/// when this `Console` value was created.
|
||||||
|
///
|
||||||
|
/// If there was a problem setting attributes on the console, then an error
|
||||||
|
/// is returned.
|
||||||
|
pub fn reset(&mut self) -> io::Result<()> {
|
||||||
|
self.cur_attr = self.start_attr;
|
||||||
|
self.set()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A representation of text attributes for the Windows console.
|
||||||
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||||
|
struct TextAttributes {
|
||||||
|
fg_color: Color,
|
||||||
|
fg_intense: Intense,
|
||||||
|
bg_color: Color,
|
||||||
|
bg_intense: Intense,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TextAttributes {
|
||||||
|
fn to_word(&self) -> WORD {
|
||||||
|
let mut w = 0;
|
||||||
|
w |= self.fg_color.to_fg();
|
||||||
|
w |= self.fg_intense.to_fg();
|
||||||
|
w |= self.bg_color.to_bg();
|
||||||
|
w |= self.bg_intense.to_bg();
|
||||||
|
w as WORD
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_word(word: WORD) -> TextAttributes {
|
||||||
|
let attr = word as DWORD;
|
||||||
|
TextAttributes {
|
||||||
|
fg_color: Color::from_fg(attr),
|
||||||
|
fg_intense: Intense::from_fg(attr),
|
||||||
|
bg_color: Color::from_bg(attr),
|
||||||
|
bg_intense: Intense::from_bg(attr),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether to use intense colors or not.
|
||||||
|
#[allow(missing_docs)]
|
||||||
|
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||||
|
pub enum Intense {
|
||||||
|
Yes,
|
||||||
|
No,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Intense {
|
||||||
|
fn to_bg(&self) -> DWORD {
|
||||||
|
self.to_fg() << 4
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_bg(word: DWORD) -> Intense {
|
||||||
|
Intense::from_fg(word >> 4)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_fg(&self) -> DWORD {
|
||||||
|
match *self {
|
||||||
|
Intense::No => 0,
|
||||||
|
Intense::Yes => FG_INTENSITY,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_fg(word: DWORD) -> Intense {
|
||||||
|
if word & FG_INTENSITY > 0 {
|
||||||
|
Intense::Yes
|
||||||
|
} else {
|
||||||
|
Intense::No
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The set of available colors for use with a Windows console.
|
||||||
|
#[allow(missing_docs)]
|
||||||
|
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||||
|
pub enum Color {
|
||||||
|
Black,
|
||||||
|
Blue,
|
||||||
|
Green,
|
||||||
|
Red,
|
||||||
|
Cyan,
|
||||||
|
Magenta,
|
||||||
|
Yellow,
|
||||||
|
White,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Color {
|
||||||
|
fn to_bg(&self) -> DWORD {
|
||||||
|
self.to_fg() << 4
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_bg(word: DWORD) -> Color {
|
||||||
|
Color::from_fg(word >> 4)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn to_fg(&self) -> DWORD {
|
||||||
|
match *self {
|
||||||
|
Color::Black => 0,
|
||||||
|
Color::Blue => FG_BLUE,
|
||||||
|
Color::Green => FG_GREEN,
|
||||||
|
Color::Red => FG_RED,
|
||||||
|
Color::Cyan => FG_CYAN,
|
||||||
|
Color::Magenta => FG_MAGENTA,
|
||||||
|
Color::Yellow => FG_YELLOW,
|
||||||
|
Color::White => FG_WHITE,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_fg(word: DWORD) -> Color {
|
||||||
|
match word & 0b111 {
|
||||||
|
FG_BLUE => Color::Blue,
|
||||||
|
FG_GREEN => Color::Green,
|
||||||
|
FG_RED => Color::Red,
|
||||||
|
FG_CYAN => Color::Cyan,
|
||||||
|
FG_MAGENTA => Color::Magenta,
|
||||||
|
FG_YELLOW => Color::Yellow,
|
||||||
|
FG_WHITE => Color::White,
|
||||||
|
_ => Color::Black,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user