Compare commits
641 Commits
| Author | SHA1 | Date |
|---|---|---|
|
|
8f1308b90d | |
|
|
e856b05c2c | |
|
|
fe9e750dab | |
|
|
842ae1f754 | |
|
|
339634f7bc | |
|
|
c1632693bb | |
|
|
e19cb32009 | |
|
|
b69c8ef940 | |
|
|
2724711060 | |
|
|
0a68ab7f4c | |
|
|
8675178be1 | |
|
|
9bafb8a280 | |
|
|
8e5549862a | |
|
|
8797fcd517 | |
|
|
0995d4d669 | |
|
|
6c0c273a0b | |
|
|
3eeba790fd | |
|
|
61a29ec373 | |
|
|
9fbbbf7c73 | |
|
|
331b68d909 | |
|
|
8c3e093561 | |
|
|
e35e5e3af1 | |
|
|
906287b305 | |
|
|
0dbb6abcc5 | |
|
|
03e097cc82 | |
|
|
91c1964918 | |
|
|
373f3d389a | |
|
|
828c47109d | |
|
|
94245b25df | |
|
|
734542f0af | |
|
|
1f8166ba9f | |
|
|
6f1ddaa615 | |
|
|
7ee2891517 | |
|
|
b893e4aa20 | |
|
|
eff869906a | |
|
|
2a0107e189 | |
|
|
5d8c4de212 | |
|
|
1f9d7e8373 | |
|
|
184470f871 | |
|
|
342dbc85cc | |
|
|
457e16e84f | |
|
|
43b3736b75 | |
|
|
64a49ffe17 | |
|
|
0afd2006c6 | |
|
|
3c270173a7 | |
|
|
8d73f9ef4c | |
|
|
472c086805 | |
|
|
400c277f24 | |
|
|
e759564550 | |
|
|
deebe7137c | |
|
|
cb14ccbaaf | |
|
|
eb892795e9 | |
|
|
09de90066b | |
|
|
cd1f27c12b | |
|
|
b837de8358 | |
|
|
7a70f35883 | |
|
|
4b17dae385 | |
|
|
efefe3f54a | |
|
|
4b70a1fc25 | |
|
|
a7893f399e | |
|
|
1cb6c12851 | |
|
|
c4f7485ecf | |
|
|
228f50413e | |
|
|
d1867b1b51 | |
|
|
6d2497582e | |
|
|
885c43b8af | |
|
|
8ad47b0b23 | |
|
|
e93af99424 | |
|
|
5862ab4f92 | |
|
|
4110cac45c | |
|
|
d66b18e8ae | |
|
|
b532fc6a38 | |
|
|
99cf073835 | |
|
|
ec7e1b8b81 | |
|
|
a4aab38901 | |
|
|
5202900618 | |
|
|
26444a98ad | |
|
|
bced3242f3 | |
|
|
08aabdad76 | |
|
|
170ee0b928 | |
|
|
2c9a54438a | |
|
|
84f4886809 | |
|
|
e26fd6b643 | |
|
|
ce5de20f80 | |
|
|
3ff2d135b5 | |
|
|
1e65b4a209 | |
|
|
db3545b7b0 | |
|
|
1898a0c4a9 | |
|
|
0d32357b10 | |
|
|
1be2abb056 | |
|
|
fb392534ef | |
|
|
bd49aac9d1 | |
|
|
94838863fd | |
|
|
79973a58ea | |
|
|
b9a72b55ca | |
|
|
ef55f7ddd3 | |
|
|
28b78e7ddd | |
|
|
d2072e2cac | |
|
|
3edb73cb23 | |
|
|
6d1280ee9d | |
|
|
0c457e590a | |
|
|
dc307fc0fd | |
|
|
d6256e9fc6 | |
|
|
1645828527 | |
|
|
e774b08dc5 | |
|
|
99067b2e59 | |
|
|
f039b50c4e | |
|
|
7d5bd97142 | |
|
|
70b5055631 | |
|
|
1be25e70df | |
|
|
9000575f7c | |
|
|
220ebf935c | |
|
|
959c4a2b26 | |
|
|
443d43df21 | |
|
|
80080cd57c | |
|
|
80922a93fa | |
|
|
45494fc74b | |
|
|
d92e2339a1 | |
|
|
659bf92d99 | |
|
|
3e4d9bcd85 | |
|
|
d3076cdfe0 | |
|
|
51436cefe8 | |
|
|
08a8177286 | |
|
|
e6d5fd64e0 | |
|
|
ac9f3a7fd5 | |
|
|
289ab28b98 | |
|
|
46ad6c9a5e | |
|
|
d20dcbe8db | |
|
|
70c3c84196 | |
|
|
53840c6a98 | |
|
|
068fff8711 | |
|
|
5735d01804 | |
|
|
f60de0d8f8 | |
|
|
cb3ab91492 | |
|
|
4eea79ed6a | |
|
|
03c051a525 | |
|
|
eccdf80b95 | |
|
|
eb51bf9b1a | |
|
|
5d7b438fd6 | |
|
|
ef0b97fb57 | |
|
|
c6c3522159 | |
|
|
2908c8eaa8 | |
|
|
f05b8e0ed6 | |
|
|
01bca6b39f | |
|
|
d2835e56a4 | |
|
|
0cf110e69e | |
|
|
a88adb0488 | |
|
|
4010a58dde | |
|
|
b9e28b9b23 | |
|
|
d0e005da23 | |
|
|
7a4f19e6b3 | |
|
|
f958e7b96f | |
|
|
4c99bf3b75 | |
|
|
e3066a1d7a | |
|
|
f0510a169a | |
|
|
738df6c362 | |
|
|
83a38db110 | |
|
|
9e3448d992 | |
|
|
70e3c0ddd8 | |
|
|
017c08a45d | |
|
|
f32f4ffaee | |
|
|
7379ba7b19 | |
|
|
3aeb6d6356 | |
|
|
4246a7b16f | |
|
|
e44fe49c8f | |
|
|
52cd5fdfc1 | |
|
|
947f1ad2b6 | |
|
|
4d00884d8c | |
|
|
cfcc82aaca | |
|
|
0794e02b52 | |
|
|
975965abed | |
|
|
a678bed154 | |
|
|
4ec6f1eec0 | |
|
|
d1527376e7 | |
|
|
b99719ce60 | |
|
|
dc869852bc | |
|
|
3079e7a218 | |
|
|
5cd769c2f4 | |
|
|
977ad5c1a4 | |
|
|
57a0ccef38 | |
|
|
94c013886a | |
|
|
c5e209d78e | |
|
|
3e653c46b0 | |
|
|
91f3b16993 | |
|
|
0f3df0f4da | |
|
|
0e7219b191 | |
|
|
1d2c4b134f | |
|
|
b245c462c9 | |
|
|
876e58b159 | |
|
|
66d9733da7 | |
|
|
c55deab3a2 | |
|
|
edcb697793 | |
|
|
d314d75db1 | |
|
|
c89a52caf7 | |
|
|
9c18cfe273 | |
|
|
779d4c33f4 | |
|
|
072c24687b | |
|
|
b811b2bd47 | |
|
|
355bfbd328 | |
|
|
7b3632bdad | |
|
|
4fe41f09ff | |
|
|
cd7c52e4fa | |
|
|
86140af50e | |
|
|
e5cd5e5bfe | |
|
|
dc89f1cd27 | |
|
|
388a1e06d4 | |
|
|
61eeeb7876 | |
|
|
df7da4288c | |
|
|
70896f1da4 | |
|
|
0a38cad926 | |
|
|
1e38fb6f7b | |
|
|
9b3dc3e581 | |
|
|
37b30602fd | |
|
|
7afdee4c58 | |
|
|
d4d8e01e31 | |
|
|
e5e0da00fe | |
|
|
dc61cdc7a4 | |
|
|
0fa2e1afc7 | |
|
|
7ca90ba728 | |
|
|
cd6fa5bb30 | |
|
|
fa05641661 | |
|
|
97ba4e8616 | |
|
|
9882478fb5 | |
|
|
9f5b7e1846 | |
|
|
05afac7082 | |
|
|
ae362b0f02 | |
|
|
435540606e | |
|
|
96aa12c140 | |
|
|
9560355a7c | |
|
|
b44ac231c1 | |
|
|
7ff3924f0b | |
|
|
39c3729524 | |
|
|
faddcbd15f | |
|
|
78a2d309d1 | |
|
|
35940e7584 | |
|
|
524415b5d5 | |
|
|
8882310450 | |
|
|
6d15f1319e | |
|
|
69d3308c71 | |
|
|
5c57b20936 | |
|
|
e09319f29f | |
|
|
b15242881e | |
|
|
e02507e5a1 | |
|
|
284662e156 | |
|
|
1b9f823cef | |
|
|
f0f37d841c | |
|
|
58b20db9f1 | |
|
|
a98a113a4b | |
|
|
164d01d163 | |
|
|
ddd94474b8 | |
|
|
541b023b7f | |
|
|
9b502d9245 | |
|
|
b9c352fb7c | |
|
|
48ef668e1e | |
|
|
481c39ace3 | |
|
|
6b9305250d | |
|
|
87bd15f927 | |
|
|
db23ae933f | |
|
|
ad76b3685f | |
|
|
34cb23bc6e | |
|
|
702bfee988 | |
|
|
dfe8b1599d | |
|
|
ca094d8264 | |
|
|
5113dcfb36 | |
|
|
d3f4c65459 | |
|
|
2504f238e5 | |
|
|
9646055560 | |
|
|
80d4abae34 | |
|
|
19ae05d68a | |
|
|
5c62b2ab1b | |
|
|
6cc4d9e0c7 | |
|
|
1ddfc08d7d | |
|
|
cca68bb9ab | |
|
|
d6db0f7d79 | |
|
|
d60562130c | |
|
|
aa1945b017 | |
|
|
dafcaec192 | |
|
|
3c9eda75e9 | |
|
|
8635d68864 | |
|
|
6e7b3cecb8 | |
|
|
1e12c3f7a6 | |
|
|
9e40e0a0f4 | |
|
|
4706323976 | |
|
|
4721c7f553 | |
|
|
c82c00650a | |
|
|
9e54e68da5 | |
|
|
4423230c11 | |
|
|
a04fbe6ccc | |
|
|
f599ae5ff1 | |
|
|
de81f38622 | |
|
|
a40944d336 | |
|
|
e0f184f263 | |
|
|
6c1623a3ad | |
|
|
4428b15162 | |
|
|
2adc96833a | |
|
|
b7ce20b2ca | |
|
|
5e3828882e | |
|
|
78cba6b7ca | |
|
|
9be92b9834 | |
|
|
53cbd332ca | |
|
|
2ff2836159 | |
|
|
0d008b109e | |
|
|
a29faa8288 | |
|
|
809f8ba6c4 | |
|
|
1912cbdea4 | |
|
|
b1fb7ac2ff | |
|
|
b5726e5edf | |
|
|
9eb100c819 | |
|
|
86387d0baf | |
|
|
c6f9e2eac9 | |
|
|
73df548532 | |
|
|
ae87699824 | |
|
|
8568ef7d99 | |
|
|
c6f7a99b1c | |
|
|
b5bd536e6b | |
|
|
d029af3e89 | |
|
|
af2bb98901 | |
|
|
68c4edf8b6 | |
|
|
300d6eda21 | |
|
|
33b567d453 | |
|
|
c779d21c13 | |
|
|
d818c5ebf2 | |
|
|
072b581f98 | |
|
|
2de353d8d6 | |
|
|
ca2f19ae52 | |
|
|
b8bdfd8601 | |
|
|
a985a0891e | |
|
|
a688e268b3 | |
|
|
3a7384e5f1 | |
|
|
ca17c39172 | |
|
|
55f0628c2b | |
|
|
276167be9c | |
|
|
d87f4f2b60 | |
|
|
1684fbf866 | |
|
|
c0c5d829e2 | |
|
|
0a0e4fe606 | |
|
|
979f17cf3b | |
|
|
fe6e2e57c3 | |
|
|
2c303a2869 | |
|
|
0f395d037b | |
|
|
839eab1384 | |
|
|
98fbd525ee | |
|
|
046c2957f3 | |
|
|
18bae485ae | |
|
|
46023a86b6 | |
|
|
6f3b89c98a | |
|
|
0b7d925b50 | |
|
|
785346c12d | |
|
|
a998ec309c | |
|
|
557394dc56 | |
|
|
5990cf1e8e | |
|
|
cf393b8fec | |
|
|
662d80e1a6 | |
|
|
270fbf6473 | |
|
|
06b062c122 | |
|
|
6fa603981d | |
|
|
8f5d601758 | |
|
|
08aad5a755 | |
|
|
3ffb514f71 | |
|
|
24f64b3e32 | |
|
|
e84c5fce37 | |
|
|
e94e00af53 | |
|
|
185478cf8e | |
|
|
98d8120ccd | |
|
|
3804fd9a91 | |
|
|
bd46baa639 | |
|
|
c64e795447 | |
|
|
0e5769154c | |
|
|
d4b57510f1 | |
|
|
b06fb4e425 | |
|
|
1c2e99a5b3 | |
|
|
43e543acae | |
|
|
3f1f2534a3 | |
|
|
821062bb81 | |
|
|
7cd9f2acb0 | |
|
|
7b7a0d2c8e | |
|
|
f50d0e6c41 | |
|
|
bbe9e93164 | |
|
|
beb57d2e49 | |
|
|
a03aa3157f | |
|
|
4deba5f147 | |
|
|
af4c08a08b | |
|
|
deb1936027 | |
|
|
fb52343aa3 | |
|
|
fdf3655e63 | |
|
|
d83e7c1652 | |
|
|
8e0c7eff17 | |
|
|
b7406919dc | |
|
|
656abbbbf8 | |
|
|
ef751d34f2 | |
|
|
4ef9e6b0de | |
|
|
ecd59455b0 | |
|
|
15e6924338 | |
|
|
ad86a8b954 | |
|
|
61fdb6e206 | |
|
|
193de54b6d | |
|
|
b6c33d2901 | |
|
|
b65b03fe63 | |
|
|
5193ab8a97 | |
|
|
84965712f6 | |
|
|
5f0d55bcfa | |
|
|
277d19816e | |
|
|
a7b370bc3d | |
|
|
efd765eba7 | |
|
|
192e2c333e | |
|
|
89b4cec3cb | |
|
|
4660afb7d8 | |
|
|
e9eb7d32ce | |
|
|
f7075e1b64 | |
|
|
f32fdaa93a | |
|
|
1c8e3f8142 | |
|
|
298161114b | |
|
|
0d0335bca0 | |
|
|
1e2e380876 | |
|
|
bceb625984 | |
|
|
a5dda7ae91 | |
|
|
9e111a334b | |
|
|
74a3576132 | |
|
|
0646063509 | |
|
|
c6c1f3eef7 | |
|
|
47ab466d85 | |
|
|
378967c2a5 | |
|
|
2cc854bd6b | |
|
|
4d83f057ac | |
|
|
573ae6c488 | |
|
|
fce4347a3c | |
|
|
7b2076c113 | |
|
|
7e18e0eb4c | |
|
|
22100ceed3 | |
|
|
40102be04a | |
|
|
201ab43631 | |
|
|
defd1740b8 | |
|
|
4544ddc219 | |
|
|
7e87a88d71 | |
|
|
db4bb5ada6 | |
|
|
09b324f7d4 | |
|
|
35773d43da | |
|
|
eeda506990 | |
|
|
cda65e3da5 | |
|
|
d016276478 | |
|
|
2a1e06975d | |
|
|
930ed2ac7c | |
|
|
18367353df | |
|
|
ba3952d86b | |
|
|
c5b25fa494 | |
|
|
e1c4db4dab | |
|
|
12590137f5 | |
|
|
2680cc85fb | |
|
|
9aa8242d92 | |
|
|
c12304a71a | |
|
|
cf7032dd99 | |
|
|
f79bbfcdc1 | |
|
|
e966b9c169 | |
|
|
27badf6b3d | |
|
|
bb9dd410da | |
|
|
f1d4a841eb | |
|
|
80e61cd3be | |
|
|
f9617d4f64 | |
|
|
04472af9c0 | |
|
|
448da43bf7 | |
|
|
2add15e92c | |
|
|
efc6b12c65 | |
|
|
0ece8bf672 | |
|
|
079f14ec46 | |
|
|
cc57469a65 | |
|
|
33db232493 | |
|
|
99586a56cf | |
|
|
e3a4879fcd | |
|
|
c71cb72a29 | |
|
|
501cb3dce2 | |
|
|
fff8feb1f6 | |
|
|
5019e8a122 | |
|
|
2d5217c56a | |
|
|
b42a58c86d | |
|
|
e33d595201 | |
|
|
2016892e64 | |
|
|
44ad8f506a | |
|
|
cfa4097df9 | |
|
|
32a55103e1 | |
|
|
ab869ef974 | |
|
|
6d6e17c22f | |
|
|
267e5a6979 | |
|
|
ca781a3c3b | |
|
|
7f6f600fed | |
|
|
61ef8eb3c3 | |
|
|
255c9dea8c | |
|
|
c58b030456 | |
|
|
f86bdc1f84 | |
|
|
54f6bf3f79 | |
|
|
c2805f82e8 | |
|
|
58df6bbc2b | |
|
|
e8569b6f2c | |
|
|
d3998faf59 | |
|
|
a774a639d4 | |
|
|
a9a62c2a11 | |
|
|
54bebcf2f4 | |
|
|
2746e2964a | |
|
|
94b0afa07c | |
|
|
3204e88a5b | |
|
|
42793796fd | |
|
|
7021ec2449 | |
|
|
affbf5012e | |
|
|
2ee484d696 | |
|
|
8317cc1e29 | |
|
|
5ef6a2b8f3 | |
|
|
1a68d794d6 | |
|
|
418b6a3409 | |
|
|
15180f92d0 | |
|
|
c5fa9d05ca | |
|
|
b80f38dcb2 | |
|
|
8e2a492200 | |
|
|
25d445a6c5 | |
|
|
923fa4cd0e | |
|
|
579a3e794a | |
|
|
4e1e237c32 | |
|
|
4098b26b75 | |
|
|
7443de18e9 | |
|
|
7b8aed8a5f | |
|
|
005e3ed096 | |
|
|
d678908c00 | |
|
|
13eb6f0828 | |
|
|
3211b90b02 | |
|
|
ef124acf34 | |
|
|
8b9d4824f5 | |
|
|
c8aba1848b | |
|
|
174618cf6b | |
|
|
f15699a4f4 | |
|
|
3b187d1abc | |
|
|
850528fb87 | |
|
|
87f2f08f23 | |
|
|
3c4f3d5d58 | |
|
|
b728ce0659 | |
|
|
b0521d6f5c | |
|
|
c812216cc5 | |
|
|
9701e4face | |
|
|
b2ddd9a396 | |
|
|
67258b58a4 | |
|
|
d46775802e | |
|
|
9579f941be | |
|
|
e5736d3888 | |
|
|
4aaf0583c5 | |
|
|
906575df3a | |
|
|
8bd8b20f9c | |
|
|
beb4f3eaf6 | |
|
|
e32a84ea05 | |
|
|
249bab36eb | |
|
|
790305bc07 | |
|
|
030860c0a1 | |
|
|
2b24cca340 | |
|
|
08a12912c7 | |
|
|
9f75d5ed14 | |
|
|
a785a5931f | |
|
|
85ec59e255 | |
|
|
b344611371 | |
|
|
1de8ad0fc4 | |
|
|
b8ca8aade4 | |
|
|
e5ad3c471b | |
|
|
945a364970 | |
|
|
78119452aa | |
|
|
d42dadfc45 | |
|
|
2271de7a1d | |
|
|
1cd81385b2 | |
|
|
a9c00d9dbe | |
|
|
1f91d752f0 | |
|
|
ad5eb89c4c | |
|
|
b38faf7dcf | |
|
|
9b84a7817c | |
|
|
2486e49a29 | |
|
|
a4550899be | |
|
|
7cb006526e | |
|
|
ec2aa0871e | |
|
|
e618a5a593 | |
|
|
c065cbb92b | |
|
|
ab5fcbb90f | |
|
|
0b9dfedc20 | |
|
|
04ce7aa0bb | |
|
|
07274a9a2c | |
|
|
1b0e50854f | |
|
|
313d2a9080 | |
|
|
377e0766a2 | |
|
|
ba2eef7b0e | |
|
|
4b7fd8b59d | |
|
|
606743b99d | |
|
|
16e6ee639b | |
|
|
33bbb4e720 | |
|
|
41f798a34e | |
|
|
b485001fcb | |
|
|
8fee9a9714 | |
|
|
7867e26868 | |
|
|
ea1c421838 | |
|
|
63a1c1448a | |
|
|
3b713ed008 | |
|
|
e58057820f | |
|
|
36c274ec19 | |
|
|
af7565ec3d | |
|
|
07227e4a9a | |
|
|
6b0995599a | |
|
|
9f58f93562 | |
|
|
44bc8523dd | |
|
|
caed51e268 | |
|
|
b2a69b5198 | |
|
|
de8ebb1577 | |
|
|
fba6432f40 | |
|
|
0ecb496ae9 | |
|
|
7f29e8054d | |
|
|
47c01a5687 | |
|
|
3f01b819c6 | |
|
|
379725e796 | |
|
|
b78ced0c55 | |
|
|
19c4af1a2e | |
|
|
3c291a2d3f | |
|
|
ff1e9d5766 | |
|
|
3dbb0cb277 | |
|
|
0220c481ea | |
|
|
f2090bd198 | |
|
|
dc5d6d6d66 | |
|
|
0d953ff236 | |
|
|
3e978d774b | |
|
|
4b6e630e7f | |
|
|
1bfb8c18c0 | |
|
|
77c065ada3 | |
|
|
ed7f4374da | |
|
|
128eb0d125 | |
|
|
c545fb9683 | |
|
|
3832444429 | |
|
|
f5f64d55e0 | |
|
|
86397058fb | |
|
|
b6d460ca9c | |
|
|
f8e3bd7e29 | |
|
|
b83f5be912 | |
|
|
1698928bd3 | |
|
|
253e4d4e89 | |
|
|
f794e238a0 | |
|
|
3d283b699e | |
|
|
66bd3a8c86 | |
|
|
de21308219 | |
|
|
82d7e9ce43 | |
|
|
db1e82c2f4 | |
|
|
75a0bdc1f1 | |
|
|
79f5f18a69 | |
|
|
50816be0c1 |
|
|
@ -0,0 +1,19 @@
|
|||
FROM sherlock/sherlock as sherlock
|
||||
|
||||
# Install Node.js
|
||||
RUN apt-get update; apt-get install curl gpg -y
|
||||
RUN mkdir -p /etc/apt/keyrings
|
||||
RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
|
||||
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
|
||||
RUN apt-get update && apt-get install -y curl bash git jq jo xz-utils nodejs
|
||||
|
||||
# Install Apify CLI (node.js) for the Actor Runtime
|
||||
RUN npm -g install apify-cli
|
||||
|
||||
# Install Dependencies for the Actor Shell Script
|
||||
RUN apt-get update && apt-get install -y bash jq jo xz-utils nodejs
|
||||
|
||||
# Copy Actor dir with the actorization shell script
|
||||
COPY .actor/ .actor
|
||||
|
||||
ENTRYPOINT [".actor/actor.sh"]
|
||||
|
|
@ -0,0 +1,93 @@
|
|||
# Sherlock Actor on Apify
|
||||
|
||||
[](https://apify.com/netmilk/sherlock?fpr=sherlock)
|
||||
|
||||
This Actor wraps the [Sherlock Project](https://sherlockproject.xyz/) to provide serverless username reconnaissance across social networks in the cloud. It helps you find usernames across multiple social media platforms without installing and running the tool locally.
|
||||
|
||||
## What are Actors?
|
||||
[Actors](https://docs.apify.com/platform/actors?fpr=sherlock) are serverless microservices running on the [Apify Platform](https://apify.com/?fpr=sherlock). They are based on the [Actor SDK](https://docs.apify.com/sdk/js?fpr=sherlock) and can be found in the [Apify Store](https://apify.com/store?fpr=sherlock). Learn more about Actors in the [Apify Whitepaper](https://whitepaper.actor?fpr=sherlock).
|
||||
|
||||
## Usage
|
||||
|
||||
### Apify Console
|
||||
|
||||
1. Go to the Apify Actor page
|
||||
2. Click "Run"
|
||||
3. In the input form, fill in **Username(s)** to search for
|
||||
4. The Actor will run and produce its outputs in the default datastore
|
||||
|
||||
|
||||
### Apify CLI
|
||||
|
||||
```bash
|
||||
apify call YOUR_USERNAME/sherlock --input='{
|
||||
"usernames": ["johndoe", "janedoe"]
|
||||
}'
|
||||
```
|
||||
|
||||
### Using Apify API
|
||||
|
||||
```bash
|
||||
curl --request POST \
|
||||
--url "https://api.apify.com/v2/acts/YOUR_USERNAME~sherlock/run" \
|
||||
--header 'Content-Type: application/json' \
|
||||
--header 'Authorization: Bearer YOUR_API_TOKEN' \
|
||||
--data '{
|
||||
"usernames": ["johndoe", "janedoe"],
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
## Input Parameters
|
||||
|
||||
The Actor accepts a JSON schema with the following structure:
|
||||
|
||||
| Field | Type | Required | Default | Description |
|
||||
|-------|------|----------|---------|-------------|
|
||||
| `usernames` | array | Yes | - | List of usernames to search for |
|
||||
| `usernames[]` | string | Yes | "json" | Username to search for |
|
||||
|
||||
|
||||
### Example Input
|
||||
|
||||
```json
|
||||
{
|
||||
"usernames": ["techuser", "designuser"],
|
||||
}
|
||||
```
|
||||
|
||||
## Output
|
||||
|
||||
The Actor provides three types of outputs:
|
||||
|
||||
### Dataset Record*
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|-------|------|----------|-------------|
|
||||
| `username` | string | Yes | Username the search was conducted for |
|
||||
| `links` | array | Yes | Array with found links to the social media |
|
||||
| `links[]`| string | No | URL to the account
|
||||
|
||||
### Example Dataset Item (JSON)
|
||||
|
||||
```json
|
||||
{
|
||||
"username": "johndoe",
|
||||
"links": [
|
||||
"https://github.com/johndoe"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Performance & Resources
|
||||
|
||||
- **Memory Requirements**:
|
||||
- Minimum: 512 MB RAM
|
||||
- Recommended: 1 GB RAM for multiple usernames
|
||||
- **Processing Time**:
|
||||
- Single username: ~1-2 minutes
|
||||
- Multiple usernames: 2-5 minutes
|
||||
- Varies based on number of sites checked and response times
|
||||
|
||||
|
||||
For more help, check the [Sherlock Project documentation](https://github.com/sherlock-project/sherlock) or raise an issue in the Actor's repository.
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"actorSpecification": 1,
|
||||
"name": "sherlock",
|
||||
"version": "0.0",
|
||||
"buildTag": "latest",
|
||||
"environmentVariables": {},
|
||||
"dockerFile": "./Dockerfile",
|
||||
"dockerContext": "../",
|
||||
"input": "./input_schema.json",
|
||||
"storages": {
|
||||
"dataset": "./dataset_schema.json"
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
#!/bin/bash
|
||||
INPUT=`apify actor:get-input | jq -r .usernames[] | xargs echo`
|
||||
echo "INPUT: $INPUT"
|
||||
|
||||
sherlock $INPUT
|
||||
|
||||
for username in $INPUT; do
|
||||
# escape the special meaning leading characters
|
||||
# https://github.com/jpmens/jo/blob/master/jo.md#description
|
||||
safe_username=$(echo $username | sed 's/^@/\\@/' | sed 's/^:/\\:/' | sed 's/%/\\%/')
|
||||
echo "pushing results for username: $username, content:"
|
||||
cat $username.txt
|
||||
sed '$d' $username.txt | jo -a | jo username=$safe_username links:=- | apify actor:push-data
|
||||
done
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
{
|
||||
"actorSpecification": 1,
|
||||
"fields":{
|
||||
"title": "Sherlock actor input",
|
||||
"description": "This is actor input schema",
|
||||
"type": "object",
|
||||
"schemaVersion": 1,
|
||||
"properties": {
|
||||
"links": {
|
||||
"title": "Links to accounts",
|
||||
"type": "array",
|
||||
"description": "A list of social media accounts found for the uername"
|
||||
},
|
||||
"username": {
|
||||
"title": "Lookup username",
|
||||
"type": "string",
|
||||
"description": "Username the lookup was performed for"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"username",
|
||||
"links"
|
||||
]
|
||||
},
|
||||
"views": {
|
||||
"overview": {
|
||||
"title": "Overview",
|
||||
"transformation": {
|
||||
"fields": [
|
||||
"username",
|
||||
"links"
|
||||
],
|
||||
},
|
||||
"display": {
|
||||
"component": "table",
|
||||
"links": {
|
||||
"label": "Links"
|
||||
},
|
||||
"username":{
|
||||
"label": "Username"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"title": "Sherlock actor input",
|
||||
"description": "This is actor input schema",
|
||||
"type": "object",
|
||||
"schemaVersion": 1,
|
||||
"properties": {
|
||||
"usernames": {
|
||||
"title": "Usernames to hunt down",
|
||||
"type": "array",
|
||||
"description": "A list of usernames to be checked for existence across social media",
|
||||
"editor": "stringList",
|
||||
"prefill": ["johndoe"]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"usernames"
|
||||
]
|
||||
}
|
||||
|
|
@ -5,4 +5,4 @@ tests/
|
|||
*.txt
|
||||
!/requirements.txt
|
||||
venv/
|
||||
|
||||
devel/
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
### REPOSITORY
|
||||
/.github/CODEOWNERS @sdushantha @ppfeister
|
||||
/.github/FUNDING.yml @sdushantha
|
||||
/LICENSE @sdushantha
|
||||
|
||||
### PACKAGING
|
||||
# Changes made to these items without code owner approval may negatively
|
||||
# impact packaging pipelines.
|
||||
/pyproject.toml @ppfeister @sdushantha
|
||||
|
||||
### REGRESSION
|
||||
/.github/workflows/regression.yml @ppfeister
|
||||
/tox.ini @ppfeister
|
||||
/pytest.ini @ppfeister
|
||||
/tests/ @ppfeister
|
||||
|
|
@ -0,0 +1 @@
|
|||
github: [ sdushantha, ppfeister, matheusfelipeog ]
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Report a bug in Sherlock's functionality
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
######################################################################
|
||||
WARNING!
|
||||
IGNORING THE FOLLOWING TEMPLATE WILL RESULT IN ISSUE CLOSED AS INCOMPLETE
|
||||
######################################################################
|
||||
|
||||
-->
|
||||
|
||||
|
||||
## Checklist
|
||||
<!--
|
||||
Put x into all boxes (like this [x]) once you have completed what they say.
|
||||
Make sure complete everything in the checklist.
|
||||
-->
|
||||
|
||||
- [ ] I'm reporting a bug in Sherlock's functionality
|
||||
- [ ] The bug I'm reporting is not a false positive or a false negative
|
||||
- [ ] I've verified that I'm running the latest version of Sherlock
|
||||
- [ ] I've checked for similar bug reports including closed ones
|
||||
- [ ] I've checked for pull requests that attempt to fix this bug
|
||||
|
||||
## Description
|
||||
<!--
|
||||
Provide a detailed description of the bug that you have found in Sherlock.
|
||||
Provide the version of Sherlock you are running.
|
||||
-->
|
||||
|
||||
WRITE DESCRIPTION HERE
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
name: Bug report
|
||||
description: File a bug report
|
||||
labels: ["bug"]
|
||||
body:
|
||||
- type: dropdown
|
||||
id: package
|
||||
attributes:
|
||||
label: Installation method
|
||||
description: |
|
||||
Some packages are maintained by the community, rather than by the Sherlock Project.
|
||||
Knowing which packages are affected helps us diagnose package-specific bugs.
|
||||
options:
|
||||
- Select one
|
||||
- PyPI (via pip)
|
||||
- Homebrew
|
||||
- Docker
|
||||
- Kali repository (via apt)
|
||||
- Built from source
|
||||
- Other (indicate below)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: package-version
|
||||
attributes:
|
||||
label: Package version
|
||||
description: |
|
||||
Knowing the version of the package you are using can help us diagnose your issue more quickly.
|
||||
You can find the version by running `sherlock --version`.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Detailed descriptions that help contributors understand and reproduce your bug are much more likely to lead to a fix.
|
||||
Please include the following information:
|
||||
- What you were trying to do
|
||||
- What you expected to happen
|
||||
- What actually happened
|
||||
placeholder: |
|
||||
When doing {action}, the expected result should be {expected result}.
|
||||
When doing {action}, however, the actual result was {actual result}.
|
||||
This is undesirable because {reason}.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps-to-reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: Write a step by step list that will allow us to reproduce this bug.
|
||||
placeholder: |
|
||||
1. Do something
|
||||
2. Then do something else
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional-info
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: If you have some additional information, please write it here.
|
||||
validations:
|
||||
required: false
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/sherlock-project/sherlock/blob/master/docs/CODE_OF_CONDUCT.md).
|
||||
options:
|
||||
- label: I agree to follow this project's Code of Conduct
|
||||
required: true
|
||||
|
|
@ -0,0 +1 @@
|
|||
blank_issues_enabled: false
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
name: False negative
|
||||
description: Report a site that is returning false negative results
|
||||
title: "False negative for: "
|
||||
labels: ["false negative"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please include the site name in the title of your issue.
|
||||
Submit **one site per report** for faster resolution. If you have multiple sites in the same report, it often takes longer to fix.
|
||||
- type: textarea
|
||||
id: additional-info
|
||||
attributes:
|
||||
label: Additional info
|
||||
description: If you know why the site is returning false negatives, or noticed any patterns, please explain.
|
||||
placeholder: |
|
||||
Reddit is returning false negatives because...
|
||||
validations:
|
||||
required: false
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/sherlock-project/sherlock/blob/master/docs/CODE_OF_CONDUCT.md).
|
||||
options:
|
||||
- label: I agree to follow this project's Code of Conduct
|
||||
required: true
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
name: False positive
|
||||
description: Report a site that is returning false positive results
|
||||
title: "False positive for: "
|
||||
labels: ["false positive"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please include the site name in the title of your issue.
|
||||
Submit **one site per report** for faster resolution. If you have multiple sites in the same report, it often takes longer to fix.
|
||||
- type: textarea
|
||||
id: additional-info
|
||||
attributes:
|
||||
label: Additional info
|
||||
description: If you know why the site is returning false positives, or noticed any patterns, please explain.
|
||||
placeholder: |
|
||||
Reddit is returning false positives because...
|
||||
False positives only occur after x searches...
|
||||
validations:
|
||||
required: false
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/sherlock-project/sherlock/blob/master/docs/CODE_OF_CONDUCT.md).
|
||||
options:
|
||||
- label: I agree to follow this project's Code of Conduct
|
||||
required: true
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
---
|
||||
name: Feature request
|
||||
about: Request a new functionality for Sherlock
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
######################################################################
|
||||
WARNING!
|
||||
IGNORING THE FOLLOWING TEMPLATE WILL RESULT IN ISSUE CLOSED AS INCOMPLETE
|
||||
######################################################################
|
||||
|
||||
-->
|
||||
|
||||
## Checklist
|
||||
<!--
|
||||
Put x into all boxes (like this [x]) once you have completed what they say.
|
||||
Make sure complete everything in the checklist.
|
||||
-->
|
||||
- [ ] I'm reporting a feature request
|
||||
- [ ] I've checked for similar feature requests including closed ones
|
||||
|
||||
## Description
|
||||
<!--
|
||||
Provide a detailed description of the feature you would like Sherlock to have
|
||||
-->
|
||||
|
||||
WRITE DESCRIPTION HERE
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
name: Feature request
|
||||
description: Request a feature or enhancement
|
||||
labels: ["enhancement"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Concise and thoughtful titles help other contributors find and add your requested feature.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: Describe the feature you are requesting
|
||||
placeholder: I'd like Sherlock to be able to do xyz
|
||||
validations:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/sherlock-project/sherlock/blob/master/docs/CODE_OF_CONDUCT.md).
|
||||
options:
|
||||
- label: I agree to follow this project's Code of Conduct
|
||||
required: true
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
---
|
||||
name: Question
|
||||
about: Ask us a question
|
||||
title: ''
|
||||
labels: question
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
######################################################################
|
||||
WARNING!
|
||||
IGNORING THE FOLLOWING TEMPLATE WILL RESULT IN ISSUE CLOSED AS INCOMPLETE.
|
||||
######################################################################
|
||||
|
||||
-->
|
||||
|
||||
## Checklist
|
||||
<!--
|
||||
Put x into all boxes (like this [x]) once you have completed what they say.
|
||||
Make sure complete everything in the checklist.
|
||||
-->
|
||||
- [ ] I'm asking a question regarding Sherlock
|
||||
- [ ] My question is not a tech support question.
|
||||
|
||||
**We are not your tech support**.
|
||||
If you have questions related to `pip`, `git`, or something that is not related to Sherlock, please ask them on [Stack Overflow](https://stackoverflow.com/) or [r/learnpython](https://www.reddit.com/r/learnpython/)
|
||||
|
||||
|
||||
## Question
|
||||
|
||||
ASK YOUR QUESTION HERE
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
---
|
||||
name: Reporting false negative
|
||||
about: Reporting a site that is returning false positives
|
||||
title: ''
|
||||
labels: false negative
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
######################################################################
|
||||
WARNING!
|
||||
IGNORING THE FOLLOWING TEMPLATE WILL RESULT IN ISSUE CLOSED AS INCOMPLETE
|
||||
######################################################################
|
||||
|
||||
-->
|
||||
|
||||
## Checklist
|
||||
<!--
|
||||
Put x into all boxes (like this [x]) once you have completed what they say.
|
||||
Make sure complete everything in the checklist.
|
||||
-->
|
||||
- [ ] I'm reporting a website that is returning **false negative** results
|
||||
- [ ] I've checked for similar site support requests including closed ones
|
||||
- [ ] I've checked for pull requests attempting to fix this false negative
|
||||
- [ ] I'm only reporting **one** site (create a separate issue for each site)
|
||||
|
||||
## Description
|
||||
<!--
|
||||
Provide the username that is causing Sherlock to return a false negative, along with any other information that might help us fix this false negative.
|
||||
-->
|
||||
|
||||
WRITE DESCRIPTION HERE
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
---
|
||||
name: Reporting false positive
|
||||
about: Reporting a site that is returning false positives
|
||||
title: ''
|
||||
labels: false positive
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
######################################################################
|
||||
WARNING!
|
||||
IGNORING THE FOLLOWING TEMPLATE WILL RESULT IN ISSUE CLOSED AS INCOMPLETE
|
||||
######################################################################
|
||||
|
||||
-->
|
||||
|
||||
## Checklist
|
||||
<!--
|
||||
Put x into all boxes (like this [x]) once you have completed what they say.
|
||||
Make sure complete everything in the checklist.
|
||||
-->
|
||||
- [ ] I'm reporting a website that is returning **false positive** results
|
||||
- [ ] I've checked for similar site support requests including closed ones
|
||||
- [ ] I've checked for pull requests attempting to fix this false positive
|
||||
- [ ] I'm only reporting **one** site (create a separate issue for each site)
|
||||
|
||||
## Description
|
||||
<!--
|
||||
Provide the username that is causing Sherlock to return a false positive, along with any other information that might help us fix this false positive.
|
||||
-->
|
||||
|
||||
WRITE DESCRIPTION HERE
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
name: Reuest a new website
|
||||
description: Request that Sherlock add support for a new website
|
||||
title: "Requesting support for: "
|
||||
labels: ["site support request"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Ensure that the site name is in the title of your request. Requests without this information will be **closed**.
|
||||
- type: input
|
||||
id: site-url
|
||||
attributes:
|
||||
label: Site URL
|
||||
description: |
|
||||
What is the URL of the website indicated in your title?
|
||||
Websites sometimes have similar names. This helps constributors find the correct site.
|
||||
placeholder: https://reddit.com
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional-info
|
||||
attributes:
|
||||
label: Additional info
|
||||
description: If you have suggestions on how Sherlock should detect for usernames, please explain below
|
||||
placeholder: Sherlock can detect if a username exists on Reddit by checking for...
|
||||
validations:
|
||||
required: false
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Code of Conduct
|
||||
description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/sherlock-project/sherlock/blob/master/docs/CODE_OF_CONDUCT.md).
|
||||
options:
|
||||
- label: I agree to follow this project's Code of Conduct
|
||||
required: true
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
---
|
||||
name: Site support request
|
||||
about: Request support for a new site
|
||||
title: ''
|
||||
labels: site support request
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
######################################################################
|
||||
WARNING!
|
||||
IGNORING THE FOLLOWING TEMPLATE WILL RESULT IN ISSUE CLOSED AS INCOMPLETE
|
||||
######################################################################
|
||||
|
||||
-->
|
||||
|
||||
## Checklist
|
||||
<!--
|
||||
Put x into all boxes (like this [x]) once you have completed what they say.
|
||||
Make sure complete everything in the checklist.
|
||||
-->
|
||||
|
||||
- [ ] I'm requesting support for a new site
|
||||
- [ ] I've checked for similar site support requests including closed ones
|
||||
- [ ] I've checked that the site I am requesting has not been removed in the past and is not documented in [removed_sites.md](https://github.com/sherlock-project/sherlock/blob/master/removed_sites.md)
|
||||
- [ ] The site I am requesting support for is not a pornographic website
|
||||
- [ ] I'm only requesting support of **one** website (create a separate issue for each site)
|
||||
|
||||
## Description
|
||||
<!--
|
||||
Provide the url to the website and the name of the website.
|
||||
If there is anything else you want to mention regarding the site support request include that in this section.
|
||||
-->
|
||||
|
||||
URL:
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
## Security Policy
|
||||
|
||||
### Supported Versions
|
||||
|
||||
Sherlock is a forward looking project. Only the latest and most current version is supported.
|
||||
|
||||
### Reporting a Vulnerability
|
||||
|
||||
Security concerns can be submitted [__here__][report-url] without risk of exposing sensitive information. For issues that are low severity or unlikely to see exploitation, public issues are often acceptable.
|
||||
|
||||
[report-url]: https://github.com/sherlock-project/sherlock/security/advisories/new
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
name: Exclusions Updater
|
||||
|
||||
on:
|
||||
schedule:
|
||||
#- cron: '0 5 * * 0' # Runs at 05:00 every Sunday
|
||||
- cron: '0 5 * * *' # Runs at 05:00 every day
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-exclusions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.13'
|
||||
|
||||
- name: Install Poetry
|
||||
uses: abatilo/actions-poetry@v4
|
||||
with:
|
||||
poetry-version: 'latest'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
poetry install --no-interaction --with dev
|
||||
|
||||
- name: Run false positive tests
|
||||
run: |
|
||||
$(poetry env activate)
|
||||
pytest -q --tb no -m validate_targets_fp -n 20 | tee fp_test_results.txt
|
||||
deactivate
|
||||
|
||||
- name: Parse false positive detections by desired categories
|
||||
run: |
|
||||
grep -oP '(?<=test_false_pos\[)[^\]]+(?=\].*result was Claimed)' fp_test_results.txt \
|
||||
| sort -u > false_positive_exclusions.txt
|
||||
grep -oP '(?<=test_false_pos\[)[^\]]+(?=\].*result was WAF)' fp_test_results.txt \
|
||||
| sort -u > waf_hits.txt
|
||||
|
||||
- name: Detect if exclusions list changed
|
||||
id: detect_changes
|
||||
run: |
|
||||
git fetch origin exclusions || true
|
||||
|
||||
if git show origin/exclusions:false_positive_exclusions.txt >/dev/null 2>&1; then
|
||||
# If the exclusions branch and file exist, compare
|
||||
if git diff --quiet origin/exclusions -- false_positive_exclusions.txt; then
|
||||
echo "exclusions_changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "exclusions_changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
else
|
||||
# If the exclusions branch or file do not exist, treat as changed
|
||||
echo "exclusions_changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Quantify and display results
|
||||
run: |
|
||||
FP_COUNT=$(wc -l < false_positive_exclusions.txt | xargs)
|
||||
WAF_COUNT=$(wc -l < waf_hits.txt | xargs)
|
||||
echo ">>> Found $FP_COUNT false positives and $WAF_COUNT WAF hits."
|
||||
echo ">>> False positive exclusions:" && cat false_positive_exclusions.txt
|
||||
echo ">>> WAF hits:" && cat waf_hits.txt
|
||||
|
||||
- name: Commit and push exclusions list
|
||||
if: steps.detect_changes.outputs.exclusions_changed == 'true'
|
||||
run: |
|
||||
git config user.name "Paul Pfeister (automation)"
|
||||
git config user.email "code@pfeister.dev"
|
||||
|
||||
mv false_positive_exclusions.txt false_positive_exclusions.txt.tmp
|
||||
|
||||
git add -f false_positive_exclusions.txt.tmp # -f required to override .gitignore
|
||||
git stash push -m "stash false positive exclusion list" -- false_positive_exclusions.txt.tmp
|
||||
|
||||
git fetch origin exclusions || true # Allows creation of branch if deleted
|
||||
git checkout -B exclusions origin/exclusions || (git checkout --orphan exclusions && git rm -rf .)
|
||||
|
||||
git stash pop || true
|
||||
|
||||
mv false_positive_exclusions.txt.tmp false_positive_exclusions.txt
|
||||
|
||||
git rm -f false_positive_exclusions.txt.tmp || true
|
||||
git add false_positive_exclusions.txt
|
||||
git commit -m "auto: update exclusions list" || echo "No changes to commit"
|
||||
git push origin exclusions
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- '3.12'
|
||||
- '3.11'
|
||||
- '3.10'
|
||||
- '3.9'
|
||||
- '3.8'
|
||||
- '3.7'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install ruff flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
- name: Lint with ruff
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
ruff . --output-format=github --select=E9,F63,F7,F82
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
- name: Sherlock Site Detect Tests
|
||||
run: |
|
||||
cd sherlock && python -m unittest tests.all.SherlockDetectTests --verbose
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
name: Nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run Nightly Tests At 3AM (The Hour Of The Wolf) Every Day
|
||||
- cron: '0 3 * * *'
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
- name: Sherlock Site Coverage Tests
|
||||
run: |
|
||||
cd sherlock && python -m unittest tests.all.SherlockSiteCoverageTests --verbose
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
name: Pull Request Action
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
getchange:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.changes.outputs.matrix }}
|
||||
steps:
|
||||
- id: changes
|
||||
run: |
|
||||
URL="https://api.github.com/repos/sherlock-project/sherlock/pulls/${{ github.event.pull_request.number }}/files"
|
||||
FILES=$(curl -s -X GET -G $URL | jq -r '.[] | .filename')
|
||||
if echo $FILES | grep -q ".json"; then
|
||||
echo "::set-output name=matrix::{\"include\":[{\"python\":\"3.x\"}]}"
|
||||
else
|
||||
echo "::set-output name=matrix::{\"include\":[{\"python\":\"3.7\"},{\"python\":\"3.8\"}]},{\"python\":\"3.9\"},{\"python\":\"3.10\"}]},{\"python\":\"3.11\"},{\"python\":\"3.12\"}]}"
|
||||
fi
|
||||
tests:
|
||||
needs: [getchange]
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix: ${{ fromJson(needs.getchange.outputs.matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install ruff flake8 pytest
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
- name: Lint With Ruff
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
ruff check . --output-format=github --select=E9,F63,F7,F82
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
- name: Sherlock Site Detect Tests
|
||||
run: |
|
||||
cd sherlock && python -m unittest tests.all.SherlockDetectTests --verbose
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
name: Regression Testing
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- release/**
|
||||
paths:
|
||||
- '.github/workflows/regression.yml'
|
||||
- '**/*.json'
|
||||
- '**/*.py'
|
||||
- '**/*.ini'
|
||||
- '**/*.toml'
|
||||
- 'Dockerfile'
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- release/**
|
||||
paths:
|
||||
- '.github/workflows/regression.yml'
|
||||
- '**/*.json'
|
||||
- '**/*.py'
|
||||
- '**/*.ini'
|
||||
- '**/*.toml'
|
||||
- 'Dockerfile'
|
||||
|
||||
jobs:
|
||||
tox-lint:
|
||||
runs-on: ubuntu-latest
|
||||
# Linting is ran through tox to ensure that the same linter
|
||||
# is used by local runners
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up linting environment
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Install tox and related dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
- name: Run tox linting environment
|
||||
run: tox -e lint
|
||||
tox-matrix:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
# We want to know what specicic versions it fails on
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [
|
||||
ubuntu-latest,
|
||||
windows-latest,
|
||||
macos-latest,
|
||||
]
|
||||
python-version: [
|
||||
'3.10',
|
||||
'3.11',
|
||||
'3.12',
|
||||
'3.13',
|
||||
]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up environment ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install tox and related dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install tox
|
||||
pip install tox-gh-actions
|
||||
- name: Run tox
|
||||
run: tox
|
||||
docker-build-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Get version from pyproject.toml
|
||||
id: get-version
|
||||
run: |
|
||||
VERSION=$(grep -m1 'version = ' pyproject.toml | cut -d'"' -f2)
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
- name: Build Docker image
|
||||
run: |
|
||||
docker build \
|
||||
--build-arg VERSION_TAG=${{ steps.get-version.outputs.version }} \
|
||||
-t sherlock-test:latest .
|
||||
- name: Test Docker image runs
|
||||
run: docker run --rm sherlock-test:latest --version
|
||||
|
|
@ -1,13 +1,13 @@
|
|||
name: Update Site List
|
||||
name: Update Site List
|
||||
|
||||
# Trigger the workflow when changes are pushed to the main branch
|
||||
# and the changes include the sherlock/resources/data.json file
|
||||
# and the changes include the sherlock_project/resources/data.json file
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- master
|
||||
paths:
|
||||
- sherlock/resources/data.json
|
||||
- sherlock_project/resources/data.json
|
||||
|
||||
jobs:
|
||||
sync-json-data:
|
||||
|
|
@ -26,24 +26,21 @@ jobs:
|
|||
- name: Install Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
python-version: '3.x'
|
||||
|
||||
# Execute the site_list.py Python script
|
||||
- name: Execute site_list.py
|
||||
run: python site_list.py
|
||||
- name: Execute site-list.py
|
||||
run: python devel/site-list.py
|
||||
|
||||
# Commit any changes made by the script
|
||||
- name: Commit files
|
||||
run: |
|
||||
git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
if ! git diff --exit-code; then
|
||||
git commit -a -m "Updated Site List"
|
||||
fi
|
||||
|
||||
# Push the changes to the remote repository
|
||||
- name: Push changes
|
||||
uses: ad-m/github-push-action@master
|
||||
- name: Pushes to another repository
|
||||
uses: sdushantha/github-action-push-to-another-repository@main
|
||||
env:
|
||||
SSH_DEPLOY_KEY: ${{ secrets.SSH_DEPLOY_KEY }}
|
||||
API_TOKEN_GITHUB: ${{ secrets.API_TOKEN_GITHUB }}
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: ${{ github.ref }}
|
||||
source-directory: 'output'
|
||||
destination-github-username: 'sherlock-project'
|
||||
commit-message: 'Updated site list'
|
||||
destination-repository-name: 'sherlockproject.xyz'
|
||||
user-email: siddharth.dushantha@gmail.com
|
||||
target-branch: master
|
||||
|
|
|
|||
|
|
@ -0,0 +1,126 @@
|
|||
name: Modified Target Validation
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- "sherlock_project/resources/data.json"
|
||||
|
||||
jobs:
|
||||
validate-modified-targets:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
# Checkout the base branch but fetch all history to avoid a second fetch call
|
||||
ref: ${{ github.base_ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.13"
|
||||
|
||||
- name: Install Poetry
|
||||
uses: abatilo/actions-poetry@v4
|
||||
with:
|
||||
poetry-version: "latest"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
poetry install --no-interaction --with dev
|
||||
|
||||
- name: Prepare JSON versions for comparison
|
||||
run: |
|
||||
# Fetch only the PR's branch head (single network call in this step)
|
||||
git fetch origin pull/${{ github.event.pull_request.number }}/head:pr
|
||||
|
||||
# Find the merge-base commit between the target branch and the PR branch
|
||||
MERGE_BASE=$(git merge-base origin/${{ github.base_ref }} pr)
|
||||
echo "Comparing PR head against merge-base commit: $MERGE_BASE"
|
||||
|
||||
# Safely extract the file from the PR's head and the merge-base commit
|
||||
git show pr:sherlock_project/resources/data.json > data.json.head
|
||||
git show $MERGE_BASE:sherlock_project/resources/data.json > data.json.base
|
||||
|
||||
# CRITICAL FIX: Overwrite the checked-out data.json with the one from the PR
|
||||
# This ensures that pytest runs against the new, updated file.
|
||||
cp data.json.head sherlock_project/resources/data.json
|
||||
|
||||
- name: Discover modified targets
|
||||
id: discover-modified
|
||||
run: |
|
||||
CHANGED=$(
|
||||
python - <<'EOF'
|
||||
import json
|
||||
import sys
|
||||
try:
|
||||
with open("data.json.base") as f: base = json.load(f)
|
||||
with open("data.json.head") as f: head = json.load(f)
|
||||
except FileNotFoundError as e:
|
||||
print(f"Error: Could not find {e.filename}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error: Could not decode JSON from a file - {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
changed = []
|
||||
for k, v in head.items():
|
||||
if k not in base or base[k] != v:
|
||||
changed.append(k)
|
||||
|
||||
print(",".join(sorted(changed)))
|
||||
EOF
|
||||
)
|
||||
|
||||
# Preserve changelist
|
||||
echo -e ">>> Changed targets: \n$(echo $CHANGED | tr ',' '\n')"
|
||||
echo "changed_targets=$CHANGED" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Validate remote manifest against local schema
|
||||
if: steps.discover-modified.outputs.changed_targets != ''
|
||||
run: |
|
||||
poetry run pytest tests/test_manifest.py::test_validate_manifest_against_local_schema
|
||||
|
||||
# --- The rest of the steps below are unchanged ---
|
||||
|
||||
- name: Validate modified targets
|
||||
if: steps.discover-modified.outputs.changed_targets != ''
|
||||
continue-on-error: true
|
||||
run: |
|
||||
poetry run pytest -q --tb no -rA -m validate_targets -n 20 \
|
||||
--chunked-sites "${{ steps.discover-modified.outputs.changed_targets }}" \
|
||||
--junitxml=validation_results.xml
|
||||
|
||||
- name: Prepare validation summary
|
||||
if: steps.discover-modified.outputs.changed_targets != ''
|
||||
id: prepare-summary
|
||||
run: |
|
||||
summary=$(
|
||||
poetry run python devel/summarize_site_validation.py validation_results.xml || echo "Failed to generate summary of test results"
|
||||
)
|
||||
echo "$summary" > validation_summary.md
|
||||
|
||||
- name: Announce validation results
|
||||
if: steps.discover-modified.outputs.changed_targets != ''
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const body = fs.readFileSync('validation_summary.md', 'utf8');
|
||||
await github.rest.issues.createComment({
|
||||
issue_number: context.payload.pull_request.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: body,
|
||||
});
|
||||
|
||||
- name: This step shows as ran when no modifications are found
|
||||
if: steps.discover-modified.outputs.changed_targets == ''
|
||||
run: |
|
||||
echo "No modified targets found"
|
||||
|
|
@ -1,8 +1,13 @@
|
|||
# Virtual Environment
|
||||
# Virtual Environments
|
||||
venv/
|
||||
bin/
|
||||
lib/
|
||||
pyvenv.cfg
|
||||
poetry.lock
|
||||
|
||||
# Regression Testing
|
||||
.coverage
|
||||
.tox/
|
||||
|
||||
# Editor Configurations
|
||||
.vscode/
|
||||
|
|
@ -14,6 +19,10 @@ __pycache__/
|
|||
# Pip
|
||||
src/
|
||||
|
||||
# Devel, Build, and Installation
|
||||
*.egg-info/
|
||||
dist/**
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
*.ipynb
|
||||
|
|
|
|||
|
|
@ -1,31 +0,0 @@
|
|||
# How To Contribute To Sherlock
|
||||
First off, thank you for the help!
|
||||
|
||||
There are many ways to contribute. Here is some high level grouping.
|
||||
|
||||
## Adding New Sites
|
||||
|
||||
Please look at the Wiki entry on
|
||||
[adding new sites](https://github.com/sherlock-project/sherlock/wiki/Adding-Sites-To-Sherlock)
|
||||
to understand the issues.
|
||||
|
||||
Any new sites that are added need to have a username that has been claimed, and one
|
||||
that is unclaimed documented in the site data. This allows the regression tests
|
||||
to ensure that everything is working.
|
||||
|
||||
It is required that a contributor test any new sites by either running the full tests, or running
|
||||
a site-specific query against the claimed and unclaimed usernames.
|
||||
|
||||
It is not required that a contributor run the
|
||||
[site_list.py](https://github.com/sherlock-project/sherlock/blob/master/site_list.py)
|
||||
script.
|
||||
|
||||
If there are performance problems with a site (e.g. slow to respond, unreliable uptime, ...), then
|
||||
the site may be removed from the list. The
|
||||
[removed_sites.md](https://github.com/sherlock-project/sherlock/blob/master/removed_sites.md)
|
||||
file contains sites that were included at one time in Sherlock, but had to be removed for
|
||||
one reason or another.
|
||||
|
||||
## Adding New Functionality
|
||||
|
||||
Please ensure that the content on your branch passes all tests before submitting a pull request.
|
||||
39
Dockerfile
39
Dockerfile
|
|
@ -1,26 +1,31 @@
|
|||
FROM python:3.11-slim-bullseye as build
|
||||
WORKDIR /wheels
|
||||
# Release instructions:
|
||||
# 1. Update the version tag in the Dockerfile to match the version in sherlock/__init__.py
|
||||
# 2. Update the VCS_REF tag to match the tagged version's FULL commit hash
|
||||
# 3. Build image with BOTH latest and version tags
|
||||
# i.e. `docker build -t sherlock/sherlock:0.16.0 -t sherlock/sherlock:latest .`
|
||||
|
||||
COPY requirements.txt /opt/sherlock/
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y build-essential \
|
||||
&& pip3 wheel -r /opt/sherlock/requirements.txt
|
||||
FROM python:3.12-slim-bullseye AS build
|
||||
WORKDIR /sherlock
|
||||
|
||||
FROM python:3.11-slim-bullseye
|
||||
WORKDIR /opt/sherlock
|
||||
RUN pip3 install --no-cache-dir --upgrade pip
|
||||
|
||||
ARG VCS_REF
|
||||
FROM python:3.12-slim-bullseye
|
||||
WORKDIR /sherlock
|
||||
|
||||
ARG VCS_REF= # CHANGE ME ON UPDATE
|
||||
ARG VCS_URL="https://github.com/sherlock-project/sherlock"
|
||||
ARG VERSION_TAG= # CHANGE ME ON UPDATE
|
||||
|
||||
ENV SHERLOCK_ENV=docker
|
||||
|
||||
LABEL org.label-schema.vcs-ref=$VCS_REF \
|
||||
org.label-schema.vcs-url=$VCS_URL
|
||||
org.label-schema.vcs-url=$VCS_URL \
|
||||
org.label-schema.name="Sherlock" \
|
||||
org.label-schema.version=$VERSION_TAG \
|
||||
website="https://sherlockproject.xyz"
|
||||
|
||||
COPY --from=build /wheels /wheels
|
||||
COPY . /opt/sherlock/
|
||||
RUN pip3 install --no-cache-dir sherlock-project==$VERSION_TAG
|
||||
|
||||
RUN pip3 install --no-cache-dir -r requirements.txt -f /wheels \
|
||||
&& rm -rf /wheels
|
||||
WORKDIR /sherlock
|
||||
|
||||
WORKDIR /opt/sherlock/sherlock
|
||||
|
||||
ENTRYPOINT ["python", "sherlock.py"]
|
||||
ENTRYPOINT ["sherlock"]
|
||||
|
|
|
|||
184
README.md
184
README.md
|
|
@ -1,184 +0,0 @@
|
|||
<p align=center>
|
||||
<br>
|
||||
<a href="https://sherlock-project.github.io/" target="_blank"><img src="https://user-images.githubusercontent.com/27065646/53551960-ae4dff80-3b3a-11e9-9075-cef786c69364.png"/></a>
|
||||
<br>
|
||||
<span>Hunt down social media accounts by username across <a href="https://github.com/sherlock-project/sherlock/blob/master/sites.md">social networks</a></span>
|
||||
<br>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="#installation">Installation</a>
|
||||
|
|
||||
<a href="#usage">Usage</a>
|
||||
|
|
||||
<a href="#docker-notes">Docker Notes</a>
|
||||
|
|
||||
<a href="#contributing">Contributing</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img width="70%" height="70%" src="https://user-images.githubusercontent.com/27065646/219638267-a5e11090-aa6e-4e77-87f7-0e95f6ad5978.png"/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
```console
|
||||
# clone the repo
|
||||
$ git clone https://github.com/sherlock-project/sherlock.git
|
||||
|
||||
# change the working directory to sherlock
|
||||
$ cd sherlock
|
||||
|
||||
# install the requirements
|
||||
$ python3 -m pip install -r requirements.txt
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```console
|
||||
$ python3 sherlock --help
|
||||
usage: sherlock [-h] [--version] [--verbose] [--folderoutput FOLDEROUTPUT]
|
||||
[--output OUTPUT] [--tor] [--unique-tor] [--csv] [--xlsx]
|
||||
[--site SITE_NAME] [--proxy PROXY_URL] [--json JSON_FILE]
|
||||
[--timeout TIMEOUT] [--print-all] [--print-found] [--no-color]
|
||||
[--browse] [--local] [--nsfw]
|
||||
USERNAMES [USERNAMES ...]
|
||||
|
||||
Sherlock: Find Usernames Across Social Networks (Version 0.14.3)
|
||||
|
||||
positional arguments:
|
||||
USERNAMES One or more usernames to check with social networks.
|
||||
Check similar usernames using {?} (replace to '_', '-', '.').
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--version Display version information and dependencies.
|
||||
--verbose, -v, -d, --debug
|
||||
Display extra debugging information and metrics.
|
||||
--folderoutput FOLDEROUTPUT, -fo FOLDEROUTPUT
|
||||
If using multiple usernames, the output of the results will be
|
||||
saved to this folder.
|
||||
--output OUTPUT, -o OUTPUT
|
||||
If using single username, the output of the result will be saved
|
||||
to this file.
|
||||
--tor, -t Make requests over Tor; increases runtime; requires Tor to be
|
||||
installed and in system path.
|
||||
--unique-tor, -u Make requests over Tor with new Tor circuit after each request;
|
||||
increases runtime; requires Tor to be installed and in system
|
||||
path.
|
||||
--csv Create Comma-Separated Values (CSV) File.
|
||||
--xlsx Create the standard file for the modern Microsoft Excel
|
||||
spreadsheet (xlsx).
|
||||
--site SITE_NAME Limit analysis to just the listed sites. Add multiple options to
|
||||
specify more than one site.
|
||||
--proxy PROXY_URL, -p PROXY_URL
|
||||
Make requests over a proxy. e.g. socks5://127.0.0.1:1080
|
||||
--json JSON_FILE, -j JSON_FILE
|
||||
Load data from a JSON file or an online, valid, JSON file.
|
||||
--timeout TIMEOUT Time (in seconds) to wait for response to requests (Default: 60)
|
||||
--print-all Output sites where the username was not found.
|
||||
--print-found Output sites where the username was found.
|
||||
--no-color Don't color terminal output
|
||||
--browse, -b Browse to all results on default browser.
|
||||
--local, -l Force the use of the local data.json file.
|
||||
--nsfw Include checking of NSFW sites from default list.
|
||||
```
|
||||
|
||||
To search for only one user:
|
||||
```
|
||||
python3 sherlock user123
|
||||
```
|
||||
|
||||
To search for more than one user:
|
||||
```
|
||||
python3 sherlock user1 user2 user3
|
||||
```
|
||||
|
||||
Accounts found will be stored in an individual text file with the corresponding username (e.g ```user123.txt```).
|
||||
|
||||
## Anaconda (Windows) Notes
|
||||
|
||||
If you are using Anaconda in Windows, using `python3` might not work. Use `python` instead.
|
||||
|
||||
## Docker Notes
|
||||
|
||||
If docker is installed you can build an image and run this as a container.
|
||||
|
||||
```
|
||||
docker build -t mysherlock-image .
|
||||
```
|
||||
|
||||
Once the image is built, sherlock can be invoked by running the following:
|
||||
|
||||
```
|
||||
docker run --rm -t mysherlock-image user123
|
||||
```
|
||||
|
||||
Use the following command to access the saved results:
|
||||
|
||||
```
|
||||
docker run --rm -t -v "$PWD/results:/opt/sherlock/results" mysherlock-image -o /opt/sherlock/results/text.txt user123
|
||||
```
|
||||
|
||||
Docker is instructed to create (or use) the folder `results` in the current working directory and to mount it at `/opt/sherlock/results` on the docker container by using the ```-v "$PWD/results:/opt/sherlock/results"``` options. `Sherlock` is instructed to export the result using the `-o /opt/sherlock/results/text.txt` option.
|
||||
|
||||
|
||||
### Using `docker-compose`
|
||||
|
||||
You can use the `docker-compose.yml` file from the repository and use this command:
|
||||
|
||||
```
|
||||
docker-compose run sherlock -o /opt/sherlock/results/text.txt user123
|
||||
```
|
||||
|
||||
## Contributing
|
||||
We would love to have you help us with the development of Sherlock. Each and every contribution is greatly valued!
|
||||
|
||||
Here are some things we would appreciate your help on:
|
||||
- Addition of new site support ¹
|
||||
- Bringing back site support of [sites that have been removed](removed_sites.md) in the past due to false positives
|
||||
|
||||
[1] Please look at the Wiki entry on [adding new sites](https://github.com/sherlock-project/sherlock/wiki/Adding-Sites-To-Sherlock)
|
||||
to understand the issues.
|
||||
|
||||
## Tests
|
||||
|
||||
Thank you for contributing to Sherlock!
|
||||
|
||||
Before creating a pull request with new development, please run the tests
|
||||
to ensure that everything is working great. It would also be a good idea to run the tests
|
||||
before starting development to distinguish problems between your
|
||||
environment and the Sherlock software.
|
||||
|
||||
The following is an example of the command line to run all the tests for
|
||||
Sherlock. This invocation hides the progress text that Sherlock normally
|
||||
outputs, and instead shows the verbose output of the tests.
|
||||
|
||||
```console
|
||||
$ cd sherlock/sherlock
|
||||
$ python3 -m unittest tests.all --verbose
|
||||
```
|
||||
|
||||
Note that we do currently have 100% test coverage. Unfortunately, some of
|
||||
the sites that Sherlock checks are not always reliable, so it is common
|
||||
to get response problems. Any problems in connection will show up as
|
||||
warnings in the tests instead of true errors.
|
||||
|
||||
If some sites are failing due to connection problems (site is down, in maintenance, etc)
|
||||
you can exclude them from tests by creating a `tests/.excluded_sites` file with a
|
||||
list of sites to ignore (one site name per line).
|
||||
|
||||
## Star History
|
||||
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=sherlock-project/sherlock&type=Date&theme=dark" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=sherlock-project/sherlock&type=Date" />
|
||||
<img alt="Sherlock Project Star History Chart" src="https://api.star-history.com/svg?repos=sherlock-project/sherlock&type=Date" />
|
||||
</picture>
|
||||
|
||||
## License
|
||||
|
||||
MIT © Sherlock Project<br/>
|
||||
Original Creator - [Siddharth Dushantha](https://github.com/sdushantha)
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
#!/usr/bin/env python
|
||||
# This module generates the listing of supported sites which can be found in
|
||||
# sites.mdx. It also organizes all the sites in alphanumeric order
|
||||
import json
|
||||
import os
|
||||
|
||||
DATA_REL_URI: str = "sherlock_project/resources/data.json"
|
||||
|
||||
DEFAULT_ENCODING = "utf-8"
|
||||
|
||||
# Read the data.json file
|
||||
with open(DATA_REL_URI, "r", encoding=DEFAULT_ENCODING) as data_file:
|
||||
data: dict = json.load(data_file)
|
||||
|
||||
# Removes schema-specific keywords for proper processing
|
||||
social_networks = data.copy()
|
||||
social_networks.pop('$schema', None)
|
||||
|
||||
# Sort the social networks in alphanumeric order
|
||||
social_networks = sorted(social_networks.items())
|
||||
|
||||
# Make output dir where the site list will be written
|
||||
os.mkdir("output")
|
||||
|
||||
# Write the list of supported sites to sites.mdx
|
||||
with open("output/sites.mdx", "w", encoding=DEFAULT_ENCODING) as site_file:
|
||||
site_file.write("---\n")
|
||||
site_file.write("title: 'List of supported sites'\n")
|
||||
site_file.write("sidebarTitle: 'Supported sites'\n")
|
||||
site_file.write("icon: 'globe'\n")
|
||||
site_file.write("description: 'Sherlock currently supports **400+** sites'\n")
|
||||
site_file.write("---\n\n")
|
||||
|
||||
for social_network, info in social_networks:
|
||||
url_main = info["urlMain"]
|
||||
is_nsfw = "**(NSFW)**" if info.get("isNSFW") else ""
|
||||
site_file.write(f"1. [{social_network}]({url_main}) {is_nsfw}\n")
|
||||
|
||||
# Overwrite the data.json file with sorted data
|
||||
with open(DATA_REL_URI, "w", encoding=DEFAULT_ENCODING) as data_file:
|
||||
sorted_data = json.dumps(data, indent=2, sort_keys=True)
|
||||
data_file.write(sorted_data)
|
||||
data_file.write("\n") # Keep the newline after writing data
|
||||
|
||||
print("Finished updating supported site listing!")
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
#!/usr/bin/env python
|
||||
# This module summarizes the results of site validation tests queued by
|
||||
# workflow validate_modified_targets for presentation in Issue comments.
|
||||
|
||||
from defusedxml import ElementTree as ET
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
def summarize_junit_xml(xml_path: Path) -> str:
|
||||
tree = ET.parse(xml_path)
|
||||
root = tree.getroot()
|
||||
suite = root.find('testsuite')
|
||||
|
||||
pass_message: str = ":heavy_check_mark: Pass"
|
||||
fail_message: str = ":x: Fail"
|
||||
|
||||
if suite is None:
|
||||
raise ValueError("Invalid JUnit XML: No testsuite found")
|
||||
|
||||
summary_lines: list[str] = []
|
||||
summary_lines.append("#### Automatic validation of changes\n")
|
||||
summary_lines.append("| Target | F+ Check | F- Check |")
|
||||
summary_lines.append("|---|---|---|")
|
||||
|
||||
failures = int(suite.get('failures', 0))
|
||||
errors_detected: bool = False
|
||||
|
||||
results: dict[str, dict[str, str]] = {}
|
||||
|
||||
for testcase in suite.findall('testcase'):
|
||||
test_name = testcase.get('name').split('[')[0]
|
||||
site_name = testcase.get('name').split('[')[1].rstrip(']')
|
||||
failure = testcase.find('failure')
|
||||
error = testcase.find('error')
|
||||
|
||||
if site_name not in results:
|
||||
results[site_name] = {}
|
||||
|
||||
if test_name == "test_false_neg":
|
||||
results[site_name]['F- Check'] = pass_message if failure is None and error is None else fail_message
|
||||
elif test_name == "test_false_pos":
|
||||
results[site_name]['F+ Check'] = pass_message if failure is None and error is None else fail_message
|
||||
|
||||
if error is not None:
|
||||
errors_detected = True
|
||||
|
||||
for result in results:
|
||||
summary_lines.append(f"| {result} | {results[result].get('F+ Check', 'Error!')} | {results[result].get('F- Check', 'Error!')} |")
|
||||
|
||||
if failures > 0:
|
||||
summary_lines.append("\n___\n" +
|
||||
"\nFailures were detected on at least one updated target. Commits containing accuracy failures" +
|
||||
" will often not be merged (unless a rationale is provided, such as false negatives due to regional differences).")
|
||||
|
||||
if errors_detected:
|
||||
summary_lines.append("\n___\n" +
|
||||
"\n**Errors were detected during validation. Please review the workflow logs.**")
|
||||
|
||||
return "\n".join(summary_lines)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: summarize_site_validation.py <junit-xml-file>")
|
||||
sys.exit(1)
|
||||
|
||||
xml_path: Path = Path(sys.argv[1])
|
||||
if not xml_path.is_file():
|
||||
print(f"Error: File '{xml_path}' does not exist.")
|
||||
sys.exit(1)
|
||||
|
||||
summary: str = summarize_junit_xml(xml_path)
|
||||
print(summary)
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
version: '2'
|
||||
|
||||
services:
|
||||
sherlock:
|
||||
build: .
|
||||
volumes:
|
||||
- "./results:/opt/sherlock/results"
|
||||
|
|
@ -0,0 +1,143 @@
|
|||
<p align="center">
|
||||
<br>
|
||||
<a href="https://sherlock-project.github.io/" target="_blank"><img src="images/sherlock-logo.png" alt="sherlock"/></a>
|
||||
<br>
|
||||
<span>Hunt down social media accounts by username across <a href="https://sherlockproject.xyz/sites">400+ social networks</a></span>
|
||||
<br>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://sherlockproject.xyz/installation">Installation</a>
|
||||
•
|
||||
<a href="https://sherlockproject.xyz/usage">Usage</a>
|
||||
•
|
||||
<a href="https://sherlockproject.xyz/contribute">Contributing</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img width="70%" height="70%" src="images/demo.png" alt="demo"/>
|
||||
</p>
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
> [!WARNING]
|
||||
> Packages for ParrotOS and Ubuntu 24.04, maintained by a third party, appear to be __broken__.
|
||||
> Users of these systems should defer to pipx/pip or Docker.
|
||||
|
||||
| Method | Notes |
|
||||
| - | - |
|
||||
| `pipx install sherlock-project` | `pip` may be used in place of `pipx` |
|
||||
| `docker run -it --rm sherlock/sherlock` |
|
||||
| `dnf install sherlock-project` | |
|
||||
|
||||
Community-maintained packages are available for Debian (>= 13), Ubuntu (>= 22.10), Homebrew, Kali, and BlackArch. These packages are not directly supported or maintained by the Sherlock Project.
|
||||
|
||||
See all alternative installation methods [here](https://sherlockproject.xyz/installation)
|
||||
|
||||
## General usage
|
||||
|
||||
To search for only one user:
|
||||
```bash
|
||||
sherlock user123
|
||||
```
|
||||
|
||||
To search for more than one user:
|
||||
```bash
|
||||
sherlock user1 user2 user3
|
||||
```
|
||||
|
||||
Accounts found will be stored in an individual text file with the corresponding username (e.g ```user123.txt```).
|
||||
|
||||
```console
|
||||
$ sherlock --help
|
||||
usage: sherlock [-h] [--version] [--verbose] [--folderoutput FOLDEROUTPUT]
|
||||
[--output OUTPUT] [--tor] [--unique-tor] [--csv] [--xlsx]
|
||||
[--site SITE_NAME] [--proxy PROXY_URL] [--json JSON_FILE]
|
||||
[--timeout TIMEOUT] [--print-all] [--print-found] [--no-color]
|
||||
[--browse] [--local] [--nsfw]
|
||||
USERNAMES [USERNAMES ...]
|
||||
|
||||
Sherlock: Find Usernames Across Social Networks (Version 0.14.3)
|
||||
|
||||
positional arguments:
|
||||
USERNAMES One or more usernames to check with social networks.
|
||||
Check similar usernames using {?} (replace to '_', '-', '.').
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--version Display version information and dependencies.
|
||||
--verbose, -v, -d, --debug
|
||||
Display extra debugging information and metrics.
|
||||
--folderoutput FOLDEROUTPUT, -fo FOLDEROUTPUT
|
||||
If using multiple usernames, the output of the results will be
|
||||
saved to this folder.
|
||||
--output OUTPUT, -o OUTPUT
|
||||
If using single username, the output of the result will be saved
|
||||
to this file.
|
||||
--tor, -t Make requests over Tor; increases runtime; requires Tor to be
|
||||
installed and in system path.
|
||||
--unique-tor, -u Make requests over Tor with new Tor circuit after each request;
|
||||
increases runtime; requires Tor to be installed and in system
|
||||
path.
|
||||
--csv Create Comma-Separated Values (CSV) File.
|
||||
--xlsx Create the standard file for the modern Microsoft Excel
|
||||
spreadsheet (xlsx).
|
||||
--site SITE_NAME Limit analysis to just the listed sites. Add multiple options to
|
||||
specify more than one site.
|
||||
--proxy PROXY_URL, -p PROXY_URL
|
||||
Make requests over a proxy. e.g. socks5://127.0.0.1:1080
|
||||
--json JSON_FILE, -j JSON_FILE
|
||||
Load data from a JSON file or an online, valid, JSON file.
|
||||
--timeout TIMEOUT Time (in seconds) to wait for response to requests (Default: 60)
|
||||
--print-all Output sites where the username was not found.
|
||||
--print-found Output sites where the username was found.
|
||||
--no-color Don't color terminal output
|
||||
--browse, -b Browse to all results on default browser.
|
||||
--local, -l Force the use of the local data.json file.
|
||||
--nsfw Include checking of NSFW sites from default list.
|
||||
```
|
||||
## Apify Actor Usage [](https://apify.com/netmilk/sherlock?fpr=sherlock)
|
||||
|
||||
<a href="https://apify.com/netmilk/sherlock?fpr=sherlock"><img src="https://apify.com/ext/run-on-apify.png" alt="Run Sherlock Actor on Apify" width="176" height="39" /></a>
|
||||
|
||||
You can run Sherlock in the cloud without installation using the [Sherlock Actor](https://apify.com/netmilk/sherlock?fpr=sherlock) on [Apify](https://apify.com?fpr=sherlock) free of charge.
|
||||
|
||||
``` bash
|
||||
$ echo '{"usernames":["user123"]}' | apify call -so netmilk/sherlock
|
||||
[{
|
||||
"username": "user123",
|
||||
"links": [
|
||||
"https://www.1337x.to/user/user123/",
|
||||
...
|
||||
]
|
||||
}]
|
||||
```
|
||||
|
||||
Read more about the [Sherlock Actor](../.actor/README.md), including how to use it programmatically via the Apify [API](https://apify.com/netmilk/sherlock/api?fpr=sherlock), [CLI](https://docs.apify.com/cli/?fpr=sherlock) and [JS/TS and Python SDKs](https://docs.apify.com/sdk?fpr=sherlock).
|
||||
|
||||
## Credits
|
||||
|
||||
Thank you to everyone who has contributed to Sherlock! ❤️
|
||||
|
||||
<a href="https://github.com/sherlock-project/sherlock/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?&columns=25&max=10000&&repo=sherlock-project/sherlock" alt="contributors"/>
|
||||
</a>
|
||||
|
||||
## Star History
|
||||
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=sherlock-project/sherlock&type=Date&theme=dark" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=sherlock-project/sherlock&type=Date" />
|
||||
<img alt="Sherlock Project Star History Chart" src="https://api.star-history.com/svg?repos=sherlock-project/sherlock&type=Date" />
|
||||
</picture>
|
||||
|
||||
## License
|
||||
|
||||
MIT © Sherlock Project<br/>
|
||||
Original Creator - [Siddharth Dushantha](https://github.com/sdushantha)
|
||||
|
||||
<!-- Reference Links -->
|
||||
|
||||
[ext_pypi]: https://pypi.org/project/sherlock-project/
|
||||
[ext_brew]: https://formulae.brew.sh/formula/sherlock
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 440 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 70 KiB |
|
|
@ -0,0 +1,42 @@
|
|||
<!-- This README should be a mini version at all times for use on pypi -->
|
||||
|
||||
<p align=center>
|
||||
<br>
|
||||
<a href="https://sherlock-project.github.io/" target="_blank"><img src="https://www.kali.org/tools/sherlock/images/sherlock-logo.svg" width="25%"/></a>
|
||||
<br>
|
||||
<strong><span>Hunt down social media accounts by username across <a href="https://github.com/sherlock-project/sherlock/blob/master/sites.md">400+ social networks</a></span></strong>
|
||||
<br><br>
|
||||
<span>Additional documentation can be found at our <a href="https://github.com/sherlock-project/sherlock/">GitHub repository</a></span>
|
||||
<br>
|
||||
</p>
|
||||
|
||||
## Usage
|
||||
|
||||
```console
|
||||
$ sherlock --help
|
||||
usage: sherlock [-h] [--version] [--verbose] [--folderoutput FOLDEROUTPUT]
|
||||
[--output OUTPUT] [--tor] [--unique-tor] [--csv] [--xlsx]
|
||||
[--site SITE_NAME] [--proxy PROXY_URL] [--json JSON_FILE]
|
||||
[--timeout TIMEOUT] [--print-all] [--print-found] [--no-color]
|
||||
[--browse] [--local] [--nsfw]
|
||||
USERNAMES [USERNAMES ...]
|
||||
```
|
||||
|
||||
To search for only one user:
|
||||
```bash
|
||||
$ sherlock user123
|
||||
```
|
||||
|
||||
To search for more than one user:
|
||||
```bash
|
||||
$ sherlock user1 user2 user3
|
||||
```
|
||||
<br>
|
||||
|
||||
___
|
||||
|
||||
<br>
|
||||
<p align="center">
|
||||
<img width="70%" height="70%" src="https://user-images.githubusercontent.com/27065646/219638267-a5e11090-aa6e-4e77-87f7-0e95f6ad5978.png"/>
|
||||
</a>
|
||||
</p>
|
||||
|
|
@ -84,22 +84,6 @@ As of 2020-02-23, all usernames are reported as not existing.
|
|||
},
|
||||
```
|
||||
|
||||
## Fanpop
|
||||
|
||||
As of 2020-02-23, all usernames are reported as not existing.
|
||||
|
||||
```json
|
||||
"fanpop": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "http://www.fanpop.com/",
|
||||
"rank": 9454,
|
||||
"url": "http://www.fanpop.com/fans/{}",
|
||||
"urlMain": "http://www.fanpop.com/",
|
||||
"username_claimed": "blue",
|
||||
"username_unclaimed": "noonewould_everusethis7"
|
||||
},
|
||||
```
|
||||
|
||||
## Canva
|
||||
|
||||
As of 2020-02-23, all usernames are reported as not existing.
|
||||
|
|
@ -618,7 +602,7 @@ removed
|
|||
|
||||
## Coderwall
|
||||
As of 2020-07-06, Coderwall returns false positives when checking for an username which contains a period.
|
||||
I have tried to find out what Coderwall's criteria is for a valid username, but unfortunately I have not been able to
|
||||
I have tried to find out what Coderwall's criteria is for a valid username, but unfortunately I have not been able to
|
||||
find it and because of this, the best thing we can do now is to remove it.
|
||||
```json
|
||||
"Coderwall": {
|
||||
|
|
@ -666,15 +650,15 @@ As of 2020-07-24, Zomato seems to be unstable. Majority of the time, Zomato take
|
|||
## Mixer
|
||||
As of 2020-07-22, the Mixer service has closed down.
|
||||
```json
|
||||
"mixer.com": {
|
||||
"errorType": "status_code",
|
||||
"rank": 1544,
|
||||
"url": "https://mixer.com/{}",
|
||||
"urlMain": "https://mixer.com/",
|
||||
"urlProbe": "https://mixer.com/api/v1/channels/{}",
|
||||
"username_claimed": "blue",
|
||||
"username_unclaimed": "noonewouldeverusethis7"
|
||||
},
|
||||
"mixer.com": {
|
||||
"errorType": "status_code",
|
||||
"rank": 1544,
|
||||
"url": "https://mixer.com/{}",
|
||||
"urlMain": "https://mixer.com/",
|
||||
"urlProbe": "https://mixer.com/api/v1/channels/{}",
|
||||
"username_claimed": "blue",
|
||||
"username_unclaimed": "noonewouldeverusethis7"
|
||||
},
|
||||
```
|
||||
|
||||
|
||||
|
|
@ -1273,19 +1257,6 @@ As of 2022-05-1, FanCentro returns false positives. Will later in new version of
|
|||
},
|
||||
```
|
||||
|
||||
## Codeforces
|
||||
As og 2022-05-01, Codeforces returns false positives
|
||||
```json
|
||||
"Codeforces": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "https://codeforces.com/",
|
||||
"url": "https://codeforces.com/profile/{}",
|
||||
"urlMain": "https://www.codeforces.com/",
|
||||
"username_claimed": "tourist",
|
||||
"username_unclaimed": "noonewouldeverusethis789"
|
||||
},
|
||||
```
|
||||
|
||||
## Smashcast
|
||||
As og 2022-05-01, Smashcast is down
|
||||
```json
|
||||
|
|
@ -1300,7 +1271,7 @@ As og 2022-05-01, Smashcast is down
|
|||
|
||||
## Countable
|
||||
|
||||
As og 2022-05-01, Countable returns false positives
|
||||
As og 2022-05-01, Countable returns false positives
|
||||
```json
|
||||
"Countable": {
|
||||
"errorType": "status_code",
|
||||
|
|
@ -1867,4 +1838,160 @@ __2024-04-24 :__ BCF seems to have gone defunct. Uncertain.
|
|||
"urlMain": "https://bitcoinforum.com",
|
||||
"username_claimed": "bitcoinforum.com"
|
||||
}
|
||||
```
|
||||
```
|
||||
|
||||
## Zhihu
|
||||
As of 24.06.2024, Zhihu returns false positives as they obfuscate the code thats returned. Checking for patterns may allow us to find a way to detect the existans of a user, this will be need to be worked on later
|
||||
```json
|
||||
|
||||
"Zhihu": {
|
||||
"errorMsg": "用户不存在",
|
||||
"errorType": "message",
|
||||
"url": "https://www.zhihu.com/people/{}",
|
||||
"urlMain": "https://www.zhihu.com/",
|
||||
"username_claimed": "blue"
|
||||
}
|
||||
```
|
||||
|
||||
## Penetestit
|
||||
|
||||
As of 24.06.2024, Pentestit returns a 403. This is most likely due to a new site structures
|
||||
|
||||
```json
|
||||
"labpentestit": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "https://lab.pentestit.ru/{}",
|
||||
"url": "https://lab.pentestit.ru/profile/{}",
|
||||
"urlMain": "https://lab.pentestit.ru/",
|
||||
"username_claimed": "CSV"
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Euw
|
||||
__2024-06-09 :__ errorMsg detection doesn't work anymore, because the error message is included in HTTP request body, even in successful search
|
||||
```json
|
||||
"Euw": {
|
||||
"errorMsg": "This summoner is not registered at OP.GG. Please check spelling.",
|
||||
"errorType": "message",
|
||||
"url": "https://euw.op.gg/summoner/userName={}",
|
||||
"urlMain": "https://euw.op.gg/",
|
||||
"username_claimed": "blue"
|
||||
}
|
||||
```
|
||||
|
||||
## Etsy
|
||||
__2024-06-10 :__ Http request returns 403 forbidden, and tries to verify the connection, so it doesn't work anymore
|
||||
```json
|
||||
"Etsy": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.etsy.com/shop/{}",
|
||||
"urlMain": "https://www.etsy.com/",
|
||||
"username_claimed": "JennyKrafts"
|
||||
}
|
||||
```
|
||||
|
||||
## Alik.cz
|
||||
__2024-07-21 :__ Target is now BLACKLISTED from the default manifest due to the site recieving unnecessarily high traffic from Sherlock (by request of the site owners). This target is not permitted to be reactivited. Inclusion in unrelated manifests is not impacted, but it is discouraged.
|
||||
|
||||
## 8tracks
|
||||
__2025-02-02 :__ Might be dead again. Nobody knows for sure.
|
||||
```json
|
||||
"8tracks": {
|
||||
"errorType": "message",
|
||||
"errorMsg": "\"available\":true",
|
||||
"headers": {
|
||||
"Accept-Language": "en-US,en;q=0.5"
|
||||
},
|
||||
"url": "https://8tracks.com/{}",
|
||||
"urlProbe": "https://8tracks.com/users/check_username?login={}&format=jsonh",
|
||||
"urlMain": "https://8tracks.com/",
|
||||
"username_claimed": "blue"
|
||||
}
|
||||
```
|
||||
|
||||
## Shpock
|
||||
__2025-02-02 :__ Can likely be added back with a new endpoint (source username availability endpoint from mobile app reg flow?)
|
||||
```json
|
||||
"Shpock": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.shpock.com/shop/{}/items",
|
||||
"urlMain": "https://www.shpock.com/",
|
||||
"username_claimed": "user"
|
||||
}
|
||||
```
|
||||
|
||||
## Twitch
|
||||
__2025-02-02 :__
|
||||
```json
|
||||
"Twitch": {
|
||||
"errorType": "message",
|
||||
"errorMsg": "components.availability-tracking.warn-unavailable.component",
|
||||
"url": "https://www.twitch.tv/{}",
|
||||
"urlMain": "https://www.twitch.tv/",
|
||||
"urlProbe": "https://m.twitch.tv/{}",
|
||||
"username_claimed": "jenny"
|
||||
}
|
||||
```
|
||||
|
||||
## Fiverr
|
||||
__2025-02-02 :__ Fiverr added CSRF protections that messed with this test
|
||||
```json
|
||||
"Fiverr": {
|
||||
"errorMsg": "\"status\":\"success\"",
|
||||
"errorType": "message",
|
||||
"headers": {
|
||||
"Content-Type": "application/json",
|
||||
"Accept-Language": "en-US,en;q=0.9"
|
||||
},
|
||||
"regexCheck": "^[A-Za-z][A-Za-z\\d_]{5,14}$",
|
||||
"request_method": "POST",
|
||||
"request_payload": {
|
||||
"username": "{}"
|
||||
},
|
||||
"url": "https://www.fiverr.com/{}",
|
||||
"urlMain": "https://www.fiverr.com/",
|
||||
"urlProbe": "https://www.fiverr.com/validate_username",
|
||||
"username_claimed": "blueman"
|
||||
}
|
||||
```
|
||||
|
||||
## BabyRU
|
||||
__2025-02-02 :__ Just being problematic (possibly related to errorMsg encoding?)
|
||||
```json
|
||||
"babyRU": {
|
||||
"errorMsg": [
|
||||
"\u0421\u0442\u0440\u0430\u043d\u0438\u0446\u0430, \u043a\u043e\u0442\u043e\u0440\u0443\u044e \u0432\u044b \u0438\u0441\u043a\u0430\u043b\u0438, \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d\u0430",
|
||||
"Доступ с вашего IP-адреса временно ограничен"
|
||||
],
|
||||
"errorType": "message",
|
||||
"url": "https://www.baby.ru/u/{}/",
|
||||
"urlMain": "https://www.baby.ru/",
|
||||
"username_claimed": "blue"
|
||||
}
|
||||
```
|
||||
|
||||
## v0.dev
|
||||
__2025-02-16 :__ Unsure if any way to view profiles exists now
|
||||
```json
|
||||
"v0.dev": {
|
||||
"errorType": "message",
|
||||
"errorMsg": "<title>v0 by Vercel</title>",
|
||||
"url": "https://v0.dev/{}",
|
||||
"urlMain": "https://v0.dev",
|
||||
"username_claimed": "t3dotgg"
|
||||
}
|
||||
```
|
||||
|
||||
## TorrentGalaxy
|
||||
__2025-07-06 :__ Site appears to have gone offline in March and hasn't come back
|
||||
```json
|
||||
"TorrentGalaxy": {
|
||||
"errorMsg": "<title>TGx:Can't show details</title>",
|
||||
"errorType": "message",
|
||||
"regexCheck": "^[A-Za-z0-9]{3,15}$",
|
||||
"url": "https://torrentgalaxy.to/profile/{}",
|
||||
"urlMain": "https://torrentgalaxy.to/",
|
||||
"username_claimed": "GalaxyRG"
|
||||
},
|
||||
```
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 139 KiB |
|
|
@ -0,0 +1,68 @@
|
|||
[build-system]
|
||||
requires = [ "poetry-core>=1.2.0" ]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
# poetry-core 1.8 not available in .fc39. Can upgrade to 1.8.0 at .fc39 EOL
|
||||
|
||||
[tool.poetry-version-plugin]
|
||||
source = "init"
|
||||
|
||||
[tool.poetry]
|
||||
name = "sherlock-project"
|
||||
version = "0.16.0"
|
||||
description = "Hunt down social media accounts by username across social networks"
|
||||
license = "MIT"
|
||||
authors = [
|
||||
"Siddharth Dushantha <siddharth.dushantha@gmail.com>"
|
||||
]
|
||||
maintainers = [
|
||||
"Paul Pfeister <code@pfeister.dev>",
|
||||
"Matheus Felipe <matheusfelipeog@protonmail.com>",
|
||||
"Sondre Karlsen Dyrnes <sondre@villdyr.no>"
|
||||
]
|
||||
readme = "docs/pyproject/README.md"
|
||||
packages = [ { include = "sherlock_project"} ]
|
||||
keywords = [ "osint", "reconnaissance", "information gathering" ]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: Information Technology",
|
||||
"Natural Language :: English",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Topic :: Security"
|
||||
]
|
||||
homepage = "https://sherlockproject.xyz/"
|
||||
repository = "https://github.com/sherlock-project/sherlock"
|
||||
|
||||
|
||||
[tool.poetry.urls]
|
||||
"Bug Tracker" = "https://github.com/sherlock-project/sherlock/issues"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.9"
|
||||
certifi = ">=2019.6.16"
|
||||
colorama = "^0.4.1"
|
||||
PySocks = "^1.7.0"
|
||||
requests = "^2.22.0"
|
||||
requests-futures = "^1.0.0"
|
||||
stem = "^1.8.0"
|
||||
pandas = "^2.2.1"
|
||||
openpyxl = "^3.0.10"
|
||||
tomli = "^2.2.1"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
jsonschema = "^4.0.0"
|
||||
rstr = "^3.2.2"
|
||||
pytest = "^8.4.2"
|
||||
pytest-xdist = "^3.8.0"
|
||||
|
||||
|
||||
[tool.poetry.group.ci.dependencies]
|
||||
defusedxml = "^0.7.1"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
sherlock = 'sherlock_project.sherlock:main'
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
[pytest]
|
||||
addopts = --strict-markers -m "not validate_targets"
|
||||
markers =
|
||||
online: mark tests are requiring internet access.
|
||||
validate_targets: mark tests for sweeping manifest validation (sends many requests).
|
||||
validate_targets_fp: validate_targets, false positive tests only.
|
||||
validate_targets_fn: validate_targets, false negative tests only.
|
||||
|
|
@ -1,860 +0,0 @@
|
|||
{
|
||||
"AdobeForums": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://forums.adobe.com/people/{}",
|
||||
"urlMain": "https://forums.adobe.com/",
|
||||
"username_claimed": "jack"
|
||||
},
|
||||
"AngelList": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://angel.co/u/{}",
|
||||
"urlMain": "https://angel.co/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Basecamp": {
|
||||
"errorMsg": "The account you were looking for doesn't exist",
|
||||
"errorType": "message",
|
||||
"url": "https://{}.basecamphq.com",
|
||||
"urlMain": "https://basecamp.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"BlackPlanet": {
|
||||
"errorMsg": "My Hits",
|
||||
"errorType": "message",
|
||||
"url": "http://blackplanet.com/{}",
|
||||
"urlMain": "http://blackplanet.com/"
|
||||
},
|
||||
"Canva": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "https://www.canva.com/{}",
|
||||
"url": "https://www.canva.com/{}",
|
||||
"urlMain": "https://www.canva.com/",
|
||||
"username_claimed": "jenny"
|
||||
},
|
||||
"Codementor": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.codementor.io/@{}",
|
||||
"urlMain": "https://www.codementor.io/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"EVE Online": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "https://eveonline.com",
|
||||
"url": "https://evewho.com/pilot/{}/",
|
||||
"urlMain": "https://eveonline.com",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"fanpop": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "http://www.fanpop.com/",
|
||||
"url": "http://www.fanpop.com/fans/{}",
|
||||
"urlMain": "http://www.fanpop.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Fotolog": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://fotolog.com/{}",
|
||||
"urlMain": "https://fotolog.com/"
|
||||
},
|
||||
"Foursquare": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://foursquare.com/{}",
|
||||
"urlMain": "https://foursquare.com/",
|
||||
"username_claimed": "dens"
|
||||
},
|
||||
"gpodder.net": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://gpodder.net/user/{}",
|
||||
"urlMain": "https://gpodder.net/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Investing.com": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.investing.com/traders/{}",
|
||||
"urlMain": "https://www.investing.com/",
|
||||
"username_claimed": "jenny"
|
||||
},
|
||||
"Khan Academy": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.khanacademy.org/profile/{}",
|
||||
"urlMain": "https://www.khanacademy.org/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"KiwiFarms": {
|
||||
"errorMsg": "The specified member cannot be found",
|
||||
"errorType": "message",
|
||||
"url": "https://kiwifarms.net/members/?username={}",
|
||||
"urlMain": "https://kiwifarms.net/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"NPM-Package": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.npmjs.com/package/{}",
|
||||
"urlMain": "https://www.npmjs.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Pexels": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.pexels.com/@{}",
|
||||
"urlMain": "https://www.pexels.com/",
|
||||
"username_claimed": "bruno"
|
||||
},
|
||||
"Pixabay": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://pixabay.com/en/users/{}",
|
||||
"urlMain": "https://pixabay.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"PowerShell Gallery": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.powershellgallery.com/profiles/{}",
|
||||
"urlMain": "https://www.powershellgallery.com",
|
||||
"username_claimed": "powershellteam"
|
||||
},
|
||||
"RamblerDating": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "https://dating.rambler.ru/page/{}",
|
||||
"url": "https://dating.rambler.ru/page/{}",
|
||||
"urlMain": "https://dating.rambler.ru/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Shockwave": {
|
||||
"errorMsg": "Oh no! You just finished all of the games on the internet!",
|
||||
"errorType": "message",
|
||||
"url": "http://www.shockwave.com/member/profiles/{}.jsp",
|
||||
"urlMain": "http://www.shockwave.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"StreamMe": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.stream.me/{}",
|
||||
"urlMain": "https://www.stream.me/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Teknik": {
|
||||
"errorMsg": "The user does not exist",
|
||||
"errorType": "message",
|
||||
"url": "https://user.teknik.io/{}",
|
||||
"urlMain": "https://teknik.io/",
|
||||
"username_claimed": "red"
|
||||
},
|
||||
"YandexMarket": {
|
||||
"errorMsg": "\u0422\u0443\u0442 \u043d\u0438\u0447\u0435\u0433\u043e \u043d\u0435\u0442",
|
||||
"errorType": "message",
|
||||
"url": "https://market.yandex.ru/user/{}/achievements",
|
||||
"urlMain": "https://market.yandex.ru/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Insanejournal": {
|
||||
"errorMsg": "Unknown user",
|
||||
"errorType": "message",
|
||||
"url": "http://{}.insanejournal.com/profile",
|
||||
"urlMain": "insanejournal.com",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Trip": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.trip.skyscanner.com/user/{}",
|
||||
"urlMain": "https://www.trip.skyscanner.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"SportsTracker": {
|
||||
"errorUrl": "https://www.sports-tracker.com/page-not-found",
|
||||
"errorType": "response_url",
|
||||
"url": "https://www.sports-tracker.com/view_profile/{}",
|
||||
"urlMain": "https://www.sports-tracker.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"boingboing.net": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://bbs.boingboing.net/u/{}",
|
||||
"urlMain": "https://boingboing.net/",
|
||||
"username_claimed": "admin"
|
||||
},
|
||||
"elwoRU": {
|
||||
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
|
||||
"errorType": "message",
|
||||
"url": "https://elwo.ru/index/8-0-{}",
|
||||
"urlMain": "https://elwo.ru/",
|
||||
"username_claimed": "red"
|
||||
},
|
||||
"ingvarr.net.ru": {
|
||||
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
|
||||
"errorType": "message",
|
||||
"url": "http://ingvarr.net.ru/index/8-0-{}",
|
||||
"urlMain": "http://ingvarr.net.ru/",
|
||||
"username_claimed": "red"
|
||||
},
|
||||
"Redsun.tf": {
|
||||
"errorMsg": "The specified member cannot be found",
|
||||
"errorType": "message",
|
||||
"url": "https://forum.redsun.tf/members/?username={}",
|
||||
"urlMain": "https://redsun.tf/",
|
||||
"username_claimed": "dan"
|
||||
},
|
||||
"CreativeMarket": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://creativemarket.com/users/{}",
|
||||
"urlMain": "https://creativemarket.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"pvpru": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://pvpru.com/board/member.php?username={}&tab=aboutme#aboutme",
|
||||
"urlMain": "https://pvpru.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"easyen": {
|
||||
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
|
||||
"errorType": "message",
|
||||
"url": "https://easyen.ru/index/8-0-{}",
|
||||
"urlMain": "https://easyen.ru/",
|
||||
"username_claimed": "wd"
|
||||
},
|
||||
"pedsovet": {
|
||||
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
|
||||
"errorType": "message",
|
||||
"url": "http://pedsovet.su/index/8-0-{}",
|
||||
"urlMain": "http://pedsovet.su/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"radioskot": {
|
||||
"errorMsg": "\u041f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d",
|
||||
"errorType": "message",
|
||||
"url": "https://radioskot.ru/index/8-0-{}",
|
||||
"urlMain": "https://radioskot.ru/",
|
||||
"username_claimed": "red"
|
||||
},
|
||||
"Coderwall": {
|
||||
"errorMsg": "404! Our feels when that url is used",
|
||||
"errorType": "message",
|
||||
"url": "https://coderwall.com/{}",
|
||||
"urlMain": "https://coderwall.com/",
|
||||
"username_claimed": "jenny"
|
||||
},
|
||||
"TamTam": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "https://tamtam.chat/",
|
||||
"url": "https://tamtam.chat/{}",
|
||||
"urlMain": "https://tamtam.chat/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Zomato": {
|
||||
"errorType": "status_code",
|
||||
"headers": {
|
||||
"Accept-Language": "en-US,en;q=0.9"
|
||||
},
|
||||
"url": "https://www.zomato.com/pl/{}/foodjourney",
|
||||
"urlMain": "https://www.zomato.com/",
|
||||
"username_claimed": "deepigoyal"
|
||||
},
|
||||
"mixer.com": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://mixer.com/{}",
|
||||
"urlMain": "https://mixer.com/",
|
||||
"urlProbe": "https://mixer.com/api/v1/channels/{}",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"KanoWorld": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://api.kano.me/progress/user/{}",
|
||||
"urlMain": "https://world.kano.me/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"YandexCollection": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://yandex.ru/collections/user/{}/",
|
||||
"urlMain": "https://yandex.ru/collections/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"PayPal": {
|
||||
"errorMsg": "<meta name=\"twitter:title\" content=\"Get your very own PayPal.Me link\" />",
|
||||
"errorType": "message",
|
||||
"url": "https://www.paypal.com/paypalme/{}",
|
||||
"headers": {
|
||||
"User-Agent": ""
|
||||
},
|
||||
"urlMain": "https://www.paypal.me/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"ImageShack": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "https://imageshack.us/",
|
||||
"url": "https://imageshack.us/user/{}",
|
||||
"urlMain": "https://imageshack.us/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Aptoide": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://{}.en.aptoide.com/",
|
||||
"urlMain": "https://en.aptoide.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Crunchyroll": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.crunchyroll.com/user/{}",
|
||||
"urlMain": "https://www.crunchyroll.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"T-MobileSupport": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://support.t-mobile.com/people/{}",
|
||||
"urlMain": "https://support.t-mobile.com",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"OpenCollective": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://opencollective.com/{}",
|
||||
"urlMain": "https://opencollective.com/",
|
||||
"username_claimed": "sindresorhus"
|
||||
},
|
||||
"SegmentFault": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://segmentfault.com/u/{}",
|
||||
"urlMain": "https://segmentfault.com/",
|
||||
"username_claimed": "bule"
|
||||
},
|
||||
"Viadeo": {
|
||||
"errorType": "status_code",
|
||||
"url": "http://fr.viadeo.com/en/profile/{}",
|
||||
"urlMain": "http://fr.viadeo.com/en/",
|
||||
"username_claimed": "franck.patissier"
|
||||
},
|
||||
"MeetMe": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "https://www.meetme.com/",
|
||||
"url": "https://www.meetme.com/{}",
|
||||
"urlMain": "https://www.meetme.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"tracr.co": {
|
||||
"errorMsg": "No search results",
|
||||
"errorType": "message",
|
||||
"regexCheck": "^[A-Za-z0-9]{2,32}$",
|
||||
"url": "https://tracr.co/users/1/{}",
|
||||
"urlMain": "https://tracr.co/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Taringa": {
|
||||
"errorType": "status_code",
|
||||
"regexCheck": "^[^.]*$",
|
||||
"url": "https://www.taringa.net/{}",
|
||||
"urlMain": "https://taringa.net/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Photobucket": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://photobucket.com/user/{}/library",
|
||||
"urlMain": "https://photobucket.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"4pda": {
|
||||
"errorMsg": "[1,false,0]",
|
||||
"errorType": "message",
|
||||
"url": "https://4pda.ru/forum/index.php?act=search&source=pst&noform=1&username={}",
|
||||
"urlMain": "https://4pda.ru/",
|
||||
"urlProbe": " https://4pda.ru/forum/index.php?act=auth&action=chkname&login={}",
|
||||
"username_claimed": "green"
|
||||
},
|
||||
"PokerStrategy": {
|
||||
"errorType": "status_code",
|
||||
"url": "http://www.pokerstrategy.net/user/{}/profile/",
|
||||
"urlMain": "http://www.pokerstrategy.net",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Filmogs": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.filmo.gs/users/{}",
|
||||
"urlMain": "https://www.filmo.gs/",
|
||||
"username_claimed": "cupparober"
|
||||
},
|
||||
"500px": {
|
||||
"errorMsg": "No message available",
|
||||
"errorType": "message",
|
||||
"url": "https://500px.com/p/{}",
|
||||
"urlMain": "https://500px.com/",
|
||||
"urlProbe": "https://api.500px.com/graphql?operationName=ProfileRendererQuery&variables=%7B%22username%22%3A%22{}%22%7D&extensions=%7B%22persistedQuery%22%3A%7B%22version%22%3A1%2C%22sha256Hash%22%3A%224d02ff5c13927a3ac73b3eef306490508bc765956940c31051468cf30402a503%22%7D%7D",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Badoo": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://badoo.com/profile/{}",
|
||||
"urlMain": "https://badoo.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Pling": {
|
||||
"errorMsg": "Resource not found",
|
||||
"errorType": "message",
|
||||
"url": "https://www.pling.com/u/{}/",
|
||||
"urlMain": "https://www.pling.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Realmeye": {
|
||||
"errorMsg": "Sorry, but we either:",
|
||||
"errorType": "message",
|
||||
"url": "https://www.realmeye.com/player/{}",
|
||||
"urlMain": "https://www.realmeye.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Travellerspoint": {
|
||||
"errorMsg": "Wooops. Sorry!",
|
||||
"errorType": "message",
|
||||
"url": "https://www.travellerspoint.com/users/{}",
|
||||
"urlMain": "https://www.travellerspoint.com",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"GDProfiles": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://gdprofiles.com/{}",
|
||||
"urlMain": "https://gdprofiles.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"AllTrails": {
|
||||
"errorMsg": "class=\"home index\"",
|
||||
"errorType": "message",
|
||||
"url": "https://www.alltrails.com/members/{}",
|
||||
"urlMain": "https://www.alltrails.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Cent": {
|
||||
"errorMsg": "<title>Cent</title>",
|
||||
"errorType": "message",
|
||||
"url": "https://beta.cent.co/@{}",
|
||||
"urlMain": "https://cent.co/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Anobii": {
|
||||
"errorType": "response_url",
|
||||
"url": "https://www.anobii.com/{}/profile",
|
||||
"urlMain": "https://www.anobii.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Kali community": {
|
||||
"errorMsg": "This user has not registered and therefore does not have a profile to view.",
|
||||
"errorType": "message",
|
||||
"url": "https://forums.kali.org/member.php?username={}",
|
||||
"urlMain": "https://forums.kali.org/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"NameMC (Minecraft.net skins)": {
|
||||
"errorMsg": "Profiles: 0 results",
|
||||
"errorType": "message",
|
||||
"url": "https://namemc.com/profile/{}",
|
||||
"urlMain": "https://namemc.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Steamid": {
|
||||
"errorMsg": "<link rel=\"canonical\" href=\"https://steamid.uk\" />",
|
||||
"errorType": "message",
|
||||
"url": "https://steamid.uk/profile/{}",
|
||||
"urlMain": "https://steamid.uk/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"TripAdvisor": {
|
||||
"errorMsg": "This page is on vacation\u2026",
|
||||
"errorType": "message",
|
||||
"url": "https://tripadvisor.com/members/{}",
|
||||
"urlMain": "https://tripadvisor.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"House-Mixes.com": {
|
||||
"errorMsg": "Profile Not Found",
|
||||
"errorType": "message",
|
||||
"regexCheck": "^[a-zA-Z0-9]+(-[a-zA-Z0-9]+)*$",
|
||||
"url": "https://www.house-mixes.com/profile/{}",
|
||||
"urlMain": "https://www.house-mixes.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Quora": {
|
||||
"errorMsg": "Page Not Found",
|
||||
"errorType": "message",
|
||||
"url": "https://www.quora.com/profile/{}",
|
||||
"urlMain": "https://www.quora.com/",
|
||||
"username_claimed": "Matt-Riggsby"
|
||||
},
|
||||
"SparkPeople": {
|
||||
"errorMsg": "We couldn't find that user",
|
||||
"errorType": "message",
|
||||
"url": "https://www.sparkpeople.com/mypage.asp?id={}",
|
||||
"urlMain": "https://www.sparkpeople.com",
|
||||
"username_claimed": "adam"
|
||||
},
|
||||
"Cloob": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.cloob.com/name/{}",
|
||||
"urlMain": "https://www.cloob.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"TM-Ladder": {
|
||||
"errorMsg": "player unknown or invalid",
|
||||
"errorType": "message",
|
||||
"url": "http://en.tm-ladder.com/{}_rech.php",
|
||||
"urlMain": "http://en.tm-ladder.com/index.php",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"plug.dj": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://plug.dj/@/{}",
|
||||
"urlMain": "https://plug.dj/",
|
||||
"username_claimed": "plug-dj-rock"
|
||||
},
|
||||
"Facenama": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "https://facenama.com/404.html",
|
||||
"regexCheck": "^[-a-zA-Z0-9_]+$",
|
||||
"url": "https://facenama.com/{}",
|
||||
"urlMain": "https://facenama.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Designspiration": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.designspiration.net/{}/",
|
||||
"urlMain": "https://www.designspiration.net/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"CapFriendly": {
|
||||
"errorMsg": "<div class=\"err show p5\">No results found</div>",
|
||||
"errorType": "message",
|
||||
"regexCheck": "^[a-zA-z][a-zA-Z0-9_]{2,79}$",
|
||||
"url": "https://www.capfriendly.com/users/{}",
|
||||
"urlMain": "https://www.capfriendly.com/",
|
||||
"username_claimed": "thisactuallyexists"
|
||||
},
|
||||
"Gab": {
|
||||
"errorMsg": "The page you are looking for isn't here.",
|
||||
"errorType": "message",
|
||||
"url": "https://gab.com/{}",
|
||||
"urlMain": "https://gab.com",
|
||||
"username_claimed": "a"
|
||||
},
|
||||
"FanCentro": {
|
||||
"errorMsg": "var environment",
|
||||
"errorType": "message",
|
||||
"url": "https://fancentro.com/{}",
|
||||
"urlMain": "https://fancentro.com/",
|
||||
"username_claimed": "nielsrosanna"
|
||||
},
|
||||
"Codeforces": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "https://codeforces.com/",
|
||||
"url": "https://codeforces.com/profile/{}",
|
||||
"urlMain": "https://www.codeforces.com/",
|
||||
"username_claimed": "tourist"
|
||||
},
|
||||
"Smashcast": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.smashcast.tv/api/media/live/{}",
|
||||
"urlMain": "https://www.smashcast.tv/",
|
||||
"username_claimed": "hello"
|
||||
},
|
||||
"Countable": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.countable.us/{}",
|
||||
"urlMain": "https://www.countable.us/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Spotify": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://open.spotify.com/user/{}",
|
||||
"urlMain": "https://open.spotify.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Raidforums": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://raidforums.com/User-{}",
|
||||
"urlMain": "https://raidforums.com/",
|
||||
"username_claimed": "red"
|
||||
},
|
||||
"Pinterest": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.pinterest.com/{}/",
|
||||
"urlMain": "https://www.pinterest.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"PCPartPicker": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://pcpartpicker.com/user/{}",
|
||||
"urlMain": "https://pcpartpicker.com",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"eBay.com": {
|
||||
"errorMsg": "The User ID you entered was not found. Please check the User ID and try again.",
|
||||
"errorType": "message",
|
||||
"url": "https://www.ebay.com/usr/{}",
|
||||
"urlMain": "https://www.ebay.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"eBay.de": {
|
||||
"errorMsg": "Der eingegebene Nutzername wurde nicht gefunden. Bitte pr\u00fcfen Sie den Nutzernamen und versuchen Sie es erneut.",
|
||||
"errorType": "message",
|
||||
"url": "https://www.ebay.de/usr/{}",
|
||||
"urlMain": "https://www.ebay.de/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Ghost": {
|
||||
"errorMsg": "Domain Error",
|
||||
"errorType": "message",
|
||||
"url": "https://{}.ghost.io/",
|
||||
"urlMain": "https://ghost.org/",
|
||||
"username_claimed": "troyhunt"
|
||||
},
|
||||
"Atom Discussions": {
|
||||
"errorMsg": "Oops! That page doesn\u2019t exist or is private.",
|
||||
"errorType": "message",
|
||||
"url": "https://discuss.atom.io/u/{}/summary",
|
||||
"urlMain": "https://discuss.atom.io",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Gam1ng": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://gam1ng.com.br/user/{}",
|
||||
"urlMain": "https://gam1ng.com.br",
|
||||
"username_claimed": "PinKgirl"
|
||||
},
|
||||
"OGUsers": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://ogusers.com/{}",
|
||||
"urlMain": "https://ogusers.com/",
|
||||
"username_claimed": "ogusers"
|
||||
},
|
||||
"Otzovik": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://otzovik.com/profile/{}",
|
||||
"urlMain": "https://otzovik.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"radio_echo_msk": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://echo.msk.ru/users/{}",
|
||||
"urlMain": "https://echo.msk.ru/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Ello": {
|
||||
"errorMsg": "We couldn't find the page you're looking for",
|
||||
"errorType": "message",
|
||||
"url": "https://ello.co/{}",
|
||||
"urlMain": "https://ello.co/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"GitHub Support Community": {
|
||||
"errorMsg": "Oops! That page doesn\u2019t exist or is private.",
|
||||
"errorType": "message",
|
||||
"url": "https://github.community/u/{}/summary",
|
||||
"urlMain": "https://github.community",
|
||||
"username_claimed": "jperl"
|
||||
},
|
||||
"GuruShots": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://gurushots.com/{}/photos",
|
||||
"urlMain": "https://gurushots.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Google Developer": {
|
||||
"errorMsg": "Sorry, the profile was not found.",
|
||||
"errorType": "message",
|
||||
"url": "https://g.dev/{}",
|
||||
"urlMain": "https://g.dev/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"mastodon.technology": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://mastodon.technology/@{}",
|
||||
"urlMain": "https://mastodon.xyz/",
|
||||
"username_claimed": "ashfurrow"
|
||||
},
|
||||
"zoomit": {
|
||||
"errorMsg": "\u0645\u062a\u0627\u0633\u0641\u0627\u0646\u0647 \u0635\u0641\u062d\u0647 \u06cc\u0627\u0641\u062a \u0646\u0634\u062f",
|
||||
"errorType": "message",
|
||||
"url": "https://www.zoomit.ir/user/{}",
|
||||
"urlMain": "https://www.zoomit.ir",
|
||||
"username_claimed": "kossher"
|
||||
},
|
||||
"Facebook": {
|
||||
"errorType": "status_code",
|
||||
"regexCheck": "^[a-zA-Z0-9\\.]{3,49}(?<!\\.com|\\.org|\\.net)$",
|
||||
"url": "https://www.facebook.com/{}",
|
||||
"urlMain": "https://www.facebook.com/",
|
||||
"urlProbe": "https://www.facebook.com/{}/videos/",
|
||||
"username_claimed": "hackerman"
|
||||
},
|
||||
"BinarySearch": {
|
||||
"errorMsg": "{}",
|
||||
"errorType": "message",
|
||||
"regexCheck": "^[a-zA-Z0-9-_]{1,15}$",
|
||||
"url": "https://binarysearch.io/@/{}",
|
||||
"urlMain": "https://binarysearch.io/",
|
||||
"urlProbe": "https://binarysearch.io/api/users/{}/profile",
|
||||
"username_claimed": "Eyes_Wide_Shut"
|
||||
},
|
||||
"Arduino": {
|
||||
"errorType": "status_code",
|
||||
"regexCheck": "^(?![_-])[A-Za-z0-9_-]{3,}$",
|
||||
"url": "https://create.arduino.cc/projecthub/{}",
|
||||
"urlMain": "https://www.arduino.cc/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"koo": {
|
||||
"errorMsg": "This profile does not exist",
|
||||
"errorType": "message",
|
||||
"url": "https://www.kooapp.com/profile/{}",
|
||||
"urlMain": "https://www.kooapp.com",
|
||||
"urlProbe": "https://www.kooapp.com/apiV1/users/handle/{}/valid",
|
||||
"username_claimed": "john"
|
||||
},
|
||||
"We Heart It": {
|
||||
"errorMsg": "Oops! You've landed on a moving target!",
|
||||
"errorType": "message",
|
||||
"url": "https://weheartit.com/{}",
|
||||
"urlMain": "https://weheartit.com/",
|
||||
"username_claimed": "ventivogue"
|
||||
},
|
||||
"Tinder": {
|
||||
"errorMsg": [
|
||||
"<title data-react-helmet=\"true\">Tinder | Dating, Make Friends & Meet New People</title>",
|
||||
"<title data-react-helmet=\"true\">Tinder | Match. Chat. Date.</title>"
|
||||
],
|
||||
"errorType": "message",
|
||||
"url": "https://www.tinder.com/@{}",
|
||||
"urlMain": "https://tinder.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Coil": {
|
||||
"errorMsg": "User not found",
|
||||
"errorType": "message",
|
||||
"request_method": "POST",
|
||||
"request_payload": {
|
||||
"operationName": "getCreator",
|
||||
"query": "query getCreator($userShortName:String!){getCreator(userShortName:$userShortName){id}}",
|
||||
"variables": {
|
||||
"userShortName": "{}"
|
||||
}
|
||||
},
|
||||
"url": "https://coil.com/u/{}",
|
||||
"urlMain": "https://coil.com/",
|
||||
"urlProbe": "https://coil.com/gateway",
|
||||
"username_claimed": "adam"
|
||||
},
|
||||
"OnlyFans": {
|
||||
"errorType": "status_code",
|
||||
"isNSFW": true,
|
||||
"url": "https://onlyfans.com/{}",
|
||||
"urlMain": "https://onlyfans.com/",
|
||||
"urlProbe": "https://onlyfans.com/api2/v2/users/{}",
|
||||
"username_claimed": "theemilylynne"
|
||||
},
|
||||
"OK": {
|
||||
"errorType": "status_code",
|
||||
"regexCheck": "^[a-zA-Z][a-zA-Z0-9_.-]*$",
|
||||
"url": "https://ok.ru/{}",
|
||||
"urlMain": "https://ok.ru/",
|
||||
"username_claimed": "ok"
|
||||
},
|
||||
"forumhouseRU": {
|
||||
"errorMsg": "\u0423\u043a\u0430\u0437\u0430\u043d\u043d\u044b\u0439 \u043f\u043e\u043b\u044c\u0437\u043e\u0432\u0430\u0442\u0435\u043b\u044c \u043d\u0435 \u043d\u0430\u0439\u0434\u0435\u043d. \u041f\u043e\u0436\u0430\u043b\u0443\u0439\u0441\u0442\u0430, \u0432\u0432\u0435\u0434\u0438\u0442\u0435 \u0434\u0440\u0443\u0433\u043e\u0435 \u0438\u043c\u044f.",
|
||||
"errorType": "message",
|
||||
"url": "https://www.forumhouse.ru/members/?username={}",
|
||||
"urlMain": "https://www.forumhouse.ru/",
|
||||
"username_claimed": "red"
|
||||
},
|
||||
"Enjin": {
|
||||
"errorMsg": "Yikes, there seems to have been an error. We've taken note and will check out the problem right away!",
|
||||
"errorType": "message",
|
||||
"url": "https://www.enjin.com/profile/{}",
|
||||
"urlMain": "https://www.enjin.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"IRL": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.irl.com/{}",
|
||||
"urlMain": "https://www.irl.com/",
|
||||
"username_claimed": "hacker"
|
||||
},
|
||||
"Munzee": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://www.munzee.com/m/{}",
|
||||
"urlMain": "https://www.munzee.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Quizlet": {
|
||||
"errorMsg": "Page Unavailable",
|
||||
"errorType": "message",
|
||||
"url": "https://quizlet.com/{}",
|
||||
"urlMain": "https://quizlet.com",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"GunsAndAmmo": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://forums.gunsandammo.com/profile/{}",
|
||||
"urlMain": "https://gunsandammo.com/",
|
||||
"username_claimed": "adam"
|
||||
},
|
||||
"TikTok": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://tiktok.com/@{}",
|
||||
"urlMain": "https://tiktok.com/",
|
||||
"username_claimed": "red"
|
||||
},
|
||||
"Lolchess": {
|
||||
"errorMsg": "No search results",
|
||||
"errorType": "message",
|
||||
"url": "https://lolchess.gg/profile/na/{}",
|
||||
"urlMain": "https://lolchess.gg/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Virgool": {
|
||||
"errorMsg": "\u06f4\u06f0\u06f4",
|
||||
"errorType": "message",
|
||||
"url": "https://virgool.io/@{}",
|
||||
"urlMain": "https://virgool.io/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Whonix Forum": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://forums.whonix.org/u/{}/summary",
|
||||
"urlMain": "https://forums.whonix.org/",
|
||||
"username_claimed": "red"
|
||||
},
|
||||
"ebio.gg": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://ebio.gg/{}",
|
||||
"urlMain": "https:/ebio.gg",
|
||||
"username_claimed": "dev"
|
||||
},
|
||||
"metacritic": {
|
||||
"errorMsg": "User not found",
|
||||
"errorType": "message",
|
||||
"regexCheck": "^(?![-_].)[A-Za-z0-9-_]{3,15}$",
|
||||
"url": "https://www.metacritic.com/user/{}",
|
||||
"urlMain": "https://www.metacritic.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"Oracle Communities": {
|
||||
"errorType": "status_code",
|
||||
"url": "https://community.oracle.com/people/{}",
|
||||
"urlMain": "https://community.oracle.com",
|
||||
"username_claimed": "dev"
|
||||
},
|
||||
"HexRPG": {
|
||||
"errorMsg": "Error : User ",
|
||||
"errorType": "message",
|
||||
"regexCheck": "^[a-zA-Z0-9_ ]{3,20}$",
|
||||
"url": "https://www.hexrpg.com/userinfo/{}",
|
||||
"urlMain": "https://www.hexrpg.com/",
|
||||
"username_claimed": "blue"
|
||||
},
|
||||
"G2G": {
|
||||
"errorType": "response_url",
|
||||
"errorUrl": "https://www.g2g.com/{}",
|
||||
"regexCheck": "^[A-Za-z][A-Za-z0-9_]{2,11}$",
|
||||
"url": "https://www.g2g.com/{}",
|
||||
"urlMain": "https://www.g2g.com/",
|
||||
"username_claimed": "user"
|
||||
},
|
||||
"BitCoinForum": {
|
||||
"errorMsg": "The user whose profile you are trying to view does not exist.",
|
||||
"errorType": "message",
|
||||
"url": "https://bitcoinforum.com/profile/{}",
|
||||
"urlMain": "https://bitcoinforum.com",
|
||||
"username_claimed": "bitcoinforum.com"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
certifi>=2019.6.16
|
||||
colorama>=0.4.1
|
||||
PySocks>=1.7.0
|
||||
requests>=2.22.0
|
||||
requests-futures>=1.0.0
|
||||
stem>=1.8.0
|
||||
torrequest>=0.1.0
|
||||
pandas>=1.0.0
|
||||
openpyxl<=3.0.10
|
||||
exrex>=0.11.0
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
""" Sherlock Module
|
||||
|
||||
This module contains the main logic to search for usernames at social
|
||||
networks.
|
||||
|
||||
"""
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "Sherlock Targets",
|
||||
"description": "Social media target to probe for existence of usernames",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"$schema": { "type": "string" }
|
||||
},
|
||||
"patternProperties": {
|
||||
"^(?!\\$).*?$": {
|
||||
"type": "object",
|
||||
"description": "User-friendly target name",
|
||||
"required": [ "url", "urlMain", "errorType", "username_claimed" ],
|
||||
"properties": {
|
||||
"url": { "type": "string" },
|
||||
"urlMain": { "type": "string" },
|
||||
"urlProbe": { "type": "string" },
|
||||
"username_claimed": { "type": "string" },
|
||||
"regexCheck": { "type": "string" },
|
||||
"isNSFW": { "type": "boolean" },
|
||||
"headers": { "type": "object" },
|
||||
"request_payload": { "type": "object" },
|
||||
"__comment__": {
|
||||
"type": "string",
|
||||
"description": "Used to clarify important target information if (and only if) a commit message would not suffice.\nThis key should not be parsed anywhere within Sherlock."
|
||||
},
|
||||
"tags": {
|
||||
"oneOf": [
|
||||
{ "$ref": "#/$defs/tag" },
|
||||
{ "type": "array", "items": { "$ref": "#/$defs/tag" } }
|
||||
]
|
||||
},
|
||||
"request_method": {
|
||||
"type": "string",
|
||||
"enum": [ "GET", "POST", "HEAD", "PUT" ]
|
||||
},
|
||||
"errorType": {
|
||||
"type": "string",
|
||||
"enum": [ "message", "response_url", "status_code" ]
|
||||
},
|
||||
"errorMsg": {
|
||||
"oneOf": [
|
||||
{ "type": "string" },
|
||||
{ "type": "array", "items": { "type": "string" } }
|
||||
]
|
||||
},
|
||||
"errorCode": {
|
||||
"oneOf": [
|
||||
{ "type": "integer" },
|
||||
{ "type": "array", "items": { "type": "integer" } }
|
||||
]
|
||||
},
|
||||
"errorUrl": { "type": "string" },
|
||||
"response_url": { "type": "string" }
|
||||
},
|
||||
"dependencies": {
|
||||
"errorMsg": {
|
||||
"properties" : { "errorType": { "const": "message" } }
|
||||
},
|
||||
"errorUrl": {
|
||||
"properties": { "errorType": { "const": "response_url" } }
|
||||
},
|
||||
"errorCode": {
|
||||
"properties": { "errorType": { "const": "status_code" } }
|
||||
}
|
||||
},
|
||||
"if": { "properties": { "errorType": { "const": "message" } } },
|
||||
"then": { "required": [ "errorMsg" ] },
|
||||
"else": {
|
||||
"if": { "properties": { "errorType": { "const": "response_url" } } },
|
||||
"then": { "required": [ "errorUrl" ] }
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"$defs": {
|
||||
"tag": { "type": "string", "enum": [ "adult", "gaming" ] }
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
"""Sherlock Tests
|
||||
|
||||
This package contains various submodules used to run tests.
|
||||
"""
|
||||
|
|
@ -1,213 +0,0 @@
|
|||
"""Sherlock Tests
|
||||
|
||||
This module contains various tests.
|
||||
"""
|
||||
from tests.base import SherlockBaseTest
|
||||
import exrex
|
||||
|
||||
|
||||
class SherlockDetectTests(SherlockBaseTest):
|
||||
def test_detect_true_via_message(self):
|
||||
"""Test Username Does Exist (Via Message).
|
||||
|
||||
This test ensures that the "message" detection mechanism of
|
||||
ensuring that a Username does exist works properly.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Will trigger an assert if detection mechanism did not work as expected.
|
||||
"""
|
||||
|
||||
site = "AllMyLinks"
|
||||
site_data = self.site_data_all[site]
|
||||
|
||||
# Ensure that the site's detection method has not changed.
|
||||
self.assertEqual("message", site_data["errorType"])
|
||||
|
||||
self.username_check([site_data["username_claimed"]], [site], exist_check=True)
|
||||
|
||||
return
|
||||
|
||||
def test_detect_false_via_message(self):
|
||||
"""Test Username Does Not Exist (Via Message).
|
||||
|
||||
This test ensures that the "message" detection mechanism of
|
||||
ensuring that a Username does *not* exist works properly.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Will trigger an assert if detection mechanism did not work as expected.
|
||||
"""
|
||||
|
||||
site = "AllMyLinks"
|
||||
site_data = self.site_data_all[site]
|
||||
|
||||
# Ensure that the site's detection method has not changed.
|
||||
self.assertEqual("message", site_data["errorType"])
|
||||
|
||||
# Generate a valid username based on the regex for a username that the
|
||||
# site supports that is *most likely* not taken. The regex is slightly
|
||||
# modified version of site_data["regexCheck"] as we want a username
|
||||
# that has the maximum length that is supported by the site. This way,
|
||||
# we wont generate a random username that might actually exist. This
|
||||
# method is very hacky, but it does the job as having hardcoded
|
||||
# usernames that dont exists will lead to people with ill intent to
|
||||
# create an account with that username which will break the tests
|
||||
valid_username = exrex.getone(r"^[a-z0-9][a-z0-9-]{32}$")
|
||||
self.username_check([valid_username], [site], exist_check=False)
|
||||
|
||||
return
|
||||
|
||||
def test_detect_true_via_status_code(self):
|
||||
"""Test Username Does Exist (Via Status Code).
|
||||
|
||||
This test ensures that the "status code" detection mechanism of
|
||||
ensuring that a Username does exist works properly.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Will trigger an assert if detection mechanism did not work as expected.
|
||||
"""
|
||||
|
||||
site = "BitBucket"
|
||||
site_data = self.site_data_all[site]
|
||||
|
||||
# Ensure that the site's detection method has not changed.
|
||||
self.assertEqual("status_code", site_data["errorType"])
|
||||
|
||||
self.username_check([site_data["username_claimed"]], [site], exist_check=True)
|
||||
|
||||
return
|
||||
|
||||
def test_detect_false_via_status_code(self):
|
||||
"""Test Username Does Not Exist (Via Status Code).
|
||||
|
||||
This test ensures that the "status code" detection mechanism of
|
||||
ensuring that a Username does *not* exist works properly.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Will trigger an assert if detection mechanism did not work as expected.
|
||||
"""
|
||||
|
||||
site = "BitBucket"
|
||||
site_data = self.site_data_all[site]
|
||||
|
||||
# Ensure that the site's detection method has not changed.
|
||||
self.assertEqual("status_code", site_data["errorType"])
|
||||
|
||||
# Generate a valid username based on the regex for a username that the
|
||||
# site supports that is *most likely* not taken. The regex is slightly
|
||||
# modified version of site_data["regexCheck"] as we want a username
|
||||
# that has the maximum length that is supported by the site. This way,
|
||||
# we wont generate a random username that might actually exist. This
|
||||
# method is very hacky, but it does the job as having hardcoded
|
||||
# usernames that dont exists will lead to people with ill intent to
|
||||
# create an account with that username which will break the tests
|
||||
valid_username = exrex.getone(r"^[a-zA-Z0-9-_]{30}")
|
||||
self.username_check([valid_username], [site], exist_check=False)
|
||||
|
||||
return
|
||||
|
||||
|
||||
class SherlockSiteCoverageTests(SherlockBaseTest):
|
||||
def test_coverage_false_via_status(self):
|
||||
"""Test Username Does Not Exist Site Coverage (Via HTTP Status).
|
||||
|
||||
This test checks all sites with the "HTTP Status" detection mechanism
|
||||
to ensure that a Username that does not exist is reported that way.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Will trigger an assert if detection mechanism did not work as expected.
|
||||
"""
|
||||
|
||||
self.detect_type_check("status_code", exist_check=False)
|
||||
|
||||
return
|
||||
|
||||
def test_coverage_true_via_status(self):
|
||||
"""Test Username Does Exist Site Coverage (Via HTTP Status).
|
||||
|
||||
This test checks all sites with the "HTTP Status" detection mechanism
|
||||
to ensure that a Username that does exist is reported that way.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Will trigger an assert if detection mechanism did not work as expected.
|
||||
"""
|
||||
|
||||
self.detect_type_check("status_code", exist_check=True)
|
||||
|
||||
return
|
||||
|
||||
def test_coverage_false_via_message(self):
|
||||
"""Test Username Does Not Exist Site Coverage (Via Error Message).
|
||||
|
||||
This test checks all sites with the "Error Message" detection mechanism
|
||||
to ensure that a Username that does not exist is reported that way.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Will trigger an assert if detection mechanism did not work as expected.
|
||||
"""
|
||||
|
||||
self.detect_type_check("message", exist_check=False)
|
||||
|
||||
return
|
||||
|
||||
def test_coverage_true_via_message(self):
|
||||
"""Test Username Does Exist Site Coverage (Via Error Message).
|
||||
|
||||
This test checks all sites with the "Error Message" detection mechanism
|
||||
to ensure that a Username that does exist is reported that way.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Will trigger an assert if detection mechanism did not work as expected.
|
||||
"""
|
||||
|
||||
self.detect_type_check("message", exist_check=True)
|
||||
|
||||
return
|
||||
|
||||
def test_coverage_total(self):
|
||||
"""Test Site Coverage Is Total.
|
||||
|
||||
This test checks that all sites have test data available.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Will trigger an assert if we do not have total coverage.
|
||||
"""
|
||||
|
||||
self.coverage_total_check()
|
||||
|
||||
return
|
||||
|
|
@ -1,224 +0,0 @@
|
|||
"""Sherlock Base Tests
|
||||
|
||||
This module contains various utilities for running tests.
|
||||
"""
|
||||
import os
|
||||
import os.path
|
||||
import unittest
|
||||
import sherlock
|
||||
from result import QueryStatus
|
||||
from notify import QueryNotify
|
||||
from sites import SitesInformation
|
||||
import warnings
|
||||
|
||||
|
||||
class SherlockBaseTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
"""Sherlock Base Test Setup.
|
||||
|
||||
Does common setup tasks for base Sherlock tests.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
"""
|
||||
|
||||
# This ignores the ResourceWarning from an unclosed SSLSocket.
|
||||
# TODO: Figure out how to fix the code so this is not needed.
|
||||
warnings.simplefilter("ignore", ResourceWarning)
|
||||
|
||||
# Create object with all information about sites we are aware of.
|
||||
sites = SitesInformation(data_file_path=os.path.join(os.path.dirname(__file__), "../resources/data.json"))
|
||||
|
||||
# Create original dictionary from SitesInformation() object.
|
||||
# Eventually, the rest of the code will be updated to use the new object
|
||||
# directly, but this will glue the two pieces together.
|
||||
site_data_all = {}
|
||||
for site in sites:
|
||||
site_data_all[site.name] = site.information
|
||||
self.site_data_all = site_data_all
|
||||
|
||||
# Load excluded sites list, if any
|
||||
excluded_sites_path = os.path.join(os.path.dirname(os.path.realpath(sherlock.__file__)), "tests/.excluded_sites")
|
||||
try:
|
||||
with open(excluded_sites_path, "r", encoding="utf-8") as excluded_sites_file:
|
||||
self.excluded_sites = excluded_sites_file.read().splitlines()
|
||||
except FileNotFoundError:
|
||||
self.excluded_sites = []
|
||||
|
||||
# Create notify object for query results.
|
||||
self.query_notify = QueryNotify()
|
||||
|
||||
self.tor = False
|
||||
self.unique_tor = False
|
||||
self.timeout = None
|
||||
self.skip_error_sites = True
|
||||
|
||||
return
|
||||
|
||||
def site_data_filter(self, site_list):
|
||||
"""Filter Site Data.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
site_list -- List of strings corresponding to sites which
|
||||
should be filtered.
|
||||
|
||||
Return Value:
|
||||
Dictionary containing sub-set of site data specified by "site_list".
|
||||
"""
|
||||
|
||||
# Create new dictionary that has filtered site data based on input.
|
||||
# Note that any site specified which is not understood will generate
|
||||
# an error.
|
||||
site_data = {}
|
||||
for site in site_list:
|
||||
with self.subTest(f"Checking test vector Site '{site}' "
|
||||
f"exists in total site data."
|
||||
):
|
||||
site_data[site] = self.site_data_all[site]
|
||||
|
||||
return site_data
|
||||
|
||||
def username_check(self, username_list, site_list, exist_check=True):
|
||||
"""Username Exist Check.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
username_list -- List of strings corresponding to usernames
|
||||
which should exist on *all* of the sites.
|
||||
site_list -- List of strings corresponding to sites which
|
||||
should be filtered.
|
||||
exist_check -- Boolean which indicates if this should be
|
||||
a check for Username existence,
|
||||
or non-existence.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Will trigger an assert if Username does not have the expected
|
||||
existence state.
|
||||
"""
|
||||
|
||||
# Filter all site data down to just what is needed for this test.
|
||||
site_data = self.site_data_filter(site_list)
|
||||
|
||||
if exist_check:
|
||||
check_type_text = "claimed"
|
||||
exist_result_desired = QueryStatus.CLAIMED
|
||||
else:
|
||||
check_type_text = "available"
|
||||
exist_result_desired = QueryStatus.AVAILABLE
|
||||
|
||||
for username in username_list:
|
||||
results = sherlock.sherlock(username,
|
||||
site_data,
|
||||
self.query_notify,
|
||||
tor=self.tor,
|
||||
unique_tor=self.unique_tor,
|
||||
timeout=self.timeout
|
||||
)
|
||||
for site, result in results.items():
|
||||
with self.subTest(f"Checking Username '{username}' "
|
||||
f"{check_type_text} on Site '{site}'"
|
||||
):
|
||||
if (
|
||||
(self.skip_error_sites == True) and
|
||||
(result["status"].status == QueryStatus.UNKNOWN)
|
||||
):
|
||||
#Some error connecting to site.
|
||||
self.skipTest(f"Skipping Username '{username}' "
|
||||
f"{check_type_text} on Site '{site}': "
|
||||
f"Site returned error status."
|
||||
)
|
||||
|
||||
self.assertEqual(exist_result_desired,
|
||||
result["status"].status)
|
||||
|
||||
return
|
||||
|
||||
def detect_type_check(self, detect_type, exist_check=True):
|
||||
"""Username Exist Check.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
detect_type -- String corresponding to detection algorithm
|
||||
which is desired to be tested.
|
||||
Note that only sites which have documented
|
||||
usernames which exist and do not exist
|
||||
will be tested.
|
||||
exist_check -- Boolean which indicates if this should be
|
||||
a check for Username existence,
|
||||
or non-existence.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Runs tests on all sites using the indicated detection algorithm
|
||||
and which also has test vectors specified.
|
||||
Will trigger an assert if Username does not have the expected
|
||||
existence state.
|
||||
"""
|
||||
|
||||
# Dictionary of sites that should be tested for having a username.
|
||||
# This will allow us to test sites with a common username in parallel.
|
||||
sites_by_username = {}
|
||||
|
||||
for site, site_data in self.site_data_all.items():
|
||||
if (
|
||||
(site in self.excluded_sites) or
|
||||
(site_data["errorType"] != detect_type) or
|
||||
(site_data.get("username_claimed") is None) or
|
||||
(site_data.get("username_unclaimed") is None)
|
||||
):
|
||||
# This is either not a site we are interested in, or the
|
||||
# site does not contain the required information to do
|
||||
# the tests.
|
||||
pass
|
||||
else:
|
||||
# We should run a test on this site.
|
||||
|
||||
# Figure out which type of user
|
||||
if exist_check:
|
||||
username = site_data.get("username_claimed")
|
||||
else:
|
||||
username = site_data.get("username_unclaimed")
|
||||
|
||||
# Add this site to the list of sites corresponding to this
|
||||
# username.
|
||||
if username in sites_by_username:
|
||||
sites_by_username[username].append(site)
|
||||
else:
|
||||
sites_by_username[username] = [site]
|
||||
|
||||
# Check on the username availability against all of the sites.
|
||||
for username, site_list in sites_by_username.items():
|
||||
self.username_check([username],
|
||||
site_list,
|
||||
exist_check=exist_check
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
def coverage_total_check(self):
|
||||
"""Total Coverage Check.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
Counts up all Sites with full test data available.
|
||||
Will trigger an assert if any Site does not have test coverage.
|
||||
"""
|
||||
|
||||
site_no_tests_list = []
|
||||
|
||||
for site, site_data in self.site_data_all.items():
|
||||
if site_data.get("username_claimed") is None:
|
||||
# Test information not available on this site.
|
||||
site_no_tests_list.append(site)
|
||||
|
||||
self.assertEqual("", ", ".join(site_no_tests_list))
|
||||
|
||||
return
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
import importlib
|
||||
import unittest
|
||||
import sys
|
||||
sys.path.append('../')
|
||||
import sherlock as sh
|
||||
|
||||
checksymbols = []
|
||||
checksymbols = ["_", "-", "."]
|
||||
|
||||
"""Test for multiple usernames.
|
||||
|
||||
This test ensures that the function multiple_usernames works properly. More specific,
|
||||
different scenarios are tested and only usernames that contain this specific sequence: {?}
|
||||
should return positive.
|
||||
|
||||
Keyword Arguments:
|
||||
self -- This object.
|
||||
|
||||
Return Value:
|
||||
Nothing.
|
||||
"""
|
||||
class TestMultipleUsernames(unittest.TestCase):
|
||||
def test_area(self):
|
||||
test_usernames = ["test{?}test" , "test{?feo" , "test"]
|
||||
for name in test_usernames:
|
||||
if(sh.check_for_parameter(name)):
|
||||
self.assertAlmostEqual(sh.multiple_usernames(name), ["test_test" , "test-test" , "test.test"])
|
||||
else:
|
||||
self.assertAlmostEqual(name, name)
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
""" Sherlock Module
|
||||
|
||||
This module contains the main logic to search for usernames at social
|
||||
networks.
|
||||
|
||||
"""
|
||||
|
||||
from importlib.metadata import version as pkg_version, PackageNotFoundError
|
||||
import pathlib
|
||||
import tomli
|
||||
|
||||
|
||||
def get_version() -> str:
|
||||
"""Fetch the version number of the installed package."""
|
||||
try:
|
||||
return pkg_version("sherlock_project")
|
||||
except PackageNotFoundError:
|
||||
pyproject_path: pathlib.Path = pathlib.Path(__file__).resolve().parent.parent / "pyproject.toml"
|
||||
with pyproject_path.open("rb") as f:
|
||||
pyproject_data = tomli.load(f)
|
||||
return pyproject_data["tool"]["poetry"]["version"]
|
||||
|
||||
# This variable is only used to check for ImportErrors induced by users running as script rather than as module or package
|
||||
import_error_test_var = None
|
||||
|
||||
__shortname__ = "Sherlock"
|
||||
__longname__ = "Sherlock: Find Usernames Across Social Networks"
|
||||
__version__ = get_version()
|
||||
|
||||
forge_api_latest_release = "https://api.github.com/repos/sherlock-project/sherlock/releases/latest"
|
||||
|
|
@ -14,9 +14,9 @@ if __name__ == "__main__":
|
|||
# Check if the user is using the correct version of Python
|
||||
python_version = sys.version.split()[0]
|
||||
|
||||
if sys.version_info < (3, 6):
|
||||
print(f"Sherlock requires Python 3.6+\nYou are using Python {python_version}, which is not supported by Sherlock.")
|
||||
if sys.version_info < (3, 9):
|
||||
print(f"Sherlock requires Python 3.9+\nYou are using Python {python_version}, which is not supported by Sherlock.")
|
||||
sys.exit(1)
|
||||
|
||||
import sherlock
|
||||
from sherlock_project import sherlock
|
||||
sherlock.main()
|
||||
|
|
@ -3,7 +3,7 @@
|
|||
This module defines the objects for notifying the caller about the
|
||||
results of queries.
|
||||
"""
|
||||
from result import QueryStatus
|
||||
from sherlock_project.result import QueryStatus
|
||||
from colorama import Fore, Style
|
||||
import webbrowser
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,149 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "Sherlock Target Manifest",
|
||||
"description": "Social media targets to probe for the existence of known usernames",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"$schema": { "type": "string" }
|
||||
},
|
||||
"patternProperties": {
|
||||
"^(?!\\$).*?$": {
|
||||
"type": "object",
|
||||
"description": "Target name and associated information (key should be human readable name)",
|
||||
"required": ["url", "urlMain", "errorType", "username_claimed"],
|
||||
"properties": {
|
||||
"url": { "type": "string" },
|
||||
"urlMain": { "type": "string" },
|
||||
"urlProbe": { "type": "string" },
|
||||
"username_claimed": { "type": "string" },
|
||||
"regexCheck": { "type": "string" },
|
||||
"isNSFW": { "type": "boolean" },
|
||||
"headers": { "type": "object" },
|
||||
"request_payload": { "type": "object" },
|
||||
"__comment__": {
|
||||
"type": "string",
|
||||
"description": "Used to clarify important target information if (and only if) a commit message would not suffice.\nThis key should not be parsed anywhere within Sherlock."
|
||||
},
|
||||
"tags": {
|
||||
"oneOf": [
|
||||
{ "$ref": "#/$defs/tag" },
|
||||
{ "type": "array", "items": { "$ref": "#/$defs/tag" } }
|
||||
]
|
||||
},
|
||||
"request_method": {
|
||||
"type": "string",
|
||||
"enum": ["GET", "POST", "HEAD", "PUT"]
|
||||
},
|
||||
"errorType": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"enum": ["message", "response_url", "status_code"]
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"enum": ["message", "response_url", "status_code"]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"errorMsg": {
|
||||
"oneOf": [
|
||||
{ "type": "string" },
|
||||
{ "type": "array", "items": { "type": "string" } }
|
||||
]
|
||||
},
|
||||
"errorCode": {
|
||||
"oneOf": [
|
||||
{ "type": "integer" },
|
||||
{ "type": "array", "items": { "type": "integer" } }
|
||||
]
|
||||
},
|
||||
"errorUrl": { "type": "string" },
|
||||
"response_url": { "type": "string" }
|
||||
},
|
||||
"dependencies": {
|
||||
"errorMsg": {
|
||||
"oneOf": [
|
||||
{ "properties": { "errorType": { "const": "message" } } },
|
||||
{
|
||||
"properties": {
|
||||
"errorType": {
|
||||
"type": "array",
|
||||
"contains": { "const": "message" }
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"errorUrl": {
|
||||
"oneOf": [
|
||||
{ "properties": { "errorType": { "const": "response_url" } } },
|
||||
{
|
||||
"properties": {
|
||||
"errorType": {
|
||||
"type": "array",
|
||||
"contains": { "const": "response_url" }
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"errorCode": {
|
||||
"oneOf": [
|
||||
{ "properties": { "errorType": { "const": "status_code" } } },
|
||||
{
|
||||
"properties": {
|
||||
"errorType": {
|
||||
"type": "array",
|
||||
"contains": { "const": "status_code" }
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"allOf": [
|
||||
{
|
||||
"if": {
|
||||
"anyOf": [
|
||||
{ "properties": { "errorType": { "const": "message" } } },
|
||||
{
|
||||
"properties": {
|
||||
"errorType": {
|
||||
"type": "array",
|
||||
"contains": { "const": "message" }
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"then": { "required": ["errorMsg"] }
|
||||
},
|
||||
{
|
||||
"if": {
|
||||
"anyOf": [
|
||||
{ "properties": { "errorType": { "const": "response_url" } } },
|
||||
{
|
||||
"properties": {
|
||||
"errorType": {
|
||||
"type": "array",
|
||||
"contains": { "const": "response_url" }
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"then": { "required": ["errorUrl"] }
|
||||
}
|
||||
],
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"$defs": {
|
||||
"tag": { "type": "string", "enum": ["adult", "gaming"] }
|
||||
}
|
||||
}
|
||||
|
|
@ -7,30 +7,43 @@ This module contains the main logic to search for usernames at social
|
|||
networks.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
try:
|
||||
from sherlock_project.__init__ import import_error_test_var # noqa: F401
|
||||
except ImportError:
|
||||
print("Did you run Sherlock with `python3 sherlock/sherlock.py ...`?")
|
||||
print("This is an outdated method. Please see https://sherlockproject.xyz/installation for up to date instructions.")
|
||||
sys.exit(1)
|
||||
|
||||
import csv
|
||||
import signal
|
||||
import pandas as pd
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
||||
from json import loads as json_loads
|
||||
from time import monotonic
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
|
||||
from requests_futures.sessions import FuturesSession
|
||||
from torrequest import TorRequest
|
||||
from result import QueryStatus
|
||||
from result import QueryResult
|
||||
from notify import QueryNotifyPrint
|
||||
from sites import SitesInformation
|
||||
|
||||
from sherlock_project.__init__ import (
|
||||
__longname__,
|
||||
__shortname__,
|
||||
__version__,
|
||||
forge_api_latest_release,
|
||||
)
|
||||
|
||||
from sherlock_project.result import QueryStatus
|
||||
from sherlock_project.result import QueryResult
|
||||
from sherlock_project.notify import QueryNotify
|
||||
from sherlock_project.notify import QueryNotifyPrint
|
||||
from sherlock_project.sites import SitesInformation
|
||||
from colorama import init
|
||||
from argparse import ArgumentTypeError
|
||||
|
||||
module_name = "Sherlock: Find Usernames Across Social Networks"
|
||||
__version__ = "0.14.4"
|
||||
|
||||
|
||||
class SherlockFuturesSession(FuturesSession):
|
||||
def request(self, method, url, hooks=None, *args, **kwargs):
|
||||
|
|
@ -143,7 +156,6 @@ def check_for_parameter(username):
|
|||
return "{?}" in username
|
||||
|
||||
|
||||
checksymbols = []
|
||||
checksymbols = ["_", "-", "."]
|
||||
|
||||
|
||||
|
|
@ -156,14 +168,13 @@ def multiple_usernames(username):
|
|||
|
||||
|
||||
def sherlock(
|
||||
username,
|
||||
site_data,
|
||||
query_notify,
|
||||
tor=False,
|
||||
unique_tor=False,
|
||||
proxy=None,
|
||||
timeout=60,
|
||||
):
|
||||
username: str,
|
||||
site_data: dict[str, dict[str, str]],
|
||||
query_notify: QueryNotify,
|
||||
dump_response: bool = False,
|
||||
proxy: Optional[str] = None,
|
||||
timeout: int = 60,
|
||||
) -> dict[str, dict[str, str | QueryResult]]:
|
||||
"""Run Sherlock Analysis.
|
||||
|
||||
Checks for existence of username on various social media sites.
|
||||
|
|
@ -175,8 +186,6 @@ def sherlock(
|
|||
query_notify -- Object with base type of QueryNotify().
|
||||
This will be used to notify the caller about
|
||||
query results.
|
||||
tor -- Boolean indicating whether to use a tor circuit for the requests.
|
||||
unique_tor -- Boolean indicating whether to use a new tor circuit for each request.
|
||||
proxy -- String indicating the proxy URL
|
||||
timeout -- Time in seconds to wait before timing out request.
|
||||
Default is 60 seconds.
|
||||
|
|
@ -197,15 +206,9 @@ def sherlock(
|
|||
|
||||
# Notify caller that we are starting the query.
|
||||
query_notify.start(username)
|
||||
# Create session based on request methodology
|
||||
if tor or unique_tor:
|
||||
# Requests using Tor obfuscation
|
||||
underlying_request = TorRequest()
|
||||
underlying_session = underlying_request.session
|
||||
else:
|
||||
# Normal requests
|
||||
underlying_session = requests.session()
|
||||
underlying_request = requests.Request()
|
||||
|
||||
# Normal requests
|
||||
underlying_session = requests.session()
|
||||
|
||||
# Limit number of workers to 20.
|
||||
# This is probably vastly overkill.
|
||||
|
|
@ -232,7 +235,7 @@ def sherlock(
|
|||
# A user agent is needed because some sites don't return the correct
|
||||
# information since they think that we are bots (Which we actually are...)
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/116.0",
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:129.0) Gecko/20100101 Firefox/129.0",
|
||||
}
|
||||
|
||||
if "headers" in net_info:
|
||||
|
|
@ -329,15 +332,10 @@ def sherlock(
|
|||
# Store future in data for access later
|
||||
net_info["request_future"] = future
|
||||
|
||||
# Reset identify for tor (if needed)
|
||||
if unique_tor:
|
||||
underlying_request.reset_identity()
|
||||
|
||||
# Add this site's results into final dictionary with all the other results.
|
||||
results_total[social_network] = results_site
|
||||
|
||||
# Open the file containing account links
|
||||
# Core logic: If tor requests, make them here. If multi-threaded requests, wait for responses
|
||||
for social_network, net_info in site_data.items():
|
||||
# Retrieve results again
|
||||
results_site = results_total.get(social_network)
|
||||
|
|
@ -351,6 +349,8 @@ def sherlock(
|
|||
|
||||
# Get the expected error type
|
||||
error_type = net_info["errorType"]
|
||||
if isinstance(error_type, str):
|
||||
error_type: list[str] = [error_type]
|
||||
|
||||
# Retrieve future and ensure it has finished
|
||||
future = net_info["request_future"]
|
||||
|
|
@ -377,12 +377,16 @@ def sherlock(
|
|||
query_status = QueryStatus.UNKNOWN
|
||||
error_context = None
|
||||
|
||||
# As WAFs advance and evolve, they will occasionally block Sherlock and lead to false positives
|
||||
# and negatives. Fingerprints should be added here to filter results that fail to bypass WAFs.
|
||||
# Fingerprints should be highly targetted. Comment at the end of each fingerprint to indicate target and date.
|
||||
# As WAFs advance and evolve, they will occasionally block Sherlock and
|
||||
# lead to false positives and negatives. Fingerprints should be added
|
||||
# here to filter results that fail to bypass WAFs. Fingerprints should
|
||||
# be highly targetted. Comment at the end of each fingerprint to
|
||||
# indicate target and date fingerprinted.
|
||||
WAFHitMsgs = [
|
||||
'.loading-spinner{visibility:hidden}body.no-js .challenge-running{display:none}body.dark{background-color:#222;color:#d9d9d9}body.dark a{color:#fff}body.dark a:hover{color:#ee730a;text-decoration:underline}body.dark .lds-ring div{border-color:#999 transparent transparent}body.dark .font-red{color:#b20f03}body.dark .big-button,body.dark .pow-button{background-color:#4693ff;color:#1d1d1d}body.dark #challenge-success-text{background-image:url(data:image/svg+xml;base64,', # 2024-04-08 Cloudflare
|
||||
'{return l.onPageView}}),Object.defineProperty(r,"perimeterxIdentifiers",{enumerable:' # 2024-04-09 PerimeterX / Human Security
|
||||
r'.loading-spinner{visibility:hidden}body.no-js .challenge-running{display:none}body.dark{background-color:#222;color:#d9d9d9}body.dark a{color:#fff}body.dark a:hover{color:#ee730a;text-decoration:underline}body.dark .lds-ring div{border-color:#999 transparent transparent}body.dark .font-red{color:#b20f03}body.dark', # 2024-05-13 Cloudflare
|
||||
r'<span id="challenge-error-text">', # 2024-11-11 Cloudflare error page
|
||||
r'AwsWafIntegration.forceRefreshToken', # 2024-11-11 Cloudfront (AWS)
|
||||
r'{return l.onPageView}}),Object.defineProperty(r,"perimeterxIdentifiers",{enumerable:' # 2024-04-09 PerimeterX / Human Security
|
||||
]
|
||||
|
||||
if error_text is not None:
|
||||
|
|
@ -391,61 +395,91 @@ def sherlock(
|
|||
elif any(hitMsg in r.text for hitMsg in WAFHitMsgs):
|
||||
query_status = QueryStatus.WAF
|
||||
|
||||
elif error_type == "message":
|
||||
# error_flag True denotes no error found in the HTML
|
||||
# error_flag False denotes error found in the HTML
|
||||
error_flag = True
|
||||
errors = net_info.get("errorMsg")
|
||||
# errors will hold the error message
|
||||
# it can be string or list
|
||||
# by isinstance method we can detect that
|
||||
# and handle the case for strings as normal procedure
|
||||
# and if its list we can iterate the errors
|
||||
if isinstance(errors, str):
|
||||
# Checks if the error message is in the HTML
|
||||
# if error is present we will set flag to False
|
||||
if errors in r.text:
|
||||
error_flag = False
|
||||
else:
|
||||
# If it's list, it will iterate all the error message
|
||||
for error in errors:
|
||||
if error in r.text:
|
||||
error_flag = False
|
||||
break
|
||||
if error_flag:
|
||||
query_status = QueryStatus.CLAIMED
|
||||
else:
|
||||
query_status = QueryStatus.AVAILABLE
|
||||
elif error_type == "status_code":
|
||||
error_codes = net_info.get("errorCode")
|
||||
query_status = QueryStatus.CLAIMED
|
||||
|
||||
# Type consistency, allowing for both singlets and lists in manifest
|
||||
if isinstance(error_codes, int):
|
||||
error_codes = [error_codes]
|
||||
|
||||
if error_codes is not None and r.status_code in error_codes:
|
||||
query_status = QueryStatus.AVAILABLE
|
||||
elif r.status_code >= 300 or r.status_code < 200:
|
||||
query_status = QueryStatus.AVAILABLE
|
||||
elif error_type == "response_url":
|
||||
# For this detection method, we have turned off the redirect.
|
||||
# So, there is no need to check the response URL: it will always
|
||||
# match the request. Instead, we will ensure that the response
|
||||
# code indicates that the request was successful (i.e. no 404, or
|
||||
# forward to some odd redirect).
|
||||
if 200 <= r.status_code < 300:
|
||||
query_status = QueryStatus.CLAIMED
|
||||
else:
|
||||
query_status = QueryStatus.AVAILABLE
|
||||
else:
|
||||
# It should be impossible to ever get here...
|
||||
raise ValueError(
|
||||
f"Unknown Error Type '{error_type}' for " f"site '{social_network}'"
|
||||
)
|
||||
if any(errtype not in ["message", "status_code", "response_url"] for errtype in error_type):
|
||||
error_context = f"Unknown error type '{error_type}' for {social_network}"
|
||||
query_status = QueryStatus.UNKNOWN
|
||||
else:
|
||||
if "message" in error_type:
|
||||
# error_flag True denotes no error found in the HTML
|
||||
# error_flag False denotes error found in the HTML
|
||||
error_flag = True
|
||||
errors = net_info.get("errorMsg")
|
||||
# errors will hold the error message
|
||||
# it can be string or list
|
||||
# by isinstance method we can detect that
|
||||
# and handle the case for strings as normal procedure
|
||||
# and if its list we can iterate the errors
|
||||
if isinstance(errors, str):
|
||||
# Checks if the error message is in the HTML
|
||||
# if error is present we will set flag to False
|
||||
if errors in r.text:
|
||||
error_flag = False
|
||||
else:
|
||||
# If it's list, it will iterate all the error message
|
||||
for error in errors:
|
||||
if error in r.text:
|
||||
error_flag = False
|
||||
break
|
||||
if error_flag:
|
||||
query_status = QueryStatus.CLAIMED
|
||||
else:
|
||||
query_status = QueryStatus.AVAILABLE
|
||||
|
||||
if "status_code" in error_type and query_status is not QueryStatus.AVAILABLE:
|
||||
error_codes = net_info.get("errorCode")
|
||||
query_status = QueryStatus.CLAIMED
|
||||
|
||||
# Type consistency, allowing for both singlets and lists in manifest
|
||||
if isinstance(error_codes, int):
|
||||
error_codes = [error_codes]
|
||||
|
||||
if error_codes is not None and r.status_code in error_codes:
|
||||
query_status = QueryStatus.AVAILABLE
|
||||
elif r.status_code >= 300 or r.status_code < 200:
|
||||
query_status = QueryStatus.AVAILABLE
|
||||
|
||||
if "response_url" in error_type and query_status is not QueryStatus.AVAILABLE:
|
||||
# For this detection method, we have turned off the redirect.
|
||||
# So, there is no need to check the response URL: it will always
|
||||
# match the request. Instead, we will ensure that the response
|
||||
# code indicates that the request was successful (i.e. no 404, or
|
||||
# forward to some odd redirect).
|
||||
if 200 <= r.status_code < 300:
|
||||
query_status = QueryStatus.CLAIMED
|
||||
else:
|
||||
query_status = QueryStatus.AVAILABLE
|
||||
|
||||
if dump_response:
|
||||
print("+++++++++++++++++++++")
|
||||
print(f"TARGET NAME : {social_network}")
|
||||
print(f"USERNAME : {username}")
|
||||
print(f"TARGET URL : {url}")
|
||||
print(f"TEST METHOD : {error_type}")
|
||||
try:
|
||||
print(f"STATUS CODES : {net_info['errorCode']}")
|
||||
except KeyError:
|
||||
pass
|
||||
print("Results...")
|
||||
try:
|
||||
print(f"RESPONSE CODE : {r.status_code}")
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
print(f"ERROR TEXT : {net_info['errorMsg']}")
|
||||
except KeyError:
|
||||
pass
|
||||
print(">>>>> BEGIN RESPONSE TEXT")
|
||||
try:
|
||||
print(r.text)
|
||||
except Exception:
|
||||
pass
|
||||
print("<<<<< END RESPONSE TEXT")
|
||||
print("VERDICT : " + str(query_status))
|
||||
print("+++++++++++++++++++++")
|
||||
|
||||
# Notify caller about results of query.
|
||||
result = QueryResult(
|
||||
result: QueryResult = QueryResult(
|
||||
username=username,
|
||||
site_name=social_network,
|
||||
site_url_user=url,
|
||||
|
|
@ -502,20 +536,14 @@ def handler(signal_received, frame):
|
|||
|
||||
|
||||
def main():
|
||||
version_string = (
|
||||
f"%(prog)s {__version__}\n"
|
||||
+ f"{requests.__description__}: {requests.__version__}\n"
|
||||
+ f"Python: {platform.python_version()}"
|
||||
)
|
||||
|
||||
parser = ArgumentParser(
|
||||
formatter_class=RawDescriptionHelpFormatter,
|
||||
description=f"{module_name} (Version {__version__})",
|
||||
description=f"{__longname__} (Version {__version__})",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
action="version",
|
||||
version=version_string,
|
||||
version=f"{__shortname__} v{__version__}",
|
||||
help="Display version information and dependencies.",
|
||||
)
|
||||
parser.add_argument(
|
||||
|
|
@ -540,22 +568,6 @@ def main():
|
|||
dest="output",
|
||||
help="If using single username, the output of the result will be saved to this file.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tor",
|
||||
"-t",
|
||||
action="store_true",
|
||||
dest="tor",
|
||||
default=False,
|
||||
help="Make requests over Tor; increases runtime; requires Tor to be installed and in system path.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--unique-tor",
|
||||
"-u",
|
||||
action="store_true",
|
||||
dest="unique_tor",
|
||||
default=False,
|
||||
help="Make requests over Tor with new Tor circuit after each request; increases runtime; requires Tor to be installed and in system path.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--csv",
|
||||
action="store_true",
|
||||
|
|
@ -587,13 +599,20 @@ def main():
|
|||
default=None,
|
||||
help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dump-response",
|
||||
action="store_true",
|
||||
dest="dump_response",
|
||||
default=False,
|
||||
help="Dump the HTTP response to stdout for targeted debugging.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--json",
|
||||
"-j",
|
||||
metavar="JSON_FILE",
|
||||
dest="json_file",
|
||||
default=None,
|
||||
help="Load data from a JSON file or an online, valid, JSON file.",
|
||||
help="Load data from a JSON file or an online, valid, JSON file. Upstream PR numbers also accepted.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--timeout",
|
||||
|
|
@ -656,6 +675,32 @@ def main():
|
|||
help="Include checking of NSFW sites from default list.",
|
||||
)
|
||||
|
||||
# TODO deprecated in favor of --txt, retained for workflow compatibility, to be removed
|
||||
# in future release
|
||||
parser.add_argument(
|
||||
"--no-txt",
|
||||
action="store_true",
|
||||
dest="no_txt",
|
||||
default=False,
|
||||
help="Disable creation of a txt file - WILL BE DEPRECATED",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--txt",
|
||||
action="store_true",
|
||||
dest="output_txt",
|
||||
default=False,
|
||||
help="Enable creation of a txt file",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--ignore-exclusions",
|
||||
action="store_true",
|
||||
dest="ignore_exclusions",
|
||||
default=False,
|
||||
help="Ignore upstream exclusions (may return more false positives)",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# If the user presses CTRL-C, exit gracefully without throwing errors
|
||||
|
|
@ -663,38 +708,23 @@ def main():
|
|||
|
||||
# Check for newer version of Sherlock. If it exists, let the user know about it
|
||||
try:
|
||||
r = requests.get(
|
||||
"https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/sherlock.py"
|
||||
)
|
||||
latest_release_raw = requests.get(forge_api_latest_release, timeout=10).text
|
||||
latest_release_json = json_loads(latest_release_raw)
|
||||
latest_remote_tag = latest_release_json["tag_name"]
|
||||
|
||||
remote_version = str(re.findall('__version__ = "(.*)"', r.text)[0])
|
||||
local_version = __version__
|
||||
|
||||
if remote_version != local_version:
|
||||
if latest_remote_tag[1:] != __version__:
|
||||
print(
|
||||
"Update Available!\n"
|
||||
+ f"You are running version {local_version}. Version {remote_version} is available at https://github.com/sherlock-project/sherlock"
|
||||
f"Update available! {__version__} --> {latest_remote_tag[1:]}"
|
||||
f"\n{latest_release_json['html_url']}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
print(f"A problem occurred while checking for an update: {error}")
|
||||
|
||||
# Argument check
|
||||
# TODO regex check on args.proxy
|
||||
if args.tor and (args.proxy is not None):
|
||||
raise Exception("Tor and Proxy cannot be set at the same time.")
|
||||
|
||||
# Make prompts
|
||||
if args.proxy is not None:
|
||||
print("Using the proxy: " + args.proxy)
|
||||
|
||||
if args.tor or args.unique_tor:
|
||||
print("Using Tor to make requests")
|
||||
|
||||
print(
|
||||
"Warning: some websites might refuse connecting over Tor, so note that using this option might increase connection errors."
|
||||
)
|
||||
|
||||
if args.no_color:
|
||||
# Disable color output.
|
||||
init(strip=True, convert=False)
|
||||
|
|
@ -716,10 +746,32 @@ def main():
|
|||
try:
|
||||
if args.local:
|
||||
sites = SitesInformation(
|
||||
os.path.join(os.path.dirname(__file__), "resources/data.json")
|
||||
os.path.join(os.path.dirname(__file__), "resources/data.json"),
|
||||
honor_exclusions=False,
|
||||
)
|
||||
else:
|
||||
sites = SitesInformation(args.json_file)
|
||||
json_file_location = args.json_file
|
||||
if args.json_file:
|
||||
# If --json parameter is a number, interpret it as a pull request number
|
||||
if args.json_file.isnumeric():
|
||||
pull_number = args.json_file
|
||||
pull_url = f"https://api.github.com/repos/sherlock-project/sherlock/pulls/{pull_number}"
|
||||
pull_request_raw = requests.get(pull_url, timeout=10).text
|
||||
pull_request_json = json_loads(pull_request_raw)
|
||||
|
||||
# Check if it's a valid pull request
|
||||
if "message" in pull_request_json:
|
||||
print(f"ERROR: Pull request #{pull_number} not found.")
|
||||
sys.exit(1)
|
||||
|
||||
head_commit_sha = pull_request_json["head"]["sha"]
|
||||
json_file_location = f"https://raw.githubusercontent.com/sherlock-project/sherlock/{head_commit_sha}/sherlock_project/resources/data.json"
|
||||
|
||||
sites = SitesInformation(
|
||||
data_file_path=json_file_location,
|
||||
honor_exclusions=not args.ignore_exclusions,
|
||||
do_not_exclude=args.site_list,
|
||||
)
|
||||
except Exception as error:
|
||||
print(f"ERROR: {error}")
|
||||
sys.exit(1)
|
||||
|
|
@ -773,8 +825,7 @@ def main():
|
|||
username,
|
||||
site_data,
|
||||
query_notify,
|
||||
tor=args.tor,
|
||||
unique_tor=args.unique_tor,
|
||||
dump_response=args.dump_response,
|
||||
proxy=args.proxy,
|
||||
timeout=args.timeout,
|
||||
)
|
||||
|
|
@ -789,14 +840,15 @@ def main():
|
|||
else:
|
||||
result_file = f"{username}.txt"
|
||||
|
||||
with open(result_file, "w", encoding="utf-8") as file:
|
||||
exists_counter = 0
|
||||
for website_name in results:
|
||||
dictionary = results[website_name]
|
||||
if dictionary.get("status").status == QueryStatus.CLAIMED:
|
||||
exists_counter += 1
|
||||
file.write(dictionary["url_user"] + "\n")
|
||||
file.write(f"Total Websites Username Detected On : {exists_counter}\n")
|
||||
if args.output_txt:
|
||||
with open(result_file, "w", encoding="utf-8") as file:
|
||||
exists_counter = 0
|
||||
for website_name in results:
|
||||
dictionary = results[website_name]
|
||||
if dictionary.get("status").status == QueryStatus.CLAIMED:
|
||||
exists_counter += 1
|
||||
file.write(dictionary["url_user"] + "\n")
|
||||
file.write(f"Total Websites Username Detected On : {exists_counter}\n")
|
||||
|
||||
if args.csv:
|
||||
result_file = f"{username}.csv"
|
||||
|
|
@ -873,8 +925,8 @@ def main():
|
|||
{
|
||||
"username": usernames,
|
||||
"name": names,
|
||||
"url_main": url_main,
|
||||
"url_user": url_user,
|
||||
"url_main": [f'=HYPERLINK(\"{u}\")' for u in url_main],
|
||||
"url_user": [f'=HYPERLINK(\"{u}\")' for u in url_user],
|
||||
"exists": exists,
|
||||
"http_status": http_status,
|
||||
"response_time_s": response_time_s,
|
||||
|
|
@ -7,6 +7,10 @@ import json
|
|||
import requests
|
||||
import secrets
|
||||
|
||||
|
||||
MANIFEST_URL = "https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock_project/resources/data.json"
|
||||
EXCLUSIONS_URL = "https://raw.githubusercontent.com/sherlock-project/sherlock/refs/heads/exclusions/false_positive_exclusions.txt"
|
||||
|
||||
class SiteInformation:
|
||||
def __init__(self, name, url_home, url_username_format, username_claimed,
|
||||
information, is_nsfw, username_unclaimed=secrets.token_urlsafe(10)):
|
||||
|
|
@ -67,12 +71,17 @@ class SiteInformation:
|
|||
Return Value:
|
||||
Nicely formatted string to get information about this object.
|
||||
"""
|
||||
|
||||
|
||||
return f"{self.name} ({self.url_home})"
|
||||
|
||||
|
||||
class SitesInformation:
|
||||
def __init__(self, data_file_path=None):
|
||||
def __init__(
|
||||
self,
|
||||
data_file_path: str|None = None,
|
||||
honor_exclusions: bool = True,
|
||||
do_not_exclude: list[str] = [],
|
||||
):
|
||||
"""Create Sites Information Object.
|
||||
|
||||
Contains information about all supported websites.
|
||||
|
|
@ -110,7 +119,7 @@ class SitesInformation:
|
|||
# The default data file is the live data.json which is in the GitHub repo. The reason why we are using
|
||||
# this instead of the local one is so that the user has the most up-to-date data. This prevents
|
||||
# users from creating issue about false positives which has already been fixed or having outdated data
|
||||
data_file_path = "https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock/resources/data.json"
|
||||
data_file_path = MANIFEST_URL
|
||||
|
||||
# Ensure that specified data file has correct extension.
|
||||
if not data_file_path.lower().endswith(".json"):
|
||||
|
|
@ -120,7 +129,7 @@ class SitesInformation:
|
|||
if data_file_path.lower().startswith("http"):
|
||||
# Reference is to a URL.
|
||||
try:
|
||||
response = requests.get(url=data_file_path)
|
||||
response = requests.get(url=data_file_path, timeout=30)
|
||||
except Exception as error:
|
||||
raise FileNotFoundError(
|
||||
f"Problem while attempting to access data file URL '{data_file_path}': {error}"
|
||||
|
|
@ -152,9 +161,31 @@ class SitesInformation:
|
|||
raise FileNotFoundError(f"Problem while attempting to access "
|
||||
f"data file '{data_file_path}'."
|
||||
)
|
||||
|
||||
|
||||
site_data.pop('$schema', None)
|
||||
|
||||
if honor_exclusions:
|
||||
try:
|
||||
response = requests.get(url=EXCLUSIONS_URL, timeout=10)
|
||||
if response.status_code == 200:
|
||||
exclusions = response.text.splitlines()
|
||||
exclusions = [exclusion.strip() for exclusion in exclusions]
|
||||
|
||||
for site in do_not_exclude:
|
||||
if site in exclusions:
|
||||
exclusions.remove(site)
|
||||
|
||||
for exclusion in exclusions:
|
||||
try:
|
||||
site_data.pop(exclusion, None)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
except Exception:
|
||||
# If there was any problem loading the exclusions, just continue without them
|
||||
print("Warning: Could not load exclusions, continuing without them.")
|
||||
honor_exclusions = False
|
||||
|
||||
self.sites = {}
|
||||
|
||||
# Add all site information from the json file to internal site list.
|
||||
|
|
@ -174,7 +205,7 @@ class SitesInformation:
|
|||
raise ValueError(
|
||||
f"Problem parsing json contents at '{data_file_path}': Missing attribute {error}."
|
||||
)
|
||||
except TypeError as error:
|
||||
except TypeError:
|
||||
print(f"Encountered TypeError parsing json contents for target '{site_name}' at {data_file_path}\nSkipping target.\n")
|
||||
|
||||
return
|
||||
|
|
@ -194,7 +225,7 @@ class SitesInformation:
|
|||
for site in self.sites:
|
||||
if self.sites[site].is_nsfw and site.casefold() not in do_not_remove:
|
||||
continue
|
||||
sites[site] = self.sites[site]
|
||||
sites[site] = self.sites[site]
|
||||
self.sites = sites
|
||||
|
||||
def site_name_list(self):
|
||||
31
site_list.py
31
site_list.py
|
|
@ -1,31 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# This module generates the listing of supported sites which can be found in
|
||||
# sites.md. It also organizes all the sites in alphanumeric order
|
||||
import json
|
||||
|
||||
# Read the data.json file
|
||||
with open("sherlock/resources/data.json", "r", encoding="utf-8") as data_file:
|
||||
data: dict = json.load(data_file)
|
||||
|
||||
# Removes schema-specific keywords for proper processing
|
||||
social_networks: dict = dict(data)
|
||||
social_networks.pop('$schema', None)
|
||||
|
||||
# Sort the social networks in alphanumeric order
|
||||
social_networks: list = sorted(social_networks.items())
|
||||
|
||||
# Write the list of supported sites to sites.md
|
||||
with open("sites.md", "w") as site_file:
|
||||
site_file.write(f"## List Of Supported Sites ({len(social_networks)} Sites In Total!)\n")
|
||||
for social_network, info in social_networks:
|
||||
url_main = info["urlMain"]
|
||||
is_nsfw = "**(NSFW)**" if info.get("isNSFW") else ""
|
||||
site_file.write(f"1.  [{social_network}]({url_main}) {is_nsfw}\n")
|
||||
|
||||
# Overwrite the data.json file with sorted data
|
||||
with open("sherlock/resources/data.json", "w") as data_file:
|
||||
sorted_data = json.dumps(data, indent=2, sort_keys=True)
|
||||
data_file.write(sorted_data)
|
||||
data_file.write("\n")
|
||||
|
||||
print("Finished updating supported site listing!")
|
||||
409
sites.md
409
sites.md
|
|
@ -1,409 +0,0 @@
|
|||
## List Of Supported Sites (408 Sites In Total!)
|
||||
1.  [1337x](https://www.1337x.to/)
|
||||
1.  [2Dimensions](https://2Dimensions.com/)
|
||||
1.  [3dnews](http://forum.3dnews.ru/)
|
||||
1.  [7Cups](https://www.7cups.com/)
|
||||
1.  [8tracks](https://8tracks.com/)
|
||||
1.  [9GAG](https://www.9gag.com/)
|
||||
1.  [APClips](https://apclips.com/) **(NSFW)**
|
||||
1.  [About.me](https://about.me/)
|
||||
1.  [Academia.edu](https://www.academia.edu/)
|
||||
1.  [AdmireMe.Vip](https://admireme.vip/) **(NSFW)**
|
||||
1.  [Air Pilot Life](https://airlinepilot.life/)
|
||||
1.  [Airbit](https://airbit.com/)
|
||||
1.  [Airliners](https://www.airliners.net/)
|
||||
1.  [Alik.cz](https://www.alik.cz/)
|
||||
1.  [All Things Worn](https://www.allthingsworn.com) **(NSFW)**
|
||||
1.  [AllMyLinks](https://allmylinks.com/)
|
||||
1.  [Amino](https://aminoapps.com)
|
||||
1.  [AniWorld](https://aniworld.to/)
|
||||
1.  [Anilist](https://anilist.co/)
|
||||
1.  [Apple Developer](https://developer.apple.com)
|
||||
1.  [Apple Discussions](https://discussions.apple.com)
|
||||
1.  [Archive of Our Own](https://archiveofourown.org/)
|
||||
1.  [Archive.org](https://archive.org)
|
||||
1.  [ArtStation](https://www.artstation.com/)
|
||||
1.  [Asciinema](https://asciinema.org)
|
||||
1.  [Ask Fedora](https://ask.fedoraproject.org/)
|
||||
1.  [AskFM](https://ask.fm/)
|
||||
1.  [Audiojungle](https://audiojungle.net/)
|
||||
1.  [Autofrage](https://www.autofrage.net/)
|
||||
1.  [Avizo](https://www.avizo.cz/)
|
||||
1.  [BLIP.fm](https://blip.fm/)
|
||||
1.  [BOOTH](https://booth.pm/)
|
||||
1.  [Bandcamp](https://www.bandcamp.com/)
|
||||
1.  [Bazar.cz](https://www.bazar.cz/)
|
||||
1.  [Behance](https://www.behance.net/)
|
||||
1.  [Bezuzyteczna](https://bezuzyteczna.pl)
|
||||
1.  [BiggerPockets](https://www.biggerpockets.com/)
|
||||
1.  [Bikemap](https://www.bikemap.net/)
|
||||
1.  [BioHacking](https://forum.dangerousthings.com/)
|
||||
1.  [BitBucket](https://bitbucket.org/)
|
||||
1.  [Bitwarden Forum](https://bitwarden.com/)
|
||||
1.  [Blipfoto](https://www.blipfoto.com/)
|
||||
1.  [Blogger](https://www.blogger.com/)
|
||||
1.  [BodyBuilding](https://bodyspace.bodybuilding.com/)
|
||||
1.  [BongaCams](https://pt.bongacams.com) **(NSFW)**
|
||||
1.  [Bookcrossing](https://www.bookcrossing.com/)
|
||||
1.  [BraveCommunity](https://community.brave.com/)
|
||||
1.  [BugCrowd](https://bugcrowd.com/)
|
||||
1.  [BuyMeACoffee](https://www.buymeacoffee.com/)
|
||||
1.  [BuzzFeed](https://buzzfeed.com/)
|
||||
1.  [CGTrader](https://www.cgtrader.com)
|
||||
1.  [CNET](https://www.cnet.com/)
|
||||
1.  [CSSBattle](https://cssbattle.dev)
|
||||
1.  [CTAN](https://ctan.org/)
|
||||
1.  [Caddy Community](https://caddy.community/)
|
||||
1.  [Car Talk Community](https://community.cartalk.com/)
|
||||
1.  [Carbonmade](https://carbonmade.com/)
|
||||
1.  [Career.habr](https://career.habr.com/)
|
||||
1.  [Championat](https://www.championat.com/)
|
||||
1.  [Chaos](https://chaos.social/)
|
||||
1.  [Chatujme.cz](https://chatujme.cz/)
|
||||
1.  [ChaturBate](https://chaturbate.com) **(NSFW)**
|
||||
1.  [Chess](https://www.chess.com/)
|
||||
1.  [Choice Community](https://choice.community/)
|
||||
1.  [Clapper](https://clapperapp.com/)
|
||||
1.  [CloudflareCommunity](https://community.cloudflare.com/)
|
||||
1.  [Clozemaster](https://www.clozemaster.com)
|
||||
1.  [Clubhouse](https://www.clubhouse.com)
|
||||
1.  [Code Snippet Wiki](https://codesnippets.fandom.com)
|
||||
1.  [Codeberg](https://codeberg.org/)
|
||||
1.  [Codecademy](https://www.codecademy.com/)
|
||||
1.  [Codechef](https://www.codechef.com/)
|
||||
1.  [Codeforces](https://codeforces.com/)
|
||||
1.  [Codepen](https://codepen.io/)
|
||||
1.  [Coders Rank](https://codersrank.io/)
|
||||
1.  [Coderwall](https://coderwall.com)
|
||||
1.  [Codewars](https://www.codewars.com)
|
||||
1.  [Coinvote](https://coinvote.cc/)
|
||||
1.  [ColourLovers](https://www.colourlovers.com/)
|
||||
1.  [Contently](https://contently.com/)
|
||||
1.  [Coroflot](https://coroflot.com/)
|
||||
1.  [Cracked](https://www.cracked.com/)
|
||||
1.  [Crevado](https://crevado.com/)
|
||||
1.  [Crowdin](https://crowdin.com/)
|
||||
1.  [Cryptomator Forum](https://community.cryptomator.org/)
|
||||
1.  [Cults3D](https://cults3d.com/en)
|
||||
1.  [CyberDefenders](https://cyberdefenders.org/)
|
||||
1.  [DEV Community](https://dev.to/)
|
||||
1.  [DMOJ](https://dmoj.ca/)
|
||||
1.  [DailyMotion](https://www.dailymotion.com/)
|
||||
1.  [Dealabs](https://www.dealabs.com/)
|
||||
1.  [DeviantART](https://deviantart.com)
|
||||
1.  [Discogs](https://www.discogs.com/)
|
||||
1.  [Discuss.Elastic.co](https://discuss.elastic.co/)
|
||||
1.  [Disqus](https://disqus.com/)
|
||||
1.  [Docker Hub](https://hub.docker.com/)
|
||||
1.  [Dribbble](https://dribbble.com/)
|
||||
1.  [Duolingo](https://duolingo.com/)
|
||||
1.  [Eintracht Frankfurt Forum](https://community.eintracht.de/)
|
||||
1.  [Envato Forum](https://forums.envato.com/)
|
||||
1.  [Erome](https://www.erome.com/) **(NSFW)**
|
||||
1.  [Etsy](https://www.etsy.com/)
|
||||
1.  [Euw](https://euw.op.gg/)
|
||||
1.  [Exposure](https://exposure.co/)
|
||||
1.  [EyeEm](https://www.eyeem.com/)
|
||||
1.  [F3.cool](https://f3.cool/)
|
||||
1.  [Fameswap](https://fameswap.com/)
|
||||
1.  [Fandom](https://www.fandom.com/)
|
||||
1.  [Finanzfrage](https://www.finanzfrage.net/)
|
||||
1.  [Fiverr](https://www.fiverr.com/)
|
||||
1.  [Flickr](https://www.flickr.com/)
|
||||
1.  [Flightradar24](https://www.flightradar24.com/)
|
||||
1.  [Flipboard](https://flipboard.com/)
|
||||
1.  [Football](https://www.rusfootball.info/)
|
||||
1.  [FortniteTracker](https://fortnitetracker.com/challenges)
|
||||
1.  [Forum Ophilia](https://www.forumophilia.com/) **(NSFW)**
|
||||
1.  [Fosstodon](https://fosstodon.org/)
|
||||
1.  [Freelance.habr](https://freelance.habr.com/)
|
||||
1.  [Freelancer](https://www.freelancer.com/)
|
||||
1.  [Freesound](https://freesound.org/)
|
||||
1.  [GNOME VCS](https://gitlab.gnome.org/)
|
||||
1.  [GaiaOnline](https://www.gaiaonline.com/)
|
||||
1.  [Gamespot](https://www.gamespot.com/)
|
||||
1.  [GeeksforGeeks](https://www.geeksforgeeks.org/)
|
||||
1.  [Genius (Artists)](https://genius.com/)
|
||||
1.  [Genius (Users)](https://genius.com/)
|
||||
1.  [Gesundheitsfrage](https://www.gesundheitsfrage.net/)
|
||||
1.  [GetMyUni](https://getmyuni.com/)
|
||||
1.  [Giant Bomb](https://www.giantbomb.com/)
|
||||
1.  [Giphy](https://giphy.com/)
|
||||
1.  [GitBook](https://gitbook.com/)
|
||||
1.  [GitHub](https://www.github.com/)
|
||||
1.  [GitLab](https://gitlab.com/)
|
||||
1.  [Gitee](https://gitee.com/)
|
||||
1.  [GoodReads](https://www.goodreads.com/)
|
||||
1.  [Google Play](https://play.google.com)
|
||||
1.  [Gradle](https://gradle.org/)
|
||||
1.  [Grailed](https://www.grailed.com/)
|
||||
1.  [Gravatar](http://en.gravatar.com/)
|
||||
1.  [Gumroad](https://www.gumroad.com/)
|
||||
1.  [Gutefrage](https://www.gutefrage.net/)
|
||||
1.  [HackTheBox](https://forum.hackthebox.eu/)
|
||||
1.  [Hackaday](https://hackaday.io/)
|
||||
1.  [HackenProof (Hackers)](https://hackenproof.com/)
|
||||
1.  [HackerEarth](https://hackerearth.com/)
|
||||
1.  [HackerNews](https://news.ycombinator.com/)
|
||||
1.  [HackerOne](https://hackerone.com/)
|
||||
1.  [HackerRank](https://hackerrank.com/)
|
||||
1.  [Harvard Scholar](https://scholar.harvard.edu/)
|
||||
1.  [Hashnode](https://hashnode.com)
|
||||
1.  [Heavy-R](https://www.heavy-r.com/) **(NSFW)**
|
||||
1.  [Holopin](https://holopin.io)
|
||||
1.  [Houzz](https://houzz.com/)
|
||||
1.  [HubPages](https://hubpages.com/)
|
||||
1.  [Hubski](https://hubski.com/)
|
||||
1.  [HudsonRock](https://hudsonrock.com)
|
||||
1.  [ICQ](https://icq.com/)
|
||||
1.  [IFTTT](https://www.ifttt.com/)
|
||||
1.  [IRC-Galleria](https://irc-galleria.net/)
|
||||
1.  [Icons8 Community](https://community.icons8.com/)
|
||||
1.  [Image Fap](https://www.imagefap.com/) **(NSFW)**
|
||||
1.  [ImgUp.cz](https://imgup.cz/)
|
||||
1.  [Imgur](https://imgur.com/)
|
||||
1.  [Instagram](https://instagram.com/)
|
||||
1.  [Instructables](https://www.instructables.com/)
|
||||
1.  [Intigriti](https://app.intigriti.com)
|
||||
1.  [Ionic Forum](https://forum.ionicframework.com/)
|
||||
1.  [Issuu](https://issuu.com/)
|
||||
1.  [Itch.io](https://itch.io/)
|
||||
1.  [Itemfix](https://www.itemfix.com/)
|
||||
1.  [Jellyfin Weblate](https://translate.jellyfin.org/)
|
||||
1.  [Jimdo](https://jimdosite.com/)
|
||||
1.  [Joplin Forum](https://discourse.joplinapp.org/)
|
||||
1.  [KEAKR](https://www.keakr.com/)
|
||||
1.  [Kaggle](https://www.kaggle.com/)
|
||||
1.  [Keybase](https://keybase.io/)
|
||||
1.  [Kick](https://kick.com/)
|
||||
1.  [Kik](http://kik.me/)
|
||||
1.  [Kongregate](https://www.kongregate.com/)
|
||||
1.  [LOR](https://linux.org.ru/)
|
||||
1.  [Launchpad](https://launchpad.net/)
|
||||
1.  [LeetCode](https://leetcode.com/)
|
||||
1.  [LessWrong](https://www.lesswrong.com/)
|
||||
1.  [Letterboxd](https://letterboxd.com/)
|
||||
1.  [LibraryThing](https://www.librarything.com/)
|
||||
1.  [Lichess](https://lichess.org)
|
||||
1.  [LinkedIn](https://linkedin.com)
|
||||
1.  [Linktree](https://linktr.ee/)
|
||||
1.  [Listed](https://listed.to/)
|
||||
1.  [LiveJournal](https://www.livejournal.com/)
|
||||
1.  [Lobsters](https://lobste.rs/)
|
||||
1.  [LottieFiles](https://lottiefiles.com/)
|
||||
1.  [LushStories](https://www.lushstories.com/) **(NSFW)**
|
||||
1.  [MMORPG Forum](https://forums.mmorpg.com/)
|
||||
1.  [Mapify](https://mapify.travel/)
|
||||
1.  [Medium](https://medium.com/)
|
||||
1.  [Memrise](https://www.memrise.com/)
|
||||
1.  [Minecraft](https://minecraft.net/)
|
||||
1.  [MixCloud](https://www.mixcloud.com/)
|
||||
1.  [Monkeytype](https://monkeytype.com/)
|
||||
1.  [Motherless](https://motherless.com/) **(NSFW)**
|
||||
1.  [Motorradfrage](https://www.motorradfrage.net/)
|
||||
1.  [MyAnimeList](https://myanimelist.net/)
|
||||
1.  [MyMiniFactory](https://www.myminifactory.com/)
|
||||
1.  [Mydramalist](https://mydramalist.com)
|
||||
1.  [Myspace](https://myspace.com/)
|
||||
1.  [NICommunityForum](https://www.native-instruments.com/forum/)
|
||||
1.  [NationStates Nation](https://nationstates.net)
|
||||
1.  [NationStates Region](https://nationstates.net)
|
||||
1.  [Naver](https://naver.com)
|
||||
1.  [Needrom](https://www.needrom.com/)
|
||||
1.  [Newgrounds](https://newgrounds.com)
|
||||
1.  [Nextcloud Forum](https://nextcloud.com/)
|
||||
1.  [Nightbot](https://nightbot.tv/)
|
||||
1.  [Ninja Kiwi](https://ninjakiwi.com/)
|
||||
1.  [NintendoLife](https://www.nintendolife.com/)
|
||||
1.  [NitroType](https://www.nitrotype.com/)
|
||||
1.  [NotABug.org](https://notabug.org/)
|
||||
1.  [Nyaa.si](https://nyaa.si/)
|
||||
1.  [OGUsers](https://ogu.gg/)
|
||||
1.  [OpenStreetMap](https://www.openstreetmap.org/)
|
||||
1.  [Opensource](https://opensource.com/)
|
||||
1.  [OurDJTalk](https://ourdjtalk.com/)
|
||||
1.  [PCGamer](https://pcgamer.com)
|
||||
1.  [PSNProfiles.com](https://psnprofiles.com/)
|
||||
1.  [Packagist](https://packagist.org/)
|
||||
1.  [Pastebin](https://pastebin.com/)
|
||||
1.  [Patreon](https://www.patreon.com/)
|
||||
1.  [PentesterLab](https://pentesterlab.com/)
|
||||
1.  [PepperIT](https://www.pepper.it)
|
||||
1.  [Periscope](https://www.periscope.tv/)
|
||||
1.  [Pinkbike](https://www.pinkbike.com/)
|
||||
1.  [PlayStore](https://play.google.com/store)
|
||||
1.  [PocketStars](https://pocketstars.com/) **(NSFW)**
|
||||
1.  [Pokemon Showdown](https://pokemonshowdown.com)
|
||||
1.  [Polarsteps](https://polarsteps.com/)
|
||||
1.  [Polygon](https://www.polygon.com/)
|
||||
1.  [Polymart](https://polymart.org/)
|
||||
1.  [Pornhub](https://pornhub.com/) **(NSFW)**
|
||||
1.  [ProductHunt](https://www.producthunt.com/)
|
||||
1.  [PromoDJ](http://promodj.com/)
|
||||
1.  [PyPi](https://pypi.org)
|
||||
1.  [Rajce.net](https://www.rajce.idnes.cz/)
|
||||
1.  [Rate Your Music](https://rateyourmusic.com/)
|
||||
1.  [Rclone Forum](https://forum.rclone.org/)
|
||||
1.  [RedTube](https://www.redtube.com/) **(NSFW)**
|
||||
1.  [Redbubble](https://www.redbubble.com/)
|
||||
1.  [Reddit](https://www.reddit.com/)
|
||||
1.  [Reisefrage](https://www.reisefrage.net/)
|
||||
1.  [Replit.com](https://replit.com/)
|
||||
1.  [ResearchGate](https://www.researchgate.net/)
|
||||
1.  [ReverbNation](https://www.reverbnation.com/)
|
||||
1.  [Roblox](https://www.roblox.com/)
|
||||
1.  [RocketTube](https://www.rockettube.com/) **(NSFW)**
|
||||
1.  [RoyalCams](https://royalcams.com)
|
||||
1.  [RubyGems](https://rubygems.org/)
|
||||
1.  [Rumble](https://rumble.com/)
|
||||
1.  [RuneScape](https://www.runescape.com/)
|
||||
1.  [SWAPD](https://swapd.co/)
|
||||
1.  [Sbazar.cz](https://www.sbazar.cz/)
|
||||
1.  [Scratch](https://scratch.mit.edu/)
|
||||
1.  [Scribd](https://www.scribd.com/)
|
||||
1.  [ShitpostBot5000](https://www.shitpostbot.com/)
|
||||
1.  [Shpock](https://www.shpock.com/)
|
||||
1.  [Signal](https://community.signalusers.org)
|
||||
1.  [Sketchfab](https://sketchfab.com/)
|
||||
1.  [Slack](https://slack.com)
|
||||
1.  [Slant](https://www.slant.co/)
|
||||
1.  [Slashdot](https://slashdot.org)
|
||||
1.  [SlideShare](https://slideshare.net/)
|
||||
1.  [Slides](https://slides.com/)
|
||||
1.  [SmugMug](https://smugmug.com)
|
||||
1.  [Smule](https://www.smule.com/)
|
||||
1.  [Snapchat](https://www.snapchat.com)
|
||||
1.  [SoundCloud](https://soundcloud.com/)
|
||||
1.  [SourceForge](https://sourceforge.net/)
|
||||
1.  [SoylentNews](https://soylentnews.org)
|
||||
1.  [Speedrun.com](https://speedrun.com/)
|
||||
1.  [Spells8](https://spells8.com)
|
||||
1.  [Splice](https://splice.com/)
|
||||
1.  [Splits.io](https://splits.io)
|
||||
1.  [Sporcle](https://www.sporcle.com/)
|
||||
1.  [Sportlerfrage](https://www.sportlerfrage.net/)
|
||||
1.  [SportsRU](https://www.sports.ru/)
|
||||
1.  [Spotify](https://open.spotify.com/)
|
||||
1.  [Star Citizen](https://robertsspaceindustries.com/)
|
||||
1.  [Steam Community (Group)](https://steamcommunity.com/)
|
||||
1.  [Steam Community (User)](https://steamcommunity.com/)
|
||||
1.  [Strava](https://www.strava.com/)
|
||||
1.  [SublimeForum](https://forum.sublimetext.com/)
|
||||
1.  [TETR.IO](https://tetr.io)
|
||||
1.  [TLDR Legal](https://tldrlegal.com/)
|
||||
1.  [TRAKTRAIN](https://traktrain.com/)
|
||||
1.  [Telegram](https://t.me/)
|
||||
1.  [Tellonym.me](https://tellonym.me/)
|
||||
1.  [Tenor](https://tenor.com/)
|
||||
1.  [ThemeForest](https://themeforest.net/)
|
||||
1.  [TnAFlix](https://www.tnaflix.com/) **(NSFW)**
|
||||
1.  [TorrentGalaxy](https://torrentgalaxy.to/)
|
||||
1.  [TradingView](https://www.tradingview.com/)
|
||||
1.  [Trakt](https://www.trakt.tv/)
|
||||
1.  [TrashboxRU](https://trashbox.ru/)
|
||||
1.  [Trawelling](https://traewelling.de/)
|
||||
1.  [Trello](https://trello.com/)
|
||||
1.  [TryHackMe](https://tryhackme.com/)
|
||||
1.  [Tuna](https://tuna.voicemod.net/)
|
||||
1.  [Tweakers](https://tweakers.net)
|
||||
1.  [Twitch](https://www.twitch.tv/)
|
||||
1.  [Twitter](https://twitter.com/)
|
||||
1.  [Typeracer](https://typeracer.com)
|
||||
1.  [Ultimate-Guitar](https://ultimate-guitar.com/)
|
||||
1.  [Unsplash](https://unsplash.com/)
|
||||
1.  [Untappd](https://untappd.com/)
|
||||
1.  [VK](https://vk.com/)
|
||||
1.  [VSCO](https://vsco.co/)
|
||||
1.  [Velomania](https://forum.velomania.ru/)
|
||||
1.  [Venmo](https://venmo.com/)
|
||||
1.  [Vero](https://vero.co/)
|
||||
1.  [Vimeo](https://vimeo.com/)
|
||||
1.  [VirusTotal](https://www.virustotal.com/)
|
||||
1.  [WICG Forum](https://discourse.wicg.io/)
|
||||
1.  [Warrior Forum](https://www.warriorforum.com/)
|
||||
1.  [Wattpad](https://www.wattpad.com/)
|
||||
1.  [WebNode](https://www.webnode.cz/)
|
||||
1.  [Weblate](https://hosted.weblate.org/)
|
||||
1.  [Weebly](https://weebly.com/)
|
||||
1.  [Wikidot](http://www.wikidot.com/)
|
||||
1.  [Wikipedia](https://www.wikipedia.org/)
|
||||
1.  [Windy](https://windy.com/)
|
||||
1.  [Wix](https://wix.com/)
|
||||
1.  [WolframalphaForum](https://community.wolfram.com/)
|
||||
1.  [WordPress](https://wordpress.com)
|
||||
1.  [WordPressOrg](https://wordpress.org/)
|
||||
1.  [Wordnik](https://www.wordnik.com/)
|
||||
1.  [Wykop](https://www.wykop.pl)
|
||||
1.  [Xbox Gamertag](https://xboxgamertag.com/)
|
||||
1.  [Xvideos](https://xvideos.com/) **(NSFW)**
|
||||
1.  [YandexMusic](https://music.yandex)
|
||||
1.  [YouNow](https://www.younow.com/)
|
||||
1.  [YouPic](https://youpic.com/)
|
||||
1.  [YouPorn](https://youporn.com) **(NSFW)**
|
||||
1.  [YouTube](https://www.youtube.com/)
|
||||
1.  [Zhihu](https://www.zhihu.com/)
|
||||
1.  [akniga](https://akniga.org/profile/blue/)
|
||||
1.  [authorSTREAM](http://www.authorstream.com/)
|
||||
1.  [babyRU](https://www.baby.ru/)
|
||||
1.  [babyblogRU](https://www.babyblog.ru/)
|
||||
1.  [chaos.social](https://chaos.social/)
|
||||
1.  [couchsurfing](https://www.couchsurfing.com/)
|
||||
1.  [d3RU](https://d3.ru/)
|
||||
1.  [dailykos](https://www.dailykos.com)
|
||||
1.  [datingRU](http://dating.ru)
|
||||
1.  [devRant](https://devrant.com/)
|
||||
1.  [drive2](https://www.drive2.ru/)
|
||||
1.  [eGPU](https://egpu.io/)
|
||||
1.  [eintracht](https://eintracht.de)
|
||||
1.  [fixya](https://www.fixya.com)
|
||||
1.  [fl](https://www.fl.ru/)
|
||||
1.  [forum_guns](https://forum.guns.ru/)
|
||||
1.  [freecodecamp](https://www.freecodecamp.org/)
|
||||
1.  [furaffinity](https://www.furaffinity.net)
|
||||
1.  [geocaching](https://www.geocaching.com/)
|
||||
1.  [gfycat](https://gfycat.com/)
|
||||
1.  [habr](https://habr.com/)
|
||||
1.  [hackster](https://www.hackster.io)
|
||||
1.  [hunting](https://www.hunting.ru/forum/)
|
||||
1.  [iMGSRC.RU](https://imgsrc.ru/)
|
||||
1.  [igromania](http://forum.igromania.ru/)
|
||||
1.  [interpals](https://www.interpals.net/)
|
||||
1.  [irecommend](https://irecommend.ru/)
|
||||
1.  [jbzd.com.pl](https://jbzd.com.pl/)
|
||||
1.  [jeuxvideo](https://www.jeuxvideo.com)
|
||||
1.  [kofi](https://ko-fi.com)
|
||||
1.  [kwork](https://www.kwork.ru/)
|
||||
1.  [labpentestit](https://lab.pentestit.ru/)
|
||||
1.  [last.fm](https://last.fm/)
|
||||
1.  [leasehackr](https://forum.leasehackr.com/)
|
||||
1.  [livelib](https://www.livelib.ru/)
|
||||
1.  [mastodon.cloud](https://mastodon.cloud/)
|
||||
1.  [mastodon.social](https://chaos.social/)
|
||||
1.  [mastodon.technology](https://mastodon.xyz/)
|
||||
1.  [mastodon.xyz](https://mastodon.xyz/)
|
||||
1.  [mercadolivre](https://www.mercadolivre.com.br)
|
||||
1.  [minds](https://www.minds.com)
|
||||
1.  [moikrug](https://moikrug.ru/)
|
||||
1.  [mstdn.io](https://mstdn.io/)
|
||||
1.  [nairaland.com](https://www.nairaland.com/)
|
||||
1.  [nnRU](https://www.nn.ru/)
|
||||
1.  [note](https://note.com/)
|
||||
1.  [npm](https://www.npmjs.com/)
|
||||
1.  [opennet](https://www.opennet.ru/)
|
||||
1.  [osu!](https://osu.ppy.sh/)
|
||||
1.  [phpRU](https://php.ru/forum/)
|
||||
1.  [pikabu](https://pikabu.ru/)
|
||||
1.  [pr0gramm](https://pr0gramm.com/)
|
||||
1.  [prog.hu](https://prog.hu/)
|
||||
1.  [queer.af](https://queer.af/)
|
||||
1.  [satsisRU](https://satsis.info/)
|
||||
1.  [sessionize](https://sessionize.com/)
|
||||
1.  [skyrock](https://skyrock.com/)
|
||||
1.  [social.tchncs.de](https://social.tchncs.de/)
|
||||
1.  [spletnik](https://spletnik.ru/)
|
||||
1.  [svidbook](https://www.svidbook.ru/)
|
||||
1.  [toster](https://www.toster.ru/)
|
||||
1.  [uid](https://uid.me/)
|
||||
1.  [wiki.vg](https://wiki.vg/)
|
||||
1.  [xHamster](https://xhamster.com) **(NSFW)**
|
||||
1.  [znanylekarz.pl](https://znanylekarz.pl)
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
import os
|
||||
import json
|
||||
import urllib
|
||||
import pytest
|
||||
from sherlock_project.sites import SitesInformation
|
||||
|
||||
def fetch_local_manifest(honor_exclusions: bool = True) -> dict[str, dict[str, str]]:
|
||||
sites_obj = SitesInformation(data_file_path=os.path.join(os.path.dirname(__file__), "../sherlock_project/resources/data.json"), honor_exclusions=honor_exclusions)
|
||||
sites_iterable: dict[str, dict[str, str]] = {site.name: site.information for site in sites_obj}
|
||||
return sites_iterable
|
||||
|
||||
@pytest.fixture()
|
||||
def sites_obj():
|
||||
sites_obj = SitesInformation(data_file_path=os.path.join(os.path.dirname(__file__), "../sherlock_project/resources/data.json"))
|
||||
yield sites_obj
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def sites_info():
|
||||
yield fetch_local_manifest()
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def remote_schema():
|
||||
schema_url: str = 'https://raw.githubusercontent.com/sherlock-project/sherlock/master/sherlock_project/resources/data.schema.json'
|
||||
with urllib.request.urlopen(schema_url) as remoteschema:
|
||||
schemadat = json.load(remoteschema)
|
||||
yield schemadat
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption(
|
||||
"--chunked-sites",
|
||||
action="store",
|
||||
default=None,
|
||||
help="For tests utilizing chunked sites, include only the (comma-separated) site(s) specified.",
|
||||
)
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
if "chunked_sites" in metafunc.fixturenames:
|
||||
sites_info = fetch_local_manifest(honor_exclusions=False)
|
||||
|
||||
# Ingest and apply site selections
|
||||
site_filter: str | None = metafunc.config.getoption("--chunked-sites")
|
||||
if site_filter:
|
||||
selected_sites: list[str] = [site.strip() for site in site_filter.split(",")]
|
||||
sites_info = {
|
||||
site: data for site, data in sites_info.items()
|
||||
if site in selected_sites
|
||||
}
|
||||
|
||||
params = [{name: data} for name, data in sites_info.items()]
|
||||
ids = list(sites_info.keys())
|
||||
metafunc.parametrize("chunked_sites", params, ids=ids)
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
import sherlock_project
|
||||
|
||||
#from sherlock.sites import SitesInformation
|
||||
#local_manifest = data_file_path=os.path.join(os.path.dirname(__file__), "../sherlock/resources/data.json")
|
||||
|
||||
def test_username_via_message():
|
||||
sherlock_project.__main__("--version")
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
import os
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
class Interactives:
|
||||
def run_cli(args:str = "") -> str:
|
||||
"""Pass arguments to Sherlock as a normal user on the command line"""
|
||||
# Adapt for platform differences (Windows likes to be special)
|
||||
if platform.system() == "Windows":
|
||||
command:str = f"py -m sherlock_project {args}"
|
||||
else:
|
||||
command:str = f"sherlock {args}"
|
||||
|
||||
proc_out:str = ""
|
||||
try:
|
||||
proc_out = subprocess.check_output(command, shell=True, stderr=subprocess.STDOUT)
|
||||
return proc_out.decode()
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise InteractivesSubprocessError(e.output.decode())
|
||||
|
||||
|
||||
def walk_sherlock_for_files_with(pattern: str) -> list[str]:
|
||||
"""Check all files within the Sherlock package for matching patterns"""
|
||||
pattern:re.Pattern = re.compile(pattern)
|
||||
matching_files:list[str] = []
|
||||
for root, dirs, files in os.walk("sherlock_project"):
|
||||
for file in files:
|
||||
file_path = os.path.join(root,file)
|
||||
if "__pycache__" in file_path:
|
||||
continue
|
||||
with open(file_path, 'r', errors='ignore') as f:
|
||||
if pattern.search(f.read()):
|
||||
matching_files.append(file_path)
|
||||
return matching_files
|
||||
|
||||
class InteractivesSubprocessError(Exception):
|
||||
pass
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
import os
|
||||
import json
|
||||
import pytest
|
||||
from jsonschema import validate
|
||||
|
||||
def test_validate_manifest_against_local_schema():
|
||||
"""Ensures that the manifest matches the local schema, for situations where the schema is being changed."""
|
||||
json_relative: str = '../sherlock_project/resources/data.json'
|
||||
schema_relative: str = '../sherlock_project/resources/data.schema.json'
|
||||
|
||||
json_path: str = os.path.join(os.path.dirname(__file__), json_relative)
|
||||
schema_path: str = os.path.join(os.path.dirname(__file__), schema_relative)
|
||||
|
||||
with open(json_path, 'r') as f:
|
||||
jsondat = json.load(f)
|
||||
with open(schema_path, 'r') as f:
|
||||
schemadat = json.load(f)
|
||||
|
||||
validate(instance=jsondat, schema=schemadat)
|
||||
|
||||
|
||||
@pytest.mark.online
|
||||
def test_validate_manifest_against_remote_schema(remote_schema):
|
||||
"""Ensures that the manifest matches the remote schema, so as to not unexpectedly break clients."""
|
||||
json_relative: str = '../sherlock_project/resources/data.json'
|
||||
json_path: str = os.path.join(os.path.dirname(__file__), json_relative)
|
||||
|
||||
with open(json_path, 'r') as f:
|
||||
jsondat = json.load(f)
|
||||
|
||||
validate(instance=jsondat, schema=remote_schema)
|
||||
|
||||
# Ensure that the expected values are beind returned by the site list
|
||||
@pytest.mark.parametrize("target_name,target_expected_err_type", [
|
||||
('GitHub', 'status_code'),
|
||||
('GitLab', 'message'),
|
||||
])
|
||||
def test_site_list_iterability (sites_info, target_name, target_expected_err_type):
|
||||
assert sites_info[target_name]['errorType'] == target_expected_err_type
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
import pytest
|
||||
import random
|
||||
import string
|
||||
import re
|
||||
from sherlock_project.sherlock import sherlock
|
||||
from sherlock_project.notify import QueryNotify
|
||||
from sherlock_project.result import QueryStatus
|
||||
#from sherlock_interactives import Interactives
|
||||
|
||||
|
||||
def simple_query(sites_info: dict, site: str, username: str) -> QueryStatus:
|
||||
query_notify = QueryNotify()
|
||||
site_data: dict = {}
|
||||
site_data[site] = sites_info[site]
|
||||
return sherlock(
|
||||
username=username,
|
||||
site_data=site_data,
|
||||
query_notify=query_notify,
|
||||
)[site]['status'].status
|
||||
|
||||
|
||||
@pytest.mark.online
|
||||
class TestLiveTargets:
|
||||
"""Actively test probes against live and trusted targets"""
|
||||
# Known positives should only use sites trusted to be reliable and unchanging
|
||||
@pytest.mark.parametrize('site,username',[
|
||||
('GitLab', 'ppfeister'),
|
||||
('AllMyLinks', 'blue'),
|
||||
])
|
||||
def test_known_positives_via_message(self, sites_info, site, username):
|
||||
assert simple_query(sites_info=sites_info, site=site, username=username) is QueryStatus.CLAIMED
|
||||
|
||||
|
||||
# Known positives should only use sites trusted to be reliable and unchanging
|
||||
@pytest.mark.parametrize('site,username',[
|
||||
('GitHub', 'ppfeister'),
|
||||
('GitHub', 'sherlock-project'),
|
||||
('Docker Hub', 'ppfeister'),
|
||||
('Docker Hub', 'sherlock'),
|
||||
])
|
||||
def test_known_positives_via_status_code(self, sites_info, site, username):
|
||||
assert simple_query(sites_info=sites_info, site=site, username=username) is QueryStatus.CLAIMED
|
||||
|
||||
|
||||
# Known positives should only use sites trusted to be reliable and unchanging
|
||||
@pytest.mark.parametrize('site,username',[
|
||||
('Keybase', 'blue'),
|
||||
('devRant', 'blue'),
|
||||
])
|
||||
def test_known_positives_via_response_url(self, sites_info, site, username):
|
||||
assert simple_query(sites_info=sites_info, site=site, username=username) is QueryStatus.CLAIMED
|
||||
|
||||
|
||||
# Randomly generate usernames of high length and test for positive availability
|
||||
# Randomly generated usernames should be simple alnum for simplicity and high
|
||||
# compatibility. Several attempts may be made ~just in case~ a real username is
|
||||
# generated.
|
||||
@pytest.mark.parametrize('site,random_len',[
|
||||
('GitLab', 255),
|
||||
('Codecademy', 30)
|
||||
])
|
||||
def test_likely_negatives_via_message(self, sites_info, site, random_len):
|
||||
num_attempts: int = 3
|
||||
attempted_usernames: list[str] = []
|
||||
status: QueryStatus = QueryStatus.CLAIMED
|
||||
for i in range(num_attempts):
|
||||
acceptable_types = string.ascii_letters + string.digits
|
||||
random_handle = ''.join(random.choice(acceptable_types) for _ in range (random_len))
|
||||
attempted_usernames.append(random_handle)
|
||||
status = simple_query(sites_info=sites_info, site=site, username=random_handle)
|
||||
if status is QueryStatus.AVAILABLE:
|
||||
break
|
||||
assert status is QueryStatus.AVAILABLE, f"Could not validate available username after {num_attempts} attempts with randomly generated usernames {attempted_usernames}."
|
||||
|
||||
|
||||
# Randomly generate usernames of high length and test for positive availability
|
||||
# Randomly generated usernames should be simple alnum for simplicity and high
|
||||
# compatibility. Several attempts may be made ~just in case~ a real username is
|
||||
# generated.
|
||||
@pytest.mark.parametrize('site,random_len',[
|
||||
('GitHub', 39),
|
||||
('Docker Hub', 30)
|
||||
])
|
||||
def test_likely_negatives_via_status_code(self, sites_info, site, random_len):
|
||||
num_attempts: int = 3
|
||||
attempted_usernames: list[str] = []
|
||||
status: QueryStatus = QueryStatus.CLAIMED
|
||||
for i in range(num_attempts):
|
||||
acceptable_types = string.ascii_letters + string.digits
|
||||
random_handle = ''.join(random.choice(acceptable_types) for _ in range (random_len))
|
||||
attempted_usernames.append(random_handle)
|
||||
status = simple_query(sites_info=sites_info, site=site, username=random_handle)
|
||||
if status is QueryStatus.AVAILABLE:
|
||||
break
|
||||
assert status is QueryStatus.AVAILABLE, f"Could not validate available username after {num_attempts} attempts with randomly generated usernames {attempted_usernames}."
|
||||
|
||||
|
||||
def test_username_illegal_regex(sites_info):
|
||||
site: str = 'BitBucket'
|
||||
invalid_handle: str = '*#$Y&*JRE'
|
||||
pattern = re.compile(sites_info[site]['regexCheck'])
|
||||
# Ensure that the username actually fails regex before testing sherlock
|
||||
assert pattern.match(invalid_handle) is None
|
||||
assert simple_query(sites_info=sites_info, site=site, username=invalid_handle) is QueryStatus.ILLEGAL
|
||||
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
import pytest
|
||||
from sherlock_project import sherlock
|
||||
from sherlock_interactives import Interactives
|
||||
from sherlock_interactives import InteractivesSubprocessError
|
||||
|
||||
def test_remove_nsfw(sites_obj):
|
||||
nsfw_target: str = 'Pornhub'
|
||||
assert nsfw_target in {site.name: site.information for site in sites_obj}
|
||||
sites_obj.remove_nsfw_sites()
|
||||
assert nsfw_target not in {site.name: site.information for site in sites_obj}
|
||||
|
||||
|
||||
# Parametrized sites should *not* include Motherless, which is acting as the control
|
||||
@pytest.mark.parametrize('nsfwsites', [
|
||||
['Pornhub'],
|
||||
['Pornhub', 'Xvideos'],
|
||||
])
|
||||
def test_nsfw_explicit_selection(sites_obj, nsfwsites):
|
||||
for site in nsfwsites:
|
||||
assert site in {site.name: site.information for site in sites_obj}
|
||||
sites_obj.remove_nsfw_sites(do_not_remove=nsfwsites)
|
||||
for site in nsfwsites:
|
||||
assert site in {site.name: site.information for site in sites_obj}
|
||||
assert 'Motherless' not in {site.name: site.information for site in sites_obj}
|
||||
|
||||
def test_wildcard_username_expansion():
|
||||
assert sherlock.check_for_parameter('test{?}test') is True
|
||||
assert sherlock.check_for_parameter('test{.}test') is False
|
||||
assert sherlock.check_for_parameter('test{}test') is False
|
||||
assert sherlock.check_for_parameter('testtest') is False
|
||||
assert sherlock.check_for_parameter('test{?test') is False
|
||||
assert sherlock.check_for_parameter('test?}test') is False
|
||||
assert sherlock.multiple_usernames('test{?}test') == ["test_test" , "test-test" , "test.test"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('cliargs', [
|
||||
'',
|
||||
'--site urghrtuight --egiotr',
|
||||
'--',
|
||||
])
|
||||
def test_no_usernames_provided(cliargs):
|
||||
with pytest.raises(InteractivesSubprocessError, match=r"error: the following arguments are required: USERNAMES"):
|
||||
Interactives.run_cli(cliargs)
|
||||
|
|
@ -0,0 +1,100 @@
|
|||
import pytest
|
||||
import re
|
||||
import rstr
|
||||
|
||||
from sherlock_project.sherlock import sherlock
|
||||
from sherlock_project.notify import QueryNotify
|
||||
from sherlock_project.result import QueryResult, QueryStatus
|
||||
|
||||
|
||||
FALSE_POSITIVE_ATTEMPTS: int = 2 # Since the usernames are randomly generated, it's POSSIBLE that a real username can be hit
|
||||
FALSE_POSITIVE_QUANTIFIER_UPPER_BOUND: int = 15 # If a pattern uses quantifiers such as `+` `*` or `{n,}`, limit the upper bound (0 to disable)
|
||||
FALSE_POSITIVE_DEFAULT_PATTERN: str = r'^[a-zA-Z0-9]{7,20}$' # Used in absence of a regexCheck entry
|
||||
|
||||
|
||||
def set_pattern_upper_bound(pattern: str, upper_bound: int = FALSE_POSITIVE_QUANTIFIER_UPPER_BOUND) -> str:
|
||||
"""Set upper bound for regex patterns that use quantifiers such as `+` `*` or `{n,}`."""
|
||||
def replace_upper_bound(match: re.Match) -> str: # type: ignore
|
||||
lower_bound: int = int(match.group(1)) if match.group(1) else 0 # type: ignore
|
||||
nonlocal upper_bound
|
||||
upper_bound = upper_bound if lower_bound < upper_bound else lower_bound # type: ignore # noqa: F823
|
||||
return f'{{{lower_bound},{upper_bound}}}'
|
||||
|
||||
pattern = re.sub(r'(?<!\\)\{(\d+),\}', replace_upper_bound, pattern) # {n,} # type: ignore
|
||||
pattern = re.sub(r'(?<!\\)\+', f'{{1,{upper_bound}}}', pattern) # +
|
||||
pattern = re.sub(r'(?<!\\)\*', f'{{0,{upper_bound}}}', pattern) # *
|
||||
|
||||
return pattern
|
||||
|
||||
def false_positive_check(sites_info: dict[str, dict[str, str]], site: str, pattern: str) -> QueryStatus:
|
||||
"""Check if a site is likely to produce false positives."""
|
||||
status: QueryStatus = QueryStatus.UNKNOWN
|
||||
|
||||
for _ in range(FALSE_POSITIVE_ATTEMPTS):
|
||||
query_notify: QueryNotify = QueryNotify()
|
||||
username: str = rstr.xeger(pattern)
|
||||
|
||||
result: QueryResult | str = sherlock(
|
||||
username=username,
|
||||
site_data=sites_info,
|
||||
query_notify=query_notify,
|
||||
)[site]['status']
|
||||
|
||||
if not hasattr(result, 'status'):
|
||||
raise TypeError(f"Result for site {site} does not have 'status' attribute. Actual result: {result}")
|
||||
if type(result.status) is not QueryStatus: # type: ignore
|
||||
raise TypeError(f"Result status for site {site} is not of type QueryStatus. Actual type: {type(result.status)}") # type: ignore
|
||||
status = result.status # type: ignore
|
||||
|
||||
if status in (QueryStatus.AVAILABLE, QueryStatus.WAF):
|
||||
return status
|
||||
|
||||
return status
|
||||
|
||||
|
||||
def false_negative_check(sites_info: dict[str, dict[str, str]], site: str) -> QueryStatus:
|
||||
"""Check if a site is likely to produce false negatives."""
|
||||
status: QueryStatus = QueryStatus.UNKNOWN
|
||||
query_notify: QueryNotify = QueryNotify()
|
||||
|
||||
result: QueryResult | str = sherlock(
|
||||
username=sites_info[site]['username_claimed'],
|
||||
site_data=sites_info,
|
||||
query_notify=query_notify,
|
||||
)[site]['status']
|
||||
|
||||
if not hasattr(result, 'status'):
|
||||
raise TypeError(f"Result for site {site} does not have 'status' attribute. Actual result: {result}")
|
||||
if type(result.status) is not QueryStatus: # type: ignore
|
||||
raise TypeError(f"Result status for site {site} is not of type QueryStatus. Actual type: {type(result.status)}") # type: ignore
|
||||
status = result.status # type: ignore
|
||||
|
||||
return status
|
||||
|
||||
@pytest.mark.validate_targets
|
||||
@pytest.mark.online
|
||||
class Test_All_Targets:
|
||||
|
||||
@pytest.mark.validate_targets_fp
|
||||
def test_false_pos(self, chunked_sites: dict[str, dict[str, str]]):
|
||||
"""Iterate through all sites in the manifest to discover possible false-positive inducting targets."""
|
||||
pattern: str
|
||||
for site in chunked_sites:
|
||||
try:
|
||||
pattern = chunked_sites[site]['regexCheck']
|
||||
except KeyError:
|
||||
pattern = FALSE_POSITIVE_DEFAULT_PATTERN
|
||||
|
||||
if FALSE_POSITIVE_QUANTIFIER_UPPER_BOUND > 0:
|
||||
pattern = set_pattern_upper_bound(pattern)
|
||||
|
||||
result: QueryStatus = false_positive_check(chunked_sites, site, pattern)
|
||||
assert result is QueryStatus.AVAILABLE, f"{site} produced false positive with pattern {pattern}, result was {result}"
|
||||
|
||||
@pytest.mark.validate_targets_fn
|
||||
def test_false_neg(self, chunked_sites: dict[str, dict[str, str]]):
|
||||
"""Iterate through all sites in the manifest to discover possible false-negative inducting targets."""
|
||||
for site in chunked_sites:
|
||||
result: QueryStatus = false_negative_check(chunked_sites, site)
|
||||
assert result is QueryStatus.CLAIMED, f"{site} produced false negative, result was {result}"
|
||||
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
import os
|
||||
from sherlock_interactives import Interactives
|
||||
import sherlock_project
|
||||
|
||||
def test_versioning() -> None:
|
||||
# Ensure __version__ matches version presented to the user
|
||||
assert sherlock_project.__version__ in Interactives.run_cli("--version")
|
||||
# Ensure __init__ is single source of truth for __version__ in package
|
||||
# Temporarily allows sherlock.py so as to not trigger early upgrades
|
||||
found:list = Interactives.walk_sherlock_for_files_with(r'__version__ *= *')
|
||||
expected:list = [
|
||||
# Normalization is REQUIRED for Windows ( / vs \ )
|
||||
os.path.normpath("sherlock_project/__init__.py"),
|
||||
]
|
||||
# Sorting is REQUIRED for Mac
|
||||
assert sorted(found) == sorted(expected)
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
[tox]
|
||||
requires =
|
||||
tox >= 3
|
||||
envlist =
|
||||
lint
|
||||
py313
|
||||
py312
|
||||
py311
|
||||
py310
|
||||
|
||||
[testenv]
|
||||
description = Attempt to build and install the package
|
||||
deps =
|
||||
coverage
|
||||
jsonschema
|
||||
pytest
|
||||
rstr
|
||||
allowlist_externals = coverage
|
||||
commands =
|
||||
coverage run --source=sherlock_project --module pytest -v
|
||||
coverage report --show-missing
|
||||
|
||||
[testenv:offline]
|
||||
deps =
|
||||
jsonschema
|
||||
pytest
|
||||
commands =
|
||||
pytest -v -m "not online"
|
||||
|
||||
[testenv:lint]
|
||||
description = Lint with Ruff
|
||||
deps =
|
||||
ruff
|
||||
commands =
|
||||
ruff check
|
||||
|
||||
[gh-actions]
|
||||
python =
|
||||
3.13: py313
|
||||
3.12: py312
|
||||
3.11: py311
|
||||
3.10: py310
|
||||
Loading…
Reference in New Issue