Compare commits
376 Commits
WIP/extra-
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4b4bf20a6f | ||
|
|
9407f5c4b0 | ||
|
|
8715274c9a | ||
|
|
6ef6720ec0 | ||
|
|
c0c146b2b9 | ||
|
|
64510f7e7e | ||
|
|
4603828b92 | ||
|
|
cdf7875ddb | ||
|
|
50a11a5f50 | ||
|
|
145711bdc7 | ||
|
|
0e4958aa10 | ||
|
|
99653868c7 | ||
|
|
20d08290af | ||
|
|
90883843e6 | ||
|
|
eb63741e04 | ||
|
|
bbf519e110 | ||
|
|
f233de9bca | ||
|
|
da0204c156 | ||
|
|
7ca22daa9a | ||
|
|
7ca8412cc9 | ||
|
|
bc6afa635c | ||
|
|
1627bed25c | ||
|
|
00af9f35cd | ||
|
|
95469b971f | ||
|
|
c30bea943b | ||
|
|
2bcf0bc670 | ||
|
|
15d32a5551 | ||
|
|
42ecdd9ef6 | ||
|
|
89cdbedf11 | ||
|
|
27f719078a | ||
|
|
67cc4c5275 | ||
|
|
db242fb980 | ||
|
|
9512595e21 | ||
|
|
7ee81a14e7 | ||
|
|
c63adc6678 | ||
|
|
21ab305c43 | ||
|
|
2445381b8e | ||
|
|
09efe9904e | ||
|
|
203a634f7c | ||
|
|
2186fff4dc | ||
|
|
07b6ca3222 | ||
|
|
abab9f1940 | ||
|
|
99f1e2768f | ||
|
|
2ff8155a35 | ||
|
|
c0f7ef1079 | ||
|
|
9bb8b3f46d | ||
|
|
ce9201594e | ||
|
|
0ff76de682 | ||
|
|
cee06c987a | ||
|
|
0a419045c0 | ||
|
|
ba8880cff7 | ||
|
|
a6bc8d790e | ||
|
|
8c17303179 | ||
|
|
e529a2e229 | ||
|
|
43fb27eade | ||
|
|
aea93fedeb | ||
|
|
7d4eab28b4 | ||
|
|
316e0d97fd | ||
|
|
ad9ea9137b | ||
|
|
2628655cdd | ||
|
|
7c0f750f6f | ||
|
|
0716ff5265 | ||
|
|
ff18904d6c | ||
|
|
0172de829c | ||
|
|
4fd5ff4315 | ||
|
|
8feb67de38 | ||
|
|
d3cbc50fb7 | ||
|
|
82be27c263 | ||
|
|
f0ec686f23 | ||
|
|
6773e61a89 | ||
|
|
73d1c248ca | ||
|
|
80bcd1d619 | ||
|
|
6207157da7 | ||
|
|
8013a0a445 | ||
|
|
2f1bab569d | ||
|
|
e2d83b627a | ||
|
|
03e980eafc | ||
|
|
f4196c79d4 | ||
|
|
6a04cdef46 | ||
|
|
2fe645fa37 | ||
|
|
8f552147ed | ||
|
|
a12e7c5c22 | ||
|
|
f9e6878cea | ||
|
|
50cc152491 | ||
|
|
68f95d503c | ||
|
|
fa6ef6f929 | ||
|
|
c46ec14cb1 | ||
|
|
10cbc2a738 | ||
|
|
ed1d33d81a | ||
|
|
5b71ba96c5 | ||
|
|
b55a1cc1b3 | ||
|
|
ca3f8a4235 | ||
|
|
fd49c488b1 | ||
|
|
3d6539b673 | ||
|
|
17696d80cc | ||
|
|
322ef5e0ea | ||
|
|
d826e97c4d | ||
|
|
b090dcadb1 | ||
|
|
34ca09fc20 | ||
|
|
97fd19492e | ||
|
|
a2f5bcb263 | ||
|
|
5de879be9f | ||
|
|
8a0ec5a7a7 | ||
|
|
d2f58f40b5 | ||
|
|
891982321c | ||
|
|
bf41ce0466 | ||
|
|
cb8db0b1f5 | ||
|
|
cc510bd281 | ||
|
|
c30e3a015c | ||
|
|
7125eeff5d | ||
|
|
a59c33571a | ||
|
|
26d51708eb | ||
|
|
37f711d95b | ||
|
|
9f82e19957 | ||
|
|
a5a277c20a | ||
|
|
91404d1c44 | ||
|
|
7557cb9b3a | ||
|
|
8a7e431af5 | ||
|
|
70724be65a | ||
|
|
7e27143a90 | ||
|
|
e5738d1f5b | ||
|
|
69abef34c5 | ||
|
|
32138847dd | ||
|
|
22cf1244c7 | ||
|
|
1478fa738d | ||
|
|
de75d1c71f | ||
|
|
390b8b9df7 | ||
|
|
42d6e0394f | ||
|
|
d9135883de | ||
|
|
c9e57400fd | ||
|
|
e255c358cb | ||
|
|
e6eb91a33a | ||
|
|
e3214660d0 | ||
|
|
bb6845d044 | ||
|
|
fd0b1e7d7f | ||
|
|
9a59d12a00 | ||
|
|
20ef316081 | ||
|
|
45bf452f17 | ||
|
|
7234e11e87 | ||
|
|
9609cd3701 | ||
|
|
031b0e37bb | ||
|
|
f1fff427f8 | ||
|
|
584c145b15 | ||
|
|
d9e7bb57f3 | ||
|
|
c9631e6848 | ||
|
|
06965b0e82 | ||
|
|
be741a1087 | ||
|
|
52b3960ce3 | ||
|
|
d7bcbcb5b4 | ||
|
|
4d54cdccac | ||
|
|
da75fb25d7 | ||
|
|
42023039c1 | ||
|
|
affec03d6c | ||
|
|
ecaffda777 | ||
|
|
0d484e3ffa | ||
|
|
38d6b5b97e | ||
|
|
728a8237aa | ||
|
|
24ca677021 | ||
|
|
ed53c29a55 | ||
|
|
c4e29fda0f | ||
|
|
ff65696729 | ||
|
|
2304aaf17c | ||
|
|
d43a05a0ed | ||
|
|
ada1dccc5b | ||
|
|
7a8e694abc | ||
|
|
c764bbfcd4 | ||
|
|
523419df11 | ||
|
|
62a213d74d | ||
|
|
7fc8ba4c15 | ||
|
|
f63418039a | ||
|
|
3f80d58085 | ||
|
|
ee74c5866a | ||
|
|
41cd379805 | ||
|
|
0c5dbd66a3 | ||
|
|
18f72788aa | ||
|
|
cd3ab4a264 | ||
|
|
ab66def695 | ||
|
|
68778d3824 | ||
|
|
43851ae0fb | ||
|
|
91a6e6221c | ||
|
|
7f2da1905c | ||
|
|
7583843e80 | ||
|
|
e77075764e | ||
|
|
195577fe0c | ||
|
|
29398665b0 | ||
|
|
1780b2323d | ||
|
|
1e6802afbf | ||
|
|
2cd68c5b98 | ||
|
|
ac3a70ae0d | ||
|
|
761f9dab81 | ||
|
|
64fbcb3f7d | ||
|
|
2fb7dc9a2b | ||
|
|
0a1023ce19 | ||
|
|
f4dd5d7a31 | ||
|
|
aff5ea5f52 | ||
|
|
4c4813620d | ||
|
|
b9bc24156b | ||
|
|
13d0e4ac9f | ||
|
|
01fb32e43c | ||
|
|
ec30ef7b8b | ||
|
|
af70930be2 | ||
|
|
55299f001f | ||
|
|
7e51239c8d | ||
|
|
cb6a008680 | ||
|
|
55f30f5537 | ||
|
|
cc3ca64f25 | ||
|
|
8c0cf5f870 | ||
|
|
2f46de124d | ||
|
|
0db0b044b2 | ||
|
|
299925d22e | ||
|
|
db9b69fac3 | ||
|
|
be307e6876 | ||
|
|
e44f8e73aa | ||
|
|
9c4335f2af | ||
|
|
44cbe47983 | ||
|
|
5924d89f54 | ||
|
|
b5ec08da8b | ||
|
|
184b962731 | ||
|
|
d6035b8e9c | ||
|
|
2988486a65 | ||
|
|
5616b71564 | ||
|
|
5348175b5e | ||
|
|
f9b4edda01 | ||
|
|
50590334be | ||
|
|
33a35b94f5 | ||
|
|
a6711aeb63 | ||
|
|
f36ac56370 | ||
|
|
09e5b5d888 | ||
|
|
ad57aaa7f8 | ||
|
|
2bce29e3cb | ||
|
|
6b9aad81fd | ||
|
|
afc5b54207 | ||
|
|
71ce9a6623 | ||
|
|
d8bb822a4e | ||
|
|
5297b2ffe7 | ||
|
|
79c88a5d9a | ||
|
|
0e022f97a2 | ||
|
|
0d4942738d | ||
|
|
32d5962dc3 | ||
|
|
8a7b024e99 | ||
|
|
c169a3762d | ||
|
|
d26f5b26a7 | ||
|
|
c2a937ac8e | ||
|
|
59203acdfa | ||
|
|
63db1dbd85 | ||
|
|
e23347a936 | ||
|
|
d41af54513 | ||
|
|
73c44e39de | ||
|
|
7dd398a9c3 | ||
|
|
1a95a42853 | ||
|
|
3695c82472 | ||
|
|
6a4eb5aa8e | ||
|
|
1630f2a050 | ||
|
|
b1f8c0144b | ||
|
|
76cce7ef9a | ||
|
|
393dafbf84 | ||
|
|
6bd353273a | ||
|
|
8a60b77135 | ||
|
|
ab59afc150 | ||
|
|
b314168da1 | ||
|
|
33e1adb974 | ||
|
|
8688fd23f5 | ||
|
|
cd2681fd84 | ||
|
|
0cb9cedd8c | ||
|
|
20762dc3ad | ||
|
|
359c6ca476 | ||
|
|
f7573572e5 | ||
|
|
975c73fd2b | ||
|
|
acf4f90aee | ||
|
|
daf6850052 | ||
|
|
3aa385cf41 | ||
|
|
394b68fa29 | ||
|
|
6d8b2368e7 | ||
|
|
fbe774829d | ||
|
|
08eadbe02c | ||
|
|
273e2539cc | ||
|
|
6ba75cebf2 | ||
|
|
98a3243846 | ||
|
|
f9530f8b03 | ||
|
|
5145b661f4 | ||
|
|
54dc70e6b2 | ||
|
|
b68447ed60 | ||
|
|
94281bef08 | ||
|
|
56aac52f6b | ||
|
|
e4c99c755e | ||
|
|
7559716c5d | ||
|
|
63d09738d2 | ||
|
|
5496fb6e5c | ||
|
|
95a63825f0 | ||
|
|
fe6931efc6 | ||
|
|
db64e5a3d4 | ||
|
|
a779057762 | ||
|
|
52cb0fd618 | ||
|
|
0194ada819 | ||
|
|
d805848dd7 | ||
|
|
749a61358c | ||
|
|
b6b0c25966 | ||
|
|
09f5af26df | ||
|
|
805cc41bce | ||
|
|
4bd3085fe9 | ||
|
|
823873bce2 | ||
|
|
49cfad2dbe | ||
|
|
b6dfd3321a | ||
|
|
57481c8757 | ||
|
|
d13db8d438 | ||
|
|
5e755c1dc2 | ||
|
|
93aa496a30 | ||
|
|
5dc9a82718 | ||
|
|
86bb64a4d0 | ||
|
|
f573faf304 | ||
|
|
737639ae84 | ||
|
|
8ca747c02f | ||
|
|
759bb603df | ||
|
|
83c8d311b3 | ||
|
|
93ed8d6c44 | ||
|
|
981f79f676 | ||
|
|
55d59c2d07 | ||
|
|
502b9fd7f4 | ||
|
|
dbcbd36874 | ||
|
|
519d2f0f79 | ||
|
|
6b8c50a9aa | ||
|
|
e24934b7dd | ||
|
|
645dcf229f | ||
|
|
8ce46fbf89 | ||
|
|
fac47e27c6 | ||
|
|
2bf7454950 | ||
|
|
8d4c09ffd9 | ||
|
|
c55b23086d | ||
|
|
e2ec9b2f62 | ||
|
|
07656c6fea | ||
|
|
fe1e74a22f | ||
|
|
cb0143de40 | ||
|
|
9430fc7913 | ||
|
|
d3fa33b104 | ||
|
|
a8eadf40b0 | ||
|
|
9b9c31674d | ||
|
|
d38f2ae384 | ||
|
|
56ee47fef4 | ||
|
|
d63f891667 | ||
|
|
8263b9b7fd | ||
|
|
b0008823da | ||
|
|
1e8d6e868c | ||
|
|
efba42d59d | ||
|
|
15e5ba3cfc | ||
|
|
e47b39041b | ||
|
|
6a31927562 | ||
|
|
18faeb631b | ||
|
|
e1d9b13045 | ||
|
|
c5d8cd1641 | ||
|
|
5f3a99f2b9 | ||
|
|
0fd0e537f7 | ||
|
|
20cc52a304 | ||
|
|
361c65348d | ||
|
|
6cb133a46a | ||
|
|
ca5d43cea1 | ||
|
|
9e426ed55e | ||
|
|
f552e5cb69 | ||
|
|
95b13240c7 | ||
|
|
f96c7ba25f | ||
|
|
976ca7d47a | ||
|
|
af1689bd85 | ||
|
|
d2a56eb526 | ||
|
|
85af5f9327 | ||
|
|
8624237760 | ||
|
|
f0ff33e081 | ||
|
|
65ff004b63 | ||
|
|
127db45247 | ||
|
|
30c5dbb898 | ||
|
|
42eea33809 | ||
|
|
c46fed5ebb | ||
|
|
57fadde151 | ||
|
|
af409245fb | ||
|
|
77d4ca26a4 | ||
|
|
22995995d0 | ||
|
|
4d69848350 | ||
|
|
d03acb8a3c |
31
.env
@@ -2,11 +2,26 @@
|
||||
# Be sure to uncomment any line you populate
|
||||
# Everything is optional, but some features won't work without external API access
|
||||
|
||||
# GOOGLE_CLOUD_API_KEY=''
|
||||
# SHODAN_API_KEY=''
|
||||
# REACT_APP_SHODAN_API_KEY=''
|
||||
# WHO_API_KEY=''
|
||||
# REACT_APP_WHO_API_KEY=''
|
||||
# SECURITY_TRAILS_API_KEY=''
|
||||
# BUILT_WITH_API_KEY=''
|
||||
# CI=false
|
||||
# API Keys for external services (backend)
|
||||
GOOGLE_CLOUD_API_KEY=''
|
||||
TORRENT_IP_API_KEY=''
|
||||
SECURITY_TRAILS_API_KEY=''
|
||||
BUILT_WITH_API_KEY=''
|
||||
URL_SCAN_API_KEY=''
|
||||
TRANCO_USERNAME=''
|
||||
TRANCO_API_KEY=''
|
||||
CLOUDMERSIVE_API_KEY=''
|
||||
|
||||
# API Keys for external services (frontend)
|
||||
REACT_APP_SHODAN_API_KEY=''
|
||||
REACT_APP_WHO_API_KEY=''
|
||||
|
||||
# Configuration settings
|
||||
# CHROME_PATH='/usr/bin/chromium' # The path the the Chromium executable
|
||||
# PORT='3000' # Port to serve the API, when running server.js
|
||||
# DISABLE_GUI='false' # Disable the GUI, and only serve the API
|
||||
# API_TIMEOUT_LIMIT='10000' # The timeout limit for API requests, in milliseconds
|
||||
# API_CORS_ORIGIN='*' # Enable CORS, by setting your allowed hostname(s) here
|
||||
# API_ENABLE_RATE_LIMIT='true' # Enable rate limiting for the API
|
||||
# REACT_APP_API_ENDPOINT='/api' # The endpoint for the API (can be local or remote)
|
||||
# ENABLE_ANALYTICS='false' # Enable Plausible hit counter for the frontend
|
||||
|
||||
1148
.github/README.md
vendored
1
.github/screenshots/README.md
vendored
Normal file
@@ -0,0 +1 @@
|
||||

|
||||
BIN
.github/screenshots/tiles/archives.png
vendored
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
.github/screenshots/tiles/block-lists.png
vendored
Normal file
|
After Width: | Height: | Size: 95 KiB |
BIN
.github/screenshots/tiles/carbon.png
vendored
Normal file
|
After Width: | Height: | Size: 40 KiB |
|
Before Width: | Height: | Size: 35 KiB After Width: | Height: | Size: 35 KiB |
BIN
.github/screenshots/tiles/dns-server.png
vendored
Normal file
|
After Width: | Height: | Size: 40 KiB |
|
Before Width: | Height: | Size: 53 KiB After Width: | Height: | Size: 53 KiB |
|
Before Width: | Height: | Size: 165 KiB After Width: | Height: | Size: 165 KiB |
|
Before Width: | Height: | Size: 44 KiB After Width: | Height: | Size: 44 KiB |
BIN
.github/screenshots/tiles/email-config.png
vendored
Normal file
|
After Width: | Height: | Size: 67 KiB |
|
Before Width: | Height: | Size: 73 KiB After Width: | Height: | Size: 73 KiB |
BIN
.github/screenshots/tiles/firewall.png
vendored
Normal file
|
After Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 105 KiB After Width: | Height: | Size: 105 KiB |
|
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 24 KiB |
BIN
.github/screenshots/tiles/hsts.png
vendored
Normal file
|
After Width: | Height: | Size: 27 KiB |
BIN
.github/screenshots/tiles/http-security.png
vendored
Normal file
|
After Width: | Height: | Size: 37 KiB |
BIN
.github/screenshots/tiles/linked-pages.png
vendored
Normal file
|
After Width: | Height: | Size: 64 KiB |
|
Before Width: | Height: | Size: 94 KiB After Width: | Height: | Size: 94 KiB |
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 158 KiB After Width: | Height: | Size: 158 KiB |
BIN
.github/screenshots/tiles/ranking.png
vendored
Normal file
|
After Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
|
Before Width: | Height: | Size: 114 KiB After Width: | Height: | Size: 114 KiB |
BIN
.github/screenshots/tiles/screenshot.png
vendored
Normal file
|
After Width: | Height: | Size: 79 KiB |
BIN
.github/screenshots/tiles/security-txt.png
vendored
Normal file
|
After Width: | Height: | Size: 63 KiB |
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
BIN
.github/screenshots/tiles/sitemap.png
vendored
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
.github/screenshots/tiles/social-tags.png
vendored
Normal file
|
After Width: | Height: | Size: 207 KiB |
|
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 46 KiB |
|
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
BIN
.github/screenshots/tiles/tech-stack.png
vendored
Normal file
|
After Width: | Height: | Size: 146 KiB |
BIN
.github/screenshots/tiles/threats.png
vendored
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
.github/screenshots/tiles/tls-cipher-suites.png
vendored
Normal file
|
After Width: | Height: | Size: 64 KiB |
BIN
.github/screenshots/tiles/tls-handshake-simulation.png
vendored
Normal file
|
After Width: | Height: | Size: 90 KiB |
BIN
.github/screenshots/tiles/tls-security-config.png
vendored
Normal file
|
After Width: | Height: | Size: 127 KiB |
|
Before Width: | Height: | Size: 54 KiB After Width: | Height: | Size: 54 KiB |
|
Before Width: | Height: | Size: 123 KiB After Width: | Height: | Size: 123 KiB |
BIN
.github/screenshots/wc_carbon.png
vendored
|
Before Width: | Height: | Size: 31 KiB |
BIN
.github/screenshots/wc_dnssec-2.png
vendored
|
Before Width: | Height: | Size: 46 KiB |
BIN
.github/screenshots/wc_features-2.png
vendored
|
Before Width: | Height: | Size: 132 KiB |
BIN
.github/screenshots/web-check-screenshot1.png
vendored
Normal file
|
After Width: | Height: | Size: 3.0 MiB |
BIN
.github/screenshots/web-check-screenshot10.png
vendored
Normal file
|
After Width: | Height: | Size: 1.4 MiB |
BIN
.github/screenshots/web-check-screenshot2.png
vendored
Normal file
|
After Width: | Height: | Size: 1.7 MiB |
BIN
.github/screenshots/web-check-screenshot3.png
vendored
Normal file
|
After Width: | Height: | Size: 2.6 MiB |
BIN
.github/screenshots/web-check-screenshot4.png
vendored
Normal file
|
After Width: | Height: | Size: 810 KiB |
|
Before Width: | Height: | Size: 2.0 MiB After Width: | Height: | Size: 2.0 MiB |
37
.github/workflows/credits.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
# Inserts list of community members into ./README.md
|
||||
name: 💓 Inserts Contributors & Sponsors
|
||||
on:
|
||||
workflow_dispatch: # Manual dispatch
|
||||
schedule:
|
||||
- cron: '45 1 * * 0' # At 01:45 on Sunday.
|
||||
|
||||
jobs:
|
||||
# Job #1 - Fetches sponsors and inserts table into readme
|
||||
insert-sponsors:
|
||||
runs-on: ubuntu-latest
|
||||
name: Inserts Sponsors 💓
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Updates readme with sponsors
|
||||
uses: JamesIves/github-sponsors-readme-action@v1
|
||||
with:
|
||||
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
file: .github/README.md
|
||||
|
||||
# Job #2 - Fetches contributors and inserts table into readme
|
||||
insert-contributors:
|
||||
runs-on: ubuntu-latest
|
||||
name: Inserts Contributors 💓
|
||||
steps:
|
||||
- name: Updates readme with contributors
|
||||
uses: akhilmhdh/contributors-readme-action@v2.3.10
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
image_size: 80
|
||||
readme_path: .github/README.md
|
||||
columns_per_row: 6
|
||||
commit_message: 'docs: Updates contributors list'
|
||||
committer_username: liss-bot
|
||||
committer_email: liss-bot@d0h.co
|
||||
128
.github/workflows/deploy-aws.yml
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
name: 🚀 Deploy to AWS
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- '*'
|
||||
paths:
|
||||
- api/**
|
||||
- serverless.yml
|
||||
- package.json
|
||||
- .github/workflows/deploy-aws.yml
|
||||
|
||||
jobs:
|
||||
deploy-api:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 16
|
||||
|
||||
- name: Cache node_modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
|
||||
- name: Create GitHub deployment for API
|
||||
uses: chrnorm/deployment-action@releases/v2
|
||||
id: deployment_api
|
||||
with:
|
||||
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
environment: AWS (Backend API)
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- name: Install Serverless CLI and dependencies
|
||||
run: |
|
||||
npm i -g serverless
|
||||
yarn
|
||||
|
||||
- name: Deploy to AWS
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
run: serverless deploy
|
||||
|
||||
- name: Update GitHub deployment status (API)
|
||||
if: always()
|
||||
uses: chrnorm/deployment-status@v2
|
||||
with:
|
||||
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
state: "${{ job.status }}"
|
||||
deployment_id: ${{ steps.deployment_api.outputs.deployment_id }}
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
deploy-frontend:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 16
|
||||
|
||||
- name: Cache node_modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
|
||||
- name: Create GitHub deployment for Frontend
|
||||
uses: chrnorm/deployment-action@v2
|
||||
id: deployment_frontend
|
||||
with:
|
||||
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
environment: AWS (Frontend Web UI)
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- name: Install dependencies and build
|
||||
run: |
|
||||
yarn install
|
||||
yarn build
|
||||
|
||||
- name: Setup AWS
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Upload to S3
|
||||
env:
|
||||
AWS_S3_BUCKET: 'web-check-frontend'
|
||||
run: aws s3 sync ./build/ s3://$AWS_S3_BUCKET/ --delete
|
||||
|
||||
- name: Invalidate CloudFront cache
|
||||
uses: chetan/invalidate-cloudfront-action@v2
|
||||
env:
|
||||
DISTRIBUTION: E30XKAM2TG9FD8
|
||||
PATHS: '/*'
|
||||
AWS_REGION: 'us-east-1'
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
||||
- name: Update GitHub deployment status (Frontend)
|
||||
if: always()
|
||||
uses: chrnorm/deployment-status@v2
|
||||
with:
|
||||
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
state: "${{ job.status }}"
|
||||
deployment_id: ${{ steps.deployment_frontend.outputs.deployment_id }}
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
18
.github/workflows/docker.yml
vendored
@@ -23,14 +23,14 @@ jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
- name: Checkout 🛎️
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Extract tag name
|
||||
- name: Extract tag name 🏷️
|
||||
shell: bash
|
||||
run: echo "GIT_TAG=$(echo ${GITHUB_REF#refs/tags/} | sed 's/\//_/g')" >> $GITHUB_ENV
|
||||
|
||||
- name: Compute tags
|
||||
- name: Compute tags 🔖
|
||||
id: compute-tags
|
||||
run: |
|
||||
if [[ "${{ github.ref }}" == "refs/heads/master" ]]; then
|
||||
@@ -41,33 +41,33 @@ jobs:
|
||||
echo "DOCKERHUB_TAG=${DOCKERHUB_REGISTRY}/${DOCKER_USER}/${IMAGE_NAME}:${GIT_TAG}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Set up QEMU
|
||||
- name: Set up QEMU 🐧
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
- name: Set up Docker Buildx 🐳
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
- name: Login to GitHub Container Registry 🔑
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ${{ env.GHCR_REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Login to DockerHub
|
||||
- name: Login to DockerHub 🔑
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ${{ env.DOCKERHUB_REGISTRY }}
|
||||
username: ${{ env.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
|
||||
- name: Build and push Docker images
|
||||
- name: Build and push Docker images 🛠️
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
platforms: linux/amd64
|
||||
platforms: linux/amd64,linux/arm64/v8
|
||||
tags: |
|
||||
${{ env.GHCR_TAG }}
|
||||
${{ env.DOCKERHUB_TAG }}
|
||||
|
||||
2
.github/workflows/mirror.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
||||
codeberg:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
with: { fetch-depth: 0 }
|
||||
- uses: pixta-dev/repository-mirroring-action@v1
|
||||
with:
|
||||
|
||||
72
.gitignore
vendored
@@ -1,28 +1,62 @@
|
||||
|
||||
# Keys
|
||||
# ------------------------
|
||||
# ENVIRONMENT SETTINGS
|
||||
# ------------------------
|
||||
.env
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
# ------------------------
|
||||
# PRODUCTION
|
||||
# ------------------------
|
||||
/build/
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
# ------------------------
|
||||
# BUILT FILES
|
||||
# ------------------------
|
||||
dist/
|
||||
.vercel/
|
||||
.netlify/
|
||||
.webpack/
|
||||
.serverless/
|
||||
.astro/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
# ------------------------
|
||||
# DEPENDENCIES
|
||||
# ------------------------
|
||||
node_modules/
|
||||
.yarn/cache/
|
||||
.yarn/unplugged/
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnpm/
|
||||
.pnp.*
|
||||
|
||||
# ------------------------
|
||||
# LOGS
|
||||
# ------------------------
|
||||
logs/
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# ------------------------
|
||||
# TESTING
|
||||
# ------------------------
|
||||
coverage/
|
||||
.nyc_output/
|
||||
|
||||
# ------------------------
|
||||
# OS SPECIFIC
|
||||
# ------------------------
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# ------------------------
|
||||
# EDITORS
|
||||
# ------------------------
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Local Netlify folder
|
||||
.netlify
|
||||
|
||||
66
Dockerfile
@@ -1,12 +1,62 @@
|
||||
FROM node:16-buster-slim AS base
|
||||
# Specify the Node.js version to use
|
||||
ARG NODE_VERSION=21
|
||||
|
||||
# Specify the Debian version to use, the default is "bullseye"
|
||||
ARG DEBIAN_VERSION=bullseye
|
||||
|
||||
# Use Node.js Docker image as the base image, with specific Node and Debian versions
|
||||
FROM node:${NODE_VERSION}-${DEBIAN_VERSION} AS build
|
||||
|
||||
# Set the container's default shell to Bash and enable some options
|
||||
SHELL ["/bin/bash", "-euo", "pipefail", "-c"]
|
||||
|
||||
# Install Chromium browser and Download and verify Google Chrome’s signing key
|
||||
RUN apt-get update -qq --fix-missing && \
|
||||
apt-get -qqy install --allow-unauthenticated gnupg wget && \
|
||||
wget --quiet --output-document=- https://dl-ssl.google.com/linux/linux_signing_key.pub | gpg --dearmor > /etc/apt/trusted.gpg.d/google-archive.gpg && \
|
||||
echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google.list && \
|
||||
apt-get update -qq && \
|
||||
apt-get -qqy --no-install-recommends install chromium traceroute python make g++ && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Run the Chromium browser's version command and redirect its output to the /etc/chromium-version file
|
||||
RUN /usr/bin/chromium --no-sandbox --version > /etc/chromium-version
|
||||
|
||||
# Set the working directory to /app
|
||||
WORKDIR /app
|
||||
FROM base AS builder
|
||||
COPY . .
|
||||
|
||||
# Copy package.json and yarn.lock to the working directory
|
||||
COPY package.json yarn.lock ./
|
||||
|
||||
# Run yarn install to install dependencies and clear yarn cache
|
||||
RUN apt-get update && \
|
||||
apt-get install -y chromium traceroute && \
|
||||
yarn install --frozen-lockfile --network-timeout 100000 && \
|
||||
rm -rf /app/node_modules/.cache
|
||||
|
||||
# Copy all files to working directory
|
||||
COPY . .
|
||||
|
||||
# Run yarn build to build the application
|
||||
RUN yarn build --production
|
||||
|
||||
# Final stage
|
||||
FROM node:${NODE_VERSION}-${DEBIAN_VERSION} AS final
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json yarn.lock ./
|
||||
COPY --from=build /app .
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends chromium traceroute && \
|
||||
chmod 755 /usr/bin/chromium && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
RUN npm install --force
|
||||
EXPOSE 8888
|
||||
rm -rf /var/lib/apt/lists/* /app/node_modules/.cache
|
||||
|
||||
# Exposed container port, the default is 3000, which can be modified through the environment variable PORT
|
||||
EXPOSE ${PORT:-3000}
|
||||
|
||||
# Set the environment variable CHROME_PATH to specify the path to the Chromium binaries
|
||||
ENV CHROME_PATH='/usr/bin/chromium'
|
||||
CMD ["npm", "run", "serve"]
|
||||
|
||||
# Define the command executed when the container starts and start the server.js of the Node.js application
|
||||
CMD ["yarn", "start"]
|
||||
|
||||
51
api/_common/aws-webpack.config.js
Normal file
@@ -0,0 +1,51 @@
|
||||
const path = require('path');
|
||||
const nodeExternals = require('webpack-node-externals');
|
||||
|
||||
module.exports = {
|
||||
target: 'node',
|
||||
mode: 'production',
|
||||
entry: {
|
||||
'carbon': './api/carbon.js',
|
||||
'cookies': './api/cookies.js',
|
||||
'dns-server': './api/dns-server.js',
|
||||
'dns': './api/dns.js',
|
||||
'dnssec': './api/dnssec.js',
|
||||
'features': './api/features.js',
|
||||
'get-ip': './api/get-ip.js',
|
||||
'headers': './api/headers.js',
|
||||
'hsts': './api/hsts.js',
|
||||
'linked-pages': './api/linked-pages.js',
|
||||
'mail-config': './api/mail-config.js',
|
||||
'ports': './api/ports.js',
|
||||
'quality': './api/quality.js',
|
||||
'redirects': './api/redirects.js',
|
||||
'robots-txt': './api/robots-txt.js',
|
||||
'screenshot': './api/screenshot.js',
|
||||
'security-txt': './api/security-txt.js',
|
||||
'sitemap': './api/sitemap.js',
|
||||
'social-tags': './api/social-tags.js',
|
||||
'ssl': './api/ssl.js',
|
||||
'status': './api/status.js',
|
||||
'tech-stack': './api/tech-stack.js',
|
||||
'trace-route': './api/trace-route.js',
|
||||
'txt-records': './api/txt-records.js',
|
||||
'whois': './api/whois.js',
|
||||
},
|
||||
externals: [nodeExternals()],
|
||||
output: {
|
||||
filename: '[name].js',
|
||||
path: path.resolve(__dirname, '.webpack'),
|
||||
libraryTarget: 'commonjs2'
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.js$/,
|
||||
use: {
|
||||
loader: 'babel-loader'
|
||||
},
|
||||
exclude: /node_modules/,
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
155
api/_common/middleware.js
Normal file
@@ -0,0 +1,155 @@
|
||||
const normalizeUrl = (url) => {
|
||||
return url.startsWith('http') ? url : `https://${url}`;
|
||||
};
|
||||
|
||||
// If present, set a shorter timeout for API requests
|
||||
const TIMEOUT = process.env.API_TIMEOUT_LIMIT ? parseInt(process.env.API_TIMEOUT_LIMIT, 10) : 60000;
|
||||
|
||||
// If present, set CORS allowed origins for responses
|
||||
const ALLOWED_ORIGINS = process.env.API_CORS_ORIGIN || '*';
|
||||
|
||||
// Disable everything :( Setting this env var will turn off the instance, and show message
|
||||
const DISABLE_EVERYTHING = !!process.env.VITE_DISABLE_EVERYTHING;
|
||||
|
||||
// Set the platform currently being used
|
||||
let PLATFORM = 'NETLIFY';
|
||||
if (process.env.PLATFORM) { PLATFORM = process.env.PLATFORM.toUpperCase(); }
|
||||
else if (process.env.VERCEL) { PLATFORM = 'VERCEL'; }
|
||||
else if (process.env.WC_SERVER) { PLATFORM = 'NODE'; }
|
||||
|
||||
// Define the headers to be returned with each response
|
||||
const headers = {
|
||||
'Access-Control-Allow-Origin': ALLOWED_ORIGINS,
|
||||
'Access-Control-Allow-Credentials': true,
|
||||
'Content-Type': 'application/json;charset=UTF-8',
|
||||
};
|
||||
|
||||
const timeoutErrorMsg = 'You can re-trigger this request, by clicking "Retry"\n'
|
||||
+ 'If you\'re running your own instance of Web Check, then you can '
|
||||
+ 'resolve this issue, by increasing the timeout limit in the '
|
||||
+ '`API_TIMEOUT_LIMIT` environmental variable to a higher value (in milliseconds), '
|
||||
+ 'or if you\'re hosting on Vercel increase the maxDuration in vercel.json.\n\n'
|
||||
+ `The public instance currently has a lower timeout of ${TIMEOUT}ms `
|
||||
+ 'in order to keep running costs affordable, so that Web Check can '
|
||||
+ 'remain freely available for everyone.';
|
||||
|
||||
const disabledErrorMsg = 'Error - WebCheck Temporarily Disabled.\n\n'
|
||||
+ 'We\'re sorry, but due to the increased cost of running Web Check '
|
||||
+ 'we\'ve had to temporatily disable the public instand. '
|
||||
+ 'We\'re activley looking for affordable ways to keep Web Check running, '
|
||||
+ 'while free to use for everybody.\n'
|
||||
+ 'In the meantime, since we\'ve made our code free and open source, '
|
||||
+ 'you can get Web Check running on your own system, by following the instructions in our GitHub repo';
|
||||
|
||||
// A middleware function used by all API routes on all platforms
|
||||
const commonMiddleware = (handler) => {
|
||||
|
||||
// Create a timeout promise, to throw an error if a request takes too long
|
||||
const createTimeoutPromise = (timeoutMs) => {
|
||||
return new Promise((_, reject) => {
|
||||
setTimeout(() => {
|
||||
reject(new Error(`Request timed-out after ${timeoutMs} ms`));
|
||||
}, timeoutMs);
|
||||
});
|
||||
};
|
||||
|
||||
// Vercel
|
||||
const vercelHandler = async (request, response) => {
|
||||
|
||||
if (DISABLE_EVERYTHING) {
|
||||
response.status(503).json({ error: disabledErrorMsg });
|
||||
}
|
||||
|
||||
const queryParams = request.query || {};
|
||||
const rawUrl = queryParams.url;
|
||||
|
||||
if (!rawUrl) {
|
||||
return response.status(500).json({ error: 'No URL specified' });
|
||||
}
|
||||
|
||||
const url = normalizeUrl(rawUrl);
|
||||
|
||||
try {
|
||||
// Race the handler against the timeout
|
||||
const handlerResponse = await Promise.race([
|
||||
handler(url, request),
|
||||
createTimeoutPromise(TIMEOUT)
|
||||
]);
|
||||
|
||||
if (handlerResponse.body && handlerResponse.statusCode) {
|
||||
response.status(handlerResponse.statusCode).json(handlerResponse.body);
|
||||
} else {
|
||||
response.status(200).json(
|
||||
typeof handlerResponse === 'object' ? handlerResponse : JSON.parse(handlerResponse)
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
let errorCode = 500;
|
||||
if (error.message.includes('timed-out') || response.statusCode === 504) {
|
||||
errorCode = 408;
|
||||
error.message = `${error.message}\n\n${timeoutErrorMsg}`;
|
||||
}
|
||||
response.status(errorCode).json({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
// Netlify
|
||||
const netlifyHandler = async (event, context, callback) => {
|
||||
const queryParams = event.queryStringParameters || event.query || {};
|
||||
const rawUrl = queryParams.url;
|
||||
|
||||
if (DISABLE_EVERYTHING) {
|
||||
callback(null, {
|
||||
statusCode: 503,
|
||||
body: JSON.stringify({ error: 'Web-Check is temporarily disabled. Please try again later.' }),
|
||||
headers,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (!rawUrl) {
|
||||
callback(null, {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: 'No URL specified' }),
|
||||
headers,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const url = normalizeUrl(rawUrl);
|
||||
|
||||
try {
|
||||
// Race the handler against the timeout
|
||||
const handlerResponse = await Promise.race([
|
||||
handler(url, event, context),
|
||||
createTimeoutPromise(TIMEOUT)
|
||||
]);
|
||||
|
||||
if (handlerResponse.body && handlerResponse.statusCode) {
|
||||
callback(null, handlerResponse);
|
||||
} else {
|
||||
callback(null, {
|
||||
statusCode: 200,
|
||||
body: typeof handlerResponse === 'object' ? JSON.stringify(handlerResponse) : handlerResponse,
|
||||
headers,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
callback(null, {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: error.message }),
|
||||
headers,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// The format of the handlers varies between platforms
|
||||
const nativeMode = (['VERCEL', 'NODE'].includes(PLATFORM));
|
||||
return nativeMode ? vercelHandler : netlifyHandler;
|
||||
};
|
||||
|
||||
if (PLATFORM === 'NETLIFY') {
|
||||
module.exports = commonMiddleware;
|
||||
}
|
||||
|
||||
export default commonMiddleware;
|
||||
84
api/archives.js
Normal file
@@ -0,0 +1,84 @@
|
||||
import axios from 'axios';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const convertTimestampToDate = (timestamp) => {
|
||||
const [year, month, day, hour, minute, second] = [
|
||||
timestamp.slice(0, 4),
|
||||
timestamp.slice(4, 6) - 1,
|
||||
timestamp.slice(6, 8),
|
||||
timestamp.slice(8, 10),
|
||||
timestamp.slice(10, 12),
|
||||
timestamp.slice(12, 14),
|
||||
].map(num => parseInt(num, 10));
|
||||
|
||||
return new Date(year, month, day, hour, minute, second);
|
||||
}
|
||||
|
||||
const countPageChanges = (results) => {
|
||||
let prevDigest = null;
|
||||
return results.reduce((acc, curr) => {
|
||||
if (curr[2] !== prevDigest) {
|
||||
prevDigest = curr[2];
|
||||
return acc + 1;
|
||||
}
|
||||
return acc;
|
||||
}, -1);
|
||||
}
|
||||
|
||||
const getAveragePageSize = (scans) => {
|
||||
const totalSize = scans.map(scan => parseInt(scan[3], 10)).reduce((sum, size) => sum + size, 0);
|
||||
return Math.round(totalSize / scans.length);
|
||||
};
|
||||
|
||||
const getScanFrequency = (firstScan, lastScan, totalScans, changeCount) => {
|
||||
const formatToTwoDecimal = num => parseFloat(num.toFixed(2));
|
||||
|
||||
const dayFactor = (lastScan - firstScan) / (1000 * 60 * 60 * 24);
|
||||
const daysBetweenScans = formatToTwoDecimal(dayFactor / totalScans);
|
||||
const daysBetweenChanges = formatToTwoDecimal(dayFactor / changeCount);
|
||||
const scansPerDay = formatToTwoDecimal((totalScans - 1) / dayFactor);
|
||||
const changesPerDay = formatToTwoDecimal(changeCount / dayFactor);
|
||||
return {
|
||||
daysBetweenScans,
|
||||
daysBetweenChanges,
|
||||
scansPerDay,
|
||||
changesPerDay,
|
||||
};
|
||||
};
|
||||
|
||||
const wayBackHandler = async (url) => {
|
||||
const cdxUrl = `https://web.archive.org/cdx/search/cdx?url=${url}&output=json&fl=timestamp,statuscode,digest,length,offset`;
|
||||
|
||||
try {
|
||||
const { data } = await axios.get(cdxUrl);
|
||||
|
||||
// Check there's data
|
||||
if (!data || !Array.isArray(data) || data.length <= 1) {
|
||||
return { skipped: 'Site has never before been archived via the Wayback Machine' };
|
||||
}
|
||||
|
||||
// Remove the header row
|
||||
data.shift();
|
||||
|
||||
// Process and return the results
|
||||
const firstScan = convertTimestampToDate(data[0][0]);
|
||||
const lastScan = convertTimestampToDate(data[data.length - 1][0]);
|
||||
const totalScans = data.length;
|
||||
const changeCount = countPageChanges(data);
|
||||
return {
|
||||
firstScan,
|
||||
lastScan,
|
||||
totalScans,
|
||||
changeCount,
|
||||
averagePageSize: getAveragePageSize(data),
|
||||
scanFrequency: getScanFrequency(firstScan, lastScan, totalScans, changeCount),
|
||||
scans: data,
|
||||
scanUrl: url,
|
||||
};
|
||||
} catch (err) {
|
||||
return { error: `Error fetching Wayback data: ${err.message}` };
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(wayBackHandler);
|
||||
export default handler;
|
||||
105
api/block-lists.js
Normal file
@@ -0,0 +1,105 @@
|
||||
import dns from 'dns';
|
||||
import { URL } from 'url';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const DNS_SERVERS = [
|
||||
{ name: 'AdGuard', ip: '176.103.130.130' },
|
||||
{ name: 'AdGuard Family', ip: '176.103.130.132' },
|
||||
{ name: 'CleanBrowsing Adult', ip: '185.228.168.10' },
|
||||
{ name: 'CleanBrowsing Family', ip: '185.228.168.168' },
|
||||
{ name: 'CleanBrowsing Security', ip: '185.228.168.9' },
|
||||
{ name: 'CloudFlare', ip: '1.1.1.1' },
|
||||
{ name: 'CloudFlare Family', ip: '1.1.1.3' },
|
||||
{ name: 'Comodo Secure', ip: '8.26.56.26' },
|
||||
{ name: 'Google DNS', ip: '8.8.8.8' },
|
||||
{ name: 'Neustar Family', ip: '156.154.70.3' },
|
||||
{ name: 'Neustar Protection', ip: '156.154.70.2' },
|
||||
{ name: 'Norton Family', ip: '199.85.126.20' },
|
||||
{ name: 'OpenDNS', ip: '208.67.222.222' },
|
||||
{ name: 'OpenDNS Family', ip: '208.67.222.123' },
|
||||
{ name: 'Quad9', ip: '9.9.9.9' },
|
||||
{ name: 'Yandex Family', ip: '77.88.8.7' },
|
||||
{ name: 'Yandex Safe', ip: '77.88.8.88' },
|
||||
];
|
||||
const knownBlockIPs = [
|
||||
'146.112.61.106', // OpenDNS
|
||||
'185.228.168.10', // CleanBrowsing
|
||||
'8.26.56.26', // Comodo
|
||||
'9.9.9.9', // Quad9
|
||||
'208.69.38.170', // Some OpenDNS IPs
|
||||
'208.69.39.170', // Some OpenDNS IPs
|
||||
'208.67.222.222', // OpenDNS
|
||||
'208.67.222.123', // OpenDNS FamilyShield
|
||||
'199.85.126.10', // Norton
|
||||
'199.85.126.20', // Norton Family
|
||||
'156.154.70.22', // Neustar
|
||||
'77.88.8.7', // Yandex
|
||||
'77.88.8.8', // Yandex
|
||||
'::1', // Localhost IPv6
|
||||
'2a02:6b8::feed:0ff', // Yandex DNS
|
||||
'2a02:6b8::feed:bad', // Yandex Safe
|
||||
'2a02:6b8::feed:a11', // Yandex Family
|
||||
'2620:119:35::35', // OpenDNS
|
||||
'2620:119:53::53', // OpenDNS FamilyShield
|
||||
'2606:4700:4700::1111', // Cloudflare
|
||||
'2606:4700:4700::1001', // Cloudflare
|
||||
'2001:4860:4860::8888', // Google DNS
|
||||
'2a0d:2a00:1::', // AdGuard
|
||||
'2a0d:2a00:2::' // AdGuard Family
|
||||
];
|
||||
|
||||
const isDomainBlocked = async (domain, serverIP) => {
|
||||
return new Promise((resolve) => {
|
||||
dns.resolve4(domain, { server: serverIP }, (err, addresses) => {
|
||||
if (!err) {
|
||||
if (addresses.some(addr => knownBlockIPs.includes(addr))) {
|
||||
resolve(true);
|
||||
return;
|
||||
}
|
||||
resolve(false);
|
||||
return;
|
||||
}
|
||||
|
||||
dns.resolve6(domain, { server: serverIP }, (err6, addresses6) => {
|
||||
if (!err6) {
|
||||
if (addresses6.some(addr => knownBlockIPs.includes(addr))) {
|
||||
resolve(true);
|
||||
return;
|
||||
}
|
||||
resolve(false);
|
||||
return;
|
||||
}
|
||||
if (err6.code === 'ENOTFOUND' || err6.code === 'SERVFAIL') {
|
||||
resolve(true);
|
||||
} else {
|
||||
resolve(false);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const checkDomainAgainstDnsServers = async (domain) => {
|
||||
let results = [];
|
||||
|
||||
for (let server of DNS_SERVERS) {
|
||||
const isBlocked = await isDomainBlocked(domain, server.ip);
|
||||
results.push({
|
||||
server: server.name,
|
||||
serverIp: server.ip,
|
||||
isBlocked,
|
||||
});
|
||||
}
|
||||
|
||||
return results;
|
||||
};
|
||||
|
||||
export const blockListHandler = async (url) => {
|
||||
const domain = new URL(url).hostname;
|
||||
const results = await checkDomainAgainstDnsServers(domain);
|
||||
return { blocklists: results };
|
||||
};
|
||||
|
||||
export const handler = middleware(blockListHandler);
|
||||
export default handler;
|
||||
|
||||
@@ -1,14 +1,7 @@
|
||||
const https = require('https');
|
||||
import https from 'https';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
exports.handler = async (event, context) => {
|
||||
const { url } = event.queryStringParameters;
|
||||
|
||||
if (!url) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'url query parameter is required' }),
|
||||
};
|
||||
}
|
||||
const carbonHandler = async (url) => {
|
||||
|
||||
// First, get the size of the website's HTML
|
||||
const getHtmlSize = (url) => new Promise((resolve, reject) => {
|
||||
@@ -49,14 +42,11 @@ exports.handler = async (event, context) => {
|
||||
}
|
||||
|
||||
carbonData.scanUrl = url;
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify(carbonData),
|
||||
};
|
||||
return carbonData;
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: `Error: ${error.message}` }),
|
||||
};
|
||||
throw new Error(`Error: ${error.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(carbonHandler);
|
||||
export default handler;
|
||||
@@ -1,60 +0,0 @@
|
||||
const axios = require('axios');
|
||||
const cheerio = require('cheerio');
|
||||
const urlLib = require('url');
|
||||
|
||||
exports.handler = async (event, context) => {
|
||||
let url = event.queryStringParameters.url;
|
||||
|
||||
// Check if url includes protocol
|
||||
if (!url.startsWith('http://') && !url.startsWith('https://')) {
|
||||
url = 'http://' + url;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios.get(url);
|
||||
const html = response.data;
|
||||
const $ = cheerio.load(html);
|
||||
const internalLinksMap = new Map();
|
||||
const externalLinksMap = new Map();
|
||||
|
||||
$('a[href]').each((i, link) => {
|
||||
const href = $(link).attr('href');
|
||||
const absoluteUrl = urlLib.resolve(url, href);
|
||||
|
||||
if (absoluteUrl.startsWith(url)) {
|
||||
const count = internalLinksMap.get(absoluteUrl) || 0;
|
||||
internalLinksMap.set(absoluteUrl, count + 1);
|
||||
} else if (href.startsWith('http://') || href.startsWith('https://')) {
|
||||
const count = externalLinksMap.get(absoluteUrl) || 0;
|
||||
externalLinksMap.set(absoluteUrl, count + 1);
|
||||
}
|
||||
});
|
||||
|
||||
// Convert maps to sorted arrays
|
||||
const internalLinks = [...internalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
|
||||
const externalLinks = [...externalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
|
||||
|
||||
if (internalLinks.length === 0 && externalLinks.length === 0) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({
|
||||
skipped: 'No internal or external links found. '
|
||||
+ 'This may be due to the website being dynamically rendered, using a client-side framework (like React), and without SSR enabled. '
|
||||
+ 'That would mean that the static HTML returned from the HTTP request doesn\'t contain any meaningful content for Web-Check to analyze. '
|
||||
+ 'You can rectify this by using a headless browser to render the page instead.',
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ internal: internalLinks, external: externalLinks }),
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: 'Failed fetching data' }),
|
||||
};
|
||||
}
|
||||
};
|
||||
58
api/cookies.js
Normal file
@@ -0,0 +1,58 @@
|
||||
import axios from 'axios';
|
||||
import puppeteer from 'puppeteer';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const getPuppeteerCookies = async (url) => {
|
||||
const browser = await puppeteer.launch({
|
||||
headless: 'new',
|
||||
args: ['--no-sandbox', '--disable-setuid-sandbox'],
|
||||
});
|
||||
|
||||
try {
|
||||
const page = await browser.newPage();
|
||||
const navigationPromise = page.goto(url, { waitUntil: 'networkidle2' });
|
||||
const timeoutPromise = new Promise((_, reject) =>
|
||||
setTimeout(() => reject(new Error('Puppeteer took too long!')), 3000)
|
||||
);
|
||||
await Promise.race([navigationPromise, timeoutPromise]);
|
||||
return await page.cookies();
|
||||
} finally {
|
||||
await browser.close();
|
||||
}
|
||||
};
|
||||
|
||||
const cookieHandler = async (url) => {
|
||||
let headerCookies = null;
|
||||
let clientCookies = null;
|
||||
|
||||
try {
|
||||
const response = await axios.get(url, {
|
||||
withCredentials: true,
|
||||
maxRedirects: 5,
|
||||
});
|
||||
headerCookies = response.headers['set-cookie'];
|
||||
} catch (error) {
|
||||
if (error.response) {
|
||||
return { error: `Request failed with status ${error.response.status}: ${error.message}` };
|
||||
} else if (error.request) {
|
||||
return { error: `No response received: ${error.message}` };
|
||||
} else {
|
||||
return { error: `Error setting up request: ${error.message}` };
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
clientCookies = await getPuppeteerCookies(url);
|
||||
} catch (_) {
|
||||
clientCookies = null;
|
||||
}
|
||||
|
||||
if (!headerCookies && (!clientCookies || clientCookies.length === 0)) {
|
||||
return { skipped: 'No cookies' };
|
||||
}
|
||||
|
||||
return { headerCookies, clientCookies };
|
||||
};
|
||||
|
||||
export const handler = middleware(cookieHandler);
|
||||
export default handler;
|
||||
@@ -1,11 +1,10 @@
|
||||
const dns = require('dns');
|
||||
const dnsPromises = dns.promises;
|
||||
// const https = require('https');
|
||||
const axios = require('axios');
|
||||
import { promises as dnsPromises, lookup } from 'dns';
|
||||
import axios from 'axios';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
exports.handler = async (event) => {
|
||||
const domain = event.queryStringParameters.url.replace(/^(?:https?:\/\/)?/i, "");
|
||||
const dnsHandler = async (url) => {
|
||||
try {
|
||||
const domain = url.replace(/^(?:https?:\/\/)?/i, "");
|
||||
const addresses = await dnsPromises.resolve4(domain);
|
||||
const results = await Promise.all(addresses.map(async (address) => {
|
||||
const hostname = await dnsPromises.reverse(address).catch(() => null);
|
||||
@@ -22,6 +21,7 @@ exports.handler = async (event) => {
|
||||
dohDirectSupports,
|
||||
};
|
||||
}));
|
||||
|
||||
// let dohMozillaSupport = false;
|
||||
// try {
|
||||
// const mozillaList = await axios.get('https://firefox.settings.services.mozilla.com/v1/buckets/security-state/collections/onecrl/records');
|
||||
@@ -29,20 +29,18 @@ exports.handler = async (event) => {
|
||||
// } catch (error) {
|
||||
// console.error(error);
|
||||
// }
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({
|
||||
domain,
|
||||
dns: results,
|
||||
// dohMozillaSupport,
|
||||
}),
|
||||
domain,
|
||||
dns: results,
|
||||
// dohMozillaSupport,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({
|
||||
error: `An error occurred while resolving DNS. ${error.message}`,
|
||||
}),
|
||||
};
|
||||
throw new Error(`An error occurred while resolving DNS. ${error.message}`); // This will be caught and handled by the commonMiddleware
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
export const handler = middleware(dnsHandler);
|
||||
export default handler;
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
const dns = require('dns');
|
||||
const util = require('util');
|
||||
import dns from 'dns';
|
||||
import util from 'util';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
exports.handler = async function(event, context) {
|
||||
let hostname = event.queryStringParameters.url;
|
||||
const dnsHandler = async (url) => {
|
||||
let hostname = url;
|
||||
|
||||
// Handle URLs by extracting hostname
|
||||
if (hostname.startsWith('http://') || hostname.startsWith('https://')) {
|
||||
@@ -35,25 +36,20 @@ exports.handler = async function(event, context) {
|
||||
]);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({
|
||||
A: a,
|
||||
AAAA: aaaa,
|
||||
MX: mx,
|
||||
TXT: txt,
|
||||
NS: ns,
|
||||
CNAME: cname,
|
||||
SOA: soa,
|
||||
SRV: srv,
|
||||
PTR: ptr
|
||||
})
|
||||
A: a,
|
||||
AAAA: aaaa,
|
||||
MX: mx,
|
||||
TXT: txt,
|
||||
NS: ns,
|
||||
CNAME: cname,
|
||||
SOA: soa,
|
||||
SRV: srv,
|
||||
PTR: ptr
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({
|
||||
error: error.message
|
||||
})
|
||||
};
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(dnsHandler);
|
||||
export default handler;
|
||||
@@ -1,16 +1,7 @@
|
||||
const https = require('https');
|
||||
|
||||
exports.handler = async function(event, context) {
|
||||
let { url } = event.queryStringParameters;
|
||||
|
||||
if (!url) {
|
||||
return errorResponse('URL query parameter is required.');
|
||||
}
|
||||
|
||||
// Extract hostname from URL
|
||||
const parsedUrl = new URL(url);
|
||||
const domain = parsedUrl.hostname;
|
||||
import https from 'https';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const dnsSecHandler = async (domain) => {
|
||||
const dnsTypes = ['DNSKEY', 'DS', 'RRSIG'];
|
||||
const records = {};
|
||||
|
||||
@@ -34,7 +25,11 @@ exports.handler = async function(event, context) {
|
||||
});
|
||||
|
||||
res.on('end', () => {
|
||||
resolve(JSON.parse(data));
|
||||
try {
|
||||
resolve(JSON.parse(data));
|
||||
} catch (error) {
|
||||
reject(new Error('Invalid JSON response'));
|
||||
}
|
||||
});
|
||||
|
||||
res.on('error', error => {
|
||||
@@ -48,22 +43,15 @@ exports.handler = async function(event, context) {
|
||||
if (dnsResponse.Answer) {
|
||||
records[type] = { isFound: true, answer: dnsResponse.Answer, response: dnsResponse.Answer };
|
||||
} else {
|
||||
records[type] = { isFound: false, answer: null, response: dnsResponse};
|
||||
records[type] = { isFound: false, answer: null, response: dnsResponse };
|
||||
}
|
||||
} catch (error) {
|
||||
return errorResponse(`Error fetching ${type} record: ${error.message}`);
|
||||
throw new Error(`Error fetching ${type} record: ${error.message}`); // This will be caught and handled by the commonMiddleware
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify(records),
|
||||
};
|
||||
return records;
|
||||
};
|
||||
|
||||
const errorResponse = (message, statusCode = 444) => {
|
||||
return {
|
||||
statusCode: statusCode,
|
||||
body: JSON.stringify({ error: message }),
|
||||
};
|
||||
};
|
||||
export const handler = middleware(dnsSecHandler);
|
||||
export default handler;
|
||||
@@ -1,22 +1,15 @@
|
||||
const https = require('https');
|
||||
import https from 'https';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
exports.handler = async function (event, context) {
|
||||
const { url } = event.queryStringParameters;
|
||||
const featuresHandler = async (url) => {
|
||||
const apiKey = process.env.BUILT_WITH_API_KEY;
|
||||
|
||||
const errorResponse = (message, statusCode = 500) => {
|
||||
return {
|
||||
statusCode: statusCode,
|
||||
body: JSON.stringify({ error: message }),
|
||||
};
|
||||
};
|
||||
|
||||
if (!url) {
|
||||
return errorResponse('URL query parameter is required', 400);
|
||||
throw new Error('URL query parameter is required');
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
return errorResponse('Missing BuiltWith API key in environment variables', 500);
|
||||
throw new Error('Missing BuiltWith API key in environment variables');
|
||||
}
|
||||
|
||||
const apiUrl = `https://api.builtwith.com/free1/api.json?KEY=${apiKey}&LOOKUP=${encodeURIComponent(url)}`;
|
||||
@@ -46,11 +39,11 @@ exports.handler = async function (event, context) {
|
||||
req.end();
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: response,
|
||||
};
|
||||
return response;
|
||||
} catch (error) {
|
||||
return errorResponse(`Error making request: ${error.message}`);
|
||||
throw new Error(`Error making request: ${error.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(featuresHandler);
|
||||
export default handler;
|
||||
@@ -1,33 +0,0 @@
|
||||
const dns = require('dns');
|
||||
|
||||
/* Lambda function to fetch the IP address of a given URL */
|
||||
exports.handler = function (event, context, callback) {
|
||||
const addressParam = event.queryStringParameters.url;
|
||||
|
||||
if (!addressParam) {
|
||||
callback(null, errorResponse('Address parameter is missing.'));
|
||||
return;
|
||||
}
|
||||
|
||||
const address = decodeURIComponent(addressParam)
|
||||
.replaceAll('https://', '')
|
||||
.replaceAll('http://', '');
|
||||
|
||||
dns.lookup(address, (err, ip, family) => {
|
||||
if (err) {
|
||||
callback(null, errorResponse(err.message));
|
||||
} else {
|
||||
callback(null, {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ ip, family }),
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const errorResponse = (message, statusCode = 444) => {
|
||||
return {
|
||||
statusCode: statusCode,
|
||||
body: JSON.stringify({ error: message }),
|
||||
};
|
||||
};
|
||||
114
api/firewall.js
Normal file
@@ -0,0 +1,114 @@
|
||||
import axios from 'axios';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const hasWaf = (waf) => {
|
||||
return {
|
||||
hasWaf: true, waf,
|
||||
}
|
||||
};
|
||||
|
||||
const firewallHandler = async (url) => {
|
||||
const fullUrl = url.startsWith('http') ? url : `http://${url}`;
|
||||
|
||||
try {
|
||||
const response = await axios.get(fullUrl);
|
||||
|
||||
const headers = response.headers;
|
||||
|
||||
if (headers['server'] && headers['server'].includes('cloudflare')) {
|
||||
return hasWaf('Cloudflare');
|
||||
}
|
||||
|
||||
if (headers['x-powered-by'] && headers['x-powered-by'].includes('AWS Lambda')) {
|
||||
return hasWaf('AWS WAF');
|
||||
}
|
||||
|
||||
if (headers['server'] && headers['server'].includes('AkamaiGHost')) {
|
||||
return hasWaf('Akamai');
|
||||
}
|
||||
|
||||
if (headers['server'] && headers['server'].includes('Sucuri')) {
|
||||
return hasWaf('Sucuri');
|
||||
}
|
||||
|
||||
if (headers['server'] && headers['server'].includes('BarracudaWAF')) {
|
||||
return hasWaf('Barracuda WAF');
|
||||
}
|
||||
|
||||
if (headers['server'] && (headers['server'].includes('F5 BIG-IP') || headers['server'].includes('BIG-IP'))) {
|
||||
return hasWaf('F5 BIG-IP');
|
||||
}
|
||||
|
||||
if (headers['x-sucuri-id'] || headers['x-sucuri-cache']) {
|
||||
return hasWaf('Sucuri CloudProxy WAF');
|
||||
}
|
||||
|
||||
if (headers['server'] && headers['server'].includes('FortiWeb')) {
|
||||
return hasWaf('Fortinet FortiWeb WAF');
|
||||
}
|
||||
|
||||
if (headers['server'] && headers['server'].includes('Imperva')) {
|
||||
return hasWaf('Imperva SecureSphere WAF');
|
||||
}
|
||||
|
||||
if (headers['x-protected-by'] && headers['x-protected-by'].includes('Sqreen')) {
|
||||
return hasWaf('Sqreen');
|
||||
}
|
||||
|
||||
if (headers['x-waf-event-info']) {
|
||||
return hasWaf('Reblaze WAF');
|
||||
}
|
||||
|
||||
if (headers['set-cookie'] && headers['set-cookie'].includes('_citrix_ns_id')) {
|
||||
return hasWaf('Citrix NetScaler');
|
||||
}
|
||||
|
||||
if (headers['x-denied-reason'] || headers['x-wzws-requested-method']) {
|
||||
return hasWaf('WangZhanBao WAF');
|
||||
}
|
||||
|
||||
if (headers['x-webcoment']) {
|
||||
return hasWaf('Webcoment Firewall');
|
||||
}
|
||||
|
||||
if (headers['server'] && headers['server'].includes('Yundun')) {
|
||||
return hasWaf('Yundun WAF');
|
||||
}
|
||||
|
||||
if (headers['x-yd-waf-info'] || headers['x-yd-info']) {
|
||||
return hasWaf('Yundun WAF');
|
||||
}
|
||||
|
||||
if (headers['server'] && headers['server'].includes('Safe3WAF')) {
|
||||
return hasWaf('Safe3 Web Application Firewall');
|
||||
}
|
||||
|
||||
if (headers['server'] && headers['server'].includes('NAXSI')) {
|
||||
return hasWaf('NAXSI WAF');
|
||||
}
|
||||
|
||||
if (headers['x-datapower-transactionid']) {
|
||||
return hasWaf('IBM WebSphere DataPower');
|
||||
}
|
||||
|
||||
if (headers['server'] && headers['server'].includes('QRATOR')) {
|
||||
return hasWaf('QRATOR WAF');
|
||||
}
|
||||
|
||||
if (headers['server'] && headers['server'].includes('ddos-guard')) {
|
||||
return hasWaf('DDoS-Guard WAF');
|
||||
}
|
||||
|
||||
return {
|
||||
hasWaf: false,
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: error.message }),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(firewallHandler);
|
||||
export default handler;
|
||||
@@ -1,35 +0,0 @@
|
||||
exports.handler = async (event) => {
|
||||
const { url } = event.queryStringParameters;
|
||||
const redirects = [url];
|
||||
|
||||
try {
|
||||
const got = await import('got');
|
||||
await got.default(url, {
|
||||
followRedirect: true,
|
||||
maxRedirects: 12,
|
||||
hooks: {
|
||||
beforeRedirect: [
|
||||
(options, response) => {
|
||||
redirects.push(response.headers.location);
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({
|
||||
redirects: redirects,
|
||||
}),
|
||||
};
|
||||
} catch (error) {
|
||||
return errorResponse(`Error: ${error.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
const errorResponse = (message, statusCode = 444) => {
|
||||
return {
|
||||
statusCode: statusCode,
|
||||
body: JSON.stringify({ error: message }),
|
||||
};
|
||||
};
|
||||
@@ -1,26 +0,0 @@
|
||||
const axios = require('axios');
|
||||
|
||||
exports.handler = async function(event, context) {
|
||||
const { url } = event.queryStringParameters;
|
||||
|
||||
if (!url) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ message: 'url query string parameter is required' }),
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios.get(url, {withCredentials: true});
|
||||
const cookies = response.headers['set-cookie'];
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ cookies }),
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: error.message }),
|
||||
};
|
||||
}
|
||||
};
|
||||
@@ -1,31 +0,0 @@
|
||||
const axios = require('axios');
|
||||
|
||||
exports.handler = async function(event, context) {
|
||||
const { url } = event.queryStringParameters;
|
||||
|
||||
if (!url) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'url query string parameter is required' }),
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios.get(url, {
|
||||
validateStatus: function (status) {
|
||||
return status >= 200 && status < 600; // Resolve only if the status code is less than 600
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify(response.headers),
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: error.message }),
|
||||
};
|
||||
}
|
||||
};
|
||||
23
api/get-ip.js
Normal file
@@ -0,0 +1,23 @@
|
||||
import dns from 'dns';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const lookupAsync = (address) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
dns.lookup(address, (err, ip, family) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve({ ip, family });
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const ipHandler = async (url) => {
|
||||
const address = url.replaceAll('https://', '').replaceAll('http://', '');
|
||||
return await lookupAsync(address);
|
||||
};
|
||||
|
||||
|
||||
export const handler = middleware(ipHandler);
|
||||
export default handler;
|
||||
19
api/headers.js
Normal file
@@ -0,0 +1,19 @@
|
||||
import axios from 'axios';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const headersHandler = async (url, event, context) => {
|
||||
try {
|
||||
const response = await axios.get(url, {
|
||||
validateStatus: function (status) {
|
||||
return status >= 200 && status < 600; // Resolve only if the status code is less than 600
|
||||
},
|
||||
});
|
||||
|
||||
return response.headers;
|
||||
} catch (error) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(headersHandler);
|
||||
export default handler;
|
||||
@@ -1,30 +1,20 @@
|
||||
const https = require('https');
|
||||
|
||||
exports.handler = async function(event, context) {
|
||||
const siteURL = event.queryStringParameters.url;
|
||||
import https from 'https';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const hstsHandler = async (url, event, context) => {
|
||||
const errorResponse = (message, statusCode = 500) => {
|
||||
return {
|
||||
statusCode: statusCode,
|
||||
body: JSON.stringify({ error: message }),
|
||||
};
|
||||
};
|
||||
const hstsIncompatible = (message, statusCode = 200) => {
|
||||
return {
|
||||
statusCode: statusCode,
|
||||
body: JSON.stringify({ message, compatible: false }),
|
||||
};
|
||||
const hstsIncompatible = (message, compatible = false, hstsHeader = null ) => {
|
||||
return { message, compatible, hstsHeader };
|
||||
};
|
||||
|
||||
if (!siteURL) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'URL parameter is missing!' }),
|
||||
};
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = https.request(siteURL, res => {
|
||||
const req = https.request(url, res => {
|
||||
const headers = res.headers;
|
||||
const hstsHeader = headers['strict-transport-security'];
|
||||
|
||||
@@ -42,14 +32,7 @@ exports.handler = async function(event, context) {
|
||||
} else if (!preload) {
|
||||
resolve(hstsIncompatible(`HSTS header does not contain the preload directive.`));
|
||||
} else {
|
||||
resolve({
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({
|
||||
message: "Site is compatible with the HSTS preload list!",
|
||||
compatible: true,
|
||||
hstsHeader: hstsHeader,
|
||||
}),
|
||||
});
|
||||
resolve(hstsIncompatible(`Site is compatible with the HSTS preload list!`, true, hstsHeader));
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -61,3 +44,6 @@ exports.handler = async function(event, context) {
|
||||
req.end();
|
||||
});
|
||||
};
|
||||
|
||||
export const handler = middleware(hstsHandler);
|
||||
export default handler;
|
||||
26
api/http-security.js
Normal file
@@ -0,0 +1,26 @@
|
||||
import axios from 'axios';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const httpsSecHandler = async (url) => {
|
||||
const fullUrl = url.startsWith('http') ? url : `http://${url}`;
|
||||
|
||||
try {
|
||||
const response = await axios.get(fullUrl);
|
||||
const headers = response.headers;
|
||||
return {
|
||||
strictTransportPolicy: headers['strict-transport-security'] ? true : false,
|
||||
xFrameOptions: headers['x-frame-options'] ? true : false,
|
||||
xContentTypeOptions: headers['x-content-type-options'] ? true : false,
|
||||
xXSSProtection: headers['x-xss-protection'] ? true : false,
|
||||
contentSecurityPolicy: headers['content-security-policy'] ? true : false,
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: error.message }),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(httpsSecHandler);
|
||||
export default handler;
|
||||
70
api/legacy-rank.js
Normal file
@@ -0,0 +1,70 @@
|
||||
import axios from 'axios';
|
||||
import unzipper from 'unzipper';
|
||||
import csv from 'csv-parser';
|
||||
import fs from 'fs';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
// Should also work with the following sources:
|
||||
// https://www.domcop.com/files/top/top10milliondomains.csv.zip
|
||||
// https://tranco-list.eu/top-1m.csv.zip
|
||||
// https://www.domcop.com/files/top/top10milliondomains.csv.zip
|
||||
// https://radar.cloudflare.com/charts/LargerTopDomainsTable/attachment?id=525&top=1000000
|
||||
// https://statvoo.com/dl/top-1million-sites.csv.zip
|
||||
|
||||
const FILE_URL = 'https://s3-us-west-1.amazonaws.com/umbrella-static/top-1m.csv.zip';
|
||||
const TEMP_FILE_PATH = '/tmp/top-1m.csv';
|
||||
|
||||
const rankHandler = async (url) => {
|
||||
let domain = null;
|
||||
|
||||
try {
|
||||
domain = new URL(url).hostname;
|
||||
} catch (e) {
|
||||
throw new Error('Invalid URL');
|
||||
}
|
||||
|
||||
// Download and unzip the file if not in cache
|
||||
if (!fs.existsSync(TEMP_FILE_PATH)) {
|
||||
const response = await axios({
|
||||
method: 'GET',
|
||||
url: FILE_URL,
|
||||
responseType: 'stream'
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
response.data
|
||||
.pipe(unzipper.Extract({ path: '/tmp' }))
|
||||
.on('close', resolve)
|
||||
.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
// Parse the CSV and find the rank
|
||||
return new Promise((resolve, reject) => {
|
||||
const csvStream = fs.createReadStream(TEMP_FILE_PATH)
|
||||
.pipe(csv({
|
||||
headers: ['rank', 'domain'],
|
||||
}))
|
||||
.on('data', (row) => {
|
||||
if (row.domain === domain) {
|
||||
csvStream.destroy();
|
||||
resolve({
|
||||
domain: domain,
|
||||
rank: row.rank,
|
||||
isFound: true,
|
||||
});
|
||||
}
|
||||
})
|
||||
.on('end', () => {
|
||||
resolve({
|
||||
skipped: `Skipping, as ${domain} is not present in the Umbrella top 1M list.`,
|
||||
domain: domain,
|
||||
isFound: false,
|
||||
});
|
||||
})
|
||||
.on('error', reject);
|
||||
});
|
||||
};
|
||||
|
||||
export const handler = middleware(rankHandler);
|
||||
export default handler;
|
||||
@@ -1,40 +0,0 @@
|
||||
const axios = require('axios');
|
||||
|
||||
exports.handler = function(event, context, callback) {
|
||||
const { url } = event.queryStringParameters;
|
||||
|
||||
if (!url) {
|
||||
callback(null, {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'URL param is required'}),
|
||||
});
|
||||
}
|
||||
|
||||
const apiKey = process.env.GOOGLE_CLOUD_API_KEY;
|
||||
|
||||
if (!apiKey) {
|
||||
callback(null, {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: 'API key (GOOGLE_CLOUD_API_KEY) not set'}),
|
||||
});
|
||||
}
|
||||
|
||||
const endpoint = `https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url=${encodeURIComponent(url)}&category=PERFORMANCE&category=ACCESSIBILITY&category=BEST_PRACTICES&category=SEO&category=PWA&strategy=mobile&key=${apiKey}`;
|
||||
|
||||
axios.get(endpoint)
|
||||
.then(
|
||||
(response) => {
|
||||
callback(null, {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify(response.data),
|
||||
});
|
||||
}
|
||||
).catch(
|
||||
() => {
|
||||
callback(null, {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: 'Error running Lighthouse'}),
|
||||
});
|
||||
}
|
||||
);
|
||||
};
|
||||
49
api/linked-pages.js
Normal file
@@ -0,0 +1,49 @@
|
||||
import axios from 'axios';
|
||||
import cheerio from 'cheerio';
|
||||
import urlLib from 'url';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const linkedPagesHandler = async (url) => {
|
||||
const response = await axios.get(url);
|
||||
const html = response.data;
|
||||
const $ = cheerio.load(html);
|
||||
const internalLinksMap = new Map();
|
||||
const externalLinksMap = new Map();
|
||||
|
||||
// Get all links on the page
|
||||
$('a[href]').each((i, link) => {
|
||||
const href = $(link).attr('href');
|
||||
const absoluteUrl = urlLib.resolve(url, href);
|
||||
|
||||
// Check if absolute / relative, append to appropriate map or increment occurrence count
|
||||
if (absoluteUrl.startsWith(url)) {
|
||||
const count = internalLinksMap.get(absoluteUrl) || 0;
|
||||
internalLinksMap.set(absoluteUrl, count + 1);
|
||||
} else if (href.startsWith('http://') || href.startsWith('https://')) {
|
||||
const count = externalLinksMap.get(absoluteUrl) || 0;
|
||||
externalLinksMap.set(absoluteUrl, count + 1);
|
||||
}
|
||||
});
|
||||
|
||||
// Sort by most occurrences, remove supplicates, and convert to array
|
||||
const internalLinks = [...internalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
|
||||
const externalLinks = [...externalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
|
||||
|
||||
// If there were no links, then mark as skipped and show reasons
|
||||
if (internalLinks.length === 0 && externalLinks.length === 0) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: {
|
||||
skipped: 'No internal or external links found. '
|
||||
+ 'This may be due to the website being dynamically rendered, using a client-side framework (like React), and without SSR enabled. '
|
||||
+ 'That would mean that the static HTML returned from the HTTP request doesn\'t contain any meaningful content for Web-Check to analyze. '
|
||||
+ 'You can rectify this by using a headless browser to render the page instead.',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return { internal: internalLinks, external: externalLinks };
|
||||
};
|
||||
|
||||
export const handler = middleware(linkedPagesHandler);
|
||||
export default handler;
|
||||
@@ -1,11 +1,12 @@
|
||||
const dns = require('dns').promises;
|
||||
const URL = require('url-parse');
|
||||
import dns from 'dns';
|
||||
import URL from 'url-parse';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
exports.handler = async (event, context) => {
|
||||
// TODO: Fix.
|
||||
|
||||
const mailConfigHandler = async (url, event, context) => {
|
||||
try {
|
||||
let domain = event.queryStringParameters.url;
|
||||
const parsedUrl = new URL(domain);
|
||||
domain = parsedUrl.hostname || parsedUrl.pathname;
|
||||
const domain = new URL(url).hostname || new URL(url).pathname;
|
||||
|
||||
// Get MX records
|
||||
const mxRecords = await dns.resolveMx(domain);
|
||||
@@ -54,26 +55,28 @@ exports.handler = async (event, context) => {
|
||||
if (yahooMx.length > 0) {
|
||||
mailServices.push({ provider: 'Yahoo', value: yahooMx[0].exchange });
|
||||
}
|
||||
// Check MX records for Mimecast
|
||||
const mimecastMx = mxRecords.filter(record => record.exchange.includes('mimecast.com'));
|
||||
if (mimecastMx.length > 0) {
|
||||
mailServices.push({ provider: 'Mimecast', value: mimecastMx[0].exchange });
|
||||
}
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({
|
||||
mxRecords,
|
||||
txtRecords: emailTxtRecords,
|
||||
mailServices,
|
||||
}),
|
||||
};
|
||||
};
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOTFOUND' || error.code === 'ENODATA') {
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ skipped: 'No mail server in use on this domain' }),
|
||||
};
|
||||
return { skipped: 'No mail server in use on this domain' };
|
||||
} else {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: error.message }),
|
||||
body: { error: error.message },
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(mailConfigHandler);
|
||||
export default handler;
|
||||
|
||||
@@ -1,18 +1,24 @@
|
||||
const net = require('net');
|
||||
import net from 'net';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
// A list of commonly used ports.
|
||||
const PORTS = [
|
||||
const DEFAULT_PORTS_TO_CHECK = [
|
||||
20, 21, 22, 23, 25, 53, 80, 67, 68, 69,
|
||||
110, 119, 123, 143, 156, 161, 162, 179, 194,
|
||||
389, 443, 587, 993, 995,
|
||||
3000, 3306, 3389, 5060, 5900, 8000, 8080, 8888
|
||||
];
|
||||
/*
|
||||
* Checks if the env PORTS_TO_CHECK is set, if so the string is split via "," to get an array of ports to check.
|
||||
* If the env is not set, return the default commonly used ports.
|
||||
*/
|
||||
const PORTS = process.env.PORTS_TO_CHECK ? process.env.PORTS_TO_CHECK.split(",") : DEFAULT_PORTS_TO_CHECK
|
||||
|
||||
async function checkPort(port, domain) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const socket = new net.Socket();
|
||||
|
||||
socket.setTimeout(1500); // you may want to adjust the timeout
|
||||
socket.setTimeout(1500);
|
||||
|
||||
socket.once('connect', () => {
|
||||
socket.destroy();
|
||||
@@ -33,13 +39,9 @@ async function checkPort(port, domain) {
|
||||
});
|
||||
}
|
||||
|
||||
exports.handler = async (event, context) => {
|
||||
const domain = event.queryStringParameters.url;
|
||||
const portsHandler = async (url, event, context) => {
|
||||
const domain = url.replace(/(^\w+:|^)\/\//, '');
|
||||
|
||||
if (!domain) {
|
||||
return errorResponse('Missing domain parameter.');
|
||||
}
|
||||
|
||||
const delay = ms => new Promise(res => setTimeout(res, ms));
|
||||
const timeout = delay(9000);
|
||||
|
||||
@@ -75,16 +77,17 @@ exports.handler = async (event, context) => {
|
||||
if(timeoutReached){
|
||||
return errorResponse('The function timed out before completing.');
|
||||
}
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ openPorts, failedPorts }),
|
||||
};
|
||||
|
||||
// Sort openPorts and failedPorts before returning
|
||||
openPorts.sort((a, b) => a - b);
|
||||
failedPorts.sort((a, b) => a - b);
|
||||
|
||||
return { openPorts, failedPorts };
|
||||
};
|
||||
|
||||
const errorResponse = (message, statusCode = 444) => {
|
||||
return {
|
||||
statusCode: statusCode,
|
||||
body: JSON.stringify({ error: message }),
|
||||
};
|
||||
return { error: message };
|
||||
};
|
||||
|
||||
export const handler = middleware(portsHandler);
|
||||
export default handler;
|
||||
22
api/quality.js
Normal file
@@ -0,0 +1,22 @@
|
||||
import axios from 'axios';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const qualityHandler = async (url, event, context) => {
|
||||
const apiKey = process.env.GOOGLE_CLOUD_API_KEY;
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error(
|
||||
'Missing Google API. You need to set the `GOOGLE_CLOUD_API_KEY` environment variable'
|
||||
);
|
||||
}
|
||||
|
||||
const endpoint = `https://www.googleapis.com/pagespeedonline/v5/runPagespeed?`
|
||||
+ `url=${encodeURIComponent(url)}&category=PERFORMANCE&category=ACCESSIBILITY`
|
||||
+ `&category=BEST_PRACTICES&category=SEO&category=PWA&strategy=mobile`
|
||||
+ `&key=${apiKey}`;
|
||||
|
||||
return (await axios.get(endpoint)).data;
|
||||
};
|
||||
|
||||
export const handler = middleware(qualityHandler);
|
||||
export default handler;
|
||||
26
api/rank.js
Normal file
@@ -0,0 +1,26 @@
|
||||
import axios from 'axios';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const rankHandler = async (url) => {
|
||||
const domain = url ? new URL(url).hostname : null;
|
||||
if (!domain) throw new Error('Invalid URL');
|
||||
|
||||
try {
|
||||
const auth = process.env.TRANCO_API_KEY ? // Auth is optional.
|
||||
{ auth: { username: process.env.TRANCO_USERNAME, password: process.env.TRANCO_API_KEY } }
|
||||
: {};
|
||||
const response = await axios.get(
|
||||
`https://tranco-list.eu/api/ranks/domain/${domain}`, { timeout: 5000 }, auth,
|
||||
);
|
||||
if (!response.data || !response.data.ranks || response.data.ranks.length === 0) {
|
||||
return { skipped: `Skipping, as ${domain} isn't ranked in the top 100 million sites yet.`};
|
||||
}
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
return { error: `Unable to fetch rank, ${error.message}` };
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(rankHandler);
|
||||
export default handler;
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
const axios = require('axios');
|
||||
|
||||
exports.handler = async function(event, context) {
|
||||
const siteURL = event.queryStringParameters.url;
|
||||
|
||||
if (!siteURL) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'Missing url query parameter' }),
|
||||
};
|
||||
}
|
||||
|
||||
let parsedURL;
|
||||
try {
|
||||
parsedURL = new URL(siteURL);
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'Invalid url query parameter' }),
|
||||
};
|
||||
}
|
||||
|
||||
const robotsURL = `${parsedURL.protocol}//${parsedURL.hostname}/robots.txt`;
|
||||
|
||||
try {
|
||||
const response = await axios.get(robotsURL);
|
||||
|
||||
if (response.status === 200) {
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: response.data,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
statusCode: response.status,
|
||||
body: JSON.stringify({ error: 'Failed to fetch robots.txt', statusCode: response.status }),
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: `Error fetching robots.txt: ${error.message}` }),
|
||||
};
|
||||
}
|
||||
};
|
||||
28
api/redirects.js
Normal file
@@ -0,0 +1,28 @@
|
||||
import got from 'got';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const redirectsHandler = async (url) => {
|
||||
const redirects = [url];
|
||||
try {
|
||||
await got(url, {
|
||||
followRedirect: true,
|
||||
maxRedirects: 12,
|
||||
hooks: {
|
||||
beforeRedirect: [
|
||||
(options, response) => {
|
||||
redirects.push(response.headers.location);
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
redirects: redirects,
|
||||
};
|
||||
} catch (error) {
|
||||
throw new Error(`Error: ${error.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(redirectsHandler);
|
||||
export default handler;
|
||||
71
api/robots-txt.js
Normal file
@@ -0,0 +1,71 @@
|
||||
import axios from 'axios';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const parseRobotsTxt = (content) => {
|
||||
const lines = content.split('\n');
|
||||
const rules = [];
|
||||
|
||||
lines.forEach(line => {
|
||||
line = line.trim(); // This removes trailing and leading whitespaces
|
||||
|
||||
let match = line.match(/^(Allow|Disallow):\s*(\S*)$/i);
|
||||
if (match) {
|
||||
const rule = {
|
||||
lbl: match[1],
|
||||
val: match[2],
|
||||
};
|
||||
|
||||
rules.push(rule);
|
||||
} else {
|
||||
match = line.match(/^(User-agent):\s*(\S*)$/i);
|
||||
if (match) {
|
||||
const rule = {
|
||||
lbl: match[1],
|
||||
val: match[2],
|
||||
};
|
||||
|
||||
rules.push(rule);
|
||||
}
|
||||
}
|
||||
});
|
||||
return { robots: rules };
|
||||
}
|
||||
|
||||
const robotsHandler = async function(url) {
|
||||
let parsedURL;
|
||||
try {
|
||||
parsedURL = new URL(url);
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'Invalid url query parameter' }),
|
||||
};
|
||||
}
|
||||
|
||||
const robotsURL = `${parsedURL.protocol}//${parsedURL.hostname}/robots.txt`;
|
||||
|
||||
try {
|
||||
const response = await axios.get(robotsURL);
|
||||
|
||||
if (response.status === 200) {
|
||||
const parsedData = parseRobotsTxt(response.data);
|
||||
if (!parsedData.robots || parsedData.robots.length === 0) {
|
||||
return { skipped: 'No robots.txt file present, unable to continue' };
|
||||
}
|
||||
return parsedData;
|
||||
} else {
|
||||
return {
|
||||
statusCode: response.status,
|
||||
body: JSON.stringify({ error: 'Failed to fetch robots.txt', statusCode: response.status }),
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: `Error fetching robots.txt: ${error.message}` }),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(robotsHandler);
|
||||
export default handler;
|
||||
@@ -1,50 +1,116 @@
|
||||
const puppeteer = require('puppeteer-core');
|
||||
const chromium = require('chrome-aws-lambda');
|
||||
import puppeteer from 'puppeteer-core';
|
||||
import chromium from 'chrome-aws-lambda';
|
||||
import middleware from './_common/middleware.js';
|
||||
import { execFile } from 'child_process';
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import pkg from 'uuid';
|
||||
const { v4: uuidv4 } = pkg;
|
||||
|
||||
exports.handler = async (event, context, callback) => {
|
||||
let browser = null;
|
||||
let targetUrl = event.queryStringParameters.url;
|
||||
// Helper function for direct chromium screenshot as fallback
|
||||
const directChromiumScreenshot = async (url) => {
|
||||
console.log(`[DIRECT-SCREENSHOT] Starting direct screenshot process for URL: ${url}`);
|
||||
|
||||
// Create a tmp filename
|
||||
const tmpDir = '/tmp';
|
||||
const uuid = uuidv4();
|
||||
const screenshotPath = path.join(tmpDir, `screenshot-${uuid}.png`);
|
||||
|
||||
console.log(`[DIRECT-SCREENSHOT] Will save screenshot to: ${screenshotPath}`);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const chromePath = process.env.CHROME_PATH || '/usr/bin/chromium';
|
||||
const args = [
|
||||
'--headless',
|
||||
'--disable-gpu',
|
||||
'--no-sandbox',
|
||||
`--screenshot=${screenshotPath}`,
|
||||
url
|
||||
];
|
||||
|
||||
if (!targetUrl) {
|
||||
callback(null, {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'URL is missing from queryStringParameters' }),
|
||||
console.log(`[DIRECT-SCREENSHOT] Executing: ${chromePath} ${args.join(' ')}`);
|
||||
|
||||
execFile(chromePath, args, async (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
console.error(`[DIRECT-SCREENSHOT] Chromium error: ${error.message}`);
|
||||
return reject(error);
|
||||
}
|
||||
|
||||
try {
|
||||
// Read the screenshot file
|
||||
const screenshotData = await fs.readFile(screenshotPath);
|
||||
console.log(`[DIRECT-SCREENSHOT] Screenshot read successfully`);
|
||||
|
||||
// Convert to base64
|
||||
const base64Data = screenshotData.toString('base64');
|
||||
|
||||
await fs.unlink(screenshotPath).catch(err =>
|
||||
console.warn(`[DIRECT-SCREENSHOT] Failed to delete temp file: ${err.message}`)
|
||||
);
|
||||
|
||||
resolve(base64Data);
|
||||
} catch (readError) {
|
||||
console.error(`[DIRECT-SCREENSHOT] Failed reading screenshot: ${readError.message}`);
|
||||
reject(readError);
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const screenshotHandler = async (targetUrl) => {
|
||||
console.log(`[SCREENSHOT] Request received for URL: ${targetUrl}`);
|
||||
|
||||
if (!targetUrl) {
|
||||
console.error('[SCREENSHOT] URL is missing from queryStringParameters');
|
||||
throw new Error('URL is missing from queryStringParameters');
|
||||
}
|
||||
|
||||
if (!targetUrl.startsWith('http://') && !targetUrl.startsWith('https://')) {
|
||||
targetUrl = 'http://' + targetUrl;
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
new URL(targetUrl);
|
||||
} catch (error) {
|
||||
callback(null, {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'URL provided is invalid' }),
|
||||
});
|
||||
return;
|
||||
console.error(`[SCREENSHOT] URL provided is invalid: ${targetUrl}`);
|
||||
throw new Error('URL provided is invalid');
|
||||
}
|
||||
|
||||
// First try direct Chromium
|
||||
try {
|
||||
browser = await puppeteer.launch({
|
||||
args: chromium.args,
|
||||
console.log(`[SCREENSHOT] Using direct Chromium method for URL: ${targetUrl}`);
|
||||
const base64Screenshot = await directChromiumScreenshot(targetUrl);
|
||||
console.log(`[SCREENSHOT] Direct screenshot successful`);
|
||||
return { image: base64Screenshot };
|
||||
} catch (directError) {
|
||||
console.error(`[SCREENSHOT] Direct screenshot method failed: ${directError.message}`);
|
||||
console.log(`[SCREENSHOT] Falling back to puppeteer method...`);
|
||||
}
|
||||
|
||||
// fall back puppeteer
|
||||
let browser = null;
|
||||
try {
|
||||
console.log(`[SCREENSHOT] Launching puppeteer browser`);
|
||||
browser = await puppeteer.launch({
|
||||
args: [...chromium.args, '--no-sandbox'], // Add --no-sandbox flag
|
||||
defaultViewport: { width: 800, height: 600 },
|
||||
executablePath: process.env.CHROME_PATH || await chromium.executablePath,
|
||||
headless: chromium.headless,
|
||||
executablePath: process.env.CHROME_PATH || '/usr/bin/chromium',
|
||||
headless: true,
|
||||
ignoreHTTPSErrors: true,
|
||||
ignoreDefaultArgs: ['--disable-extensions'],
|
||||
});
|
||||
|
||||
|
||||
console.log(`[SCREENSHOT] Creating new page`);
|
||||
let page = await browser.newPage();
|
||||
|
||||
|
||||
console.log(`[SCREENSHOT] Setting page preferences`);
|
||||
await page.emulateMediaFeatures([{ name: 'prefers-color-scheme', value: 'dark' }]);
|
||||
|
||||
page.setDefaultNavigationTimeout(8000);
|
||||
|
||||
|
||||
console.log(`[SCREENSHOT] Navigating to URL: ${targetUrl}`);
|
||||
await page.goto(targetUrl, { waitUntil: 'domcontentloaded' });
|
||||
|
||||
|
||||
console.log(`[SCREENSHOT] Checking if body element exists`);
|
||||
await page.evaluate(() => {
|
||||
const selector = 'body';
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -55,26 +121,25 @@ exports.handler = async (event, context, callback) => {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
console.log(`[SCREENSHOT] Taking screenshot`);
|
||||
const screenshotBuffer = await page.screenshot();
|
||||
|
||||
|
||||
console.log(`[SCREENSHOT] Converting screenshot to base64`);
|
||||
const base64Screenshot = screenshotBuffer.toString('base64');
|
||||
|
||||
const response = {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ image: base64Screenshot }),
|
||||
};
|
||||
|
||||
callback(null, response);
|
||||
|
||||
console.log(`[SCREENSHOT] Screenshot complete, returning image`);
|
||||
return { image: base64Screenshot };
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
callback(null, {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: `An error occurred: ${error.message}` }),
|
||||
});
|
||||
console.error(`[SCREENSHOT] Puppeteer screenshot failed: ${error.message}`);
|
||||
throw error;
|
||||
} finally {
|
||||
if (browser !== null) {
|
||||
console.log(`[SCREENSHOT] Closing browser`);
|
||||
await browser.close();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(screenshotHandler);
|
||||
export default handler;
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
const { https } = require('follow-redirects');
|
||||
const { URL } = require('url');
|
||||
import { URL } from 'url';
|
||||
import followRedirects from 'follow-redirects';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const { https } = followRedirects;
|
||||
|
||||
const SECURITY_TXT_PATHS = [
|
||||
'/security.txt',
|
||||
@@ -37,57 +40,35 @@ const isPgpSigned = (result) => {
|
||||
return false;
|
||||
};
|
||||
|
||||
exports.handler = async (event, context) => {
|
||||
const urlParam = event.queryStringParameters.url;
|
||||
if (!urlParam) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'Missing url parameter' })
|
||||
};
|
||||
}
|
||||
const securityTxtHandler = async (urlParam) => {
|
||||
|
||||
let url;
|
||||
try {
|
||||
url = new URL(urlParam.includes('://') ? urlParam : 'https://' + urlParam);
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: 'Invalid URL format' }),
|
||||
};
|
||||
throw new Error('Invalid URL format');
|
||||
}
|
||||
url.pathname = '';
|
||||
|
||||
for (let path of SECURITY_TXT_PATHS) {
|
||||
try {
|
||||
const result = await fetchSecurityTxt(url, path);
|
||||
if (result && result.includes('<html')) return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ isPresent: false }),
|
||||
};
|
||||
if (result && result.includes('<html')) return { isPresent: false };
|
||||
if (result) {
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({
|
||||
isPresent: true,
|
||||
foundIn: path,
|
||||
content: result,
|
||||
isPgpSigned: isPgpSigned(result),
|
||||
fields: parseResult(result),
|
||||
}),
|
||||
isPresent: true,
|
||||
foundIn: path,
|
||||
content: result,
|
||||
isPgpSigned: isPgpSigned(result),
|
||||
fields: parseResult(result),
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: error.message }),
|
||||
};
|
||||
throw new Error(error.message);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
statusCode: 404,
|
||||
body: JSON.stringify({ isPresent: false }),
|
||||
};
|
||||
return { isPresent: false };
|
||||
};
|
||||
|
||||
async function fetchSecurityTxt(baseURL, path) {
|
||||
@@ -110,3 +91,6 @@ async function fetchSecurityTxt(baseURL, path) {
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export const handler = middleware(securityTxtHandler);
|
||||
export default handler;
|
||||
|
||||
@@ -1,19 +1,21 @@
|
||||
const axios = require('axios');
|
||||
const xml2js = require('xml2js');
|
||||
import axios from 'axios';
|
||||
import xml2js from 'xml2js';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
exports.handler = async (event) => {
|
||||
const url = event.queryStringParameters.url;
|
||||
const sitemapHandler = async (url) => {
|
||||
let sitemapUrl = `${url}/sitemap.xml`;
|
||||
|
||||
const hardTimeOut = 5000;
|
||||
|
||||
try {
|
||||
// Try to fetch sitemap directly
|
||||
let sitemapRes;
|
||||
try {
|
||||
sitemapRes = await axios.get(sitemapUrl, { timeout: 5000 });
|
||||
sitemapRes = await axios.get(sitemapUrl, { timeout: hardTimeOut });
|
||||
} catch (error) {
|
||||
if (error.response && error.response.status === 404) {
|
||||
// If sitemap not found, try to fetch it from robots.txt
|
||||
const robotsRes = await axios.get(`${url}/robots.txt`, { timeout: 5000 });
|
||||
const robotsRes = await axios.get(`${url}/robots.txt`, { timeout: hardTimeOut });
|
||||
const robotsTxt = robotsRes.data.split('\n');
|
||||
|
||||
for (let line of robotsTxt) {
|
||||
@@ -24,13 +26,10 @@ exports.handler = async (event) => {
|
||||
}
|
||||
|
||||
if (!sitemapUrl) {
|
||||
return {
|
||||
statusCode: 404,
|
||||
body: JSON.stringify({ skipped: 'No sitemap found' }),
|
||||
};
|
||||
return { skipped: 'No sitemap found' };
|
||||
}
|
||||
|
||||
sitemapRes = await axios.get(sitemapUrl, { timeout: 5000 });
|
||||
sitemapRes = await axios.get(sitemapUrl, { timeout: hardTimeOut });
|
||||
} else {
|
||||
throw error; // If other error, throw it
|
||||
}
|
||||
@@ -39,23 +38,16 @@ exports.handler = async (event) => {
|
||||
const parser = new xml2js.Parser();
|
||||
const sitemap = await parser.parseStringPromise(sitemapRes.data);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify(sitemap),
|
||||
};
|
||||
return sitemap;
|
||||
} catch (error) {
|
||||
// If error occurs
|
||||
console.log(error.message);
|
||||
if (error.code === 'ECONNABORTED') {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: 'Request timed out' }),
|
||||
};
|
||||
return { error: `Request timed-out after ${hardTimeOut}ms` };
|
||||
} else {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: error.message }),
|
||||
};
|
||||
return { error: error.message };
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(sitemapHandler);
|
||||
export default handler;
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
const axios = require('axios');
|
||||
const cheerio = require('cheerio');
|
||||
import axios from 'axios';
|
||||
import cheerio from 'cheerio';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
exports.handler = async (event, context) => {
|
||||
let url = event.queryStringParameters.url;
|
||||
const socialTagsHandler = async (url) => {
|
||||
|
||||
// Check if url includes protocol
|
||||
if (!url.startsWith('http://') && !url.startsWith('https://')) {
|
||||
@@ -49,16 +49,9 @@ exports.handler = async (event, context) => {
|
||||
};
|
||||
|
||||
if (Object.keys(metadata).length === 0) {
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ skipped: 'No metadata found' }),
|
||||
};
|
||||
return { skipped: 'No metadata found' };
|
||||
}
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify(metadata),
|
||||
};
|
||||
return metadata;
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
@@ -66,3 +59,6 @@ exports.handler = async (event, context) => {
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(socialTagsHandler);
|
||||
export default handler;
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
const https = require('https');
|
||||
|
||||
exports.handler = async function (event, context) {
|
||||
const { url } = event.queryStringParameters;
|
||||
|
||||
const errorResponse = (message, statusCode = 500) => {
|
||||
return {
|
||||
statusCode: statusCode,
|
||||
body: JSON.stringify({ error: message }),
|
||||
};
|
||||
};
|
||||
|
||||
if (!url) {
|
||||
return errorResponse('URL query parameter is required', 400);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await new Promise((resolve, reject) => {
|
||||
const req = https.request(url, res => {
|
||||
|
||||
// Check if the SSL handshake was authorized
|
||||
if (!res.socket.authorized) {
|
||||
resolve(errorResponse(`SSL handshake not authorized. Reason: ${res.socket.authorizationError}`));
|
||||
} else {
|
||||
let cert = res.socket.getPeerCertificate(true);
|
||||
if (!cert || Object.keys(cert).length === 0) {
|
||||
resolve(errorResponse("No certificate presented by the server."));
|
||||
} else {
|
||||
// omit the raw and issuerCertificate fields
|
||||
const { raw, issuerCertificate, ...certWithoutRaw } = cert;
|
||||
resolve({
|
||||
statusCode: 200,
|
||||
body: JSON.stringify(certWithoutRaw),
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
req.on('error', error => {
|
||||
resolve(errorResponse(`Error fetching site certificate: ${error.message}`));
|
||||
});
|
||||
|
||||
req.end();
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
return errorResponse(`Unexpected error occurred: ${error.message}`);
|
||||
}
|
||||
};
|
||||
44
api/ssl.js
Normal file
@@ -0,0 +1,44 @@
|
||||
import tls from 'tls';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const sslHandler = async (urlString) => {
|
||||
try {
|
||||
const parsedUrl = new URL(urlString);
|
||||
const options = {
|
||||
host: parsedUrl.hostname,
|
||||
port: parsedUrl.port || 443,
|
||||
servername: parsedUrl.hostname,
|
||||
rejectUnauthorized: false,
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const socket = tls.connect(options, () => {
|
||||
if (!socket.authorized) {
|
||||
return reject(new Error(`SSL handshake not authorized. Reason: ${socket.authorizationError}`));
|
||||
}
|
||||
|
||||
const cert = socket.getPeerCertificate();
|
||||
if (!cert || Object.keys(cert).length === 0) {
|
||||
return reject(new Error(`
|
||||
No certificate presented by the server.\n
|
||||
The server is possibly not using SNI (Server Name Indication) to identify itself, and you are connecting to a hostname-aliased IP address.
|
||||
Or it may be due to an invalid SSL certificate, or an incomplete SSL handshake at the time the cert is being read.`));
|
||||
}
|
||||
|
||||
const { raw, issuerCertificate, ...certWithoutRaw } = cert;
|
||||
resolve(certWithoutRaw);
|
||||
socket.end();
|
||||
});
|
||||
|
||||
socket.on('error', (error) => {
|
||||
reject(new Error(`Error fetching site certificate: ${error.message}`));
|
||||
});
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(sslHandler);
|
||||
export default handler;
|
||||
@@ -1,14 +1,10 @@
|
||||
const https = require('https');
|
||||
const { performance, PerformanceObserver } = require('perf_hooks');
|
||||
|
||||
exports.handler = async function(event, context) {
|
||||
const { url } = event.queryStringParameters;
|
||||
import https from 'https';
|
||||
import { performance, PerformanceObserver } from 'perf_hooks';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const statusHandler = async (url) => {
|
||||
if (!url) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'You must provide a URL query parameter!' }),
|
||||
};
|
||||
throw new Error('You must provide a URL query parameter!');
|
||||
}
|
||||
|
||||
let dnsLookupTime;
|
||||
@@ -43,10 +39,7 @@ exports.handler = async function(event, context) {
|
||||
});
|
||||
|
||||
if (responseCode < 200 || responseCode >= 400) {
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ error: `Received non-success response code: ${responseCode}` }),
|
||||
};
|
||||
throw new Error(`Received non-success response code: ${responseCode}`);
|
||||
}
|
||||
|
||||
performance.mark('B');
|
||||
@@ -54,16 +47,13 @@ exports.handler = async function(event, context) {
|
||||
let responseTime = performance.now() - startTime;
|
||||
obs.disconnect();
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ isUp: true, dnsLookupTime, responseTime, responseCode }),
|
||||
};
|
||||
return { isUp: true, dnsLookupTime, responseTime, responseCode };
|
||||
|
||||
} catch (error) {
|
||||
obs.disconnect();
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ error: `Error during operation: ${error.message}` }),
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(statusHandler);
|
||||
export default handler;
|
||||
@@ -1,69 +1,31 @@
|
||||
const Wappalyzer = require('wappalyzer');
|
||||
|
||||
const analyze = async (url) => {
|
||||
import Wappalyzer from 'wappalyzer';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const techStackHandler = async (url) => {
|
||||
const options = {};
|
||||
|
||||
const wappalyzer = new Wappalyzer(options);
|
||||
return (async function() {
|
||||
try {
|
||||
await wappalyzer.init()
|
||||
const headers = {}
|
||||
const storage = {
|
||||
local: {},
|
||||
session: {},
|
||||
}
|
||||
const site = await wappalyzer.open(url, headers, storage)
|
||||
const results = await site.analyze()
|
||||
return results;
|
||||
} catch (error) {
|
||||
return error;
|
||||
} finally {
|
||||
await wappalyzer.destroy()
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
exports.handler = async (event, context, callback) => {
|
||||
// Validate URL parameter
|
||||
if (!event.queryStringParameters || !event.queryStringParameters.url) {
|
||||
return {
|
||||
statusCode: 400,
|
||||
body: JSON.stringify({ error: 'Missing url parameter' }),
|
||||
};
|
||||
}
|
||||
|
||||
// Get URL from param
|
||||
let url = event.queryStringParameters.url;
|
||||
if (!/^https?:\/\//i.test(url)) {
|
||||
url = 'http://' + url;
|
||||
}
|
||||
|
||||
try {
|
||||
return analyze(url).then(
|
||||
(results) => {
|
||||
if (!results.technologies || results.technologies.length === 0) {
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({ error: 'Unable to find any technologies for site' }),
|
||||
};
|
||||
}
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify(results),
|
||||
}
|
||||
}
|
||||
)
|
||||
.catch((error) => {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: error.message }),
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: error.message }),
|
||||
await wappalyzer.init();
|
||||
const headers = {};
|
||||
const storage = {
|
||||
local: {},
|
||||
session: {},
|
||||
};
|
||||
const site = await wappalyzer.open(url, headers, storage);
|
||||
const results = await site.analyze();
|
||||
|
||||
if (!results.technologies || results.technologies.length === 0) {
|
||||
throw new Error('Unable to find any technologies for site');
|
||||
}
|
||||
return results;
|
||||
} catch (error) {
|
||||
throw new Error(error.message);
|
||||
} finally {
|
||||
await wappalyzer.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(techStackHandler);
|
||||
export default handler;
|
||||
|
||||
103
api/threats.js
Normal file
@@ -0,0 +1,103 @@
|
||||
import axios from 'axios';
|
||||
import xml2js from 'xml2js';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const getGoogleSafeBrowsingResult = async (url) => {
|
||||
try {
|
||||
const apiKey = process.env.GOOGLE_CLOUD_API_KEY;
|
||||
if (!apiKey) {
|
||||
return { error: 'GOOGLE_CLOUD_API_KEY is required for the Google Safe Browsing check' };
|
||||
}
|
||||
const apiEndpoint = `https://safebrowsing.googleapis.com/v4/threatMatches:find?key=${apiKey}`;
|
||||
|
||||
const requestBody = {
|
||||
threatInfo: {
|
||||
threatTypes: [
|
||||
'MALWARE', 'SOCIAL_ENGINEERING', 'UNWANTED_SOFTWARE', 'POTENTIALLY_HARMFUL_APPLICATION', 'API_ABUSE'
|
||||
],
|
||||
platformTypes: ["ANY_PLATFORM"],
|
||||
threatEntryTypes: ["URL"],
|
||||
threatEntries: [{ url }]
|
||||
}
|
||||
};
|
||||
|
||||
const response = await axios.post(apiEndpoint, requestBody);
|
||||
if (response.data && response.data.matches) {
|
||||
return {
|
||||
unsafe: true,
|
||||
details: response.data.matches
|
||||
};
|
||||
} else {
|
||||
return { unsafe: false };
|
||||
}
|
||||
} catch (error) {
|
||||
return { error: `Request failed: ${error.message}` };
|
||||
}
|
||||
};
|
||||
|
||||
const getUrlHausResult = async (url) => {
|
||||
let domain = new URL(url).hostname;
|
||||
return await axios({
|
||||
method: 'post',
|
||||
url: 'https://urlhaus-api.abuse.ch/v1/host/',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded'
|
||||
},
|
||||
data: `host=${domain}`
|
||||
})
|
||||
.then((x) => x.data)
|
||||
.catch((e) => ({ error: `Request to URLHaus failed, ${e.message}`}));
|
||||
};
|
||||
|
||||
|
||||
const getPhishTankResult = async (url) => {
|
||||
try {
|
||||
const encodedUrl = Buffer.from(url).toString('base64');
|
||||
const endpoint = `https://checkurl.phishtank.com/checkurl/?url=${encodedUrl}`;
|
||||
const headers = {
|
||||
'User-Agent': 'phishtank/web-check',
|
||||
};
|
||||
const response = await axios.post(endpoint, null, { headers, timeout: 3000 });
|
||||
const parsed = await xml2js.parseStringPromise(response.data, { explicitArray: false });
|
||||
return parsed.response.results;
|
||||
} catch (error) {
|
||||
return { error: `Request to PhishTank failed: ${error.message}` };
|
||||
}
|
||||
}
|
||||
|
||||
const getCloudmersiveResult = async (url) => {
|
||||
const apiKey = process.env.CLOUDMERSIVE_API_KEY;
|
||||
if (!apiKey) {
|
||||
return { error: 'CLOUDMERSIVE_API_KEY is required for the Cloudmersive check' };
|
||||
}
|
||||
try {
|
||||
const endpoint = 'https://api.cloudmersive.com/virus/scan/website';
|
||||
const headers = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
'Apikey': apiKey,
|
||||
};
|
||||
const data = `Url=${encodeURIComponent(url)}`;
|
||||
const response = await axios.post(endpoint, data, { headers });
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
return { error: `Request to Cloudmersive failed: ${error.message}` };
|
||||
}
|
||||
};
|
||||
|
||||
const threatsHandler = async (url) => {
|
||||
try {
|
||||
const urlHaus = await getUrlHausResult(url);
|
||||
const phishTank = await getPhishTankResult(url);
|
||||
const cloudmersive = await getCloudmersiveResult(url);
|
||||
const safeBrowsing = await getGoogleSafeBrowsingResult(url);
|
||||
if (urlHaus.error && phishTank.error && cloudmersive.error && safeBrowsing.error) {
|
||||
throw new Error(`All requests failed - ${urlHaus.error} ${phishTank.error} ${cloudmersive.error} ${safeBrowsing.error}`);
|
||||
}
|
||||
return JSON.stringify({ urlHaus, phishTank, cloudmersive, safeBrowsing });
|
||||
} catch (error) {
|
||||
throw new Error(error.message);
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(threatsHandler);
|
||||
export default handler;
|
||||
29
api/tls.js
Normal file
@@ -0,0 +1,29 @@
|
||||
import axios from 'axios';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const MOZILLA_TLS_OBSERVATORY_API = 'https://tls-observatory.services.mozilla.com/api/v1';
|
||||
|
||||
const tlsHandler = async (url) => {
|
||||
try {
|
||||
const domain = new URL(url).hostname;
|
||||
const scanResponse = await axios.post(`${MOZILLA_TLS_OBSERVATORY_API}/scan?target=${domain}`);
|
||||
const scanId = scanResponse.data.scan_id;
|
||||
|
||||
if (typeof scanId !== 'number') {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: { error: 'Failed to get scan_id from TLS Observatory' },
|
||||
};
|
||||
}
|
||||
const resultResponse = await axios.get(`${MOZILLA_TLS_OBSERVATORY_API}/results?id=${scanId}`);
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: resultResponse.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return { error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(tlsHandler);
|
||||
export default handler;
|
||||
@@ -1,55 +1,32 @@
|
||||
const traceroute = require('traceroute');
|
||||
const url = require('url');
|
||||
import url from 'url';
|
||||
import traceroute from 'traceroute';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
exports.handler = async function(event, context) {
|
||||
const urlString = event.queryStringParameters.url;
|
||||
const traceRouteHandler = async (urlString, context) => {
|
||||
// Parse the URL and get the hostname
|
||||
const urlObject = url.parse(urlString);
|
||||
const host = urlObject.hostname;
|
||||
|
||||
try {
|
||||
if (!urlString) {
|
||||
throw new Error('URL parameter is missing!');
|
||||
}
|
||||
if (!host) {
|
||||
throw new Error('Invalid URL provided');
|
||||
}
|
||||
|
||||
// Parse the URL and get the hostname
|
||||
const urlObject = url.parse(urlString);
|
||||
const host = urlObject.hostname;
|
||||
|
||||
if (!host) {
|
||||
throw new Error('Invalid URL provided');
|
||||
}
|
||||
|
||||
// Traceroute with callback
|
||||
const result = await new Promise((resolve, reject) => {
|
||||
traceroute.trace(host, (err, hops) => {
|
||||
if (err || !hops) {
|
||||
reject(err || new Error('No hops found'));
|
||||
} else {
|
||||
resolve(hops);
|
||||
}
|
||||
});
|
||||
|
||||
// Check if remaining time is less than 8.8 seconds, then reject promise
|
||||
if (context.getRemainingTimeInMillis() < 8800) {
|
||||
reject(new Error('Lambda is about to timeout'));
|
||||
// Traceroute with callback
|
||||
const result = await new Promise((resolve, reject) => {
|
||||
traceroute.trace(host, (err, hops) => {
|
||||
if (err || !hops) {
|
||||
reject(err || new Error('No hops found'));
|
||||
} else {
|
||||
resolve(hops);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify({
|
||||
message: "Traceroute completed!",
|
||||
result,
|
||||
}),
|
||||
};
|
||||
} catch (err) {
|
||||
const message = err.code === 'ENOENT'
|
||||
? 'Traceroute command is not installed on the host.'
|
||||
: err.message;
|
||||
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({
|
||||
error: message,
|
||||
}),
|
||||
};
|
||||
}
|
||||
return {
|
||||
message: "Traceroute completed!",
|
||||
result,
|
||||
};
|
||||
};
|
||||
|
||||
export const handler = middleware(traceRouteHandler);
|
||||
export default handler;
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
const dns = require('dns').promises;
|
||||
import dns from 'dns/promises';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
exports.handler = async (event) => {
|
||||
const url = new URL(event.queryStringParameters.url);
|
||||
const txtRecordHandler = async (url, event, context) => {
|
||||
try {
|
||||
const txtRecords = await dns.resolveTxt(url.hostname);
|
||||
const parsedUrl = new URL(url);
|
||||
|
||||
const txtRecords = await dns.resolveTxt(parsedUrl.hostname);
|
||||
|
||||
// Parsing and formatting TXT records into a single object
|
||||
const readableTxtRecords = txtRecords.reduce((acc, recordArray) => {
|
||||
@@ -16,15 +18,16 @@ exports.handler = async (event) => {
|
||||
return { ...acc, ...recordObject };
|
||||
}, {});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: JSON.stringify(readableTxtRecords),
|
||||
};
|
||||
return readableTxtRecords;
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: JSON.stringify({ error: error.message }),
|
||||
};
|
||||
if (error.code === 'ERR_INVALID_URL') {
|
||||
throw new Error(`Invalid URL ${error}`);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = middleware(txtRecordHandler);
|
||||
export default handler;
|
||||
@@ -1,14 +1,7 @@
|
||||
const net = require('net');
|
||||
const psl = require('psl');
|
||||
// const { URL } = require('url');
|
||||
|
||||
const errorResponse = (message, statusCode = 444) => {
|
||||
return {
|
||||
statusCode: statusCode,
|
||||
body: JSON.stringify({ error: message }),
|
||||
};
|
||||
};
|
||||
|
||||
import net from 'net';
|
||||
import psl from 'psl';
|
||||
import axios from 'axios';
|
||||
import middleware from './_common/middleware.js';
|
||||
|
||||
const getBaseDomain = (url) => {
|
||||
let protocol = '';
|
||||
@@ -22,55 +15,7 @@ const getBaseDomain = (url) => {
|
||||
return protocol + parsed.domain;
|
||||
};
|
||||
|
||||
|
||||
exports.handler = async function(event, context) {
|
||||
let url = event.queryStringParameters.url;
|
||||
|
||||
if (!url) {
|
||||
return errorResponse('URL query parameter is required.', 400);
|
||||
}
|
||||
|
||||
if (!url.startsWith('http://') && !url.startsWith('https://')) {
|
||||
url = 'http://' + url;
|
||||
}
|
||||
|
||||
let hostname;
|
||||
try {
|
||||
hostname = getBaseDomain(new URL(url).hostname);
|
||||
} catch (error) {
|
||||
return errorResponse(`Unable to parse URL: ${error}`, 400);
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const client = net.createConnection({ port: 43, host: 'whois.internic.net' }, () => {
|
||||
client.write(hostname + '\r\n');
|
||||
});
|
||||
|
||||
let data = '';
|
||||
client.on('data', (chunk) => {
|
||||
data += chunk;
|
||||
});
|
||||
|
||||
client.on('end', () => {
|
||||
try {
|
||||
const parsedData = parseWhoisData(data);
|
||||
resolve({
|
||||
statusCode: 200,
|
||||
body: JSON.stringify(parsedData),
|
||||
});
|
||||
} catch (error) {
|
||||
resolve(errorResponse(error.message));
|
||||
}
|
||||
});
|
||||
|
||||
client.on('error', (err) => {
|
||||
resolve(errorResponse(err.message, 500));
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const parseWhoisData = (data) => {
|
||||
|
||||
if (data.includes('No match for')) {
|
||||
return { error: 'No matches found for domain in internic database'};
|
||||
}
|
||||
@@ -100,3 +45,67 @@ const parseWhoisData = (data) => {
|
||||
return parsedData;
|
||||
};
|
||||
|
||||
const fetchFromInternic = async (hostname) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const client = net.createConnection({ port: 43, host: 'whois.internic.net' }, () => {
|
||||
client.write(hostname + '\r\n');
|
||||
});
|
||||
|
||||
let data = '';
|
||||
client.on('data', (chunk) => {
|
||||
data += chunk;
|
||||
});
|
||||
|
||||
client.on('end', () => {
|
||||
try {
|
||||
const parsedData = parseWhoisData(data);
|
||||
resolve(parsedData);
|
||||
} catch (error) {
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
|
||||
client.on('error', (err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const fetchFromMyAPI = async (hostname) => {
|
||||
try {
|
||||
const response = await axios.post('https://whois-api-zeta.vercel.app/', {
|
||||
domain: hostname
|
||||
});
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
console.error('Error fetching data from your API:', error.message);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const whoisHandler = async (url) => {
|
||||
if (!url.startsWith('http://') && !url.startsWith('https://')) {
|
||||
url = 'http://' + url;
|
||||
}
|
||||
|
||||
let hostname;
|
||||
try {
|
||||
hostname = getBaseDomain(new URL(url).hostname);
|
||||
} catch (error) {
|
||||
throw new Error(`Unable to parse URL: ${error}`);
|
||||
}
|
||||
|
||||
const [internicData, whoisData] = await Promise.all([
|
||||
fetchFromInternic(hostname),
|
||||
fetchFromMyAPI(hostname)
|
||||
]);
|
||||
|
||||
return {
|
||||
internicData,
|
||||
whoisData
|
||||
};
|
||||
};
|
||||
|
||||
export const handler = middleware(whoisHandler);
|
||||
export default handler;
|
||||
|
||||
79
astro.config.mjs
Normal file
@@ -0,0 +1,79 @@
|
||||
import { defineConfig } from 'astro/config';
|
||||
|
||||
// Integrations
|
||||
import svelte from '@astrojs/svelte';
|
||||
import react from "@astrojs/react";
|
||||
import partytown from '@astrojs/partytown';
|
||||
import sitemap from '@astrojs/sitemap';
|
||||
|
||||
// Adapters
|
||||
import vercelAdapter from '@astrojs/vercel/serverless';
|
||||
import netlifyAdapter from '@astrojs/netlify';
|
||||
import nodeAdapter from '@astrojs/node';
|
||||
import cloudflareAdapter from '@astrojs/cloudflare';
|
||||
|
||||
// Helper function to unwrap both Vite and Node environment variables
|
||||
const unwrapEnvVar = (varName, fallbackValue) => {
|
||||
const classicEnvVar = process?.env && process.env[varName];
|
||||
const viteEnvVar = import.meta.env[varName];
|
||||
return classicEnvVar || viteEnvVar || fallbackValue;
|
||||
}
|
||||
|
||||
// Determine the deploy target (vercel, netlify, cloudflare, node)
|
||||
const deployTarget = unwrapEnvVar('PLATFORM', 'node').toLowerCase();
|
||||
|
||||
// Determine the output mode (server, hybrid or static)
|
||||
const output = unwrapEnvVar('OUTPUT', 'hybrid');
|
||||
|
||||
// The FQDN of where the site is hosted (used for sitemaps & canonical URLs)
|
||||
const site = unwrapEnvVar('SITE_URL', 'https://web-check.xyz');
|
||||
|
||||
// The base URL of the site (if serving from a subdirectory)
|
||||
const base = unwrapEnvVar('BASE_URL', '/');
|
||||
|
||||
// Should run the app in boss-mode (requires extra configuration)
|
||||
const isBossServer = unwrapEnvVar('BOSS_SERVER', false);
|
||||
|
||||
// Initialize Astro integrations
|
||||
const integrations = [svelte(), react(), partytown(), sitemap()];
|
||||
|
||||
// Set the appropriate adapter, based on the deploy target
|
||||
function getAdapter(target) {
|
||||
switch(target) {
|
||||
case 'vercel':
|
||||
return vercelAdapter();
|
||||
case 'netlify':
|
||||
return netlifyAdapter();
|
||||
case 'cloudflare':
|
||||
return cloudflareAdapter();
|
||||
case 'node':
|
||||
return nodeAdapter({ mode: 'middleware' });
|
||||
default:
|
||||
throw new Error(`Unsupported deploy target: ${target}`);
|
||||
}
|
||||
}
|
||||
const adapter = getAdapter(deployTarget);
|
||||
|
||||
// Print build information to console
|
||||
console.log(
|
||||
`\n\x1b[1m\x1b[35m Preparing to start build of Web Check.... \x1b[0m\n`,
|
||||
`\x1b[35m\x1b[2mCompiling for "${deployTarget}" using "${output}" mode, `
|
||||
+ `to deploy to "${site}" at "${base}"\x1b[0m\n`,
|
||||
`\x1b[2m\x1b[36m🛟 For documentation and support, visit the GitHub repo: ` +
|
||||
`https://github.com/lissy93/web-check \n`,
|
||||
`💖 Found Web-Check useful? Consider sponsoring us on GitHub ` +
|
||||
`to help fund maintenance & development.\x1b[0m\n`,
|
||||
);
|
||||
|
||||
const redirects = {
|
||||
'/about': '/check/about',
|
||||
};
|
||||
|
||||
// Skip the marketing homepage for self-hosted users
|
||||
if (!isBossServer && isBossServer !== true) {
|
||||
redirects['/'] = '/check';
|
||||
}
|
||||
|
||||
// Export Astro configuration
|
||||
export default defineConfig({ output, base, integrations, site, adapter, redirects });
|
||||
|
||||
@@ -4,5 +4,5 @@ services:
|
||||
container_name: Web-Check
|
||||
image: lissy93/web-check
|
||||
ports:
|
||||
- 8888:8888
|
||||
- 3000:3000
|
||||
restart: unless-stopped
|
||||
|
||||
17
fly.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
app = 'web-check'
|
||||
primary_region = 'lhr'
|
||||
|
||||
[build]
|
||||
|
||||
[http_service]
|
||||
internal_port = 3000
|
||||
force_https = true
|
||||
auto_stop_machines = true
|
||||
auto_start_machines = true
|
||||
min_machines_running = 0
|
||||
processes = ['app']
|
||||
|
||||
[[vm]]
|
||||
memory = '1gb'
|
||||
cpu_kind = 'shared'
|
||||
cpus = 1
|
||||