Compare commits
1 Commits
contributo
...
WIP/extra-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c1daa4799c |
31
.env
@@ -2,26 +2,11 @@
|
|||||||
# Be sure to uncomment any line you populate
|
# Be sure to uncomment any line you populate
|
||||||
# Everything is optional, but some features won't work without external API access
|
# Everything is optional, but some features won't work without external API access
|
||||||
|
|
||||||
# API Keys for external services (backend)
|
# GOOGLE_CLOUD_API_KEY=''
|
||||||
GOOGLE_CLOUD_API_KEY=''
|
# SHODAN_API_KEY=''
|
||||||
TORRENT_IP_API_KEY=''
|
# REACT_APP_SHODAN_API_KEY=''
|
||||||
SECURITY_TRAILS_API_KEY=''
|
# WHO_API_KEY=''
|
||||||
BUILT_WITH_API_KEY=''
|
# REACT_APP_WHO_API_KEY=''
|
||||||
URL_SCAN_API_KEY=''
|
# SECURITY_TRAILS_API_KEY=''
|
||||||
TRANCO_USERNAME=''
|
# BUILT_WITH_API_KEY=''
|
||||||
TRANCO_API_KEY=''
|
# CI=false
|
||||||
CLOUDMERSIVE_API_KEY=''
|
|
||||||
|
|
||||||
# API Keys for external services (frontend)
|
|
||||||
REACT_APP_SHODAN_API_KEY=''
|
|
||||||
REACT_APP_WHO_API_KEY=''
|
|
||||||
|
|
||||||
# Configuration settings
|
|
||||||
# CHROME_PATH='/usr/bin/chromium' # The path the the Chromium executable
|
|
||||||
# PORT='3000' # Port to serve the API, when running server.js
|
|
||||||
# DISABLE_GUI='false' # Disable the GUI, and only serve the API
|
|
||||||
# API_TIMEOUT_LIMIT='10000' # The timeout limit for API requests, in milliseconds
|
|
||||||
# API_CORS_ORIGIN='*' # Enable CORS, by setting your allowed hostname(s) here
|
|
||||||
# API_ENABLE_RATE_LIMIT='true' # Enable rate limiting for the API
|
|
||||||
# REACT_APP_API_ENDPOINT='/api' # The endpoint for the API (can be local or remote)
|
|
||||||
# ENABLE_ANALYTICS='false' # Enable Plausible hit counter for the frontend
|
|
||||||
|
|||||||
1160
.github/README.md
vendored
1
.github/screenshots/README.md
vendored
@@ -1 +0,0 @@
|
|||||||

|
|
||||||
|
Before Width: | Height: | Size: 2.0 MiB After Width: | Height: | Size: 2.0 MiB |
BIN
.github/screenshots/tiles/archives.png
vendored
|
Before Width: | Height: | Size: 40 KiB |
BIN
.github/screenshots/tiles/block-lists.png
vendored
|
Before Width: | Height: | Size: 95 KiB |
BIN
.github/screenshots/tiles/carbon.png
vendored
|
Before Width: | Height: | Size: 40 KiB |
BIN
.github/screenshots/tiles/dns-server.png
vendored
|
Before Width: | Height: | Size: 40 KiB |
BIN
.github/screenshots/tiles/email-config.png
vendored
|
Before Width: | Height: | Size: 67 KiB |
BIN
.github/screenshots/tiles/firewall.png
vendored
|
Before Width: | Height: | Size: 12 KiB |
BIN
.github/screenshots/tiles/hsts.png
vendored
|
Before Width: | Height: | Size: 27 KiB |
BIN
.github/screenshots/tiles/http-security.png
vendored
|
Before Width: | Height: | Size: 37 KiB |
BIN
.github/screenshots/tiles/linked-pages.png
vendored
|
Before Width: | Height: | Size: 64 KiB |
BIN
.github/screenshots/tiles/ranking.png
vendored
|
Before Width: | Height: | Size: 26 KiB |
BIN
.github/screenshots/tiles/screenshot.png
vendored
|
Before Width: | Height: | Size: 79 KiB |
BIN
.github/screenshots/tiles/security-txt.png
vendored
|
Before Width: | Height: | Size: 63 KiB |
BIN
.github/screenshots/tiles/sitemap.png
vendored
|
Before Width: | Height: | Size: 40 KiB |
BIN
.github/screenshots/tiles/social-tags.png
vendored
|
Before Width: | Height: | Size: 207 KiB |
BIN
.github/screenshots/tiles/tech-stack.png
vendored
|
Before Width: | Height: | Size: 146 KiB |
BIN
.github/screenshots/tiles/threats.png
vendored
|
Before Width: | Height: | Size: 26 KiB |
BIN
.github/screenshots/tiles/tls-cipher-suites.png
vendored
|
Before Width: | Height: | Size: 64 KiB |
|
Before Width: | Height: | Size: 90 KiB |
BIN
.github/screenshots/tiles/tls-security-config.png
vendored
|
Before Width: | Height: | Size: 127 KiB |
BIN
.github/screenshots/wc_carbon.png
vendored
Normal file
|
After Width: | Height: | Size: 31 KiB |
|
Before Width: | Height: | Size: 35 KiB After Width: | Height: | Size: 35 KiB |
|
Before Width: | Height: | Size: 53 KiB After Width: | Height: | Size: 53 KiB |
BIN
.github/screenshots/wc_dnssec-2.png
vendored
Normal file
|
After Width: | Height: | Size: 46 KiB |
|
Before Width: | Height: | Size: 165 KiB After Width: | Height: | Size: 165 KiB |
|
Before Width: | Height: | Size: 44 KiB After Width: | Height: | Size: 44 KiB |
BIN
.github/screenshots/wc_features-2.png
vendored
Normal file
|
After Width: | Height: | Size: 132 KiB |
|
Before Width: | Height: | Size: 73 KiB After Width: | Height: | Size: 73 KiB |
|
Before Width: | Height: | Size: 105 KiB After Width: | Height: | Size: 105 KiB |
|
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 24 KiB |
|
Before Width: | Height: | Size: 94 KiB After Width: | Height: | Size: 94 KiB |
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 158 KiB After Width: | Height: | Size: 158 KiB |
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
|
Before Width: | Height: | Size: 114 KiB After Width: | Height: | Size: 114 KiB |
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
|
Before Width: | Height: | Size: 46 KiB After Width: | Height: | Size: 46 KiB |
|
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 54 KiB After Width: | Height: | Size: 54 KiB |
|
Before Width: | Height: | Size: 123 KiB After Width: | Height: | Size: 123 KiB |
BIN
.github/screenshots/web-check-screenshot1.png
vendored
|
Before Width: | Height: | Size: 3.0 MiB |
BIN
.github/screenshots/web-check-screenshot10.png
vendored
|
Before Width: | Height: | Size: 1.4 MiB |
BIN
.github/screenshots/web-check-screenshot2.png
vendored
|
Before Width: | Height: | Size: 1.7 MiB |
BIN
.github/screenshots/web-check-screenshot3.png
vendored
|
Before Width: | Height: | Size: 2.6 MiB |
BIN
.github/screenshots/web-check-screenshot4.png
vendored
|
Before Width: | Height: | Size: 810 KiB |
37
.github/workflows/credits.yml
vendored
@@ -1,37 +0,0 @@
|
|||||||
# Inserts list of community members into ./README.md
|
|
||||||
name: 💓 Inserts Contributors & Sponsors
|
|
||||||
on:
|
|
||||||
workflow_dispatch: # Manual dispatch
|
|
||||||
schedule:
|
|
||||||
- cron: '45 1 * * 0' # At 01:45 on Sunday.
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Job #1 - Fetches sponsors and inserts table into readme
|
|
||||||
insert-sponsors:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: Inserts Sponsors 💓
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Updates readme with sponsors
|
|
||||||
uses: JamesIves/github-sponsors-readme-action@v1
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
|
||||||
file: .github/README.md
|
|
||||||
|
|
||||||
# Job #2 - Fetches contributors and inserts table into readme
|
|
||||||
insert-contributors:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: Inserts Contributors 💓
|
|
||||||
steps:
|
|
||||||
- name: Updates readme with contributors
|
|
||||||
uses: akhilmhdh/contributors-readme-action@v2.3.10
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
image_size: 80
|
|
||||||
readme_path: .github/README.md
|
|
||||||
columns_per_row: 6
|
|
||||||
commit_message: 'docs: Updates contributors list'
|
|
||||||
committer_username: liss-bot
|
|
||||||
committer_email: liss-bot@d0h.co
|
|
||||||
128
.github/workflows/deploy-aws.yml
vendored
@@ -1,128 +0,0 @@
|
|||||||
name: 🚀 Deploy to AWS
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
tags:
|
|
||||||
- '*'
|
|
||||||
paths:
|
|
||||||
- api/**
|
|
||||||
- serverless.yml
|
|
||||||
- package.json
|
|
||||||
- .github/workflows/deploy-aws.yml
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy-api:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 16
|
|
||||||
|
|
||||||
- name: Cache node_modules
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: node_modules
|
|
||||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-yarn-
|
|
||||||
|
|
||||||
- name: Create GitHub deployment for API
|
|
||||||
uses: chrnorm/deployment-action@releases/v2
|
|
||||||
id: deployment_api
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
|
||||||
environment: AWS (Backend API)
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
|
|
||||||
- name: Install Serverless CLI and dependencies
|
|
||||||
run: |
|
|
||||||
npm i -g serverless
|
|
||||||
yarn
|
|
||||||
|
|
||||||
- name: Deploy to AWS
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
|
||||||
run: serverless deploy
|
|
||||||
|
|
||||||
- name: Update GitHub deployment status (API)
|
|
||||||
if: always()
|
|
||||||
uses: chrnorm/deployment-status@v2
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
|
||||||
state: "${{ job.status }}"
|
|
||||||
deployment_id: ${{ steps.deployment_api.outputs.deployment_id }}
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
|
|
||||||
deploy-frontend:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 16
|
|
||||||
|
|
||||||
- name: Cache node_modules
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: node_modules
|
|
||||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-yarn-
|
|
||||||
|
|
||||||
- name: Create GitHub deployment for Frontend
|
|
||||||
uses: chrnorm/deployment-action@v2
|
|
||||||
id: deployment_frontend
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
|
||||||
environment: AWS (Frontend Web UI)
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
|
|
||||||
- name: Install dependencies and build
|
|
||||||
run: |
|
|
||||||
yarn install
|
|
||||||
yarn build
|
|
||||||
|
|
||||||
- name: Setup AWS
|
|
||||||
uses: aws-actions/configure-aws-credentials@v4
|
|
||||||
with:
|
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
aws-region: us-east-1
|
|
||||||
|
|
||||||
- name: Upload to S3
|
|
||||||
env:
|
|
||||||
AWS_S3_BUCKET: 'web-check-frontend'
|
|
||||||
run: aws s3 sync ./build/ s3://$AWS_S3_BUCKET/ --delete
|
|
||||||
|
|
||||||
- name: Invalidate CloudFront cache
|
|
||||||
uses: chetan/invalidate-cloudfront-action@v2
|
|
||||||
env:
|
|
||||||
DISTRIBUTION: E30XKAM2TG9FD8
|
|
||||||
PATHS: '/*'
|
|
||||||
AWS_REGION: 'us-east-1'
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
|
|
||||||
- name: Update GitHub deployment status (Frontend)
|
|
||||||
if: always()
|
|
||||||
uses: chrnorm/deployment-status@v2
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
|
|
||||||
state: "${{ job.status }}"
|
|
||||||
deployment_id: ${{ steps.deployment_frontend.outputs.deployment_id }}
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
|
|
||||||
18
.github/workflows/docker.yml
vendored
@@ -23,14 +23,14 @@ jobs:
|
|||||||
docker:
|
docker:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout 🛎️
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Extract tag name 🏷️
|
- name: Extract tag name
|
||||||
shell: bash
|
shell: bash
|
||||||
run: echo "GIT_TAG=$(echo ${GITHUB_REF#refs/tags/} | sed 's/\//_/g')" >> $GITHUB_ENV
|
run: echo "GIT_TAG=$(echo ${GITHUB_REF#refs/tags/} | sed 's/\//_/g')" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Compute tags 🔖
|
- name: Compute tags
|
||||||
id: compute-tags
|
id: compute-tags
|
||||||
run: |
|
run: |
|
||||||
if [[ "${{ github.ref }}" == "refs/heads/master" ]]; then
|
if [[ "${{ github.ref }}" == "refs/heads/master" ]]; then
|
||||||
@@ -41,33 +41,33 @@ jobs:
|
|||||||
echo "DOCKERHUB_TAG=${DOCKERHUB_REGISTRY}/${DOCKER_USER}/${IMAGE_NAME}:${GIT_TAG}" >> $GITHUB_ENV
|
echo "DOCKERHUB_TAG=${DOCKERHUB_REGISTRY}/${DOCKER_USER}/${IMAGE_NAME}:${GIT_TAG}" >> $GITHUB_ENV
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Set up QEMU 🐧
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v1
|
||||||
|
|
||||||
- name: Set up Docker Buildx 🐳
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v1
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry 🔑
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v1
|
||||||
with:
|
with:
|
||||||
registry: ${{ env.GHCR_REGISTRY }}
|
registry: ${{ env.GHCR_REGISTRY }}
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Login to DockerHub 🔑
|
- name: Login to DockerHub
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v1
|
||||||
with:
|
with:
|
||||||
registry: ${{ env.DOCKERHUB_REGISTRY }}
|
registry: ${{ env.DOCKERHUB_REGISTRY }}
|
||||||
username: ${{ env.DOCKER_USER }}
|
username: ${{ env.DOCKER_USER }}
|
||||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||||
|
|
||||||
- name: Build and push Docker images 🛠️
|
- name: Build and push Docker images
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64,linux/arm64/v8
|
platforms: linux/amd64
|
||||||
tags: |
|
tags: |
|
||||||
${{ env.GHCR_TAG }}
|
${{ env.GHCR_TAG }}
|
||||||
${{ env.DOCKERHUB_TAG }}
|
${{ env.DOCKERHUB_TAG }}
|
||||||
|
|||||||
2
.github/workflows/mirror.yml
vendored
@@ -8,7 +8,7 @@ jobs:
|
|||||||
codeberg:
|
codeberg:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v2
|
||||||
with: { fetch-depth: 0 }
|
with: { fetch-depth: 0 }
|
||||||
- uses: pixta-dev/repository-mirroring-action@v1
|
- uses: pixta-dev/repository-mirroring-action@v1
|
||||||
with:
|
with:
|
||||||
|
|||||||
72
.gitignore
vendored
@@ -1,62 +1,28 @@
|
|||||||
# ------------------------
|
|
||||||
# ENVIRONMENT SETTINGS
|
# Keys
|
||||||
# ------------------------
|
|
||||||
.env
|
.env
|
||||||
|
|
||||||
# ------------------------
|
# dependencies
|
||||||
# PRODUCTION
|
/node_modules
|
||||||
# ------------------------
|
/.pnp
|
||||||
/build/
|
.pnp.js
|
||||||
|
|
||||||
# ------------------------
|
# testing
|
||||||
# BUILT FILES
|
/coverage
|
||||||
# ------------------------
|
|
||||||
dist/
|
|
||||||
.vercel/
|
|
||||||
.netlify/
|
|
||||||
.webpack/
|
|
||||||
.serverless/
|
|
||||||
.astro/
|
|
||||||
|
|
||||||
# ------------------------
|
# production
|
||||||
# DEPENDENCIES
|
/build
|
||||||
# ------------------------
|
|
||||||
node_modules/
|
# misc
|
||||||
.yarn/cache/
|
.DS_Store
|
||||||
.yarn/unplugged/
|
.env.local
|
||||||
.yarn/build-state.yml
|
.env.development.local
|
||||||
.yarn/install-state.gz
|
.env.test.local
|
||||||
.pnpm/
|
.env.production.local
|
||||||
.pnp.*
|
|
||||||
|
|
||||||
# ------------------------
|
|
||||||
# LOGS
|
|
||||||
# ------------------------
|
|
||||||
logs/
|
|
||||||
*.log
|
|
||||||
npm-debug.log*
|
npm-debug.log*
|
||||||
yarn-debug.log*
|
yarn-debug.log*
|
||||||
yarn-error.log*
|
yarn-error.log*
|
||||||
lerna-debug.log*
|
|
||||||
.pnpm-debug.log*
|
|
||||||
|
|
||||||
# ------------------------
|
|
||||||
# TESTING
|
|
||||||
# ------------------------
|
|
||||||
coverage/
|
|
||||||
.nyc_output/
|
|
||||||
|
|
||||||
# ------------------------
|
|
||||||
# OS SPECIFIC
|
|
||||||
# ------------------------
|
|
||||||
.DS_Store
|
|
||||||
Thumbs.db
|
|
||||||
|
|
||||||
# ------------------------
|
|
||||||
# EDITORS
|
|
||||||
# ------------------------
|
|
||||||
.idea/
|
|
||||||
.vscode/
|
|
||||||
*.swp
|
|
||||||
*.swo
|
|
||||||
|
|
||||||
|
# Local Netlify folder
|
||||||
|
.netlify
|
||||||
|
|||||||
64
Dockerfile
@@ -1,62 +1,12 @@
|
|||||||
# Specify the Node.js version to use
|
FROM node:16-buster-slim AS base
|
||||||
ARG NODE_VERSION=21
|
|
||||||
|
|
||||||
# Specify the Debian version to use, the default is "bullseye"
|
|
||||||
ARG DEBIAN_VERSION=bullseye
|
|
||||||
|
|
||||||
# Use Node.js Docker image as the base image, with specific Node and Debian versions
|
|
||||||
FROM node:${NODE_VERSION}-${DEBIAN_VERSION} AS build
|
|
||||||
|
|
||||||
# Set the container's default shell to Bash and enable some options
|
|
||||||
SHELL ["/bin/bash", "-euo", "pipefail", "-c"]
|
|
||||||
|
|
||||||
# Install Chromium browser and Download and verify Google Chrome’s signing key
|
|
||||||
RUN apt-get update -qq --fix-missing && \
|
|
||||||
apt-get -qqy install --allow-unauthenticated gnupg wget && \
|
|
||||||
wget --quiet --output-document=- https://dl-ssl.google.com/linux/linux_signing_key.pub | gpg --dearmor > /etc/apt/trusted.gpg.d/google-archive.gpg && \
|
|
||||||
echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google.list && \
|
|
||||||
apt-get update -qq && \
|
|
||||||
apt-get -qqy --no-install-recommends install chromium traceroute python make g++ && \
|
|
||||||
rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Run the Chromium browser's version command and redirect its output to the /etc/chromium-version file
|
|
||||||
RUN /usr/bin/chromium --no-sandbox --version > /etc/chromium-version
|
|
||||||
|
|
||||||
# Set the working directory to /app
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
FROM base AS builder
|
||||||
# Copy package.json and yarn.lock to the working directory
|
|
||||||
COPY package.json yarn.lock ./
|
|
||||||
|
|
||||||
# Run yarn install to install dependencies and clear yarn cache
|
|
||||||
RUN apt-get update && \
|
|
||||||
yarn install --frozen-lockfile --network-timeout 100000 && \
|
|
||||||
rm -rf /app/node_modules/.cache
|
|
||||||
|
|
||||||
# Copy all files to working directory
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Run yarn build to build the application
|
|
||||||
RUN yarn build --production
|
|
||||||
|
|
||||||
# Final stage
|
|
||||||
FROM node:${NODE_VERSION}-${DEBIAN_VERSION} AS final
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
COPY package.json yarn.lock ./
|
|
||||||
COPY --from=build /app .
|
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y --no-install-recommends chromium traceroute && \
|
apt-get install -y chromium traceroute && \
|
||||||
chmod 755 /usr/bin/chromium && \
|
chmod 755 /usr/bin/chromium && \
|
||||||
rm -rf /var/lib/apt/lists/* /app/node_modules/.cache
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
RUN npm install --force
|
||||||
# Exposed container port, the default is 3000, which can be modified through the environment variable PORT
|
EXPOSE 8888
|
||||||
EXPOSE ${PORT:-3000}
|
|
||||||
|
|
||||||
# Set the environment variable CHROME_PATH to specify the path to the Chromium binaries
|
|
||||||
ENV CHROME_PATH='/usr/bin/chromium'
|
ENV CHROME_PATH='/usr/bin/chromium'
|
||||||
|
CMD ["npm", "run", "serve"]
|
||||||
# Define the command executed when the container starts and start the server.js of the Node.js application
|
|
||||||
CMD ["yarn", "start"]
|
|
||||||
|
|||||||
@@ -1,51 +0,0 @@
|
|||||||
const path = require('path');
|
|
||||||
const nodeExternals = require('webpack-node-externals');
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
target: 'node',
|
|
||||||
mode: 'production',
|
|
||||||
entry: {
|
|
||||||
'carbon': './api/carbon.js',
|
|
||||||
'cookies': './api/cookies.js',
|
|
||||||
'dns-server': './api/dns-server.js',
|
|
||||||
'dns': './api/dns.js',
|
|
||||||
'dnssec': './api/dnssec.js',
|
|
||||||
'features': './api/features.js',
|
|
||||||
'get-ip': './api/get-ip.js',
|
|
||||||
'headers': './api/headers.js',
|
|
||||||
'hsts': './api/hsts.js',
|
|
||||||
'linked-pages': './api/linked-pages.js',
|
|
||||||
'mail-config': './api/mail-config.js',
|
|
||||||
'ports': './api/ports.js',
|
|
||||||
'quality': './api/quality.js',
|
|
||||||
'redirects': './api/redirects.js',
|
|
||||||
'robots-txt': './api/robots-txt.js',
|
|
||||||
'screenshot': './api/screenshot.js',
|
|
||||||
'security-txt': './api/security-txt.js',
|
|
||||||
'sitemap': './api/sitemap.js',
|
|
||||||
'social-tags': './api/social-tags.js',
|
|
||||||
'ssl': './api/ssl.js',
|
|
||||||
'status': './api/status.js',
|
|
||||||
'tech-stack': './api/tech-stack.js',
|
|
||||||
'trace-route': './api/trace-route.js',
|
|
||||||
'txt-records': './api/txt-records.js',
|
|
||||||
'whois': './api/whois.js',
|
|
||||||
},
|
|
||||||
externals: [nodeExternals()],
|
|
||||||
output: {
|
|
||||||
filename: '[name].js',
|
|
||||||
path: path.resolve(__dirname, '.webpack'),
|
|
||||||
libraryTarget: 'commonjs2'
|
|
||||||
},
|
|
||||||
module: {
|
|
||||||
rules: [
|
|
||||||
{
|
|
||||||
test: /\.js$/,
|
|
||||||
use: {
|
|
||||||
loader: 'babel-loader'
|
|
||||||
},
|
|
||||||
exclude: /node_modules/,
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
};
|
|
||||||
@@ -1,155 +0,0 @@
|
|||||||
const normalizeUrl = (url) => {
|
|
||||||
return url.startsWith('http') ? url : `https://${url}`;
|
|
||||||
};
|
|
||||||
|
|
||||||
// If present, set a shorter timeout for API requests
|
|
||||||
const TIMEOUT = process.env.API_TIMEOUT_LIMIT ? parseInt(process.env.API_TIMEOUT_LIMIT, 10) : 60000;
|
|
||||||
|
|
||||||
// If present, set CORS allowed origins for responses
|
|
||||||
const ALLOWED_ORIGINS = process.env.API_CORS_ORIGIN || '*';
|
|
||||||
|
|
||||||
// Disable everything :( Setting this env var will turn off the instance, and show message
|
|
||||||
const DISABLE_EVERYTHING = !!process.env.VITE_DISABLE_EVERYTHING;
|
|
||||||
|
|
||||||
// Set the platform currently being used
|
|
||||||
let PLATFORM = 'NETLIFY';
|
|
||||||
if (process.env.PLATFORM) { PLATFORM = process.env.PLATFORM.toUpperCase(); }
|
|
||||||
else if (process.env.VERCEL) { PLATFORM = 'VERCEL'; }
|
|
||||||
else if (process.env.WC_SERVER) { PLATFORM = 'NODE'; }
|
|
||||||
|
|
||||||
// Define the headers to be returned with each response
|
|
||||||
const headers = {
|
|
||||||
'Access-Control-Allow-Origin': ALLOWED_ORIGINS,
|
|
||||||
'Access-Control-Allow-Credentials': true,
|
|
||||||
'Content-Type': 'application/json;charset=UTF-8',
|
|
||||||
};
|
|
||||||
|
|
||||||
const timeoutErrorMsg = 'You can re-trigger this request, by clicking "Retry"\n'
|
|
||||||
+ 'If you\'re running your own instance of Web Check, then you can '
|
|
||||||
+ 'resolve this issue, by increasing the timeout limit in the '
|
|
||||||
+ '`API_TIMEOUT_LIMIT` environmental variable to a higher value (in milliseconds), '
|
|
||||||
+ 'or if you\'re hosting on Vercel increase the maxDuration in vercel.json.\n\n'
|
|
||||||
+ `The public instance currently has a lower timeout of ${TIMEOUT}ms `
|
|
||||||
+ 'in order to keep running costs affordable, so that Web Check can '
|
|
||||||
+ 'remain freely available for everyone.';
|
|
||||||
|
|
||||||
const disabledErrorMsg = 'Error - WebCheck Temporarily Disabled.\n\n'
|
|
||||||
+ 'We\'re sorry, but due to the increased cost of running Web Check '
|
|
||||||
+ 'we\'ve had to temporatily disable the public instand. '
|
|
||||||
+ 'We\'re activley looking for affordable ways to keep Web Check running, '
|
|
||||||
+ 'while free to use for everybody.\n'
|
|
||||||
+ 'In the meantime, since we\'ve made our code free and open source, '
|
|
||||||
+ 'you can get Web Check running on your own system, by following the instructions in our GitHub repo';
|
|
||||||
|
|
||||||
// A middleware function used by all API routes on all platforms
|
|
||||||
const commonMiddleware = (handler) => {
|
|
||||||
|
|
||||||
// Create a timeout promise, to throw an error if a request takes too long
|
|
||||||
const createTimeoutPromise = (timeoutMs) => {
|
|
||||||
return new Promise((_, reject) => {
|
|
||||||
setTimeout(() => {
|
|
||||||
reject(new Error(`Request timed-out after ${timeoutMs} ms`));
|
|
||||||
}, timeoutMs);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
// Vercel
|
|
||||||
const vercelHandler = async (request, response) => {
|
|
||||||
|
|
||||||
if (DISABLE_EVERYTHING) {
|
|
||||||
response.status(503).json({ error: disabledErrorMsg });
|
|
||||||
}
|
|
||||||
|
|
||||||
const queryParams = request.query || {};
|
|
||||||
const rawUrl = queryParams.url;
|
|
||||||
|
|
||||||
if (!rawUrl) {
|
|
||||||
return response.status(500).json({ error: 'No URL specified' });
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = normalizeUrl(rawUrl);
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Race the handler against the timeout
|
|
||||||
const handlerResponse = await Promise.race([
|
|
||||||
handler(url, request),
|
|
||||||
createTimeoutPromise(TIMEOUT)
|
|
||||||
]);
|
|
||||||
|
|
||||||
if (handlerResponse.body && handlerResponse.statusCode) {
|
|
||||||
response.status(handlerResponse.statusCode).json(handlerResponse.body);
|
|
||||||
} else {
|
|
||||||
response.status(200).json(
|
|
||||||
typeof handlerResponse === 'object' ? handlerResponse : JSON.parse(handlerResponse)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
let errorCode = 500;
|
|
||||||
if (error.message.includes('timed-out') || response.statusCode === 504) {
|
|
||||||
errorCode = 408;
|
|
||||||
error.message = `${error.message}\n\n${timeoutErrorMsg}`;
|
|
||||||
}
|
|
||||||
response.status(errorCode).json({ error: error.message });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Netlify
|
|
||||||
const netlifyHandler = async (event, context, callback) => {
|
|
||||||
const queryParams = event.queryStringParameters || event.query || {};
|
|
||||||
const rawUrl = queryParams.url;
|
|
||||||
|
|
||||||
if (DISABLE_EVERYTHING) {
|
|
||||||
callback(null, {
|
|
||||||
statusCode: 503,
|
|
||||||
body: JSON.stringify({ error: 'Web-Check is temporarily disabled. Please try again later.' }),
|
|
||||||
headers,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!rawUrl) {
|
|
||||||
callback(null, {
|
|
||||||
statusCode: 500,
|
|
||||||
body: JSON.stringify({ error: 'No URL specified' }),
|
|
||||||
headers,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = normalizeUrl(rawUrl);
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Race the handler against the timeout
|
|
||||||
const handlerResponse = await Promise.race([
|
|
||||||
handler(url, event, context),
|
|
||||||
createTimeoutPromise(TIMEOUT)
|
|
||||||
]);
|
|
||||||
|
|
||||||
if (handlerResponse.body && handlerResponse.statusCode) {
|
|
||||||
callback(null, handlerResponse);
|
|
||||||
} else {
|
|
||||||
callback(null, {
|
|
||||||
statusCode: 200,
|
|
||||||
body: typeof handlerResponse === 'object' ? JSON.stringify(handlerResponse) : handlerResponse,
|
|
||||||
headers,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
callback(null, {
|
|
||||||
statusCode: 500,
|
|
||||||
body: JSON.stringify({ error: error.message }),
|
|
||||||
headers,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// The format of the handlers varies between platforms
|
|
||||||
const nativeMode = (['VERCEL', 'NODE'].includes(PLATFORM));
|
|
||||||
return nativeMode ? vercelHandler : netlifyHandler;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (PLATFORM === 'NETLIFY') {
|
|
||||||
module.exports = commonMiddleware;
|
|
||||||
}
|
|
||||||
|
|
||||||
export default commonMiddleware;
|
|
||||||
@@ -1,84 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const convertTimestampToDate = (timestamp) => {
|
|
||||||
const [year, month, day, hour, minute, second] = [
|
|
||||||
timestamp.slice(0, 4),
|
|
||||||
timestamp.slice(4, 6) - 1,
|
|
||||||
timestamp.slice(6, 8),
|
|
||||||
timestamp.slice(8, 10),
|
|
||||||
timestamp.slice(10, 12),
|
|
||||||
timestamp.slice(12, 14),
|
|
||||||
].map(num => parseInt(num, 10));
|
|
||||||
|
|
||||||
return new Date(year, month, day, hour, minute, second);
|
|
||||||
}
|
|
||||||
|
|
||||||
const countPageChanges = (results) => {
|
|
||||||
let prevDigest = null;
|
|
||||||
return results.reduce((acc, curr) => {
|
|
||||||
if (curr[2] !== prevDigest) {
|
|
||||||
prevDigest = curr[2];
|
|
||||||
return acc + 1;
|
|
||||||
}
|
|
||||||
return acc;
|
|
||||||
}, -1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const getAveragePageSize = (scans) => {
|
|
||||||
const totalSize = scans.map(scan => parseInt(scan[3], 10)).reduce((sum, size) => sum + size, 0);
|
|
||||||
return Math.round(totalSize / scans.length);
|
|
||||||
};
|
|
||||||
|
|
||||||
const getScanFrequency = (firstScan, lastScan, totalScans, changeCount) => {
|
|
||||||
const formatToTwoDecimal = num => parseFloat(num.toFixed(2));
|
|
||||||
|
|
||||||
const dayFactor = (lastScan - firstScan) / (1000 * 60 * 60 * 24);
|
|
||||||
const daysBetweenScans = formatToTwoDecimal(dayFactor / totalScans);
|
|
||||||
const daysBetweenChanges = formatToTwoDecimal(dayFactor / changeCount);
|
|
||||||
const scansPerDay = formatToTwoDecimal((totalScans - 1) / dayFactor);
|
|
||||||
const changesPerDay = formatToTwoDecimal(changeCount / dayFactor);
|
|
||||||
return {
|
|
||||||
daysBetweenScans,
|
|
||||||
daysBetweenChanges,
|
|
||||||
scansPerDay,
|
|
||||||
changesPerDay,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
const wayBackHandler = async (url) => {
|
|
||||||
const cdxUrl = `https://web.archive.org/cdx/search/cdx?url=${url}&output=json&fl=timestamp,statuscode,digest,length,offset`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { data } = await axios.get(cdxUrl);
|
|
||||||
|
|
||||||
// Check there's data
|
|
||||||
if (!data || !Array.isArray(data) || data.length <= 1) {
|
|
||||||
return { skipped: 'Site has never before been archived via the Wayback Machine' };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove the header row
|
|
||||||
data.shift();
|
|
||||||
|
|
||||||
// Process and return the results
|
|
||||||
const firstScan = convertTimestampToDate(data[0][0]);
|
|
||||||
const lastScan = convertTimestampToDate(data[data.length - 1][0]);
|
|
||||||
const totalScans = data.length;
|
|
||||||
const changeCount = countPageChanges(data);
|
|
||||||
return {
|
|
||||||
firstScan,
|
|
||||||
lastScan,
|
|
||||||
totalScans,
|
|
||||||
changeCount,
|
|
||||||
averagePageSize: getAveragePageSize(data),
|
|
||||||
scanFrequency: getScanFrequency(firstScan, lastScan, totalScans, changeCount),
|
|
||||||
scans: data,
|
|
||||||
scanUrl: url,
|
|
||||||
};
|
|
||||||
} catch (err) {
|
|
||||||
return { error: `Error fetching Wayback data: ${err.message}` };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(wayBackHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
import dns from 'dns';
|
|
||||||
import { URL } from 'url';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const DNS_SERVERS = [
|
|
||||||
{ name: 'AdGuard', ip: '176.103.130.130' },
|
|
||||||
{ name: 'AdGuard Family', ip: '176.103.130.132' },
|
|
||||||
{ name: 'CleanBrowsing Adult', ip: '185.228.168.10' },
|
|
||||||
{ name: 'CleanBrowsing Family', ip: '185.228.168.168' },
|
|
||||||
{ name: 'CleanBrowsing Security', ip: '185.228.168.9' },
|
|
||||||
{ name: 'CloudFlare', ip: '1.1.1.1' },
|
|
||||||
{ name: 'CloudFlare Family', ip: '1.1.1.3' },
|
|
||||||
{ name: 'Comodo Secure', ip: '8.26.56.26' },
|
|
||||||
{ name: 'Google DNS', ip: '8.8.8.8' },
|
|
||||||
{ name: 'Neustar Family', ip: '156.154.70.3' },
|
|
||||||
{ name: 'Neustar Protection', ip: '156.154.70.2' },
|
|
||||||
{ name: 'Norton Family', ip: '199.85.126.20' },
|
|
||||||
{ name: 'OpenDNS', ip: '208.67.222.222' },
|
|
||||||
{ name: 'OpenDNS Family', ip: '208.67.222.123' },
|
|
||||||
{ name: 'Quad9', ip: '9.9.9.9' },
|
|
||||||
{ name: 'Yandex Family', ip: '77.88.8.7' },
|
|
||||||
{ name: 'Yandex Safe', ip: '77.88.8.88' },
|
|
||||||
];
|
|
||||||
const knownBlockIPs = [
|
|
||||||
'146.112.61.106', // OpenDNS
|
|
||||||
'185.228.168.10', // CleanBrowsing
|
|
||||||
'8.26.56.26', // Comodo
|
|
||||||
'9.9.9.9', // Quad9
|
|
||||||
'208.69.38.170', // Some OpenDNS IPs
|
|
||||||
'208.69.39.170', // Some OpenDNS IPs
|
|
||||||
'208.67.222.222', // OpenDNS
|
|
||||||
'208.67.222.123', // OpenDNS FamilyShield
|
|
||||||
'199.85.126.10', // Norton
|
|
||||||
'199.85.126.20', // Norton Family
|
|
||||||
'156.154.70.22', // Neustar
|
|
||||||
'77.88.8.7', // Yandex
|
|
||||||
'77.88.8.8', // Yandex
|
|
||||||
'::1', // Localhost IPv6
|
|
||||||
'2a02:6b8::feed:0ff', // Yandex DNS
|
|
||||||
'2a02:6b8::feed:bad', // Yandex Safe
|
|
||||||
'2a02:6b8::feed:a11', // Yandex Family
|
|
||||||
'2620:119:35::35', // OpenDNS
|
|
||||||
'2620:119:53::53', // OpenDNS FamilyShield
|
|
||||||
'2606:4700:4700::1111', // Cloudflare
|
|
||||||
'2606:4700:4700::1001', // Cloudflare
|
|
||||||
'2001:4860:4860::8888', // Google DNS
|
|
||||||
'2a0d:2a00:1::', // AdGuard
|
|
||||||
'2a0d:2a00:2::' // AdGuard Family
|
|
||||||
];
|
|
||||||
|
|
||||||
const isDomainBlocked = async (domain, serverIP) => {
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
dns.resolve4(domain, { server: serverIP }, (err, addresses) => {
|
|
||||||
if (!err) {
|
|
||||||
if (addresses.some(addr => knownBlockIPs.includes(addr))) {
|
|
||||||
resolve(true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
resolve(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
dns.resolve6(domain, { server: serverIP }, (err6, addresses6) => {
|
|
||||||
if (!err6) {
|
|
||||||
if (addresses6.some(addr => knownBlockIPs.includes(addr))) {
|
|
||||||
resolve(true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
resolve(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (err6.code === 'ENOTFOUND' || err6.code === 'SERVFAIL') {
|
|
||||||
resolve(true);
|
|
||||||
} else {
|
|
||||||
resolve(false);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const checkDomainAgainstDnsServers = async (domain) => {
|
|
||||||
let results = [];
|
|
||||||
|
|
||||||
for (let server of DNS_SERVERS) {
|
|
||||||
const isBlocked = await isDomainBlocked(domain, server.ip);
|
|
||||||
results.push({
|
|
||||||
server: server.name,
|
|
||||||
serverIp: server.ip,
|
|
||||||
isBlocked,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return results;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const blockListHandler = async (url) => {
|
|
||||||
const domain = new URL(url).hostname;
|
|
||||||
const results = await checkDomainAgainstDnsServers(domain);
|
|
||||||
return { blocklists: results };
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(blockListHandler);
|
|
||||||
export default handler;
|
|
||||||
|
|
||||||
@@ -1,20 +1,30 @@
|
|||||||
import https from 'https';
|
const https = require('https');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
exports.handler = async function(event, context) {
|
||||||
|
const siteURL = event.queryStringParameters.url;
|
||||||
|
|
||||||
const hstsHandler = async (url, event, context) => {
|
|
||||||
const errorResponse = (message, statusCode = 500) => {
|
const errorResponse = (message, statusCode = 500) => {
|
||||||
return {
|
return {
|
||||||
statusCode: statusCode,
|
statusCode: statusCode,
|
||||||
body: JSON.stringify({ error: message }),
|
body: JSON.stringify({ error: message }),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
const hstsIncompatible = (message, compatible = false, hstsHeader = null ) => {
|
const hstsIncompatible = (message, statusCode = 200) => {
|
||||||
return { message, compatible, hstsHeader };
|
return {
|
||||||
|
statusCode: statusCode,
|
||||||
|
body: JSON.stringify({ message, compatible: false }),
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (!siteURL) {
|
||||||
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'URL parameter is missing!' }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const req = https.request(url, res => {
|
const req = https.request(siteURL, res => {
|
||||||
const headers = res.headers;
|
const headers = res.headers;
|
||||||
const hstsHeader = headers['strict-transport-security'];
|
const hstsHeader = headers['strict-transport-security'];
|
||||||
|
|
||||||
@@ -32,7 +42,14 @@ const hstsHandler = async (url, event, context) => {
|
|||||||
} else if (!preload) {
|
} else if (!preload) {
|
||||||
resolve(hstsIncompatible(`HSTS header does not contain the preload directive.`));
|
resolve(hstsIncompatible(`HSTS header does not contain the preload directive.`));
|
||||||
} else {
|
} else {
|
||||||
resolve(hstsIncompatible(`Site is compatible with the HSTS preload list!`, true, hstsHeader));
|
resolve({
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({
|
||||||
|
message: "Site is compatible with the HSTS preload list!",
|
||||||
|
compatible: true,
|
||||||
|
hstsHeader: hstsHeader,
|
||||||
|
}),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -44,6 +61,3 @@ const hstsHandler = async (url, event, context) => {
|
|||||||
req.end();
|
req.end();
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(hstsHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
import net from 'net';
|
const net = require('net');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
// A list of commonly used ports.
|
// A list of commonly used ports.
|
||||||
const PORTS = [
|
const PORTS = [
|
||||||
@@ -13,7 +12,7 @@ async function checkPort(port, domain) {
|
|||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const socket = new net.Socket();
|
const socket = new net.Socket();
|
||||||
|
|
||||||
socket.setTimeout(1500);
|
socket.setTimeout(1500); // you may want to adjust the timeout
|
||||||
|
|
||||||
socket.once('connect', () => {
|
socket.once('connect', () => {
|
||||||
socket.destroy();
|
socket.destroy();
|
||||||
@@ -34,9 +33,13 @@ async function checkPort(port, domain) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const portsHandler = async (url, event, context) => {
|
exports.handler = async (event, context) => {
|
||||||
const domain = url.replace(/(^\w+:|^)\/\//, '');
|
const domain = event.queryStringParameters.url;
|
||||||
|
|
||||||
|
if (!domain) {
|
||||||
|
return errorResponse('Missing domain parameter.');
|
||||||
|
}
|
||||||
|
|
||||||
const delay = ms => new Promise(res => setTimeout(res, ms));
|
const delay = ms => new Promise(res => setTimeout(res, ms));
|
||||||
const timeout = delay(9000);
|
const timeout = delay(9000);
|
||||||
|
|
||||||
@@ -72,17 +75,16 @@ const portsHandler = async (url, event, context) => {
|
|||||||
if(timeoutReached){
|
if(timeoutReached){
|
||||||
return errorResponse('The function timed out before completing.');
|
return errorResponse('The function timed out before completing.');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sort openPorts and failedPorts before returning
|
return {
|
||||||
openPorts.sort((a, b) => a - b);
|
statusCode: 200,
|
||||||
failedPorts.sort((a, b) => a - b);
|
body: JSON.stringify({ openPorts, failedPorts }),
|
||||||
|
};
|
||||||
return { openPorts, failedPorts };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const errorResponse = (message, statusCode = 444) => {
|
const errorResponse = (message, statusCode = 444) => {
|
||||||
return { error: message };
|
return {
|
||||||
|
statusCode: statusCode,
|
||||||
|
body: JSON.stringify({ error: message }),
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(portsHandler);
|
|
||||||
export default handler;
|
|
||||||
60
api/content-links.js
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
const axios = require('axios');
|
||||||
|
const cheerio = require('cheerio');
|
||||||
|
const urlLib = require('url');
|
||||||
|
|
||||||
|
exports.handler = async (event, context) => {
|
||||||
|
let url = event.queryStringParameters.url;
|
||||||
|
|
||||||
|
// Check if url includes protocol
|
||||||
|
if (!url.startsWith('http://') && !url.startsWith('https://')) {
|
||||||
|
url = 'http://' + url;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await axios.get(url);
|
||||||
|
const html = response.data;
|
||||||
|
const $ = cheerio.load(html);
|
||||||
|
const internalLinksMap = new Map();
|
||||||
|
const externalLinksMap = new Map();
|
||||||
|
|
||||||
|
$('a[href]').each((i, link) => {
|
||||||
|
const href = $(link).attr('href');
|
||||||
|
const absoluteUrl = urlLib.resolve(url, href);
|
||||||
|
|
||||||
|
if (absoluteUrl.startsWith(url)) {
|
||||||
|
const count = internalLinksMap.get(absoluteUrl) || 0;
|
||||||
|
internalLinksMap.set(absoluteUrl, count + 1);
|
||||||
|
} else if (href.startsWith('http://') || href.startsWith('https://')) {
|
||||||
|
const count = externalLinksMap.get(absoluteUrl) || 0;
|
||||||
|
externalLinksMap.set(absoluteUrl, count + 1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Convert maps to sorted arrays
|
||||||
|
const internalLinks = [...internalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
|
||||||
|
const externalLinks = [...externalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
|
||||||
|
|
||||||
|
if (internalLinks.length === 0 && externalLinks.length === 0) {
|
||||||
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({
|
||||||
|
skipped: 'No internal or external links found. '
|
||||||
|
+ 'This may be due to the website being dynamically rendered, using a client-side framework (like React), and without SSR enabled. '
|
||||||
|
+ 'That would mean that the static HTML returned from the HTTP request doesn\'t contain any meaningful content for Web-Check to analyze. '
|
||||||
|
+ 'You can rectify this by using a headless browser to render the page instead.',
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({ internal: internalLinks, external: externalLinks }),
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: 'Failed fetching data' }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import puppeteer from 'puppeteer';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const getPuppeteerCookies = async (url) => {
|
|
||||||
const browser = await puppeteer.launch({
|
|
||||||
headless: 'new',
|
|
||||||
args: ['--no-sandbox', '--disable-setuid-sandbox'],
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
const page = await browser.newPage();
|
|
||||||
const navigationPromise = page.goto(url, { waitUntil: 'networkidle2' });
|
|
||||||
const timeoutPromise = new Promise((_, reject) =>
|
|
||||||
setTimeout(() => reject(new Error('Puppeteer took too long!')), 3000)
|
|
||||||
);
|
|
||||||
await Promise.race([navigationPromise, timeoutPromise]);
|
|
||||||
return await page.cookies();
|
|
||||||
} finally {
|
|
||||||
await browser.close();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const cookieHandler = async (url) => {
|
|
||||||
let headerCookies = null;
|
|
||||||
let clientCookies = null;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await axios.get(url, {
|
|
||||||
withCredentials: true,
|
|
||||||
maxRedirects: 5,
|
|
||||||
});
|
|
||||||
headerCookies = response.headers['set-cookie'];
|
|
||||||
} catch (error) {
|
|
||||||
if (error.response) {
|
|
||||||
return { error: `Request failed with status ${error.response.status}: ${error.message}` };
|
|
||||||
} else if (error.request) {
|
|
||||||
return { error: `No response received: ${error.message}` };
|
|
||||||
} else {
|
|
||||||
return { error: `Error setting up request: ${error.message}` };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
clientCookies = await getPuppeteerCookies(url);
|
|
||||||
} catch (_) {
|
|
||||||
clientCookies = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!headerCookies && (!clientCookies || clientCookies.length === 0)) {
|
|
||||||
return { skipped: 'No cookies' };
|
|
||||||
}
|
|
||||||
|
|
||||||
return { headerCookies, clientCookies };
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(cookieHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,7 +1,16 @@
|
|||||||
import https from 'https';
|
const https = require('https');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
exports.handler = async function(event, context) {
|
||||||
|
let { url } = event.queryStringParameters;
|
||||||
|
|
||||||
|
if (!url) {
|
||||||
|
return errorResponse('URL query parameter is required.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract hostname from URL
|
||||||
|
const parsedUrl = new URL(url);
|
||||||
|
const domain = parsedUrl.hostname;
|
||||||
|
|
||||||
const dnsSecHandler = async (domain) => {
|
|
||||||
const dnsTypes = ['DNSKEY', 'DS', 'RRSIG'];
|
const dnsTypes = ['DNSKEY', 'DS', 'RRSIG'];
|
||||||
const records = {};
|
const records = {};
|
||||||
|
|
||||||
@@ -25,11 +34,7 @@ const dnsSecHandler = async (domain) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
res.on('end', () => {
|
res.on('end', () => {
|
||||||
try {
|
resolve(JSON.parse(data));
|
||||||
resolve(JSON.parse(data));
|
|
||||||
} catch (error) {
|
|
||||||
reject(new Error('Invalid JSON response'));
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
res.on('error', error => {
|
res.on('error', error => {
|
||||||
@@ -43,15 +48,22 @@ const dnsSecHandler = async (domain) => {
|
|||||||
if (dnsResponse.Answer) {
|
if (dnsResponse.Answer) {
|
||||||
records[type] = { isFound: true, answer: dnsResponse.Answer, response: dnsResponse.Answer };
|
records[type] = { isFound: true, answer: dnsResponse.Answer, response: dnsResponse.Answer };
|
||||||
} else {
|
} else {
|
||||||
records[type] = { isFound: false, answer: null, response: dnsResponse };
|
records[type] = { isFound: false, answer: null, response: dnsResponse};
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`Error fetching ${type} record: ${error.message}`); // This will be caught and handled by the commonMiddleware
|
return errorResponse(`Error fetching ${type} record: ${error.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return records;
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(records),
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(dnsSecHandler);
|
const errorResponse = (message, statusCode = 444) => {
|
||||||
export default handler;
|
return {
|
||||||
|
statusCode: statusCode,
|
||||||
|
body: JSON.stringify({ error: message }),
|
||||||
|
};
|
||||||
|
};
|
||||||
@@ -1,10 +1,11 @@
|
|||||||
import { promises as dnsPromises, lookup } from 'dns';
|
const dns = require('dns');
|
||||||
import axios from 'axios';
|
const dnsPromises = dns.promises;
|
||||||
import middleware from './_common/middleware.js';
|
// const https = require('https');
|
||||||
|
const axios = require('axios');
|
||||||
|
|
||||||
const dnsHandler = async (url) => {
|
exports.handler = async (event) => {
|
||||||
|
const domain = event.queryStringParameters.url.replace(/^(?:https?:\/\/)?/i, "");
|
||||||
try {
|
try {
|
||||||
const domain = url.replace(/^(?:https?:\/\/)?/i, "");
|
|
||||||
const addresses = await dnsPromises.resolve4(domain);
|
const addresses = await dnsPromises.resolve4(domain);
|
||||||
const results = await Promise.all(addresses.map(async (address) => {
|
const results = await Promise.all(addresses.map(async (address) => {
|
||||||
const hostname = await dnsPromises.reverse(address).catch(() => null);
|
const hostname = await dnsPromises.reverse(address).catch(() => null);
|
||||||
@@ -21,7 +22,6 @@ const dnsHandler = async (url) => {
|
|||||||
dohDirectSupports,
|
dohDirectSupports,
|
||||||
};
|
};
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// let dohMozillaSupport = false;
|
// let dohMozillaSupport = false;
|
||||||
// try {
|
// try {
|
||||||
// const mozillaList = await axios.get('https://firefox.settings.services.mozilla.com/v1/buckets/security-state/collections/onecrl/records');
|
// const mozillaList = await axios.get('https://firefox.settings.services.mozilla.com/v1/buckets/security-state/collections/onecrl/records');
|
||||||
@@ -29,18 +29,20 @@ const dnsHandler = async (url) => {
|
|||||||
// } catch (error) {
|
// } catch (error) {
|
||||||
// console.error(error);
|
// console.error(error);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
return {
|
return {
|
||||||
domain,
|
statusCode: 200,
|
||||||
dns: results,
|
body: JSON.stringify({
|
||||||
// dohMozillaSupport,
|
domain,
|
||||||
|
dns: results,
|
||||||
|
// dohMozillaSupport,
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`An error occurred while resolving DNS. ${error.message}`); // This will be caught and handled by the commonMiddleware
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({
|
||||||
|
error: `An error occurred while resolving DNS. ${error.message}`,
|
||||||
|
}),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
export const handler = middleware(dnsHandler);
|
|
||||||
export default handler;
|
|
||||||
|
|
||||||
|
|||||||
33
api/find-url-ip.js
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
const dns = require('dns');
|
||||||
|
|
||||||
|
/* Lambda function to fetch the IP address of a given URL */
|
||||||
|
exports.handler = function (event, context, callback) {
|
||||||
|
const addressParam = event.queryStringParameters.url;
|
||||||
|
|
||||||
|
if (!addressParam) {
|
||||||
|
callback(null, errorResponse('Address parameter is missing.'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const address = decodeURIComponent(addressParam)
|
||||||
|
.replaceAll('https://', '')
|
||||||
|
.replaceAll('http://', '');
|
||||||
|
|
||||||
|
dns.lookup(address, (err, ip, family) => {
|
||||||
|
if (err) {
|
||||||
|
callback(null, errorResponse(err.message));
|
||||||
|
} else {
|
||||||
|
callback(null, {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({ ip, family }),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const errorResponse = (message, statusCode = 444) => {
|
||||||
|
return {
|
||||||
|
statusCode: statusCode,
|
||||||
|
body: JSON.stringify({ error: message }),
|
||||||
|
};
|
||||||
|
};
|
||||||
114
api/firewall.js
@@ -1,114 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const hasWaf = (waf) => {
|
|
||||||
return {
|
|
||||||
hasWaf: true, waf,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const firewallHandler = async (url) => {
|
|
||||||
const fullUrl = url.startsWith('http') ? url : `http://${url}`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await axios.get(fullUrl);
|
|
||||||
|
|
||||||
const headers = response.headers;
|
|
||||||
|
|
||||||
if (headers['server'] && headers['server'].includes('cloudflare')) {
|
|
||||||
return hasWaf('Cloudflare');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['x-powered-by'] && headers['x-powered-by'].includes('AWS Lambda')) {
|
|
||||||
return hasWaf('AWS WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['server'] && headers['server'].includes('AkamaiGHost')) {
|
|
||||||
return hasWaf('Akamai');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['server'] && headers['server'].includes('Sucuri')) {
|
|
||||||
return hasWaf('Sucuri');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['server'] && headers['server'].includes('BarracudaWAF')) {
|
|
||||||
return hasWaf('Barracuda WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['server'] && (headers['server'].includes('F5 BIG-IP') || headers['server'].includes('BIG-IP'))) {
|
|
||||||
return hasWaf('F5 BIG-IP');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['x-sucuri-id'] || headers['x-sucuri-cache']) {
|
|
||||||
return hasWaf('Sucuri CloudProxy WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['server'] && headers['server'].includes('FortiWeb')) {
|
|
||||||
return hasWaf('Fortinet FortiWeb WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['server'] && headers['server'].includes('Imperva')) {
|
|
||||||
return hasWaf('Imperva SecureSphere WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['x-protected-by'] && headers['x-protected-by'].includes('Sqreen')) {
|
|
||||||
return hasWaf('Sqreen');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['x-waf-event-info']) {
|
|
||||||
return hasWaf('Reblaze WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['set-cookie'] && headers['set-cookie'].includes('_citrix_ns_id')) {
|
|
||||||
return hasWaf('Citrix NetScaler');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['x-denied-reason'] || headers['x-wzws-requested-method']) {
|
|
||||||
return hasWaf('WangZhanBao WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['x-webcoment']) {
|
|
||||||
return hasWaf('Webcoment Firewall');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['server'] && headers['server'].includes('Yundun')) {
|
|
||||||
return hasWaf('Yundun WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['x-yd-waf-info'] || headers['x-yd-info']) {
|
|
||||||
return hasWaf('Yundun WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['server'] && headers['server'].includes('Safe3WAF')) {
|
|
||||||
return hasWaf('Safe3 Web Application Firewall');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['server'] && headers['server'].includes('NAXSI')) {
|
|
||||||
return hasWaf('NAXSI WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['x-datapower-transactionid']) {
|
|
||||||
return hasWaf('IBM WebSphere DataPower');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['server'] && headers['server'].includes('QRATOR')) {
|
|
||||||
return hasWaf('QRATOR WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers['server'] && headers['server'].includes('ddos-guard')) {
|
|
||||||
return hasWaf('DDoS-Guard WAF');
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
hasWaf: false,
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
statusCode: 500,
|
|
||||||
body: JSON.stringify({ error: error.message }),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(firewallHandler);
|
|
||||||
export default handler;
|
|
||||||
35
api/follow-redirects.js
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
exports.handler = async (event) => {
|
||||||
|
const { url } = event.queryStringParameters;
|
||||||
|
const redirects = [url];
|
||||||
|
|
||||||
|
try {
|
||||||
|
const got = await import('got');
|
||||||
|
await got.default(url, {
|
||||||
|
followRedirect: true,
|
||||||
|
maxRedirects: 12,
|
||||||
|
hooks: {
|
||||||
|
beforeRedirect: [
|
||||||
|
(options, response) => {
|
||||||
|
redirects.push(response.headers.location);
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({
|
||||||
|
redirects: redirects,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return errorResponse(`Error: ${error.message}`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const errorResponse = (message, statusCode = 444) => {
|
||||||
|
return {
|
||||||
|
statusCode: statusCode,
|
||||||
|
body: JSON.stringify({ error: message }),
|
||||||
|
};
|
||||||
|
};
|
||||||
54
api/generate-har.js
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
const puppeteer = require('puppeteer-core');
|
||||||
|
const chromium = require('chrome-aws-lambda');
|
||||||
|
|
||||||
|
exports.handler = async (event, context) => {
|
||||||
|
let browser = null;
|
||||||
|
let result = null;
|
||||||
|
let code = 200;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const url = event.queryStringParameters.url;
|
||||||
|
|
||||||
|
browser = await chromium.puppeteer.launch({
|
||||||
|
args: chromium.args,
|
||||||
|
defaultViewport: chromium.defaultViewport,
|
||||||
|
executablePath: await chromium.executablePath,
|
||||||
|
headless: chromium.headless,
|
||||||
|
});
|
||||||
|
|
||||||
|
const page = await browser.newPage();
|
||||||
|
|
||||||
|
const requests = [];
|
||||||
|
|
||||||
|
// Capture requests
|
||||||
|
page.on('request', request => {
|
||||||
|
requests.push({
|
||||||
|
url: request.url(),
|
||||||
|
method: request.method(),
|
||||||
|
headers: request.headers(),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
await page.goto(url, {
|
||||||
|
waitUntil: 'networkidle0', // wait until all requests are finished
|
||||||
|
});
|
||||||
|
|
||||||
|
result = requests;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
code = 500;
|
||||||
|
result = {
|
||||||
|
error: 'Failed to create HAR file',
|
||||||
|
details: error.toString(),
|
||||||
|
};
|
||||||
|
} finally {
|
||||||
|
if (browser !== null) {
|
||||||
|
await browser.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
statusCode: code,
|
||||||
|
body: JSON.stringify(result),
|
||||||
|
};
|
||||||
|
};
|
||||||
@@ -1,7 +1,14 @@
|
|||||||
import https from 'https';
|
const https = require('https');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const carbonHandler = async (url) => {
|
exports.handler = async (event, context) => {
|
||||||
|
const { url } = event.queryStringParameters;
|
||||||
|
|
||||||
|
if (!url) {
|
||||||
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'url query parameter is required' }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// First, get the size of the website's HTML
|
// First, get the size of the website's HTML
|
||||||
const getHtmlSize = (url) => new Promise((resolve, reject) => {
|
const getHtmlSize = (url) => new Promise((resolve, reject) => {
|
||||||
@@ -42,11 +49,14 @@ const carbonHandler = async (url) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
carbonData.scanUrl = url;
|
carbonData.scanUrl = url;
|
||||||
return carbonData;
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(carbonData),
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`Error: ${error.message}`);
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: `Error: ${error.message}` }),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(carbonHandler);
|
|
||||||
export default handler;
|
|
||||||
26
api/get-cookies.js
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
const axios = require('axios');
|
||||||
|
|
||||||
|
exports.handler = async function(event, context) {
|
||||||
|
const { url } = event.queryStringParameters;
|
||||||
|
|
||||||
|
if (!url) {
|
||||||
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ message: 'url query string parameter is required' }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await axios.get(url, {withCredentials: true});
|
||||||
|
const cookies = response.headers['set-cookie'];
|
||||||
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({ cookies }),
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: error.message }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
@@ -1,9 +1,8 @@
|
|||||||
import dns from 'dns';
|
const dns = require('dns');
|
||||||
import util from 'util';
|
const util = require('util');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const dnsHandler = async (url) => {
|
exports.handler = async function(event, context) {
|
||||||
let hostname = url;
|
let hostname = event.queryStringParameters.url;
|
||||||
|
|
||||||
// Handle URLs by extracting hostname
|
// Handle URLs by extracting hostname
|
||||||
if (hostname.startsWith('http://') || hostname.startsWith('https://')) {
|
if (hostname.startsWith('http://') || hostname.startsWith('https://')) {
|
||||||
@@ -36,20 +35,25 @@ const dnsHandler = async (url) => {
|
|||||||
]);
|
]);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
A: a,
|
statusCode: 200,
|
||||||
AAAA: aaaa,
|
body: JSON.stringify({
|
||||||
MX: mx,
|
A: a,
|
||||||
TXT: txt,
|
AAAA: aaaa,
|
||||||
NS: ns,
|
MX: mx,
|
||||||
CNAME: cname,
|
TXT: txt,
|
||||||
SOA: soa,
|
NS: ns,
|
||||||
SRV: srv,
|
CNAME: cname,
|
||||||
PTR: ptr
|
SOA: soa,
|
||||||
|
SRV: srv,
|
||||||
|
PTR: ptr
|
||||||
|
})
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(error.message);
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({
|
||||||
|
error: error.message
|
||||||
|
})
|
||||||
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(dnsHandler);
|
|
||||||
export default handler;
|
|
||||||
31
api/get-headers.js
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
const axios = require('axios');
|
||||||
|
|
||||||
|
exports.handler = async function(event, context) {
|
||||||
|
const { url } = event.queryStringParameters;
|
||||||
|
|
||||||
|
if (!url) {
|
||||||
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'url query string parameter is required' }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await axios.get(url, {
|
||||||
|
validateStatus: function (status) {
|
||||||
|
return status >= 200 && status < 600; // Resolve only if the status code is less than 600
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(response.headers),
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: error.message }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
import dns from 'dns';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const lookupAsync = (address) => {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
dns.lookup(address, (err, ip, family) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
} else {
|
|
||||||
resolve({ ip, family });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const ipHandler = async (url) => {
|
|
||||||
const address = url.replaceAll('https://', '').replaceAll('http://', '');
|
|
||||||
return await lookupAsync(address);
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
export const handler = middleware(ipHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,11 +1,9 @@
|
|||||||
import dns from 'dns/promises';
|
const dns = require('dns').promises;
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const txtRecordHandler = async (url, event, context) => {
|
exports.handler = async (event) => {
|
||||||
|
const url = new URL(event.queryStringParameters.url);
|
||||||
try {
|
try {
|
||||||
const parsedUrl = new URL(url);
|
const txtRecords = await dns.resolveTxt(url.hostname);
|
||||||
|
|
||||||
const txtRecords = await dns.resolveTxt(parsedUrl.hostname);
|
|
||||||
|
|
||||||
// Parsing and formatting TXT records into a single object
|
// Parsing and formatting TXT records into a single object
|
||||||
const readableTxtRecords = txtRecords.reduce((acc, recordArray) => {
|
const readableTxtRecords = txtRecords.reduce((acc, recordArray) => {
|
||||||
@@ -18,16 +16,15 @@ const txtRecordHandler = async (url, event, context) => {
|
|||||||
return { ...acc, ...recordObject };
|
return { ...acc, ...recordObject };
|
||||||
}, {});
|
}, {});
|
||||||
|
|
||||||
return readableTxtRecords;
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(readableTxtRecords),
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error.code === 'ERR_INVALID_URL') {
|
console.error('Error:', error);
|
||||||
throw new Error(`Invalid URL ${error}`);
|
return {
|
||||||
} else {
|
statusCode: 500,
|
||||||
throw error;
|
body: JSON.stringify({ error: error.message }),
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(txtRecordHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const headersHandler = async (url, event, context) => {
|
|
||||||
try {
|
|
||||||
const response = await axios.get(url, {
|
|
||||||
validateStatus: function (status) {
|
|
||||||
return status >= 200 && status < 600; // Resolve only if the status code is less than 600
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return response.headers;
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(error.message);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(headersHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const httpsSecHandler = async (url) => {
|
|
||||||
const fullUrl = url.startsWith('http') ? url : `http://${url}`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await axios.get(fullUrl);
|
|
||||||
const headers = response.headers;
|
|
||||||
return {
|
|
||||||
strictTransportPolicy: headers['strict-transport-security'] ? true : false,
|
|
||||||
xFrameOptions: headers['x-frame-options'] ? true : false,
|
|
||||||
xContentTypeOptions: headers['x-content-type-options'] ? true : false,
|
|
||||||
xXSSProtection: headers['x-xss-protection'] ? true : false,
|
|
||||||
contentSecurityPolicy: headers['content-security-policy'] ? true : false,
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
statusCode: 500,
|
|
||||||
body: JSON.stringify({ error: error.message }),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(httpsSecHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import unzipper from 'unzipper';
|
|
||||||
import csv from 'csv-parser';
|
|
||||||
import fs from 'fs';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
// Should also work with the following sources:
|
|
||||||
// https://www.domcop.com/files/top/top10milliondomains.csv.zip
|
|
||||||
// https://tranco-list.eu/top-1m.csv.zip
|
|
||||||
// https://www.domcop.com/files/top/top10milliondomains.csv.zip
|
|
||||||
// https://radar.cloudflare.com/charts/LargerTopDomainsTable/attachment?id=525&top=1000000
|
|
||||||
// https://statvoo.com/dl/top-1million-sites.csv.zip
|
|
||||||
|
|
||||||
const FILE_URL = 'https://s3-us-west-1.amazonaws.com/umbrella-static/top-1m.csv.zip';
|
|
||||||
const TEMP_FILE_PATH = '/tmp/top-1m.csv';
|
|
||||||
|
|
||||||
const rankHandler = async (url) => {
|
|
||||||
let domain = null;
|
|
||||||
|
|
||||||
try {
|
|
||||||
domain = new URL(url).hostname;
|
|
||||||
} catch (e) {
|
|
||||||
throw new Error('Invalid URL');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Download and unzip the file if not in cache
|
|
||||||
if (!fs.existsSync(TEMP_FILE_PATH)) {
|
|
||||||
const response = await axios({
|
|
||||||
method: 'GET',
|
|
||||||
url: FILE_URL,
|
|
||||||
responseType: 'stream'
|
|
||||||
});
|
|
||||||
|
|
||||||
await new Promise((resolve, reject) => {
|
|
||||||
response.data
|
|
||||||
.pipe(unzipper.Extract({ path: '/tmp' }))
|
|
||||||
.on('close', resolve)
|
|
||||||
.on('error', reject);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the CSV and find the rank
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const csvStream = fs.createReadStream(TEMP_FILE_PATH)
|
|
||||||
.pipe(csv({
|
|
||||||
headers: ['rank', 'domain'],
|
|
||||||
}))
|
|
||||||
.on('data', (row) => {
|
|
||||||
if (row.domain === domain) {
|
|
||||||
csvStream.destroy();
|
|
||||||
resolve({
|
|
||||||
domain: domain,
|
|
||||||
rank: row.rank,
|
|
||||||
isFound: true,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.on('end', () => {
|
|
||||||
resolve({
|
|
||||||
skipped: `Skipping, as ${domain} is not present in the Umbrella top 1M list.`,
|
|
||||||
domain: domain,
|
|
||||||
isFound: false,
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.on('error', reject);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(rankHandler);
|
|
||||||
export default handler;
|
|
||||||
40
api/lighthouse-report.js
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
const axios = require('axios');
|
||||||
|
|
||||||
|
exports.handler = function(event, context, callback) {
|
||||||
|
const { url } = event.queryStringParameters;
|
||||||
|
|
||||||
|
if (!url) {
|
||||||
|
callback(null, {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'URL param is required'}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const apiKey = process.env.GOOGLE_CLOUD_API_KEY;
|
||||||
|
|
||||||
|
if (!apiKey) {
|
||||||
|
callback(null, {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: 'API key (GOOGLE_CLOUD_API_KEY) not set'}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const endpoint = `https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url=${encodeURIComponent(url)}&category=PERFORMANCE&category=ACCESSIBILITY&category=BEST_PRACTICES&category=SEO&category=PWA&strategy=mobile&key=${apiKey}`;
|
||||||
|
|
||||||
|
axios.get(endpoint)
|
||||||
|
.then(
|
||||||
|
(response) => {
|
||||||
|
callback(null, {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(response.data),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
).catch(
|
||||||
|
() => {
|
||||||
|
callback(null, {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: 'Error running Lighthouse'}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import cheerio from 'cheerio';
|
|
||||||
import urlLib from 'url';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const linkedPagesHandler = async (url) => {
|
|
||||||
const response = await axios.get(url);
|
|
||||||
const html = response.data;
|
|
||||||
const $ = cheerio.load(html);
|
|
||||||
const internalLinksMap = new Map();
|
|
||||||
const externalLinksMap = new Map();
|
|
||||||
|
|
||||||
// Get all links on the page
|
|
||||||
$('a[href]').each((i, link) => {
|
|
||||||
const href = $(link).attr('href');
|
|
||||||
const absoluteUrl = urlLib.resolve(url, href);
|
|
||||||
|
|
||||||
// Check if absolute / relative, append to appropriate map or increment occurrence count
|
|
||||||
if (absoluteUrl.startsWith(url)) {
|
|
||||||
const count = internalLinksMap.get(absoluteUrl) || 0;
|
|
||||||
internalLinksMap.set(absoluteUrl, count + 1);
|
|
||||||
} else if (href.startsWith('http://') || href.startsWith('https://')) {
|
|
||||||
const count = externalLinksMap.get(absoluteUrl) || 0;
|
|
||||||
externalLinksMap.set(absoluteUrl, count + 1);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Sort by most occurrences, remove supplicates, and convert to array
|
|
||||||
const internalLinks = [...internalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
|
|
||||||
const externalLinks = [...externalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
|
|
||||||
|
|
||||||
// If there were no links, then mark as skipped and show reasons
|
|
||||||
if (internalLinks.length === 0 && externalLinks.length === 0) {
|
|
||||||
return {
|
|
||||||
statusCode: 400,
|
|
||||||
body: {
|
|
||||||
skipped: 'No internal or external links found. '
|
|
||||||
+ 'This may be due to the website being dynamically rendered, using a client-side framework (like React), and without SSR enabled. '
|
|
||||||
+ 'That would mean that the static HTML returned from the HTTP request doesn\'t contain any meaningful content for Web-Check to analyze. '
|
|
||||||
+ 'You can rectify this by using a headless browser to render the page instead.',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return { internal: internalLinks, external: externalLinks };
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(linkedPagesHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,12 +1,11 @@
|
|||||||
import dns from 'dns';
|
const dns = require('dns').promises;
|
||||||
import URL from 'url-parse';
|
const URL = require('url-parse');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
// TODO: Fix.
|
exports.handler = async (event, context) => {
|
||||||
|
|
||||||
const mailConfigHandler = async (url, event, context) => {
|
|
||||||
try {
|
try {
|
||||||
const domain = new URL(url).hostname || new URL(url).pathname;
|
let domain = event.queryStringParameters.url;
|
||||||
|
const parsedUrl = new URL(domain);
|
||||||
|
domain = parsedUrl.hostname || parsedUrl.pathname;
|
||||||
|
|
||||||
// Get MX records
|
// Get MX records
|
||||||
const mxRecords = await dns.resolveMx(domain);
|
const mxRecords = await dns.resolveMx(domain);
|
||||||
@@ -55,28 +54,26 @@ const mailConfigHandler = async (url, event, context) => {
|
|||||||
if (yahooMx.length > 0) {
|
if (yahooMx.length > 0) {
|
||||||
mailServices.push({ provider: 'Yahoo', value: yahooMx[0].exchange });
|
mailServices.push({ provider: 'Yahoo', value: yahooMx[0].exchange });
|
||||||
}
|
}
|
||||||
// Check MX records for Mimecast
|
|
||||||
const mimecastMx = mxRecords.filter(record => record.exchange.includes('mimecast.com'));
|
|
||||||
if (mimecastMx.length > 0) {
|
|
||||||
mailServices.push({ provider: 'Mimecast', value: mimecastMx[0].exchange });
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({
|
||||||
mxRecords,
|
mxRecords,
|
||||||
txtRecords: emailTxtRecords,
|
txtRecords: emailTxtRecords,
|
||||||
mailServices,
|
mailServices,
|
||||||
};
|
}),
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error.code === 'ENOTFOUND' || error.code === 'ENODATA') {
|
if (error.code === 'ENOTFOUND' || error.code === 'ENODATA') {
|
||||||
return { skipped: 'No mail server in use on this domain' };
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({ skipped: 'No mail server in use on this domain' }),
|
||||||
|
};
|
||||||
} else {
|
} else {
|
||||||
return {
|
return {
|
||||||
statusCode: 500,
|
statusCode: 500,
|
||||||
body: { error: error.message },
|
body: JSON.stringify({ error: error.message }),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(mailConfigHandler);
|
|
||||||
export default handler;
|
|
||||||
|
|||||||
58
api/meta-tags.js
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
const axios = require('axios');
|
||||||
|
const cheerio = require('cheerio');
|
||||||
|
|
||||||
|
exports.handler = async (event) => {
|
||||||
|
let url;
|
||||||
|
try {
|
||||||
|
// Add https:// prefix if not present
|
||||||
|
url = new URL(event.queryStringParameters.url);
|
||||||
|
if (!url.protocol) {
|
||||||
|
url = new URL('https://' + event.queryStringParameters.url);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Return error if URL is not valid
|
||||||
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({
|
||||||
|
error: 'Invalid URL provided.',
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Fetch the page
|
||||||
|
const response = await axios.get(url.toString());
|
||||||
|
|
||||||
|
// Parse the page body with cheerio
|
||||||
|
const $ = cheerio.load(response.data);
|
||||||
|
|
||||||
|
// Extract meta tags
|
||||||
|
const metaTags = {};
|
||||||
|
|
||||||
|
$('head meta').each((index, element) => {
|
||||||
|
const name = $(element).attr('name');
|
||||||
|
const property = $(element).attr('property');
|
||||||
|
const content = $(element).attr('content');
|
||||||
|
|
||||||
|
if (name) {
|
||||||
|
metaTags[name] = content;
|
||||||
|
} else if (property) {
|
||||||
|
metaTags[property] = content;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Return meta tags
|
||||||
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(metaTags),
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
// Return error if there's a problem fetching or parsing the page
|
||||||
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({
|
||||||
|
error: error.message,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
58
api/network-requests.js
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
const puppeteer = require('puppeteer-core');
|
||||||
|
const chromium = require('chrome-aws-lambda');
|
||||||
|
|
||||||
|
exports.handler = async (event, context) => {
|
||||||
|
const urlParam = event.queryStringParameters.url;
|
||||||
|
if (!urlParam) {
|
||||||
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'Missing url parameter' })
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let url;
|
||||||
|
try {
|
||||||
|
url = new URL(urlParam.includes('://') ? urlParam : 'https://' + urlParam);
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: 'Invalid URL format' }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Launch the browser and open a new page
|
||||||
|
const browser = await puppeteer.launch({
|
||||||
|
args: chromium.args,
|
||||||
|
defaultViewport: { width: 800, height: 600 },
|
||||||
|
executablePath: process.env.CHROME_PATH || await chromium.executablePath,
|
||||||
|
headless: chromium.headless,
|
||||||
|
ignoreHTTPSErrors: true,
|
||||||
|
ignoreDefaultArgs: ['--disable-extensions'],
|
||||||
|
});
|
||||||
|
const page = await browser.newPage();
|
||||||
|
|
||||||
|
// To store network activity
|
||||||
|
let networkActivity = [];
|
||||||
|
|
||||||
|
// Register an event listener for network requests
|
||||||
|
page.on('request', (request) => {
|
||||||
|
networkActivity.push({
|
||||||
|
url: request.url(),
|
||||||
|
method: request.method(),
|
||||||
|
headers: request.headers(),
|
||||||
|
postData: request.postData(),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Navigate to the page and wait for it to load
|
||||||
|
await page.goto(url, { waitUntil: 'networkidle2' });
|
||||||
|
|
||||||
|
// Close the browser
|
||||||
|
await browser.close();
|
||||||
|
|
||||||
|
// Return network activity
|
||||||
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(networkActivity),
|
||||||
|
};
|
||||||
|
};
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const qualityHandler = async (url, event, context) => {
|
|
||||||
const apiKey = process.env.GOOGLE_CLOUD_API_KEY;
|
|
||||||
|
|
||||||
if (!apiKey) {
|
|
||||||
throw new Error(
|
|
||||||
'Missing Google API. You need to set the `GOOGLE_CLOUD_API_KEY` environment variable'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const endpoint = `https://www.googleapis.com/pagespeedonline/v5/runPagespeed?`
|
|
||||||
+ `url=${encodeURIComponent(url)}&category=PERFORMANCE&category=ACCESSIBILITY`
|
|
||||||
+ `&category=BEST_PRACTICES&category=SEO&category=PWA&strategy=mobile`
|
|
||||||
+ `&key=${apiKey}`;
|
|
||||||
|
|
||||||
return (await axios.get(endpoint)).data;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(qualityHandler);
|
|
||||||
export default handler;
|
|
||||||
26
api/rank.js
@@ -1,26 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const rankHandler = async (url) => {
|
|
||||||
const domain = url ? new URL(url).hostname : null;
|
|
||||||
if (!domain) throw new Error('Invalid URL');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const auth = process.env.TRANCO_API_KEY ? // Auth is optional.
|
|
||||||
{ auth: { username: process.env.TRANCO_USERNAME, password: process.env.TRANCO_API_KEY } }
|
|
||||||
: {};
|
|
||||||
const response = await axios.get(
|
|
||||||
`https://tranco-list.eu/api/ranks/domain/${domain}`, { timeout: 5000 }, auth,
|
|
||||||
);
|
|
||||||
if (!response.data || !response.data.ranks || response.data.ranks.length === 0) {
|
|
||||||
return { skipped: `Skipping, as ${domain} isn't ranked in the top 100 million sites yet.`};
|
|
||||||
}
|
|
||||||
return response.data;
|
|
||||||
} catch (error) {
|
|
||||||
return { error: `Unable to fetch rank, ${error.message}` };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(rankHandler);
|
|
||||||
export default handler;
|
|
||||||
|
|
||||||
45
api/read-robots-txt.js
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
const axios = require('axios');
|
||||||
|
|
||||||
|
exports.handler = async function(event, context) {
|
||||||
|
const siteURL = event.queryStringParameters.url;
|
||||||
|
|
||||||
|
if (!siteURL) {
|
||||||
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'Missing url query parameter' }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsedURL;
|
||||||
|
try {
|
||||||
|
parsedURL = new URL(siteURL);
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'Invalid url query parameter' }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const robotsURL = `${parsedURL.protocol}//${parsedURL.hostname}/robots.txt`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await axios.get(robotsURL);
|
||||||
|
|
||||||
|
if (response.status === 200) {
|
||||||
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: response.data,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
statusCode: response.status,
|
||||||
|
body: JSON.stringify({ error: 'Failed to fetch robots.txt', statusCode: response.status }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: `Error fetching robots.txt: ${error.message}` }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
import got from 'got';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const redirectsHandler = async (url) => {
|
|
||||||
const redirects = [url];
|
|
||||||
try {
|
|
||||||
await got(url, {
|
|
||||||
followRedirect: true,
|
|
||||||
maxRedirects: 12,
|
|
||||||
hooks: {
|
|
||||||
beforeRedirect: [
|
|
||||||
(options, response) => {
|
|
||||||
redirects.push(response.headers.location);
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
redirects: redirects,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(`Error: ${error.message}`);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(redirectsHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const parseRobotsTxt = (content) => {
|
|
||||||
const lines = content.split('\n');
|
|
||||||
const rules = [];
|
|
||||||
|
|
||||||
lines.forEach(line => {
|
|
||||||
line = line.trim(); // This removes trailing and leading whitespaces
|
|
||||||
|
|
||||||
let match = line.match(/^(Allow|Disallow):\s*(\S*)$/i);
|
|
||||||
if (match) {
|
|
||||||
const rule = {
|
|
||||||
lbl: match[1],
|
|
||||||
val: match[2],
|
|
||||||
};
|
|
||||||
|
|
||||||
rules.push(rule);
|
|
||||||
} else {
|
|
||||||
match = line.match(/^(User-agent):\s*(\S*)$/i);
|
|
||||||
if (match) {
|
|
||||||
const rule = {
|
|
||||||
lbl: match[1],
|
|
||||||
val: match[2],
|
|
||||||
};
|
|
||||||
|
|
||||||
rules.push(rule);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return { robots: rules };
|
|
||||||
}
|
|
||||||
|
|
||||||
const robotsHandler = async function(url) {
|
|
||||||
let parsedURL;
|
|
||||||
try {
|
|
||||||
parsedURL = new URL(url);
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
statusCode: 400,
|
|
||||||
body: JSON.stringify({ error: 'Invalid url query parameter' }),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const robotsURL = `${parsedURL.protocol}//${parsedURL.hostname}/robots.txt`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await axios.get(robotsURL);
|
|
||||||
|
|
||||||
if (response.status === 200) {
|
|
||||||
const parsedData = parseRobotsTxt(response.data);
|
|
||||||
if (!parsedData.robots || parsedData.robots.length === 0) {
|
|
||||||
return { skipped: 'No robots.txt file present, unable to continue' };
|
|
||||||
}
|
|
||||||
return parsedData;
|
|
||||||
} else {
|
|
||||||
return {
|
|
||||||
statusCode: response.status,
|
|
||||||
body: JSON.stringify({ error: 'Failed to fetch robots.txt', statusCode: response.status }),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
statusCode: 500,
|
|
||||||
body: JSON.stringify({ error: `Error fetching robots.txt: ${error.message}` }),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(robotsHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,11 +1,16 @@
|
|||||||
import puppeteer from 'puppeteer-core';
|
const puppeteer = require('puppeteer-core');
|
||||||
import chromium from 'chrome-aws-lambda';
|
const chromium = require('chrome-aws-lambda');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const screenshotHandler = async (targetUrl) => {
|
exports.handler = async (event, context, callback) => {
|
||||||
|
let browser = null;
|
||||||
|
let targetUrl = event.queryStringParameters.url;
|
||||||
|
|
||||||
if (!targetUrl) {
|
if (!targetUrl) {
|
||||||
throw new Error('URL is missing from queryStringParameters');
|
callback(null, {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'URL is missing from queryStringParameters' }),
|
||||||
|
});
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!targetUrl.startsWith('http://') && !targetUrl.startsWith('https://')) {
|
if (!targetUrl.startsWith('http://') && !targetUrl.startsWith('https://')) {
|
||||||
@@ -15,13 +20,16 @@ const screenshotHandler = async (targetUrl) => {
|
|||||||
try {
|
try {
|
||||||
new URL(targetUrl);
|
new URL(targetUrl);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error('URL provided is invalid');
|
callback(null, {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'URL provided is invalid' }),
|
||||||
|
});
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let browser = null;
|
|
||||||
try {
|
try {
|
||||||
browser = await puppeteer.launch({
|
browser = await puppeteer.launch({
|
||||||
args: [...chromium.args, '--no-sandbox'], // Add --no-sandbox flag
|
args: chromium.args,
|
||||||
defaultViewport: { width: 800, height: 600 },
|
defaultViewport: { width: 800, height: 600 },
|
||||||
executablePath: process.env.CHROME_PATH || await chromium.executablePath,
|
executablePath: process.env.CHROME_PATH || await chromium.executablePath,
|
||||||
headless: chromium.headless,
|
headless: chromium.headless,
|
||||||
@@ -32,7 +40,9 @@ const screenshotHandler = async (targetUrl) => {
|
|||||||
let page = await browser.newPage();
|
let page = await browser.newPage();
|
||||||
|
|
||||||
await page.emulateMediaFeatures([{ name: 'prefers-color-scheme', value: 'dark' }]);
|
await page.emulateMediaFeatures([{ name: 'prefers-color-scheme', value: 'dark' }]);
|
||||||
|
|
||||||
page.setDefaultNavigationTimeout(8000);
|
page.setDefaultNavigationTimeout(8000);
|
||||||
|
|
||||||
await page.goto(targetUrl, { waitUntil: 'domcontentloaded' });
|
await page.goto(targetUrl, { waitUntil: 'domcontentloaded' });
|
||||||
|
|
||||||
await page.evaluate(() => {
|
await page.evaluate(() => {
|
||||||
@@ -47,16 +57,24 @@ const screenshotHandler = async (targetUrl) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const screenshotBuffer = await page.screenshot();
|
const screenshotBuffer = await page.screenshot();
|
||||||
|
|
||||||
const base64Screenshot = screenshotBuffer.toString('base64');
|
const base64Screenshot = screenshotBuffer.toString('base64');
|
||||||
|
|
||||||
return { image: base64Screenshot };
|
const response = {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({ image: base64Screenshot }),
|
||||||
|
};
|
||||||
|
|
||||||
|
callback(null, response);
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
callback(null, {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: `An error occurred: ${error.message}` }),
|
||||||
|
});
|
||||||
} finally {
|
} finally {
|
||||||
if (browser !== null) {
|
if (browser !== null) {
|
||||||
await browser.close();
|
await browser.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(screenshotHandler);
|
|
||||||
export default handler;
|
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
import { URL } from 'url';
|
const { https } = require('follow-redirects');
|
||||||
import followRedirects from 'follow-redirects';
|
const { URL } = require('url');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const { https } = followRedirects;
|
|
||||||
|
|
||||||
const SECURITY_TXT_PATHS = [
|
const SECURITY_TXT_PATHS = [
|
||||||
'/security.txt',
|
'/security.txt',
|
||||||
@@ -40,35 +37,57 @@ const isPgpSigned = (result) => {
|
|||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
|
||||||
const securityTxtHandler = async (urlParam) => {
|
exports.handler = async (event, context) => {
|
||||||
|
const urlParam = event.queryStringParameters.url;
|
||||||
|
if (!urlParam) {
|
||||||
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'Missing url parameter' })
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
let url;
|
let url;
|
||||||
try {
|
try {
|
||||||
url = new URL(urlParam.includes('://') ? urlParam : 'https://' + urlParam);
|
url = new URL(urlParam.includes('://') ? urlParam : 'https://' + urlParam);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error('Invalid URL format');
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: 'Invalid URL format' }),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
url.pathname = '';
|
url.pathname = '';
|
||||||
|
|
||||||
for (let path of SECURITY_TXT_PATHS) {
|
for (let path of SECURITY_TXT_PATHS) {
|
||||||
try {
|
try {
|
||||||
const result = await fetchSecurityTxt(url, path);
|
const result = await fetchSecurityTxt(url, path);
|
||||||
if (result && result.includes('<html')) return { isPresent: false };
|
if (result && result.includes('<html')) return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({ isPresent: false }),
|
||||||
|
};
|
||||||
if (result) {
|
if (result) {
|
||||||
return {
|
return {
|
||||||
isPresent: true,
|
statusCode: 200,
|
||||||
foundIn: path,
|
body: JSON.stringify({
|
||||||
content: result,
|
isPresent: true,
|
||||||
isPgpSigned: isPgpSigned(result),
|
foundIn: path,
|
||||||
fields: parseResult(result),
|
content: result,
|
||||||
|
isPgpSigned: isPgpSigned(result),
|
||||||
|
fields: parseResult(result),
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(error.message);
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: error.message }),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return { isPresent: false };
|
return {
|
||||||
|
statusCode: 404,
|
||||||
|
body: JSON.stringify({ isPresent: false }),
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
async function fetchSecurityTxt(baseURL, path) {
|
async function fetchSecurityTxt(baseURL, path) {
|
||||||
@@ -91,6 +110,3 @@ async function fetchSecurityTxt(baseURL, path) {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export const handler = middleware(securityTxtHandler);
|
|
||||||
export default handler;
|
|
||||||
|
|||||||
@@ -1,10 +1,14 @@
|
|||||||
import https from 'https';
|
const https = require('https');
|
||||||
import { performance, PerformanceObserver } from 'perf_hooks';
|
const { performance, PerformanceObserver } = require('perf_hooks');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
exports.handler = async function(event, context) {
|
||||||
|
const { url } = event.queryStringParameters;
|
||||||
|
|
||||||
const statusHandler = async (url) => {
|
|
||||||
if (!url) {
|
if (!url) {
|
||||||
throw new Error('You must provide a URL query parameter!');
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'You must provide a URL query parameter!' }),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
let dnsLookupTime;
|
let dnsLookupTime;
|
||||||
@@ -39,7 +43,10 @@ const statusHandler = async (url) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (responseCode < 200 || responseCode >= 400) {
|
if (responseCode < 200 || responseCode >= 400) {
|
||||||
throw new Error(`Received non-success response code: ${responseCode}`);
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({ error: `Received non-success response code: ${responseCode}` }),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
performance.mark('B');
|
performance.mark('B');
|
||||||
@@ -47,13 +54,16 @@ const statusHandler = async (url) => {
|
|||||||
let responseTime = performance.now() - startTime;
|
let responseTime = performance.now() - startTime;
|
||||||
obs.disconnect();
|
obs.disconnect();
|
||||||
|
|
||||||
return { isUp: true, dnsLookupTime, responseTime, responseCode };
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({ isUp: true, dnsLookupTime, responseTime, responseCode }),
|
||||||
|
};
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
obs.disconnect();
|
obs.disconnect();
|
||||||
throw error;
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({ error: `Error during operation: ${error.message}` }),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(statusHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,15 +1,22 @@
|
|||||||
import https from 'https';
|
const https = require('https');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const featuresHandler = async (url) => {
|
exports.handler = async function (event, context) {
|
||||||
|
const { url } = event.queryStringParameters;
|
||||||
const apiKey = process.env.BUILT_WITH_API_KEY;
|
const apiKey = process.env.BUILT_WITH_API_KEY;
|
||||||
|
|
||||||
|
const errorResponse = (message, statusCode = 500) => {
|
||||||
|
return {
|
||||||
|
statusCode: statusCode,
|
||||||
|
body: JSON.stringify({ error: message }),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
if (!url) {
|
if (!url) {
|
||||||
throw new Error('URL query parameter is required');
|
return errorResponse('URL query parameter is required', 400);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!apiKey) {
|
if (!apiKey) {
|
||||||
throw new Error('Missing BuiltWith API key in environment variables');
|
return errorResponse('Missing BuiltWith API key in environment variables', 500);
|
||||||
}
|
}
|
||||||
|
|
||||||
const apiUrl = `https://api.builtwith.com/free1/api.json?KEY=${apiKey}&LOOKUP=${encodeURIComponent(url)}`;
|
const apiUrl = `https://api.builtwith.com/free1/api.json?KEY=${apiKey}&LOOKUP=${encodeURIComponent(url)}`;
|
||||||
@@ -39,11 +46,11 @@ const featuresHandler = async (url) => {
|
|||||||
req.end();
|
req.end();
|
||||||
});
|
});
|
||||||
|
|
||||||
return response;
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: response,
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`Error making request: ${error.message}`);
|
return errorResponse(`Error making request: ${error.message}`);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(featuresHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,21 +1,19 @@
|
|||||||
import axios from 'axios';
|
const axios = require('axios');
|
||||||
import xml2js from 'xml2js';
|
const xml2js = require('xml2js');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const sitemapHandler = async (url) => {
|
exports.handler = async (event) => {
|
||||||
|
const url = event.queryStringParameters.url;
|
||||||
let sitemapUrl = `${url}/sitemap.xml`;
|
let sitemapUrl = `${url}/sitemap.xml`;
|
||||||
|
|
||||||
const hardTimeOut = 5000;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Try to fetch sitemap directly
|
// Try to fetch sitemap directly
|
||||||
let sitemapRes;
|
let sitemapRes;
|
||||||
try {
|
try {
|
||||||
sitemapRes = await axios.get(sitemapUrl, { timeout: hardTimeOut });
|
sitemapRes = await axios.get(sitemapUrl, { timeout: 5000 });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error.response && error.response.status === 404) {
|
if (error.response && error.response.status === 404) {
|
||||||
// If sitemap not found, try to fetch it from robots.txt
|
// If sitemap not found, try to fetch it from robots.txt
|
||||||
const robotsRes = await axios.get(`${url}/robots.txt`, { timeout: hardTimeOut });
|
const robotsRes = await axios.get(`${url}/robots.txt`, { timeout: 5000 });
|
||||||
const robotsTxt = robotsRes.data.split('\n');
|
const robotsTxt = robotsRes.data.split('\n');
|
||||||
|
|
||||||
for (let line of robotsTxt) {
|
for (let line of robotsTxt) {
|
||||||
@@ -26,10 +24,13 @@ const sitemapHandler = async (url) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!sitemapUrl) {
|
if (!sitemapUrl) {
|
||||||
return { skipped: 'No sitemap found' };
|
return {
|
||||||
|
statusCode: 404,
|
||||||
|
body: JSON.stringify({ skipped: 'No sitemap found' }),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
sitemapRes = await axios.get(sitemapUrl, { timeout: hardTimeOut });
|
sitemapRes = await axios.get(sitemapUrl, { timeout: 5000 });
|
||||||
} else {
|
} else {
|
||||||
throw error; // If other error, throw it
|
throw error; // If other error, throw it
|
||||||
}
|
}
|
||||||
@@ -38,16 +39,23 @@ const sitemapHandler = async (url) => {
|
|||||||
const parser = new xml2js.Parser();
|
const parser = new xml2js.Parser();
|
||||||
const sitemap = await parser.parseStringPromise(sitemapRes.data);
|
const sitemap = await parser.parseStringPromise(sitemapRes.data);
|
||||||
|
|
||||||
return sitemap;
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(sitemap),
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
// If error occurs
|
||||||
|
console.log(error.message);
|
||||||
if (error.code === 'ECONNABORTED') {
|
if (error.code === 'ECONNABORTED') {
|
||||||
return { error: `Request timed-out after ${hardTimeOut}ms` };
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: 'Request timed out' }),
|
||||||
|
};
|
||||||
} else {
|
} else {
|
||||||
return { error: error.message };
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: error.message }),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(sitemapHandler);
|
|
||||||
export default handler;
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import axios from 'axios';
|
const axios = require('axios');
|
||||||
import cheerio from 'cheerio';
|
const cheerio = require('cheerio');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const socialTagsHandler = async (url) => {
|
exports.handler = async (event, context) => {
|
||||||
|
let url = event.queryStringParameters.url;
|
||||||
|
|
||||||
// Check if url includes protocol
|
// Check if url includes protocol
|
||||||
if (!url.startsWith('http://') && !url.startsWith('https://')) {
|
if (!url.startsWith('http://') && !url.startsWith('https://')) {
|
||||||
@@ -49,9 +49,16 @@ const socialTagsHandler = async (url) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (Object.keys(metadata).length === 0) {
|
if (Object.keys(metadata).length === 0) {
|
||||||
return { skipped: 'No metadata found' };
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({ skipped: 'No metadata found' }),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
return metadata;
|
|
||||||
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(metadata),
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return {
|
return {
|
||||||
statusCode: 500,
|
statusCode: 500,
|
||||||
@@ -59,6 +66,3 @@ const socialTagsHandler = async (url) => {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(socialTagsHandler);
|
|
||||||
export default handler;
|
|
||||||
|
|||||||
50
api/ssl-check.js
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
const https = require('https');
|
||||||
|
|
||||||
|
exports.handler = async function (event, context) {
|
||||||
|
const { url } = event.queryStringParameters;
|
||||||
|
|
||||||
|
const errorResponse = (message, statusCode = 500) => {
|
||||||
|
return {
|
||||||
|
statusCode: statusCode,
|
||||||
|
body: JSON.stringify({ error: message }),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!url) {
|
||||||
|
return errorResponse('URL query parameter is required', 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await new Promise((resolve, reject) => {
|
||||||
|
const req = https.request(url, res => {
|
||||||
|
|
||||||
|
// Check if the SSL handshake was authorized
|
||||||
|
if (!res.socket.authorized) {
|
||||||
|
resolve(errorResponse(`SSL handshake not authorized. Reason: ${res.socket.authorizationError}`));
|
||||||
|
} else {
|
||||||
|
let cert = res.socket.getPeerCertificate(true);
|
||||||
|
if (!cert || Object.keys(cert).length === 0) {
|
||||||
|
resolve(errorResponse("No certificate presented by the server."));
|
||||||
|
} else {
|
||||||
|
// omit the raw and issuerCertificate fields
|
||||||
|
const { raw, issuerCertificate, ...certWithoutRaw } = cert;
|
||||||
|
resolve({
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(certWithoutRaw),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
req.on('error', error => {
|
||||||
|
resolve(errorResponse(`Error fetching site certificate: ${error.message}`));
|
||||||
|
});
|
||||||
|
|
||||||
|
req.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
return response;
|
||||||
|
} catch (error) {
|
||||||
|
return errorResponse(`Unexpected error occurred: ${error.message}`);
|
||||||
|
}
|
||||||
|
};
|
||||||
44
api/ssl.js
@@ -1,44 +0,0 @@
|
|||||||
import tls from 'tls';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const sslHandler = async (urlString) => {
|
|
||||||
try {
|
|
||||||
const parsedUrl = new URL(urlString);
|
|
||||||
const options = {
|
|
||||||
host: parsedUrl.hostname,
|
|
||||||
port: parsedUrl.port || 443,
|
|
||||||
servername: parsedUrl.hostname,
|
|
||||||
rejectUnauthorized: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const socket = tls.connect(options, () => {
|
|
||||||
if (!socket.authorized) {
|
|
||||||
return reject(new Error(`SSL handshake not authorized. Reason: ${socket.authorizationError}`));
|
|
||||||
}
|
|
||||||
|
|
||||||
const cert = socket.getPeerCertificate();
|
|
||||||
if (!cert || Object.keys(cert).length === 0) {
|
|
||||||
return reject(new Error(`
|
|
||||||
No certificate presented by the server.\n
|
|
||||||
The server is possibly not using SNI (Server Name Indication) to identify itself, and you are connecting to a hostname-aliased IP address.
|
|
||||||
Or it may be due to an invalid SSL certificate, or an incomplete SSL handshake at the time the cert is being read.`));
|
|
||||||
}
|
|
||||||
|
|
||||||
const { raw, issuerCertificate, ...certWithoutRaw } = cert;
|
|
||||||
resolve(certWithoutRaw);
|
|
||||||
socket.end();
|
|
||||||
});
|
|
||||||
|
|
||||||
socket.on('error', (error) => {
|
|
||||||
reject(new Error(`Error fetching site certificate: ${error.message}`));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(error.message);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(sslHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,31 +1,69 @@
|
|||||||
import Wappalyzer from 'wappalyzer';
|
const Wappalyzer = require('wappalyzer');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
const analyze = async (url) => {
|
||||||
|
|
||||||
const techStackHandler = async (url) => {
|
|
||||||
const options = {};
|
const options = {};
|
||||||
|
|
||||||
const wappalyzer = new Wappalyzer(options);
|
const wappalyzer = new Wappalyzer(options);
|
||||||
|
return (async function() {
|
||||||
try {
|
try {
|
||||||
await wappalyzer.init();
|
await wappalyzer.init()
|
||||||
const headers = {};
|
const headers = {}
|
||||||
const storage = {
|
const storage = {
|
||||||
local: {},
|
local: {},
|
||||||
session: {},
|
session: {},
|
||||||
};
|
}
|
||||||
const site = await wappalyzer.open(url, headers, storage);
|
const site = await wappalyzer.open(url, headers, storage)
|
||||||
const results = await site.analyze();
|
const results = await site.analyze()
|
||||||
|
return results;
|
||||||
if (!results.technologies || results.technologies.length === 0) {
|
} catch (error) {
|
||||||
throw new Error('Unable to find any technologies for site');
|
return error;
|
||||||
|
} finally {
|
||||||
|
await wappalyzer.destroy()
|
||||||
}
|
}
|
||||||
return results;
|
})();
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.handler = async (event, context, callback) => {
|
||||||
|
// Validate URL parameter
|
||||||
|
if (!event.queryStringParameters || !event.queryStringParameters.url) {
|
||||||
|
return {
|
||||||
|
statusCode: 400,
|
||||||
|
body: JSON.stringify({ error: 'Missing url parameter' }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get URL from param
|
||||||
|
let url = event.queryStringParameters.url;
|
||||||
|
if (!/^https?:\/\//i.test(url)) {
|
||||||
|
url = 'http://' + url;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return analyze(url).then(
|
||||||
|
(results) => {
|
||||||
|
if (!results.technologies || results.technologies.length === 0) {
|
||||||
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify({ error: 'Unable to find any technologies for site' }),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(results),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.catch((error) => {
|
||||||
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({ error: error.message }),
|
||||||
|
};
|
||||||
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(error.message);
|
return {
|
||||||
} finally {
|
statusCode: 500,
|
||||||
await wappalyzer.destroy();
|
body: JSON.stringify({ error: error.message }),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(techStackHandler);
|
|
||||||
export default handler;
|
|
||||||
|
|||||||
103
api/threats.js
@@ -1,103 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import xml2js from 'xml2js';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const getGoogleSafeBrowsingResult = async (url) => {
|
|
||||||
try {
|
|
||||||
const apiKey = process.env.GOOGLE_CLOUD_API_KEY;
|
|
||||||
if (!apiKey) {
|
|
||||||
return { error: 'GOOGLE_CLOUD_API_KEY is required for the Google Safe Browsing check' };
|
|
||||||
}
|
|
||||||
const apiEndpoint = `https://safebrowsing.googleapis.com/v4/threatMatches:find?key=${apiKey}`;
|
|
||||||
|
|
||||||
const requestBody = {
|
|
||||||
threatInfo: {
|
|
||||||
threatTypes: [
|
|
||||||
'MALWARE', 'SOCIAL_ENGINEERING', 'UNWANTED_SOFTWARE', 'POTENTIALLY_HARMFUL_APPLICATION', 'API_ABUSE'
|
|
||||||
],
|
|
||||||
platformTypes: ["ANY_PLATFORM"],
|
|
||||||
threatEntryTypes: ["URL"],
|
|
||||||
threatEntries: [{ url }]
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const response = await axios.post(apiEndpoint, requestBody);
|
|
||||||
if (response.data && response.data.matches) {
|
|
||||||
return {
|
|
||||||
unsafe: true,
|
|
||||||
details: response.data.matches
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
return { unsafe: false };
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
return { error: `Request failed: ${error.message}` };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getUrlHausResult = async (url) => {
|
|
||||||
let domain = new URL(url).hostname;
|
|
||||||
return await axios({
|
|
||||||
method: 'post',
|
|
||||||
url: 'https://urlhaus-api.abuse.ch/v1/host/',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/x-www-form-urlencoded'
|
|
||||||
},
|
|
||||||
data: `host=${domain}`
|
|
||||||
})
|
|
||||||
.then((x) => x.data)
|
|
||||||
.catch((e) => ({ error: `Request to URLHaus failed, ${e.message}`}));
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
const getPhishTankResult = async (url) => {
|
|
||||||
try {
|
|
||||||
const encodedUrl = Buffer.from(url).toString('base64');
|
|
||||||
const endpoint = `https://checkurl.phishtank.com/checkurl/?url=${encodedUrl}`;
|
|
||||||
const headers = {
|
|
||||||
'User-Agent': 'phishtank/web-check',
|
|
||||||
};
|
|
||||||
const response = await axios.post(endpoint, null, { headers, timeout: 3000 });
|
|
||||||
const parsed = await xml2js.parseStringPromise(response.data, { explicitArray: false });
|
|
||||||
return parsed.response.results;
|
|
||||||
} catch (error) {
|
|
||||||
return { error: `Request to PhishTank failed: ${error.message}` };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const getCloudmersiveResult = async (url) => {
|
|
||||||
const apiKey = process.env.CLOUDMERSIVE_API_KEY;
|
|
||||||
if (!apiKey) {
|
|
||||||
return { error: 'CLOUDMERSIVE_API_KEY is required for the Cloudmersive check' };
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const endpoint = 'https://api.cloudmersive.com/virus/scan/website';
|
|
||||||
const headers = {
|
|
||||||
'Content-Type': 'application/x-www-form-urlencoded',
|
|
||||||
'Apikey': apiKey,
|
|
||||||
};
|
|
||||||
const data = `Url=${encodeURIComponent(url)}`;
|
|
||||||
const response = await axios.post(endpoint, data, { headers });
|
|
||||||
return response.data;
|
|
||||||
} catch (error) {
|
|
||||||
return { error: `Request to Cloudmersive failed: ${error.message}` };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const threatsHandler = async (url) => {
|
|
||||||
try {
|
|
||||||
const urlHaus = await getUrlHausResult(url);
|
|
||||||
const phishTank = await getPhishTankResult(url);
|
|
||||||
const cloudmersive = await getCloudmersiveResult(url);
|
|
||||||
const safeBrowsing = await getGoogleSafeBrowsingResult(url);
|
|
||||||
if (urlHaus.error && phishTank.error && cloudmersive.error && safeBrowsing.error) {
|
|
||||||
throw new Error(`All requests failed - ${urlHaus.error} ${phishTank.error} ${cloudmersive.error} ${safeBrowsing.error}`);
|
|
||||||
}
|
|
||||||
return JSON.stringify({ urlHaus, phishTank, cloudmersive, safeBrowsing });
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(error.message);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(threatsHandler);
|
|
||||||
export default handler;
|
|
||||||
29
api/tls.js
@@ -1,29 +0,0 @@
|
|||||||
import axios from 'axios';
|
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const MOZILLA_TLS_OBSERVATORY_API = 'https://tls-observatory.services.mozilla.com/api/v1';
|
|
||||||
|
|
||||||
const tlsHandler = async (url) => {
|
|
||||||
try {
|
|
||||||
const domain = new URL(url).hostname;
|
|
||||||
const scanResponse = await axios.post(`${MOZILLA_TLS_OBSERVATORY_API}/scan?target=${domain}`);
|
|
||||||
const scanId = scanResponse.data.scan_id;
|
|
||||||
|
|
||||||
if (typeof scanId !== 'number') {
|
|
||||||
return {
|
|
||||||
statusCode: 500,
|
|
||||||
body: { error: 'Failed to get scan_id from TLS Observatory' },
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const resultResponse = await axios.get(`${MOZILLA_TLS_OBSERVATORY_API}/results?id=${scanId}`);
|
|
||||||
return {
|
|
||||||
statusCode: 200,
|
|
||||||
body: resultResponse.data,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
return { error: error.message };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(tlsHandler);
|
|
||||||
export default handler;
|
|
||||||
@@ -1,32 +1,55 @@
|
|||||||
import url from 'url';
|
const traceroute = require('traceroute');
|
||||||
import traceroute from 'traceroute';
|
const url = require('url');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
|
||||||
const traceRouteHandler = async (urlString, context) => {
|
exports.handler = async function(event, context) {
|
||||||
// Parse the URL and get the hostname
|
const urlString = event.queryStringParameters.url;
|
||||||
const urlObject = url.parse(urlString);
|
|
||||||
const host = urlObject.hostname;
|
|
||||||
|
|
||||||
if (!host) {
|
try {
|
||||||
throw new Error('Invalid URL provided');
|
if (!urlString) {
|
||||||
}
|
throw new Error('URL parameter is missing!');
|
||||||
|
}
|
||||||
|
|
||||||
// Traceroute with callback
|
// Parse the URL and get the hostname
|
||||||
const result = await new Promise((resolve, reject) => {
|
const urlObject = url.parse(urlString);
|
||||||
traceroute.trace(host, (err, hops) => {
|
const host = urlObject.hostname;
|
||||||
if (err || !hops) {
|
|
||||||
reject(err || new Error('No hops found'));
|
if (!host) {
|
||||||
} else {
|
throw new Error('Invalid URL provided');
|
||||||
resolve(hops);
|
}
|
||||||
|
|
||||||
|
// Traceroute with callback
|
||||||
|
const result = await new Promise((resolve, reject) => {
|
||||||
|
traceroute.trace(host, (err, hops) => {
|
||||||
|
if (err || !hops) {
|
||||||
|
reject(err || new Error('No hops found'));
|
||||||
|
} else {
|
||||||
|
resolve(hops);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if remaining time is less than 8.8 seconds, then reject promise
|
||||||
|
if (context.getRemainingTimeInMillis() < 8800) {
|
||||||
|
reject(new Error('Lambda is about to timeout'));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
message: "Traceroute completed!",
|
statusCode: 200,
|
||||||
result,
|
body: JSON.stringify({
|
||||||
};
|
message: "Traceroute completed!",
|
||||||
|
result,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
const message = err.code === 'ENOENT'
|
||||||
|
? 'Traceroute command is not installed on the host.'
|
||||||
|
: err.message;
|
||||||
|
|
||||||
|
return {
|
||||||
|
statusCode: 500,
|
||||||
|
body: JSON.stringify({
|
||||||
|
error: message,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const handler = middleware(traceRouteHandler);
|
|
||||||
export default handler;
|
|
||||||
|
|||||||
@@ -1,7 +1,14 @@
|
|||||||
import net from 'net';
|
const net = require('net');
|
||||||
import psl from 'psl';
|
const psl = require('psl');
|
||||||
import axios from 'axios';
|
// const { URL } = require('url');
|
||||||
import middleware from './_common/middleware.js';
|
|
||||||
|
const errorResponse = (message, statusCode = 444) => {
|
||||||
|
return {
|
||||||
|
statusCode: statusCode,
|
||||||
|
body: JSON.stringify({ error: message }),
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
const getBaseDomain = (url) => {
|
const getBaseDomain = (url) => {
|
||||||
let protocol = '';
|
let protocol = '';
|
||||||
@@ -15,7 +22,55 @@ const getBaseDomain = (url) => {
|
|||||||
return protocol + parsed.domain;
|
return protocol + parsed.domain;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
exports.handler = async function(event, context) {
|
||||||
|
let url = event.queryStringParameters.url;
|
||||||
|
|
||||||
|
if (!url) {
|
||||||
|
return errorResponse('URL query parameter is required.', 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!url.startsWith('http://') && !url.startsWith('https://')) {
|
||||||
|
url = 'http://' + url;
|
||||||
|
}
|
||||||
|
|
||||||
|
let hostname;
|
||||||
|
try {
|
||||||
|
hostname = getBaseDomain(new URL(url).hostname);
|
||||||
|
} catch (error) {
|
||||||
|
return errorResponse(`Unable to parse URL: ${error}`, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const client = net.createConnection({ port: 43, host: 'whois.internic.net' }, () => {
|
||||||
|
client.write(hostname + '\r\n');
|
||||||
|
});
|
||||||
|
|
||||||
|
let data = '';
|
||||||
|
client.on('data', (chunk) => {
|
||||||
|
data += chunk;
|
||||||
|
});
|
||||||
|
|
||||||
|
client.on('end', () => {
|
||||||
|
try {
|
||||||
|
const parsedData = parseWhoisData(data);
|
||||||
|
resolve({
|
||||||
|
statusCode: 200,
|
||||||
|
body: JSON.stringify(parsedData),
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
resolve(errorResponse(error.message));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
client.on('error', (err) => {
|
||||||
|
resolve(errorResponse(err.message, 500));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
const parseWhoisData = (data) => {
|
const parseWhoisData = (data) => {
|
||||||
|
|
||||||
if (data.includes('No match for')) {
|
if (data.includes('No match for')) {
|
||||||
return { error: 'No matches found for domain in internic database'};
|
return { error: 'No matches found for domain in internic database'};
|
||||||
}
|
}
|
||||||
@@ -45,67 +100,3 @@ const parseWhoisData = (data) => {
|
|||||||
return parsedData;
|
return parsedData;
|
||||||
};
|
};
|
||||||
|
|
||||||
const fetchFromInternic = async (hostname) => {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const client = net.createConnection({ port: 43, host: 'whois.internic.net' }, () => {
|
|
||||||
client.write(hostname + '\r\n');
|
|
||||||
});
|
|
||||||
|
|
||||||
let data = '';
|
|
||||||
client.on('data', (chunk) => {
|
|
||||||
data += chunk;
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('end', () => {
|
|
||||||
try {
|
|
||||||
const parsedData = parseWhoisData(data);
|
|
||||||
resolve(parsedData);
|
|
||||||
} catch (error) {
|
|
||||||
reject(error);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('error', (err) => {
|
|
||||||
reject(err);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const fetchFromMyAPI = async (hostname) => {
|
|
||||||
try {
|
|
||||||
const response = await axios.post('https://whois-api-zeta.vercel.app/', {
|
|
||||||
domain: hostname
|
|
||||||
});
|
|
||||||
return response.data;
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error fetching data from your API:', error.message);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const whoisHandler = async (url) => {
|
|
||||||
if (!url.startsWith('http://') && !url.startsWith('https://')) {
|
|
||||||
url = 'http://' + url;
|
|
||||||
}
|
|
||||||
|
|
||||||
let hostname;
|
|
||||||
try {
|
|
||||||
hostname = getBaseDomain(new URL(url).hostname);
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(`Unable to parse URL: ${error}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const [internicData, whoisData] = await Promise.all([
|
|
||||||
fetchFromInternic(hostname),
|
|
||||||
fetchFromMyAPI(hostname)
|
|
||||||
]);
|
|
||||||
|
|
||||||
return {
|
|
||||||
internicData,
|
|
||||||
whoisData
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
export const handler = middleware(whoisHandler);
|
|
||||||
export default handler;
|
|
||||||
|
|
||||||