Compare commits

..

1 Commits

Author SHA1 Message Date
Alicia Sykes
c1daa4799c Adds gen har, network requests, meta tags 2023-07-30 02:16:08 +01:00
406 changed files with 15996 additions and 17020 deletions

31
.env
View File

@@ -2,26 +2,11 @@
# Be sure to uncomment any line you populate
# Everything is optional, but some features won't work without external API access
# API Keys for external services (backend)
GOOGLE_CLOUD_API_KEY=''
TORRENT_IP_API_KEY=''
SECURITY_TRAILS_API_KEY=''
BUILT_WITH_API_KEY=''
URL_SCAN_API_KEY=''
TRANCO_USERNAME=''
TRANCO_API_KEY=''
CLOUDMERSIVE_API_KEY=''
# API Keys for external services (frontend)
REACT_APP_SHODAN_API_KEY=''
REACT_APP_WHO_API_KEY=''
# Configuration settings
# CHROME_PATH='/usr/bin/chromium' # The path the the Chromium executable
# PORT='3000' # Port to serve the API, when running server.js
# DISABLE_GUI='false' # Disable the GUI, and only serve the API
# API_TIMEOUT_LIMIT='10000' # The timeout limit for API requests, in milliseconds
# API_CORS_ORIGIN='*' # Enable CORS, by setting your allowed hostname(s) here
# API_ENABLE_RATE_LIMIT='true' # Enable rate limiting for the API
# REACT_APP_API_ENDPOINT='/api' # The endpoint for the API (can be local or remote)
# ENABLE_ANALYTICS='false' # Enable Plausible hit counter for the frontend
# GOOGLE_CLOUD_API_KEY=''
# SHODAN_API_KEY=''
# REACT_APP_SHODAN_API_KEY=''
# WHO_API_KEY=''
# REACT_APP_WHO_API_KEY=''
# SECURITY_TRAILS_API_KEY=''
# BUILT_WITH_API_KEY=''
# CI=false

1146
.github/README.md vendored

File diff suppressed because it is too large Load Diff

View File

@@ -1 +0,0 @@
![Screenshot](https://raw.githubusercontent.com/Lissy93/web-check/HEAD/.github/screenshots/web-check-screenshot3.png)

View File

Before

Width:  |  Height:  |  Size: 2.0 MiB

After

Width:  |  Height:  |  Size: 2.0 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 79 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 63 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 207 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 146 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 90 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 127 KiB

BIN
.github/screenshots/wc_carbon.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

View File

Before

Width:  |  Height:  |  Size: 35 KiB

After

Width:  |  Height:  |  Size: 35 KiB

View File

Before

Width:  |  Height:  |  Size: 53 KiB

After

Width:  |  Height:  |  Size: 53 KiB

BIN
.github/screenshots/wc_dnssec-2.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

View File

Before

Width:  |  Height:  |  Size: 165 KiB

After

Width:  |  Height:  |  Size: 165 KiB

View File

Before

Width:  |  Height:  |  Size: 44 KiB

After

Width:  |  Height:  |  Size: 44 KiB

BIN
.github/screenshots/wc_features-2.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 132 KiB

View File

Before

Width:  |  Height:  |  Size: 73 KiB

After

Width:  |  Height:  |  Size: 73 KiB

View File

Before

Width:  |  Height:  |  Size: 105 KiB

After

Width:  |  Height:  |  Size: 105 KiB

View File

Before

Width:  |  Height:  |  Size: 24 KiB

After

Width:  |  Height:  |  Size: 24 KiB

View File

Before

Width:  |  Height:  |  Size: 94 KiB

After

Width:  |  Height:  |  Size: 94 KiB

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

Before

Width:  |  Height:  |  Size: 158 KiB

After

Width:  |  Height:  |  Size: 158 KiB

View File

Before

Width:  |  Height:  |  Size: 23 KiB

After

Width:  |  Height:  |  Size: 23 KiB

View File

Before

Width:  |  Height:  |  Size: 114 KiB

After

Width:  |  Height:  |  Size: 114 KiB

View File

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 28 KiB

View File

Before

Width:  |  Height:  |  Size: 46 KiB

After

Width:  |  Height:  |  Size: 46 KiB

View File

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View File

Before

Width:  |  Height:  |  Size: 54 KiB

After

Width:  |  Height:  |  Size: 54 KiB

View File

Before

Width:  |  Height:  |  Size: 123 KiB

After

Width:  |  Height:  |  Size: 123 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.0 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.6 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

View File

@@ -1,37 +0,0 @@
# Inserts list of community members into ./README.md
name: 💓 Inserts Contributors & Sponsors
on:
workflow_dispatch: # Manual dispatch
schedule:
- cron: '45 1 * * 0' # At 01:45 on Sunday.
jobs:
# Job #1 - Fetches sponsors and inserts table into readme
insert-sponsors:
runs-on: ubuntu-latest
name: Inserts Sponsors 💓
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Updates readme with sponsors
uses: JamesIves/github-sponsors-readme-action@v1
with:
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
file: .github/README.md
# Job #2 - Fetches contributors and inserts table into readme
insert-contributors:
runs-on: ubuntu-latest
name: Inserts Contributors 💓
steps:
- name: Updates readme with contributors
uses: akhilmhdh/contributors-readme-action@v2.3.10
env:
GITHUB_TOKEN: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
with:
image_size: 80
readme_path: .github/README.md
columns_per_row: 6
commit_message: 'docs: Updates contributors list'
committer_username: liss-bot
committer_email: liss-bot@d0h.co

View File

@@ -1,128 +0,0 @@
name: 🚀 Deploy to AWS
on:
workflow_dispatch:
push:
branches:
- master
tags:
- '*'
paths:
- api/**
- serverless.yml
- package.json
- .github/workflows/deploy-aws.yml
jobs:
deploy-api:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 16
- name: Cache node_modules
uses: actions/cache@v4
with:
path: node_modules
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Create GitHub deployment for API
uses: chrnorm/deployment-action@releases/v2
id: deployment_api
with:
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
environment: AWS (Backend API)
ref: ${{ github.ref }}
- name: Install Serverless CLI and dependencies
run: |
npm i -g serverless
yarn
- name: Deploy to AWS
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
run: serverless deploy
- name: Update GitHub deployment status (API)
if: always()
uses: chrnorm/deployment-status@v2
with:
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
state: "${{ job.status }}"
deployment_id: ${{ steps.deployment_api.outputs.deployment_id }}
ref: ${{ github.ref }}
deploy-frontend:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 16
- name: Cache node_modules
uses: actions/cache@v4
with:
path: node_modules
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Create GitHub deployment for Frontend
uses: chrnorm/deployment-action@v2
id: deployment_frontend
with:
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
environment: AWS (Frontend Web UI)
ref: ${{ github.ref }}
- name: Install dependencies and build
run: |
yarn install
yarn build
- name: Setup AWS
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-east-1
- name: Upload to S3
env:
AWS_S3_BUCKET: 'web-check-frontend'
run: aws s3 sync ./build/ s3://$AWS_S3_BUCKET/ --delete
- name: Invalidate CloudFront cache
uses: chetan/invalidate-cloudfront-action@v2
env:
DISTRIBUTION: E30XKAM2TG9FD8
PATHS: '/*'
AWS_REGION: 'us-east-1'
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: Update GitHub deployment status (Frontend)
if: always()
uses: chrnorm/deployment-status@v2
with:
token: ${{ secrets.BOT_TOKEN || secrets.GITHUB_TOKEN }}
state: "${{ job.status }}"
deployment_id: ${{ steps.deployment_frontend.outputs.deployment_id }}
ref: ${{ github.ref }}

View File

@@ -23,14 +23,14 @@ jobs:
docker:
runs-on: ubuntu-latest
steps:
- name: Checkout 🛎️
- name: Checkout
uses: actions/checkout@v2
- name: Extract tag name 🏷️
- name: Extract tag name
shell: bash
run: echo "GIT_TAG=$(echo ${GITHUB_REF#refs/tags/} | sed 's/\//_/g')" >> $GITHUB_ENV
- name: Compute tags 🔖
- name: Compute tags
id: compute-tags
run: |
if [[ "${{ github.ref }}" == "refs/heads/master" ]]; then
@@ -41,33 +41,33 @@ jobs:
echo "DOCKERHUB_TAG=${DOCKERHUB_REGISTRY}/${DOCKER_USER}/${IMAGE_NAME}:${GIT_TAG}" >> $GITHUB_ENV
fi
- name: Set up QEMU 🐧
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx 🐳
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to GitHub Container Registry 🔑
- name: Login to GitHub Container Registry
uses: docker/login-action@v1
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to DockerHub 🔑
- name: Login to DockerHub
uses: docker/login-action@v1
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ env.DOCKER_USER }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}
- name: Build and push Docker images 🛠️
- name: Build and push Docker images
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile
push: true
platforms: linux/amd64,linux/arm64/v8
platforms: linux/amd64
tags: |
${{ env.GHCR_TAG }}
${{ env.DOCKERHUB_TAG }}

View File

@@ -8,7 +8,7 @@ jobs:
codeberg:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v2
with: { fetch-depth: 0 }
- uses: pixta-dev/repository-mirroring-action@v1
with:

72
.gitignore vendored
View File

@@ -1,62 +1,28 @@
# ------------------------
# ENVIRONMENT SETTINGS
# ------------------------
# Keys
.env
# ------------------------
# PRODUCTION
# ------------------------
/build/
# dependencies
/node_modules
/.pnp
.pnp.js
# ------------------------
# BUILT FILES
# ------------------------
dist/
.vercel/
.netlify/
.webpack/
.serverless/
.astro/
# testing
/coverage
# ------------------------
# DEPENDENCIES
# ------------------------
node_modules/
.yarn/cache/
.yarn/unplugged/
.yarn/build-state.yml
.yarn/install-state.gz
.pnpm/
.pnp.*
# production
/build
# misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
# ------------------------
# LOGS
# ------------------------
logs/
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# ------------------------
# TESTING
# ------------------------
coverage/
.nyc_output/
# ------------------------
# OS SPECIFIC
# ------------------------
.DS_Store
Thumbs.db
# ------------------------
# EDITORS
# ------------------------
.idea/
.vscode/
*.swp
*.swo
# Local Netlify folder
.netlify

View File

@@ -1,62 +1,12 @@
# Specify the Node.js version to use
ARG NODE_VERSION=21
# Specify the Debian version to use, the default is "bullseye"
ARG DEBIAN_VERSION=bullseye
# Use Node.js Docker image as the base image, with specific Node and Debian versions
FROM node:${NODE_VERSION}-${DEBIAN_VERSION} AS build
# Set the container's default shell to Bash and enable some options
SHELL ["/bin/bash", "-euo", "pipefail", "-c"]
# Install Chromium browser and Download and verify Google Chromes signing key
RUN apt-get update -qq --fix-missing && \
apt-get -qqy install --allow-unauthenticated gnupg wget && \
wget --quiet --output-document=- https://dl-ssl.google.com/linux/linux_signing_key.pub | gpg --dearmor > /etc/apt/trusted.gpg.d/google-archive.gpg && \
echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google.list && \
apt-get update -qq && \
apt-get -qqy --no-install-recommends install chromium traceroute python make g++ && \
rm -rf /var/lib/apt/lists/*
# Run the Chromium browser's version command and redirect its output to the /etc/chromium-version file
RUN /usr/bin/chromium --no-sandbox --version > /etc/chromium-version
# Set the working directory to /app
FROM node:16-buster-slim AS base
WORKDIR /app
# Copy package.json and yarn.lock to the working directory
COPY package.json yarn.lock ./
# Run yarn install to install dependencies and clear yarn cache
RUN apt-get update && \
yarn install --frozen-lockfile --network-timeout 100000 && \
rm -rf /app/node_modules/.cache
# Copy all files to working directory
FROM base AS builder
COPY . .
# Run yarn build to build the application
RUN yarn build --production
# Final stage
FROM node:${NODE_VERSION}-${DEBIAN_VERSION} AS final
WORKDIR /app
COPY package.json yarn.lock ./
COPY --from=build /app .
RUN apt-get update && \
apt-get install -y --no-install-recommends chromium traceroute && \
apt-get install -y chromium traceroute && \
chmod 755 /usr/bin/chromium && \
rm -rf /var/lib/apt/lists/* /app/node_modules/.cache
# Exposed container port, the default is 3000, which can be modified through the environment variable PORT
EXPOSE ${PORT:-3000}
# Set the environment variable CHROME_PATH to specify the path to the Chromium binaries
rm -rf /var/lib/apt/lists/*
RUN npm install --force
EXPOSE 8888
ENV CHROME_PATH='/usr/bin/chromium'
# Define the command executed when the container starts and start the server.js of the Node.js application
CMD ["yarn", "start"]
CMD ["npm", "run", "serve"]

View File

@@ -1,51 +0,0 @@
const path = require('path');
const nodeExternals = require('webpack-node-externals');
module.exports = {
target: 'node',
mode: 'production',
entry: {
'carbon': './api/carbon.js',
'cookies': './api/cookies.js',
'dns-server': './api/dns-server.js',
'dns': './api/dns.js',
'dnssec': './api/dnssec.js',
'features': './api/features.js',
'get-ip': './api/get-ip.js',
'headers': './api/headers.js',
'hsts': './api/hsts.js',
'linked-pages': './api/linked-pages.js',
'mail-config': './api/mail-config.js',
'ports': './api/ports.js',
'quality': './api/quality.js',
'redirects': './api/redirects.js',
'robots-txt': './api/robots-txt.js',
'screenshot': './api/screenshot.js',
'security-txt': './api/security-txt.js',
'sitemap': './api/sitemap.js',
'social-tags': './api/social-tags.js',
'ssl': './api/ssl.js',
'status': './api/status.js',
'tech-stack': './api/tech-stack.js',
'trace-route': './api/trace-route.js',
'txt-records': './api/txt-records.js',
'whois': './api/whois.js',
},
externals: [nodeExternals()],
output: {
filename: '[name].js',
path: path.resolve(__dirname, '.webpack'),
libraryTarget: 'commonjs2'
},
module: {
rules: [
{
test: /\.js$/,
use: {
loader: 'babel-loader'
},
exclude: /node_modules/,
}
]
}
};

View File

@@ -1,155 +0,0 @@
const normalizeUrl = (url) => {
return url.startsWith('http') ? url : `https://${url}`;
};
// If present, set a shorter timeout for API requests
const TIMEOUT = process.env.API_TIMEOUT_LIMIT ? parseInt(process.env.API_TIMEOUT_LIMIT, 10) : 60000;
// If present, set CORS allowed origins for responses
const ALLOWED_ORIGINS = process.env.API_CORS_ORIGIN || '*';
// Disable everything :( Setting this env var will turn off the instance, and show message
const DISABLE_EVERYTHING = !!process.env.VITE_DISABLE_EVERYTHING;
// Set the platform currently being used
let PLATFORM = 'NETLIFY';
if (process.env.PLATFORM) { PLATFORM = process.env.PLATFORM.toUpperCase(); }
else if (process.env.VERCEL) { PLATFORM = 'VERCEL'; }
else if (process.env.WC_SERVER) { PLATFORM = 'NODE'; }
// Define the headers to be returned with each response
const headers = {
'Access-Control-Allow-Origin': ALLOWED_ORIGINS,
'Access-Control-Allow-Credentials': true,
'Content-Type': 'application/json;charset=UTF-8',
};
const timeoutErrorMsg = 'You can re-trigger this request, by clicking "Retry"\n'
+ 'If you\'re running your own instance of Web Check, then you can '
+ 'resolve this issue, by increasing the timeout limit in the '
+ '`API_TIMEOUT_LIMIT` environmental variable to a higher value (in milliseconds), '
+ 'or if you\'re hosting on Vercel increase the maxDuration in vercel.json.\n\n'
+ `The public instance currently has a lower timeout of ${TIMEOUT}ms `
+ 'in order to keep running costs affordable, so that Web Check can '
+ 'remain freely available for everyone.';
const disabledErrorMsg = 'Error - WebCheck Temporarily Disabled.\n\n'
+ 'We\'re sorry, but due to the increased cost of running Web Check '
+ 'we\'ve had to temporatily disable the public instand. '
+ 'We\'re activley looking for affordable ways to keep Web Check running, '
+ 'while free to use for everybody.\n'
+ 'In the meantime, since we\'ve made our code free and open source, '
+ 'you can get Web Check running on your own system, by following the instructions in our GitHub repo';
// A middleware function used by all API routes on all platforms
const commonMiddleware = (handler) => {
// Create a timeout promise, to throw an error if a request takes too long
const createTimeoutPromise = (timeoutMs) => {
return new Promise((_, reject) => {
setTimeout(() => {
reject(new Error(`Request timed-out after ${timeoutMs} ms`));
}, timeoutMs);
});
};
// Vercel
const vercelHandler = async (request, response) => {
if (DISABLE_EVERYTHING) {
response.status(503).json({ error: disabledErrorMsg });
}
const queryParams = request.query || {};
const rawUrl = queryParams.url;
if (!rawUrl) {
return response.status(500).json({ error: 'No URL specified' });
}
const url = normalizeUrl(rawUrl);
try {
// Race the handler against the timeout
const handlerResponse = await Promise.race([
handler(url, request),
createTimeoutPromise(TIMEOUT)
]);
if (handlerResponse.body && handlerResponse.statusCode) {
response.status(handlerResponse.statusCode).json(handlerResponse.body);
} else {
response.status(200).json(
typeof handlerResponse === 'object' ? handlerResponse : JSON.parse(handlerResponse)
);
}
} catch (error) {
let errorCode = 500;
if (error.message.includes('timed-out') || response.statusCode === 504) {
errorCode = 408;
error.message = `${error.message}\n\n${timeoutErrorMsg}`;
}
response.status(errorCode).json({ error: error.message });
}
};
// Netlify
const netlifyHandler = async (event, context, callback) => {
const queryParams = event.queryStringParameters || event.query || {};
const rawUrl = queryParams.url;
if (DISABLE_EVERYTHING) {
callback(null, {
statusCode: 503,
body: JSON.stringify({ error: 'Web-Check is temporarily disabled. Please try again later.' }),
headers,
});
return;
}
if (!rawUrl) {
callback(null, {
statusCode: 500,
body: JSON.stringify({ error: 'No URL specified' }),
headers,
});
return;
}
const url = normalizeUrl(rawUrl);
try {
// Race the handler against the timeout
const handlerResponse = await Promise.race([
handler(url, event, context),
createTimeoutPromise(TIMEOUT)
]);
if (handlerResponse.body && handlerResponse.statusCode) {
callback(null, handlerResponse);
} else {
callback(null, {
statusCode: 200,
body: typeof handlerResponse === 'object' ? JSON.stringify(handlerResponse) : handlerResponse,
headers,
});
}
} catch (error) {
callback(null, {
statusCode: 500,
body: JSON.stringify({ error: error.message }),
headers,
});
}
};
// The format of the handlers varies between platforms
const nativeMode = (['VERCEL', 'NODE'].includes(PLATFORM));
return nativeMode ? vercelHandler : netlifyHandler;
};
if (PLATFORM === 'NETLIFY') {
module.exports = commonMiddleware;
}
export default commonMiddleware;

View File

@@ -1,84 +0,0 @@
import axios from 'axios';
import middleware from './_common/middleware.js';
const convertTimestampToDate = (timestamp) => {
const [year, month, day, hour, minute, second] = [
timestamp.slice(0, 4),
timestamp.slice(4, 6) - 1,
timestamp.slice(6, 8),
timestamp.slice(8, 10),
timestamp.slice(10, 12),
timestamp.slice(12, 14),
].map(num => parseInt(num, 10));
return new Date(year, month, day, hour, minute, second);
}
const countPageChanges = (results) => {
let prevDigest = null;
return results.reduce((acc, curr) => {
if (curr[2] !== prevDigest) {
prevDigest = curr[2];
return acc + 1;
}
return acc;
}, -1);
}
const getAveragePageSize = (scans) => {
const totalSize = scans.map(scan => parseInt(scan[3], 10)).reduce((sum, size) => sum + size, 0);
return Math.round(totalSize / scans.length);
};
const getScanFrequency = (firstScan, lastScan, totalScans, changeCount) => {
const formatToTwoDecimal = num => parseFloat(num.toFixed(2));
const dayFactor = (lastScan - firstScan) / (1000 * 60 * 60 * 24);
const daysBetweenScans = formatToTwoDecimal(dayFactor / totalScans);
const daysBetweenChanges = formatToTwoDecimal(dayFactor / changeCount);
const scansPerDay = formatToTwoDecimal((totalScans - 1) / dayFactor);
const changesPerDay = formatToTwoDecimal(changeCount / dayFactor);
return {
daysBetweenScans,
daysBetweenChanges,
scansPerDay,
changesPerDay,
};
};
const wayBackHandler = async (url) => {
const cdxUrl = `https://web.archive.org/cdx/search/cdx?url=${url}&output=json&fl=timestamp,statuscode,digest,length,offset`;
try {
const { data } = await axios.get(cdxUrl);
// Check there's data
if (!data || !Array.isArray(data) || data.length <= 1) {
return { skipped: 'Site has never before been archived via the Wayback Machine' };
}
// Remove the header row
data.shift();
// Process and return the results
const firstScan = convertTimestampToDate(data[0][0]);
const lastScan = convertTimestampToDate(data[data.length - 1][0]);
const totalScans = data.length;
const changeCount = countPageChanges(data);
return {
firstScan,
lastScan,
totalScans,
changeCount,
averagePageSize: getAveragePageSize(data),
scanFrequency: getScanFrequency(firstScan, lastScan, totalScans, changeCount),
scans: data,
scanUrl: url,
};
} catch (err) {
return { error: `Error fetching Wayback data: ${err.message}` };
}
};
export const handler = middleware(wayBackHandler);
export default handler;

View File

@@ -1,105 +0,0 @@
import dns from 'dns';
import { URL } from 'url';
import middleware from './_common/middleware.js';
const DNS_SERVERS = [
{ name: 'AdGuard', ip: '176.103.130.130' },
{ name: 'AdGuard Family', ip: '176.103.130.132' },
{ name: 'CleanBrowsing Adult', ip: '185.228.168.10' },
{ name: 'CleanBrowsing Family', ip: '185.228.168.168' },
{ name: 'CleanBrowsing Security', ip: '185.228.168.9' },
{ name: 'CloudFlare', ip: '1.1.1.1' },
{ name: 'CloudFlare Family', ip: '1.1.1.3' },
{ name: 'Comodo Secure', ip: '8.26.56.26' },
{ name: 'Google DNS', ip: '8.8.8.8' },
{ name: 'Neustar Family', ip: '156.154.70.3' },
{ name: 'Neustar Protection', ip: '156.154.70.2' },
{ name: 'Norton Family', ip: '199.85.126.20' },
{ name: 'OpenDNS', ip: '208.67.222.222' },
{ name: 'OpenDNS Family', ip: '208.67.222.123' },
{ name: 'Quad9', ip: '9.9.9.9' },
{ name: 'Yandex Family', ip: '77.88.8.7' },
{ name: 'Yandex Safe', ip: '77.88.8.88' },
];
const knownBlockIPs = [
'146.112.61.106', // OpenDNS
'185.228.168.10', // CleanBrowsing
'8.26.56.26', // Comodo
'9.9.9.9', // Quad9
'208.69.38.170', // Some OpenDNS IPs
'208.69.39.170', // Some OpenDNS IPs
'208.67.222.222', // OpenDNS
'208.67.222.123', // OpenDNS FamilyShield
'199.85.126.10', // Norton
'199.85.126.20', // Norton Family
'156.154.70.22', // Neustar
'77.88.8.7', // Yandex
'77.88.8.8', // Yandex
'::1', // Localhost IPv6
'2a02:6b8::feed:0ff', // Yandex DNS
'2a02:6b8::feed:bad', // Yandex Safe
'2a02:6b8::feed:a11', // Yandex Family
'2620:119:35::35', // OpenDNS
'2620:119:53::53', // OpenDNS FamilyShield
'2606:4700:4700::1111', // Cloudflare
'2606:4700:4700::1001', // Cloudflare
'2001:4860:4860::8888', // Google DNS
'2a0d:2a00:1::', // AdGuard
'2a0d:2a00:2::' // AdGuard Family
];
const isDomainBlocked = async (domain, serverIP) => {
return new Promise((resolve) => {
dns.resolve4(domain, { server: serverIP }, (err, addresses) => {
if (!err) {
if (addresses.some(addr => knownBlockIPs.includes(addr))) {
resolve(true);
return;
}
resolve(false);
return;
}
dns.resolve6(domain, { server: serverIP }, (err6, addresses6) => {
if (!err6) {
if (addresses6.some(addr => knownBlockIPs.includes(addr))) {
resolve(true);
return;
}
resolve(false);
return;
}
if (err6.code === 'ENOTFOUND' || err6.code === 'SERVFAIL') {
resolve(true);
} else {
resolve(false);
}
});
});
});
};
const checkDomainAgainstDnsServers = async (domain) => {
let results = [];
for (let server of DNS_SERVERS) {
const isBlocked = await isDomainBlocked(domain, server.ip);
results.push({
server: server.name,
serverIp: server.ip,
isBlocked,
});
}
return results;
};
export const blockListHandler = async (url) => {
const domain = new URL(url).hostname;
const results = await checkDomainAgainstDnsServers(domain);
return { blocklists: results };
};
export const handler = middleware(blockListHandler);
export default handler;

View File

@@ -1,20 +1,30 @@
import https from 'https';
import middleware from './_common/middleware.js';
const https = require('https');
exports.handler = async function(event, context) {
const siteURL = event.queryStringParameters.url;
const hstsHandler = async (url, event, context) => {
const errorResponse = (message, statusCode = 500) => {
return {
statusCode: statusCode,
body: JSON.stringify({ error: message }),
};
};
const hstsIncompatible = (message, compatible = false, hstsHeader = null ) => {
return { message, compatible, hstsHeader };
const hstsIncompatible = (message, statusCode = 200) => {
return {
statusCode: statusCode,
body: JSON.stringify({ message, compatible: false }),
};
};
if (!siteURL) {
return {
statusCode: 400,
body: JSON.stringify({ error: 'URL parameter is missing!' }),
};
}
return new Promise((resolve, reject) => {
const req = https.request(url, res => {
const req = https.request(siteURL, res => {
const headers = res.headers;
const hstsHeader = headers['strict-transport-security'];
@@ -32,7 +42,14 @@ const hstsHandler = async (url, event, context) => {
} else if (!preload) {
resolve(hstsIncompatible(`HSTS header does not contain the preload directive.`));
} else {
resolve(hstsIncompatible(`Site is compatible with the HSTS preload list!`, true, hstsHeader));
resolve({
statusCode: 200,
body: JSON.stringify({
message: "Site is compatible with the HSTS preload list!",
compatible: true,
hstsHeader: hstsHeader,
}),
});
}
}
});
@@ -44,6 +61,3 @@ const hstsHandler = async (url, event, context) => {
req.end();
});
};
export const handler = middleware(hstsHandler);
export default handler;

View File

@@ -1,5 +1,4 @@
import net from 'net';
import middleware from './_common/middleware.js';
const net = require('net');
// A list of commonly used ports.
const PORTS = [
@@ -13,7 +12,7 @@ async function checkPort(port, domain) {
return new Promise((resolve, reject) => {
const socket = new net.Socket();
socket.setTimeout(1500);
socket.setTimeout(1500); // you may want to adjust the timeout
socket.once('connect', () => {
socket.destroy();
@@ -34,8 +33,12 @@ async function checkPort(port, domain) {
});
}
const portsHandler = async (url, event, context) => {
const domain = url.replace(/(^\w+:|^)\/\//, '');
exports.handler = async (event, context) => {
const domain = event.queryStringParameters.url;
if (!domain) {
return errorResponse('Missing domain parameter.');
}
const delay = ms => new Promise(res => setTimeout(res, ms));
const timeout = delay(9000);
@@ -73,16 +76,15 @@ const portsHandler = async (url, event, context) => {
return errorResponse('The function timed out before completing.');
}
// Sort openPorts and failedPorts before returning
openPorts.sort((a, b) => a - b);
failedPorts.sort((a, b) => a - b);
return { openPorts, failedPorts };
return {
statusCode: 200,
body: JSON.stringify({ openPorts, failedPorts }),
};
};
const errorResponse = (message, statusCode = 444) => {
return { error: message };
return {
statusCode: statusCode,
body: JSON.stringify({ error: message }),
};
};
export const handler = middleware(portsHandler);
export default handler;

60
api/content-links.js Normal file
View File

@@ -0,0 +1,60 @@
const axios = require('axios');
const cheerio = require('cheerio');
const urlLib = require('url');
exports.handler = async (event, context) => {
let url = event.queryStringParameters.url;
// Check if url includes protocol
if (!url.startsWith('http://') && !url.startsWith('https://')) {
url = 'http://' + url;
}
try {
const response = await axios.get(url);
const html = response.data;
const $ = cheerio.load(html);
const internalLinksMap = new Map();
const externalLinksMap = new Map();
$('a[href]').each((i, link) => {
const href = $(link).attr('href');
const absoluteUrl = urlLib.resolve(url, href);
if (absoluteUrl.startsWith(url)) {
const count = internalLinksMap.get(absoluteUrl) || 0;
internalLinksMap.set(absoluteUrl, count + 1);
} else if (href.startsWith('http://') || href.startsWith('https://')) {
const count = externalLinksMap.get(absoluteUrl) || 0;
externalLinksMap.set(absoluteUrl, count + 1);
}
});
// Convert maps to sorted arrays
const internalLinks = [...internalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
const externalLinks = [...externalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
if (internalLinks.length === 0 && externalLinks.length === 0) {
return {
statusCode: 400,
body: JSON.stringify({
skipped: 'No internal or external links found. '
+ 'This may be due to the website being dynamically rendered, using a client-side framework (like React), and without SSR enabled. '
+ 'That would mean that the static HTML returned from the HTTP request doesn\'t contain any meaningful content for Web-Check to analyze. '
+ 'You can rectify this by using a headless browser to render the page instead.',
}),
};
}
return {
statusCode: 200,
body: JSON.stringify({ internal: internalLinks, external: externalLinks }),
};
} catch (error) {
console.log(error);
return {
statusCode: 500,
body: JSON.stringify({ error: 'Failed fetching data' }),
};
}
};

View File

@@ -1,58 +0,0 @@
import axios from 'axios';
import puppeteer from 'puppeteer';
import middleware from './_common/middleware.js';
const getPuppeteerCookies = async (url) => {
const browser = await puppeteer.launch({
headless: 'new',
args: ['--no-sandbox', '--disable-setuid-sandbox'],
});
try {
const page = await browser.newPage();
const navigationPromise = page.goto(url, { waitUntil: 'networkidle2' });
const timeoutPromise = new Promise((_, reject) =>
setTimeout(() => reject(new Error('Puppeteer took too long!')), 3000)
);
await Promise.race([navigationPromise, timeoutPromise]);
return await page.cookies();
} finally {
await browser.close();
}
};
const cookieHandler = async (url) => {
let headerCookies = null;
let clientCookies = null;
try {
const response = await axios.get(url, {
withCredentials: true,
maxRedirects: 5,
});
headerCookies = response.headers['set-cookie'];
} catch (error) {
if (error.response) {
return { error: `Request failed with status ${error.response.status}: ${error.message}` };
} else if (error.request) {
return { error: `No response received: ${error.message}` };
} else {
return { error: `Error setting up request: ${error.message}` };
}
}
try {
clientCookies = await getPuppeteerCookies(url);
} catch (_) {
clientCookies = null;
}
if (!headerCookies && (!clientCookies || clientCookies.length === 0)) {
return { skipped: 'No cookies' };
}
return { headerCookies, clientCookies };
};
export const handler = middleware(cookieHandler);
export default handler;

View File

@@ -1,7 +1,16 @@
import https from 'https';
import middleware from './_common/middleware.js';
const https = require('https');
exports.handler = async function(event, context) {
let { url } = event.queryStringParameters;
if (!url) {
return errorResponse('URL query parameter is required.');
}
// Extract hostname from URL
const parsedUrl = new URL(url);
const domain = parsedUrl.hostname;
const dnsSecHandler = async (domain) => {
const dnsTypes = ['DNSKEY', 'DS', 'RRSIG'];
const records = {};
@@ -25,11 +34,7 @@ const dnsSecHandler = async (domain) => {
});
res.on('end', () => {
try {
resolve(JSON.parse(data));
} catch (error) {
reject(new Error('Invalid JSON response'));
}
});
res.on('error', error => {
@@ -46,12 +51,19 @@ const dnsSecHandler = async (domain) => {
records[type] = { isFound: false, answer: null, response: dnsResponse};
}
} catch (error) {
throw new Error(`Error fetching ${type} record: ${error.message}`); // This will be caught and handled by the commonMiddleware
return errorResponse(`Error fetching ${type} record: ${error.message}`);
}
}
return records;
return {
statusCode: 200,
body: JSON.stringify(records),
};
};
export const handler = middleware(dnsSecHandler);
export default handler;
const errorResponse = (message, statusCode = 444) => {
return {
statusCode: statusCode,
body: JSON.stringify({ error: message }),
};
};

View File

@@ -1,10 +1,11 @@
import { promises as dnsPromises, lookup } from 'dns';
import axios from 'axios';
import middleware from './_common/middleware.js';
const dns = require('dns');
const dnsPromises = dns.promises;
// const https = require('https');
const axios = require('axios');
const dnsHandler = async (url) => {
exports.handler = async (event) => {
const domain = event.queryStringParameters.url.replace(/^(?:https?:\/\/)?/i, "");
try {
const domain = url.replace(/^(?:https?:\/\/)?/i, "");
const addresses = await dnsPromises.resolve4(domain);
const results = await Promise.all(addresses.map(async (address) => {
const hostname = await dnsPromises.reverse(address).catch(() => null);
@@ -21,7 +22,6 @@ const dnsHandler = async (url) => {
dohDirectSupports,
};
}));
// let dohMozillaSupport = false;
// try {
// const mozillaList = await axios.get('https://firefox.settings.services.mozilla.com/v1/buckets/security-state/collections/onecrl/records');
@@ -29,18 +29,20 @@ const dnsHandler = async (url) => {
// } catch (error) {
// console.error(error);
// }
return {
statusCode: 200,
body: JSON.stringify({
domain,
dns: results,
// dohMozillaSupport,
}),
};
} catch (error) {
throw new Error(`An error occurred while resolving DNS. ${error.message}`); // This will be caught and handled by the commonMiddleware
return {
statusCode: 500,
body: JSON.stringify({
error: `An error occurred while resolving DNS. ${error.message}`,
}),
};
}
};
export const handler = middleware(dnsHandler);
export default handler;

33
api/find-url-ip.js Normal file
View File

@@ -0,0 +1,33 @@
const dns = require('dns');
/* Lambda function to fetch the IP address of a given URL */
exports.handler = function (event, context, callback) {
const addressParam = event.queryStringParameters.url;
if (!addressParam) {
callback(null, errorResponse('Address parameter is missing.'));
return;
}
const address = decodeURIComponent(addressParam)
.replaceAll('https://', '')
.replaceAll('http://', '');
dns.lookup(address, (err, ip, family) => {
if (err) {
callback(null, errorResponse(err.message));
} else {
callback(null, {
statusCode: 200,
body: JSON.stringify({ ip, family }),
});
}
});
};
const errorResponse = (message, statusCode = 444) => {
return {
statusCode: statusCode,
body: JSON.stringify({ error: message }),
};
};

View File

@@ -1,114 +0,0 @@
import axios from 'axios';
import middleware from './_common/middleware.js';
const hasWaf = (waf) => {
return {
hasWaf: true, waf,
}
};
const firewallHandler = async (url) => {
const fullUrl = url.startsWith('http') ? url : `http://${url}`;
try {
const response = await axios.get(fullUrl);
const headers = response.headers;
if (headers['server'] && headers['server'].includes('cloudflare')) {
return hasWaf('Cloudflare');
}
if (headers['x-powered-by'] && headers['x-powered-by'].includes('AWS Lambda')) {
return hasWaf('AWS WAF');
}
if (headers['server'] && headers['server'].includes('AkamaiGHost')) {
return hasWaf('Akamai');
}
if (headers['server'] && headers['server'].includes('Sucuri')) {
return hasWaf('Sucuri');
}
if (headers['server'] && headers['server'].includes('BarracudaWAF')) {
return hasWaf('Barracuda WAF');
}
if (headers['server'] && (headers['server'].includes('F5 BIG-IP') || headers['server'].includes('BIG-IP'))) {
return hasWaf('F5 BIG-IP');
}
if (headers['x-sucuri-id'] || headers['x-sucuri-cache']) {
return hasWaf('Sucuri CloudProxy WAF');
}
if (headers['server'] && headers['server'].includes('FortiWeb')) {
return hasWaf('Fortinet FortiWeb WAF');
}
if (headers['server'] && headers['server'].includes('Imperva')) {
return hasWaf('Imperva SecureSphere WAF');
}
if (headers['x-protected-by'] && headers['x-protected-by'].includes('Sqreen')) {
return hasWaf('Sqreen');
}
if (headers['x-waf-event-info']) {
return hasWaf('Reblaze WAF');
}
if (headers['set-cookie'] && headers['set-cookie'].includes('_citrix_ns_id')) {
return hasWaf('Citrix NetScaler');
}
if (headers['x-denied-reason'] || headers['x-wzws-requested-method']) {
return hasWaf('WangZhanBao WAF');
}
if (headers['x-webcoment']) {
return hasWaf('Webcoment Firewall');
}
if (headers['server'] && headers['server'].includes('Yundun')) {
return hasWaf('Yundun WAF');
}
if (headers['x-yd-waf-info'] || headers['x-yd-info']) {
return hasWaf('Yundun WAF');
}
if (headers['server'] && headers['server'].includes('Safe3WAF')) {
return hasWaf('Safe3 Web Application Firewall');
}
if (headers['server'] && headers['server'].includes('NAXSI')) {
return hasWaf('NAXSI WAF');
}
if (headers['x-datapower-transactionid']) {
return hasWaf('IBM WebSphere DataPower');
}
if (headers['server'] && headers['server'].includes('QRATOR')) {
return hasWaf('QRATOR WAF');
}
if (headers['server'] && headers['server'].includes('ddos-guard')) {
return hasWaf('DDoS-Guard WAF');
}
return {
hasWaf: false,
}
} catch (error) {
return {
statusCode: 500,
body: JSON.stringify({ error: error.message }),
};
}
};
export const handler = middleware(firewallHandler);
export default handler;

35
api/follow-redirects.js Normal file
View File

@@ -0,0 +1,35 @@
exports.handler = async (event) => {
const { url } = event.queryStringParameters;
const redirects = [url];
try {
const got = await import('got');
await got.default(url, {
followRedirect: true,
maxRedirects: 12,
hooks: {
beforeRedirect: [
(options, response) => {
redirects.push(response.headers.location);
},
],
},
});
return {
statusCode: 200,
body: JSON.stringify({
redirects: redirects,
}),
};
} catch (error) {
return errorResponse(`Error: ${error.message}`);
}
};
const errorResponse = (message, statusCode = 444) => {
return {
statusCode: statusCode,
body: JSON.stringify({ error: message }),
};
};

54
api/generate-har.js Normal file
View File

@@ -0,0 +1,54 @@
const puppeteer = require('puppeteer-core');
const chromium = require('chrome-aws-lambda');
exports.handler = async (event, context) => {
let browser = null;
let result = null;
let code = 200;
try {
const url = event.queryStringParameters.url;
browser = await chromium.puppeteer.launch({
args: chromium.args,
defaultViewport: chromium.defaultViewport,
executablePath: await chromium.executablePath,
headless: chromium.headless,
});
const page = await browser.newPage();
const requests = [];
// Capture requests
page.on('request', request => {
requests.push({
url: request.url(),
method: request.method(),
headers: request.headers(),
});
});
await page.goto(url, {
waitUntil: 'networkidle0', // wait until all requests are finished
});
result = requests;
} catch (error) {
code = 500;
result = {
error: 'Failed to create HAR file',
details: error.toString(),
};
} finally {
if (browser !== null) {
await browser.close();
}
}
return {
statusCode: code,
body: JSON.stringify(result),
};
};

View File

@@ -1,7 +1,14 @@
import https from 'https';
import middleware from './_common/middleware.js';
const https = require('https');
const carbonHandler = async (url) => {
exports.handler = async (event, context) => {
const { url } = event.queryStringParameters;
if (!url) {
return {
statusCode: 400,
body: JSON.stringify({ error: 'url query parameter is required' }),
};
}
// First, get the size of the website's HTML
const getHtmlSize = (url) => new Promise((resolve, reject) => {
@@ -42,11 +49,14 @@ const carbonHandler = async (url) => {
}
carbonData.scanUrl = url;
return carbonData;
return {
statusCode: 200,
body: JSON.stringify(carbonData),
};
} catch (error) {
throw new Error(`Error: ${error.message}`);
return {
statusCode: 500,
body: JSON.stringify({ error: `Error: ${error.message}` }),
};
}
};
export const handler = middleware(carbonHandler);
export default handler;

26
api/get-cookies.js Normal file
View File

@@ -0,0 +1,26 @@
const axios = require('axios');
exports.handler = async function(event, context) {
const { url } = event.queryStringParameters;
if (!url) {
return {
statusCode: 400,
body: JSON.stringify({ message: 'url query string parameter is required' }),
};
}
try {
const response = await axios.get(url, {withCredentials: true});
const cookies = response.headers['set-cookie'];
return {
statusCode: 200,
body: JSON.stringify({ cookies }),
};
} catch (error) {
return {
statusCode: 500,
body: JSON.stringify({ error: error.message }),
};
}
};

View File

@@ -1,9 +1,8 @@
import dns from 'dns';
import util from 'util';
import middleware from './_common/middleware.js';
const dns = require('dns');
const util = require('util');
const dnsHandler = async (url) => {
let hostname = url;
exports.handler = async function(event, context) {
let hostname = event.queryStringParameters.url;
// Handle URLs by extracting hostname
if (hostname.startsWith('http://') || hostname.startsWith('https://')) {
@@ -36,6 +35,8 @@ const dnsHandler = async (url) => {
]);
return {
statusCode: 200,
body: JSON.stringify({
A: a,
AAAA: aaaa,
MX: mx,
@@ -45,11 +46,14 @@ const dnsHandler = async (url) => {
SOA: soa,
SRV: srv,
PTR: ptr
})
};
} catch (error) {
throw new Error(error.message);
return {
statusCode: 500,
body: JSON.stringify({
error: error.message
})
};
}
};
export const handler = middleware(dnsHandler);
export default handler;

31
api/get-headers.js Normal file
View File

@@ -0,0 +1,31 @@
const axios = require('axios');
exports.handler = async function(event, context) {
const { url } = event.queryStringParameters;
if (!url) {
return {
statusCode: 400,
body: JSON.stringify({ error: 'url query string parameter is required' }),
};
}
try {
const response = await axios.get(url, {
validateStatus: function (status) {
return status >= 200 && status < 600; // Resolve only if the status code is less than 600
},
});
return {
statusCode: 200,
body: JSON.stringify(response.headers),
};
} catch (error) {
console.log(error);
return {
statusCode: 500,
body: JSON.stringify({ error: error.message }),
};
}
};

View File

@@ -1,23 +0,0 @@
import dns from 'dns';
import middleware from './_common/middleware.js';
const lookupAsync = (address) => {
return new Promise((resolve, reject) => {
dns.lookup(address, (err, ip, family) => {
if (err) {
reject(err);
} else {
resolve({ ip, family });
}
});
});
};
const ipHandler = async (url) => {
const address = url.replaceAll('https://', '').replaceAll('http://', '');
return await lookupAsync(address);
};
export const handler = middleware(ipHandler);
export default handler;

View File

@@ -1,11 +1,9 @@
import dns from 'dns/promises';
import middleware from './_common/middleware.js';
const dns = require('dns').promises;
const txtRecordHandler = async (url, event, context) => {
exports.handler = async (event) => {
const url = new URL(event.queryStringParameters.url);
try {
const parsedUrl = new URL(url);
const txtRecords = await dns.resolveTxt(parsedUrl.hostname);
const txtRecords = await dns.resolveTxt(url.hostname);
// Parsing and formatting TXT records into a single object
const readableTxtRecords = txtRecords.reduce((acc, recordArray) => {
@@ -18,16 +16,15 @@ const txtRecordHandler = async (url, event, context) => {
return { ...acc, ...recordObject };
}, {});
return readableTxtRecords;
return {
statusCode: 200,
body: JSON.stringify(readableTxtRecords),
};
} catch (error) {
if (error.code === 'ERR_INVALID_URL') {
throw new Error(`Invalid URL ${error}`);
} else {
throw error;
}
console.error('Error:', error);
return {
statusCode: 500,
body: JSON.stringify({ error: error.message }),
};
}
};
export const handler = middleware(txtRecordHandler);
export default handler;

View File

@@ -1,19 +0,0 @@
import axios from 'axios';
import middleware from './_common/middleware.js';
const headersHandler = async (url, event, context) => {
try {
const response = await axios.get(url, {
validateStatus: function (status) {
return status >= 200 && status < 600; // Resolve only if the status code is less than 600
},
});
return response.headers;
} catch (error) {
throw new Error(error.message);
}
};
export const handler = middleware(headersHandler);
export default handler;

View File

@@ -1,26 +0,0 @@
import axios from 'axios';
import middleware from './_common/middleware.js';
const httpsSecHandler = async (url) => {
const fullUrl = url.startsWith('http') ? url : `http://${url}`;
try {
const response = await axios.get(fullUrl);
const headers = response.headers;
return {
strictTransportPolicy: headers['strict-transport-security'] ? true : false,
xFrameOptions: headers['x-frame-options'] ? true : false,
xContentTypeOptions: headers['x-content-type-options'] ? true : false,
xXSSProtection: headers['x-xss-protection'] ? true : false,
contentSecurityPolicy: headers['content-security-policy'] ? true : false,
}
} catch (error) {
return {
statusCode: 500,
body: JSON.stringify({ error: error.message }),
};
}
};
export const handler = middleware(httpsSecHandler);
export default handler;

View File

@@ -1,70 +0,0 @@
import axios from 'axios';
import unzipper from 'unzipper';
import csv from 'csv-parser';
import fs from 'fs';
import middleware from './_common/middleware.js';
// Should also work with the following sources:
// https://www.domcop.com/files/top/top10milliondomains.csv.zip
// https://tranco-list.eu/top-1m.csv.zip
// https://www.domcop.com/files/top/top10milliondomains.csv.zip
// https://radar.cloudflare.com/charts/LargerTopDomainsTable/attachment?id=525&top=1000000
// https://statvoo.com/dl/top-1million-sites.csv.zip
const FILE_URL = 'https://s3-us-west-1.amazonaws.com/umbrella-static/top-1m.csv.zip';
const TEMP_FILE_PATH = '/tmp/top-1m.csv';
const rankHandler = async (url) => {
let domain = null;
try {
domain = new URL(url).hostname;
} catch (e) {
throw new Error('Invalid URL');
}
// Download and unzip the file if not in cache
if (!fs.existsSync(TEMP_FILE_PATH)) {
const response = await axios({
method: 'GET',
url: FILE_URL,
responseType: 'stream'
});
await new Promise((resolve, reject) => {
response.data
.pipe(unzipper.Extract({ path: '/tmp' }))
.on('close', resolve)
.on('error', reject);
});
}
// Parse the CSV and find the rank
return new Promise((resolve, reject) => {
const csvStream = fs.createReadStream(TEMP_FILE_PATH)
.pipe(csv({
headers: ['rank', 'domain'],
}))
.on('data', (row) => {
if (row.domain === domain) {
csvStream.destroy();
resolve({
domain: domain,
rank: row.rank,
isFound: true,
});
}
})
.on('end', () => {
resolve({
skipped: `Skipping, as ${domain} is not present in the Umbrella top 1M list.`,
domain: domain,
isFound: false,
});
})
.on('error', reject);
});
};
export const handler = middleware(rankHandler);
export default handler;

40
api/lighthouse-report.js Normal file
View File

@@ -0,0 +1,40 @@
const axios = require('axios');
exports.handler = function(event, context, callback) {
const { url } = event.queryStringParameters;
if (!url) {
callback(null, {
statusCode: 400,
body: JSON.stringify({ error: 'URL param is required'}),
});
}
const apiKey = process.env.GOOGLE_CLOUD_API_KEY;
if (!apiKey) {
callback(null, {
statusCode: 500,
body: JSON.stringify({ error: 'API key (GOOGLE_CLOUD_API_KEY) not set'}),
});
}
const endpoint = `https://www.googleapis.com/pagespeedonline/v5/runPagespeed?url=${encodeURIComponent(url)}&category=PERFORMANCE&category=ACCESSIBILITY&category=BEST_PRACTICES&category=SEO&category=PWA&strategy=mobile&key=${apiKey}`;
axios.get(endpoint)
.then(
(response) => {
callback(null, {
statusCode: 200,
body: JSON.stringify(response.data),
});
}
).catch(
() => {
callback(null, {
statusCode: 500,
body: JSON.stringify({ error: 'Error running Lighthouse'}),
});
}
);
};

View File

@@ -1,49 +0,0 @@
import axios from 'axios';
import cheerio from 'cheerio';
import urlLib from 'url';
import middleware from './_common/middleware.js';
const linkedPagesHandler = async (url) => {
const response = await axios.get(url);
const html = response.data;
const $ = cheerio.load(html);
const internalLinksMap = new Map();
const externalLinksMap = new Map();
// Get all links on the page
$('a[href]').each((i, link) => {
const href = $(link).attr('href');
const absoluteUrl = urlLib.resolve(url, href);
// Check if absolute / relative, append to appropriate map or increment occurrence count
if (absoluteUrl.startsWith(url)) {
const count = internalLinksMap.get(absoluteUrl) || 0;
internalLinksMap.set(absoluteUrl, count + 1);
} else if (href.startsWith('http://') || href.startsWith('https://')) {
const count = externalLinksMap.get(absoluteUrl) || 0;
externalLinksMap.set(absoluteUrl, count + 1);
}
});
// Sort by most occurrences, remove supplicates, and convert to array
const internalLinks = [...internalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
const externalLinks = [...externalLinksMap.entries()].sort((a, b) => b[1] - a[1]).map(entry => entry[0]);
// If there were no links, then mark as skipped and show reasons
if (internalLinks.length === 0 && externalLinks.length === 0) {
return {
statusCode: 400,
body: {
skipped: 'No internal or external links found. '
+ 'This may be due to the website being dynamically rendered, using a client-side framework (like React), and without SSR enabled. '
+ 'That would mean that the static HTML returned from the HTTP request doesn\'t contain any meaningful content for Web-Check to analyze. '
+ 'You can rectify this by using a headless browser to render the page instead.',
},
};
}
return { internal: internalLinks, external: externalLinks };
};
export const handler = middleware(linkedPagesHandler);
export default handler;

View File

@@ -1,12 +1,11 @@
import dns from 'dns';
import URL from 'url-parse';
import middleware from './_common/middleware.js';
const dns = require('dns').promises;
const URL = require('url-parse');
// TODO: Fix.
const mailConfigHandler = async (url, event, context) => {
exports.handler = async (event, context) => {
try {
const domain = new URL(url).hostname || new URL(url).pathname;
let domain = event.queryStringParameters.url;
const parsedUrl = new URL(domain);
domain = parsedUrl.hostname || parsedUrl.pathname;
// Get MX records
const mxRecords = await dns.resolveMx(domain);
@@ -55,28 +54,26 @@ const mailConfigHandler = async (url, event, context) => {
if (yahooMx.length > 0) {
mailServices.push({ provider: 'Yahoo', value: yahooMx[0].exchange });
}
// Check MX records for Mimecast
const mimecastMx = mxRecords.filter(record => record.exchange.includes('mimecast.com'));
if (mimecastMx.length > 0) {
mailServices.push({ provider: 'Mimecast', value: mimecastMx[0].exchange });
}
return {
statusCode: 200,
body: JSON.stringify({
mxRecords,
txtRecords: emailTxtRecords,
mailServices,
}),
};
} catch (error) {
if (error.code === 'ENOTFOUND' || error.code === 'ENODATA') {
return { skipped: 'No mail server in use on this domain' };
return {
statusCode: 200,
body: JSON.stringify({ skipped: 'No mail server in use on this domain' }),
};
} else {
return {
statusCode: 500,
body: { error: error.message },
body: JSON.stringify({ error: error.message }),
};
}
}
};
export const handler = middleware(mailConfigHandler);
export default handler;

58
api/meta-tags.js Normal file
View File

@@ -0,0 +1,58 @@
const axios = require('axios');
const cheerio = require('cheerio');
exports.handler = async (event) => {
let url;
try {
// Add https:// prefix if not present
url = new URL(event.queryStringParameters.url);
if (!url.protocol) {
url = new URL('https://' + event.queryStringParameters.url);
}
} catch (error) {
// Return error if URL is not valid
return {
statusCode: 400,
body: JSON.stringify({
error: 'Invalid URL provided.',
}),
};
}
try {
// Fetch the page
const response = await axios.get(url.toString());
// Parse the page body with cheerio
const $ = cheerio.load(response.data);
// Extract meta tags
const metaTags = {};
$('head meta').each((index, element) => {
const name = $(element).attr('name');
const property = $(element).attr('property');
const content = $(element).attr('content');
if (name) {
metaTags[name] = content;
} else if (property) {
metaTags[property] = content;
}
});
// Return meta tags
return {
statusCode: 200,
body: JSON.stringify(metaTags),
};
} catch (error) {
// Return error if there's a problem fetching or parsing the page
return {
statusCode: 500,
body: JSON.stringify({
error: error.message,
}),
};
}
};

58
api/network-requests.js Normal file
View File

@@ -0,0 +1,58 @@
const puppeteer = require('puppeteer-core');
const chromium = require('chrome-aws-lambda');
exports.handler = async (event, context) => {
const urlParam = event.queryStringParameters.url;
if (!urlParam) {
return {
statusCode: 400,
body: JSON.stringify({ error: 'Missing url parameter' })
};
}
let url;
try {
url = new URL(urlParam.includes('://') ? urlParam : 'https://' + urlParam);
} catch (error) {
return {
statusCode: 500,
body: JSON.stringify({ error: 'Invalid URL format' }),
};
}
// Launch the browser and open a new page
const browser = await puppeteer.launch({
args: chromium.args,
defaultViewport: { width: 800, height: 600 },
executablePath: process.env.CHROME_PATH || await chromium.executablePath,
headless: chromium.headless,
ignoreHTTPSErrors: true,
ignoreDefaultArgs: ['--disable-extensions'],
});
const page = await browser.newPage();
// To store network activity
let networkActivity = [];
// Register an event listener for network requests
page.on('request', (request) => {
networkActivity.push({
url: request.url(),
method: request.method(),
headers: request.headers(),
postData: request.postData(),
});
});
// Navigate to the page and wait for it to load
await page.goto(url, { waitUntil: 'networkidle2' });
// Close the browser
await browser.close();
// Return network activity
return {
statusCode: 200,
body: JSON.stringify(networkActivity),
};
};

View File

@@ -1,22 +0,0 @@
import axios from 'axios';
import middleware from './_common/middleware.js';
const qualityHandler = async (url, event, context) => {
const apiKey = process.env.GOOGLE_CLOUD_API_KEY;
if (!apiKey) {
throw new Error(
'Missing Google API. You need to set the `GOOGLE_CLOUD_API_KEY` environment variable'
);
}
const endpoint = `https://www.googleapis.com/pagespeedonline/v5/runPagespeed?`
+ `url=${encodeURIComponent(url)}&category=PERFORMANCE&category=ACCESSIBILITY`
+ `&category=BEST_PRACTICES&category=SEO&category=PWA&strategy=mobile`
+ `&key=${apiKey}`;
return (await axios.get(endpoint)).data;
};
export const handler = middleware(qualityHandler);
export default handler;

View File

@@ -1,26 +0,0 @@
import axios from 'axios';
import middleware from './_common/middleware.js';
const rankHandler = async (url) => {
const domain = url ? new URL(url).hostname : null;
if (!domain) throw new Error('Invalid URL');
try {
const auth = process.env.TRANCO_API_KEY ? // Auth is optional.
{ auth: { username: process.env.TRANCO_USERNAME, password: process.env.TRANCO_API_KEY } }
: {};
const response = await axios.get(
`https://tranco-list.eu/api/ranks/domain/${domain}`, { timeout: 5000 }, auth,
);
if (!response.data || !response.data.ranks || response.data.ranks.length === 0) {
return { skipped: `Skipping, as ${domain} isn't ranked in the top 100 million sites yet.`};
}
return response.data;
} catch (error) {
return { error: `Unable to fetch rank, ${error.message}` };
}
};
export const handler = middleware(rankHandler);
export default handler;

45
api/read-robots-txt.js Normal file
View File

@@ -0,0 +1,45 @@
const axios = require('axios');
exports.handler = async function(event, context) {
const siteURL = event.queryStringParameters.url;
if (!siteURL) {
return {
statusCode: 400,
body: JSON.stringify({ error: 'Missing url query parameter' }),
};
}
let parsedURL;
try {
parsedURL = new URL(siteURL);
} catch (error) {
return {
statusCode: 400,
body: JSON.stringify({ error: 'Invalid url query parameter' }),
};
}
const robotsURL = `${parsedURL.protocol}//${parsedURL.hostname}/robots.txt`;
try {
const response = await axios.get(robotsURL);
if (response.status === 200) {
return {
statusCode: 200,
body: response.data,
};
} else {
return {
statusCode: response.status,
body: JSON.stringify({ error: 'Failed to fetch robots.txt', statusCode: response.status }),
};
}
} catch (error) {
return {
statusCode: 500,
body: JSON.stringify({ error: `Error fetching robots.txt: ${error.message}` }),
};
}
};

View File

@@ -1,28 +0,0 @@
import got from 'got';
import middleware from './_common/middleware.js';
const redirectsHandler = async (url) => {
const redirects = [url];
try {
await got(url, {
followRedirect: true,
maxRedirects: 12,
hooks: {
beforeRedirect: [
(options, response) => {
redirects.push(response.headers.location);
},
],
},
});
return {
redirects: redirects,
};
} catch (error) {
throw new Error(`Error: ${error.message}`);
}
};
export const handler = middleware(redirectsHandler);
export default handler;

View File

@@ -1,71 +0,0 @@
import axios from 'axios';
import middleware from './_common/middleware.js';
const parseRobotsTxt = (content) => {
const lines = content.split('\n');
const rules = [];
lines.forEach(line => {
line = line.trim(); // This removes trailing and leading whitespaces
let match = line.match(/^(Allow|Disallow):\s*(\S*)$/i);
if (match) {
const rule = {
lbl: match[1],
val: match[2],
};
rules.push(rule);
} else {
match = line.match(/^(User-agent):\s*(\S*)$/i);
if (match) {
const rule = {
lbl: match[1],
val: match[2],
};
rules.push(rule);
}
}
});
return { robots: rules };
}
const robotsHandler = async function(url) {
let parsedURL;
try {
parsedURL = new URL(url);
} catch (error) {
return {
statusCode: 400,
body: JSON.stringify({ error: 'Invalid url query parameter' }),
};
}
const robotsURL = `${parsedURL.protocol}//${parsedURL.hostname}/robots.txt`;
try {
const response = await axios.get(robotsURL);
if (response.status === 200) {
const parsedData = parseRobotsTxt(response.data);
if (!parsedData.robots || parsedData.robots.length === 0) {
return { skipped: 'No robots.txt file present, unable to continue' };
}
return parsedData;
} else {
return {
statusCode: response.status,
body: JSON.stringify({ error: 'Failed to fetch robots.txt', statusCode: response.status }),
};
}
} catch (error) {
return {
statusCode: 500,
body: JSON.stringify({ error: `Error fetching robots.txt: ${error.message}` }),
};
}
};
export const handler = middleware(robotsHandler);
export default handler;

View File

@@ -1,63 +1,16 @@
import puppeteer from 'puppeteer-core';
import chromium from 'chrome-aws-lambda';
import middleware from './_common/middleware.js';
import { exec } from 'child_process';
import { promises as fs } from 'fs';
import path from 'path';
import pkg from 'uuid';
const { v4: uuidv4 } = pkg;
const puppeteer = require('puppeteer-core');
const chromium = require('chrome-aws-lambda');
// Helper function for direct chromium screenshot as fallback
const directChromiumScreenshot = async (url) => {
console.log(`[DIRECT-SCREENSHOT] Starting direct screenshot process for URL: ${url}`);
// Create a tmp filename
const tmpDir = '/tmp';
const uuid = uuidv4();
const screenshotPath = path.join(tmpDir, `screenshot-${uuid}.png`);
console.log(`[DIRECT-SCREENSHOT] Will save screenshot to: ${screenshotPath}`);
return new Promise((resolve, reject) => {
const chromePath = process.env.CHROME_PATH || '/usr/bin/chromium';
const command = `${chromePath} --headless --disable-gpu --no-sandbox --screenshot=${screenshotPath} "${url}"`;
console.log(`[DIRECT-SCREENSHOT] Executing command: ${command}`);
exec(command, async (error, stdout, stderr) => {
if (error) {
console.error(`[DIRECT-SCREENSHOT] Error executing Chromium: ${error.message}`);
return reject(error);
}
try {
// Read screenshot
const screenshotData = await fs.readFile(screenshotPath);
console.log(`[DIRECT-SCREENSHOT] Read ${screenshotData.length} bytes from screenshot file`);
// Convert base64
const base64Data = screenshotData.toString('base64');
// Clean
await fs.unlink(screenshotPath).catch(err =>
console.warn(`[DIRECT-SCREENSHOT] Failed to delete temp file: ${err.message}`)
);
resolve(base64Data);
} catch (readError) {
console.error(`[DIRECT-SCREENSHOT] Error reading screenshot: ${readError.message}`);
reject(readError);
}
});
});
};
const screenshotHandler = async (targetUrl) => {
console.log(`[SCREENSHOT] Request received for URL: ${targetUrl}`);
exports.handler = async (event, context, callback) => {
let browser = null;
let targetUrl = event.queryStringParameters.url;
if (!targetUrl) {
console.error('[SCREENSHOT] URL is missing from queryStringParameters');
throw new Error('URL is missing from queryStringParameters');
callback(null, {
statusCode: 400,
body: JSON.stringify({ error: 'URL is missing from queryStringParameters' }),
});
return;
}
if (!targetUrl.startsWith('http://') && !targetUrl.startsWith('https://')) {
@@ -67,45 +20,31 @@ const screenshotHandler = async (targetUrl) => {
try {
new URL(targetUrl);
} catch (error) {
console.error(`[SCREENSHOT] URL provided is invalid: ${targetUrl}`);
throw new Error('URL provided is invalid');
callback(null, {
statusCode: 400,
body: JSON.stringify({ error: 'URL provided is invalid' }),
});
return;
}
// First try direct Chromium
try {
console.log(`[SCREENSHOT] Using direct Chromium method for URL: ${targetUrl}`);
const base64Screenshot = await directChromiumScreenshot(targetUrl);
console.log(`[SCREENSHOT] Direct screenshot successful`);
return { image: base64Screenshot };
} catch (directError) {
console.error(`[SCREENSHOT] Direct screenshot method failed: ${directError.message}`);
console.log(`[SCREENSHOT] Falling back to puppeteer method...`);
}
// fall back puppeteer
let browser = null;
try {
console.log(`[SCREENSHOT] Launching puppeteer browser`);
browser = await puppeteer.launch({
args: [...chromium.args, '--no-sandbox'], // Add --no-sandbox flag
args: chromium.args,
defaultViewport: { width: 800, height: 600 },
executablePath: process.env.CHROME_PATH || '/usr/bin/chromium',
headless: true,
executablePath: process.env.CHROME_PATH || await chromium.executablePath,
headless: chromium.headless,
ignoreHTTPSErrors: true,
ignoreDefaultArgs: ['--disable-extensions'],
});
console.log(`[SCREENSHOT] Creating new page`);
let page = await browser.newPage();
console.log(`[SCREENSHOT] Setting page preferences`);
await page.emulateMediaFeatures([{ name: 'prefers-color-scheme', value: 'dark' }]);
page.setDefaultNavigationTimeout(8000);
console.log(`[SCREENSHOT] Navigating to URL: ${targetUrl}`);
await page.goto(targetUrl, { waitUntil: 'domcontentloaded' });
console.log(`[SCREENSHOT] Checking if body element exists`);
await page.evaluate(() => {
const selector = 'body';
return new Promise((resolve, reject) => {
@@ -117,24 +56,25 @@ const screenshotHandler = async (targetUrl) => {
});
});
console.log(`[SCREENSHOT] Taking screenshot`);
const screenshotBuffer = await page.screenshot();
console.log(`[SCREENSHOT] Converting screenshot to base64`);
const base64Screenshot = screenshotBuffer.toString('base64');
console.log(`[SCREENSHOT] Screenshot complete, returning image`);
return { image: base64Screenshot };
const response = {
statusCode: 200,
body: JSON.stringify({ image: base64Screenshot }),
};
callback(null, response);
} catch (error) {
console.error(`[SCREENSHOT] Puppeteer screenshot failed: ${error.message}`);
throw error;
console.log(error);
callback(null, {
statusCode: 500,
body: JSON.stringify({ error: `An error occurred: ${error.message}` }),
});
} finally {
if (browser !== null) {
console.log(`[SCREENSHOT] Closing browser`);
await browser.close();
}
}
};
export const handler = middleware(screenshotHandler);
export default handler;

View File

@@ -1,8 +1,5 @@
import { URL } from 'url';
import followRedirects from 'follow-redirects';
import middleware from './_common/middleware.js';
const { https } = followRedirects;
const { https } = require('follow-redirects');
const { URL } = require('url');
const SECURITY_TXT_PATHS = [
'/security.txt',
@@ -40,35 +37,57 @@ const isPgpSigned = (result) => {
return false;
};
const securityTxtHandler = async (urlParam) => {
exports.handler = async (event, context) => {
const urlParam = event.queryStringParameters.url;
if (!urlParam) {
return {
statusCode: 400,
body: JSON.stringify({ error: 'Missing url parameter' })
};
}
let url;
try {
url = new URL(urlParam.includes('://') ? urlParam : 'https://' + urlParam);
} catch (error) {
throw new Error('Invalid URL format');
return {
statusCode: 500,
body: JSON.stringify({ error: 'Invalid URL format' }),
};
}
url.pathname = '';
for (let path of SECURITY_TXT_PATHS) {
try {
const result = await fetchSecurityTxt(url, path);
if (result && result.includes('<html')) return { isPresent: false };
if (result && result.includes('<html')) return {
statusCode: 200,
body: JSON.stringify({ isPresent: false }),
};
if (result) {
return {
statusCode: 200,
body: JSON.stringify({
isPresent: true,
foundIn: path,
content: result,
isPgpSigned: isPgpSigned(result),
fields: parseResult(result),
}),
};
}
} catch (error) {
throw new Error(error.message);
return {
statusCode: 500,
body: JSON.stringify({ error: error.message }),
};
}
}
return { isPresent: false };
return {
statusCode: 404,
body: JSON.stringify({ isPresent: false }),
};
};
async function fetchSecurityTxt(baseURL, path) {
@@ -91,6 +110,3 @@ async function fetchSecurityTxt(baseURL, path) {
});
});
}
export const handler = middleware(securityTxtHandler);
export default handler;

View File

@@ -1,10 +1,14 @@
import https from 'https';
import { performance, PerformanceObserver } from 'perf_hooks';
import middleware from './_common/middleware.js';
const https = require('https');
const { performance, PerformanceObserver } = require('perf_hooks');
exports.handler = async function(event, context) {
const { url } = event.queryStringParameters;
const statusHandler = async (url) => {
if (!url) {
throw new Error('You must provide a URL query parameter!');
return {
statusCode: 400,
body: JSON.stringify({ error: 'You must provide a URL query parameter!' }),
};
}
let dnsLookupTime;
@@ -39,7 +43,10 @@ const statusHandler = async (url) => {
});
if (responseCode < 200 || responseCode >= 400) {
throw new Error(`Received non-success response code: ${responseCode}`);
return {
statusCode: 200,
body: JSON.stringify({ error: `Received non-success response code: ${responseCode}` }),
};
}
performance.mark('B');
@@ -47,13 +54,16 @@ const statusHandler = async (url) => {
let responseTime = performance.now() - startTime;
obs.disconnect();
return { isUp: true, dnsLookupTime, responseTime, responseCode };
return {
statusCode: 200,
body: JSON.stringify({ isUp: true, dnsLookupTime, responseTime, responseCode }),
};
} catch (error) {
obs.disconnect();
throw error;
return {
statusCode: 200,
body: JSON.stringify({ error: `Error during operation: ${error.message}` }),
};
}
};
export const handler = middleware(statusHandler);
export default handler;

View File

@@ -1,15 +1,22 @@
import https from 'https';
import middleware from './_common/middleware.js';
const https = require('https');
const featuresHandler = async (url) => {
exports.handler = async function (event, context) {
const { url } = event.queryStringParameters;
const apiKey = process.env.BUILT_WITH_API_KEY;
const errorResponse = (message, statusCode = 500) => {
return {
statusCode: statusCode,
body: JSON.stringify({ error: message }),
};
};
if (!url) {
throw new Error('URL query parameter is required');
return errorResponse('URL query parameter is required', 400);
}
if (!apiKey) {
throw new Error('Missing BuiltWith API key in environment variables');
return errorResponse('Missing BuiltWith API key in environment variables', 500);
}
const apiUrl = `https://api.builtwith.com/free1/api.json?KEY=${apiKey}&LOOKUP=${encodeURIComponent(url)}`;
@@ -39,11 +46,11 @@ const featuresHandler = async (url) => {
req.end();
});
return response;
return {
statusCode: 200,
body: response,
};
} catch (error) {
throw new Error(`Error making request: ${error.message}`);
return errorResponse(`Error making request: ${error.message}`);
}
};
export const handler = middleware(featuresHandler);
export default handler;

View File

@@ -1,21 +1,19 @@
import axios from 'axios';
import xml2js from 'xml2js';
import middleware from './_common/middleware.js';
const axios = require('axios');
const xml2js = require('xml2js');
const sitemapHandler = async (url) => {
exports.handler = async (event) => {
const url = event.queryStringParameters.url;
let sitemapUrl = `${url}/sitemap.xml`;
const hardTimeOut = 5000;
try {
// Try to fetch sitemap directly
let sitemapRes;
try {
sitemapRes = await axios.get(sitemapUrl, { timeout: hardTimeOut });
sitemapRes = await axios.get(sitemapUrl, { timeout: 5000 });
} catch (error) {
if (error.response && error.response.status === 404) {
// If sitemap not found, try to fetch it from robots.txt
const robotsRes = await axios.get(`${url}/robots.txt`, { timeout: hardTimeOut });
const robotsRes = await axios.get(`${url}/robots.txt`, { timeout: 5000 });
const robotsTxt = robotsRes.data.split('\n');
for (let line of robotsTxt) {
@@ -26,10 +24,13 @@ const sitemapHandler = async (url) => {
}
if (!sitemapUrl) {
return { skipped: 'No sitemap found' };
return {
statusCode: 404,
body: JSON.stringify({ skipped: 'No sitemap found' }),
};
}
sitemapRes = await axios.get(sitemapUrl, { timeout: hardTimeOut });
sitemapRes = await axios.get(sitemapUrl, { timeout: 5000 });
} else {
throw error; // If other error, throw it
}
@@ -38,16 +39,23 @@ const sitemapHandler = async (url) => {
const parser = new xml2js.Parser();
const sitemap = await parser.parseStringPromise(sitemapRes.data);
return sitemap;
return {
statusCode: 200,
body: JSON.stringify(sitemap),
};
} catch (error) {
// If error occurs
console.log(error.message);
if (error.code === 'ECONNABORTED') {
return { error: `Request timed-out after ${hardTimeOut}ms` };
return {
statusCode: 500,
body: JSON.stringify({ error: 'Request timed out' }),
};
} else {
return { error: error.message };
return {
statusCode: 500,
body: JSON.stringify({ error: error.message }),
};
}
}
};
export const handler = middleware(sitemapHandler);
export default handler;

View File

@@ -1,8 +1,8 @@
import axios from 'axios';
import cheerio from 'cheerio';
import middleware from './_common/middleware.js';
const axios = require('axios');
const cheerio = require('cheerio');
const socialTagsHandler = async (url) => {
exports.handler = async (event, context) => {
let url = event.queryStringParameters.url;
// Check if url includes protocol
if (!url.startsWith('http://') && !url.startsWith('https://')) {
@@ -49,9 +49,16 @@ const socialTagsHandler = async (url) => {
};
if (Object.keys(metadata).length === 0) {
return { skipped: 'No metadata found' };
return {
statusCode: 200,
body: JSON.stringify({ skipped: 'No metadata found' }),
};
}
return metadata;
return {
statusCode: 200,
body: JSON.stringify(metadata),
};
} catch (error) {
return {
statusCode: 500,
@@ -59,6 +66,3 @@ const socialTagsHandler = async (url) => {
};
}
};
export const handler = middleware(socialTagsHandler);
export default handler;

50
api/ssl-check.js Normal file
View File

@@ -0,0 +1,50 @@
const https = require('https');
exports.handler = async function (event, context) {
const { url } = event.queryStringParameters;
const errorResponse = (message, statusCode = 500) => {
return {
statusCode: statusCode,
body: JSON.stringify({ error: message }),
};
};
if (!url) {
return errorResponse('URL query parameter is required', 400);
}
try {
const response = await new Promise((resolve, reject) => {
const req = https.request(url, res => {
// Check if the SSL handshake was authorized
if (!res.socket.authorized) {
resolve(errorResponse(`SSL handshake not authorized. Reason: ${res.socket.authorizationError}`));
} else {
let cert = res.socket.getPeerCertificate(true);
if (!cert || Object.keys(cert).length === 0) {
resolve(errorResponse("No certificate presented by the server."));
} else {
// omit the raw and issuerCertificate fields
const { raw, issuerCertificate, ...certWithoutRaw } = cert;
resolve({
statusCode: 200,
body: JSON.stringify(certWithoutRaw),
});
}
}
});
req.on('error', error => {
resolve(errorResponse(`Error fetching site certificate: ${error.message}`));
});
req.end();
});
return response;
} catch (error) {
return errorResponse(`Unexpected error occurred: ${error.message}`);
}
};

View File

@@ -1,44 +0,0 @@
import tls from 'tls';
import middleware from './_common/middleware.js';
const sslHandler = async (urlString) => {
try {
const parsedUrl = new URL(urlString);
const options = {
host: parsedUrl.hostname,
port: parsedUrl.port || 443,
servername: parsedUrl.hostname,
rejectUnauthorized: false,
};
return new Promise((resolve, reject) => {
const socket = tls.connect(options, () => {
if (!socket.authorized) {
return reject(new Error(`SSL handshake not authorized. Reason: ${socket.authorizationError}`));
}
const cert = socket.getPeerCertificate();
if (!cert || Object.keys(cert).length === 0) {
return reject(new Error(`
No certificate presented by the server.\n
The server is possibly not using SNI (Server Name Indication) to identify itself, and you are connecting to a hostname-aliased IP address.
Or it may be due to an invalid SSL certificate, or an incomplete SSL handshake at the time the cert is being read.`));
}
const { raw, issuerCertificate, ...certWithoutRaw } = cert;
resolve(certWithoutRaw);
socket.end();
});
socket.on('error', (error) => {
reject(new Error(`Error fetching site certificate: ${error.message}`));
});
});
} catch (error) {
throw new Error(error.message);
}
};
export const handler = middleware(sslHandler);
export default handler;

View File

@@ -1,31 +1,69 @@
import Wappalyzer from 'wappalyzer';
import middleware from './_common/middleware.js';
const Wappalyzer = require('wappalyzer');
const analyze = async (url) => {
const techStackHandler = async (url) => {
const options = {};
const wappalyzer = new Wappalyzer(options);
return (async function() {
try {
await wappalyzer.init();
const headers = {};
await wappalyzer.init()
const headers = {}
const storage = {
local: {},
session: {},
};
const site = await wappalyzer.open(url, headers, storage);
const results = await site.analyze();
if (!results.technologies || results.technologies.length === 0) {
throw new Error('Unable to find any technologies for site');
}
const site = await wappalyzer.open(url, headers, storage)
const results = await site.analyze()
return results;
} catch (error) {
throw new Error(error.message);
return error;
} finally {
await wappalyzer.destroy();
await wappalyzer.destroy()
}
})();
}
exports.handler = async (event, context, callback) => {
// Validate URL parameter
if (!event.queryStringParameters || !event.queryStringParameters.url) {
return {
statusCode: 400,
body: JSON.stringify({ error: 'Missing url parameter' }),
};
}
// Get URL from param
let url = event.queryStringParameters.url;
if (!/^https?:\/\//i.test(url)) {
url = 'http://' + url;
}
try {
return analyze(url).then(
(results) => {
if (!results.technologies || results.technologies.length === 0) {
return {
statusCode: 200,
body: JSON.stringify({ error: 'Unable to find any technologies for site' }),
};
}
return {
statusCode: 200,
body: JSON.stringify(results),
}
}
)
.catch((error) => {
return {
statusCode: 500,
body: JSON.stringify({ error: error.message }),
};
});
} catch (error) {
return {
statusCode: 500,
body: JSON.stringify({ error: error.message }),
};
}
};
export const handler = middleware(techStackHandler);
export default handler;

View File

@@ -1,103 +0,0 @@
import axios from 'axios';
import xml2js from 'xml2js';
import middleware from './_common/middleware.js';
const getGoogleSafeBrowsingResult = async (url) => {
try {
const apiKey = process.env.GOOGLE_CLOUD_API_KEY;
if (!apiKey) {
return { error: 'GOOGLE_CLOUD_API_KEY is required for the Google Safe Browsing check' };
}
const apiEndpoint = `https://safebrowsing.googleapis.com/v4/threatMatches:find?key=${apiKey}`;
const requestBody = {
threatInfo: {
threatTypes: [
'MALWARE', 'SOCIAL_ENGINEERING', 'UNWANTED_SOFTWARE', 'POTENTIALLY_HARMFUL_APPLICATION', 'API_ABUSE'
],
platformTypes: ["ANY_PLATFORM"],
threatEntryTypes: ["URL"],
threatEntries: [{ url }]
}
};
const response = await axios.post(apiEndpoint, requestBody);
if (response.data && response.data.matches) {
return {
unsafe: true,
details: response.data.matches
};
} else {
return { unsafe: false };
}
} catch (error) {
return { error: `Request failed: ${error.message}` };
}
};
const getUrlHausResult = async (url) => {
let domain = new URL(url).hostname;
return await axios({
method: 'post',
url: 'https://urlhaus-api.abuse.ch/v1/host/',
headers: {
'Content-Type': 'application/x-www-form-urlencoded'
},
data: `host=${domain}`
})
.then((x) => x.data)
.catch((e) => ({ error: `Request to URLHaus failed, ${e.message}`}));
};
const getPhishTankResult = async (url) => {
try {
const encodedUrl = Buffer.from(url).toString('base64');
const endpoint = `https://checkurl.phishtank.com/checkurl/?url=${encodedUrl}`;
const headers = {
'User-Agent': 'phishtank/web-check',
};
const response = await axios.post(endpoint, null, { headers, timeout: 3000 });
const parsed = await xml2js.parseStringPromise(response.data, { explicitArray: false });
return parsed.response.results;
} catch (error) {
return { error: `Request to PhishTank failed: ${error.message}` };
}
}
const getCloudmersiveResult = async (url) => {
const apiKey = process.env.CLOUDMERSIVE_API_KEY;
if (!apiKey) {
return { error: 'CLOUDMERSIVE_API_KEY is required for the Cloudmersive check' };
}
try {
const endpoint = 'https://api.cloudmersive.com/virus/scan/website';
const headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Apikey': apiKey,
};
const data = `Url=${encodeURIComponent(url)}`;
const response = await axios.post(endpoint, data, { headers });
return response.data;
} catch (error) {
return { error: `Request to Cloudmersive failed: ${error.message}` };
}
};
const threatsHandler = async (url) => {
try {
const urlHaus = await getUrlHausResult(url);
const phishTank = await getPhishTankResult(url);
const cloudmersive = await getCloudmersiveResult(url);
const safeBrowsing = await getGoogleSafeBrowsingResult(url);
if (urlHaus.error && phishTank.error && cloudmersive.error && safeBrowsing.error) {
throw new Error(`All requests failed - ${urlHaus.error} ${phishTank.error} ${cloudmersive.error} ${safeBrowsing.error}`);
}
return JSON.stringify({ urlHaus, phishTank, cloudmersive, safeBrowsing });
} catch (error) {
throw new Error(error.message);
}
};
export const handler = middleware(threatsHandler);
export default handler;

View File

@@ -1,29 +0,0 @@
import axios from 'axios';
import middleware from './_common/middleware.js';
const MOZILLA_TLS_OBSERVATORY_API = 'https://tls-observatory.services.mozilla.com/api/v1';
const tlsHandler = async (url) => {
try {
const domain = new URL(url).hostname;
const scanResponse = await axios.post(`${MOZILLA_TLS_OBSERVATORY_API}/scan?target=${domain}`);
const scanId = scanResponse.data.scan_id;
if (typeof scanId !== 'number') {
return {
statusCode: 500,
body: { error: 'Failed to get scan_id from TLS Observatory' },
};
}
const resultResponse = await axios.get(`${MOZILLA_TLS_OBSERVATORY_API}/results?id=${scanId}`);
return {
statusCode: 200,
body: resultResponse.data,
};
} catch (error) {
return { error: error.message };
}
};
export const handler = middleware(tlsHandler);
export default handler;

View File

@@ -1,8 +1,14 @@
import url from 'url';
import traceroute from 'traceroute';
import middleware from './_common/middleware.js';
const traceroute = require('traceroute');
const url = require('url');
exports.handler = async function(event, context) {
const urlString = event.queryStringParameters.url;
try {
if (!urlString) {
throw new Error('URL parameter is missing!');
}
const traceRouteHandler = async (urlString, context) => {
// Parse the URL and get the hostname
const urlObject = url.parse(urlString);
const host = urlObject.hostname;
@@ -20,13 +26,30 @@ const traceRouteHandler = async (urlString, context) => {
resolve(hops);
}
});
// Check if remaining time is less than 8.8 seconds, then reject promise
if (context.getRemainingTimeInMillis() < 8800) {
reject(new Error('Lambda is about to timeout'));
}
});
return {
statusCode: 200,
body: JSON.stringify({
message: "Traceroute completed!",
result,
}),
};
};
} catch (err) {
const message = err.code === 'ENOENT'
? 'Traceroute command is not installed on the host.'
: err.message;
export const handler = middleware(traceRouteHandler);
export default handler;
return {
statusCode: 500,
body: JSON.stringify({
error: message,
}),
};
}
};

View File

@@ -1,7 +1,14 @@
import net from 'net';
import psl from 'psl';
import axios from 'axios';
import middleware from './_common/middleware.js';
const net = require('net');
const psl = require('psl');
// const { URL } = require('url');
const errorResponse = (message, statusCode = 444) => {
return {
statusCode: statusCode,
body: JSON.stringify({ error: message }),
};
};
const getBaseDomain = (url) => {
let protocol = '';
@@ -15,7 +22,55 @@ const getBaseDomain = (url) => {
return protocol + parsed.domain;
};
exports.handler = async function(event, context) {
let url = event.queryStringParameters.url;
if (!url) {
return errorResponse('URL query parameter is required.', 400);
}
if (!url.startsWith('http://') && !url.startsWith('https://')) {
url = 'http://' + url;
}
let hostname;
try {
hostname = getBaseDomain(new URL(url).hostname);
} catch (error) {
return errorResponse(`Unable to parse URL: ${error}`, 400);
}
return new Promise((resolve, reject) => {
const client = net.createConnection({ port: 43, host: 'whois.internic.net' }, () => {
client.write(hostname + '\r\n');
});
let data = '';
client.on('data', (chunk) => {
data += chunk;
});
client.on('end', () => {
try {
const parsedData = parseWhoisData(data);
resolve({
statusCode: 200,
body: JSON.stringify(parsedData),
});
} catch (error) {
resolve(errorResponse(error.message));
}
});
client.on('error', (err) => {
resolve(errorResponse(err.message, 500));
});
});
};
const parseWhoisData = (data) => {
if (data.includes('No match for')) {
return { error: 'No matches found for domain in internic database'};
}
@@ -45,67 +100,3 @@ const parseWhoisData = (data) => {
return parsedData;
};
const fetchFromInternic = async (hostname) => {
return new Promise((resolve, reject) => {
const client = net.createConnection({ port: 43, host: 'whois.internic.net' }, () => {
client.write(hostname + '\r\n');
});
let data = '';
client.on('data', (chunk) => {
data += chunk;
});
client.on('end', () => {
try {
const parsedData = parseWhoisData(data);
resolve(parsedData);
} catch (error) {
reject(error);
}
});
client.on('error', (err) => {
reject(err);
});
});
};
const fetchFromMyAPI = async (hostname) => {
try {
const response = await axios.post('https://whois-api-zeta.vercel.app/', {
domain: hostname
});
return response.data;
} catch (error) {
console.error('Error fetching data from your API:', error.message);
return null;
}
};
const whoisHandler = async (url) => {
if (!url.startsWith('http://') && !url.startsWith('https://')) {
url = 'http://' + url;
}
let hostname;
try {
hostname = getBaseDomain(new URL(url).hostname);
} catch (error) {
throw new Error(`Unable to parse URL: ${error}`);
}
const [internicData, whoisData] = await Promise.all([
fetchFromInternic(hostname),
fetchFromMyAPI(hostname)
]);
return {
internicData,
whoisData
};
};
export const handler = middleware(whoisHandler);
export default handler;

Some files were not shown because too many files have changed in this diff Show More