docs: remove proof of work branding (#763)

* docs(index): start cleanup, remove proof of work from core branding

Signed-off-by: Xe Iaso <me@xeiaso.net>

* docs(index): rewrite copy, add CELPHASE illustrations

Signed-off-by: Xe Iaso <me@xeiaso.net>

* chore: spelling

Signed-off-by: Xe Iaso <me@xeiaso.net>

---------

Signed-off-by: Xe Iaso <me@xeiaso.net>
This commit is contained in:
Xe Iaso 2025-07-06 02:34:52 +00:00 committed by GitHub
parent 6fa5b8e4e0
commit c2f46907a1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 37 additions and 44 deletions

View file

@ -5,49 +5,50 @@ import styles from "./styles.module.css";
type FeatureItem = {
title: string;
Svg: React.ComponentType<React.ComponentProps<"svg">>;
imageURL: string;
description: ReactNode;
};
const FeatureList: FeatureItem[] = [
{
title: "Easy to Use",
Svg: require("@site/static/img/undraw_docusaurus_mountain.svg").default,
imageURL: require("@site/static/img/anubis/happy.webp").default,
description: (
<>
Anubis is easy to set up, lightweight, and helps get rid of the lowest
hanging fruit so you can sleep at night.
Anubis sits in the background and weighs the risk of incoming requests.
If it asks a client to complete a challenge, no user interaction is
required.
</>
),
},
{
title: "Lightweight",
Svg: require("@site/static/img/undraw_docusaurus_tree.svg").default,
imageURL: require("@site/static/img/anubis/pensive.webp").default,
description: (
<>
Anubis is efficient and as lightweight as possible, blocking the worst
of the bots on the internet and makes it easy to protect what you host
online.
Anubis is so lightweight you'll forget it's there until you look at your
hosting bill. On average it uses less than 128 MB of ram.
</>
),
},
{
title: "Multi-threaded",
Svg: require("@site/static/img/undraw_docusaurus_react.svg").default,
title: "Block the scrapers",
imageURL: require("@site/static/img/anubis/reject.webp").default,
description: (
<>
Anubis uses a multi-threaded proof of work check to ensure that users
browsers are up to date and support modern standards.
Anubis uses a combination of heuristics to identify and block bots
before they take your website down. You can customize the rules with{" "}
<a href="/docs/admin/policies">your own policies</a>.
</>
),
},
];
function Feature({ title, Svg, description }: FeatureItem) {
function Feature({ title, description, imageURL }: FeatureItem) {
return (
<div className={clsx("col col--4")}>
<div className="text--center">
<Svg className={styles.featureSvg} role="img" />
<img src={imageURL} className={styles.featureSvg} role="img" />
</div>
<div className="text--center padding-horiz--md">
<Heading as="h3">{title}</Heading>

View file

@ -31,19 +31,12 @@ export default function Home(): ReactNode {
const { siteConfig } = useDocusaurusContext();
return (
<Layout
title={`Anubis: self hostable scraper defense software`}
description="Weigh the soul of incoming HTTP requests using proof-of-work to stop AI crawlers"
title={`Anubis: Web AI Firewall Utility`}
description="Weigh the soul of incoming HTTP requests to protect your website!"
>
<HomepageHeader />
<main>
<HomepageFeatures />
<center>
<p>
This is all placeholder text. It will be fixed. Give me time. I am
one person and my project has unexpectedly gone viral.
</p>
</center>
</main>
</Layout>
);