# robots.txt — Noura Almaazmi (almaazmilawyers.com) # Allow all standard search crawlers User-agent: * Allow: / Disallow: /admin/ Disallow: /private/ # AI / LLM crawlers — explicitly allowed (GEO/AEO strategy) # These are the user agents that the major AI engines use to index content for citation. User-agent: GPTBot Allow: / User-agent: ChatGPT-User Allow: / User-agent: OAI-SearchBot Allow: / User-agent: anthropic-ai Allow: / User-agent: ClaudeBot Allow: / User-agent: Claude-Web Allow: / User-agent: PerplexityBot Allow: / User-agent: Perplexity-User Allow: / User-agent: Google-Extended Allow: / User-agent: CCBot Allow: / User-agent: cohere-ai Allow: / User-agent: Bytespider Allow: / User-agent: Applebot-Extended Allow: / User-agent: DuckAssistBot Allow: / User-agent: MistralAI-User Allow: / User-agent: meta-externalagent Allow: / # Sitemaps Sitemap: https://almaazmilawyers.com/sitemap.xml # AI-friendly markdown index # (Not a robots.txt directive, but kept here for discoverability: # see https://almaazmilawyers.com/llms.txt and /llms-full.txt)