# If the Joomla site is installed within a folder such as at # e.g. www.example.com/joomla/ the robots.txt file MUST be # moved to the site root at e.g. www.example.com/robots.txt # AND the joomla folder name MUST be prefixed to the disallowed # path, e.g. the Disallow rule for the /administrator/ folder # MUST be changed to read Disallow: /joomla/administrator/ # # For more information about the robots.txt standard, see: # http://www.robotstxt.org/orig.html # # For syntax checking, see: # http://tool.motoricerca.info/robots-checker.phtml Sitemap: https://vidilab.com/index.php?option=com_xmap&view=xml&tmpl=component&id=1 Sitemap: https://vidilab.com/index.php?option=com_xmap&view=xml User-agent: Amazonbot # Crawl-delay: 20 Allow: * # ClaudeBot User-agent: ClaudeBot # Crawl-delay: 50 Allow: * # Bingbot (Microsoft/Bing) User-agent: Bingbot # Crawl-delay: 5 Allow: * # Yandex (Rusija) User-agent: Yandex # Crawl-delay: 50 Allow: * # Baidu (Kina) User-agent: Baiduspider # Crawl-delay: 50 Allow: * # Sogou (Kina) User-agent: Sogou # Crawl-delay: 50 Allow: * # 360Spider (Kina) User-agent: 360Spider # Crawl-delay: 50 Allow: * # MJ12bot (Web istraživanje) User-agent: MJ12bot # Crawl-delay: 50 Allow: * # Googlebot User-agent: Googlebot # Crawl-delay: 2 Allow: * # MoodleBot User-agent: MoodleBot # Crawl-delay: 20 Allow: * # Mediatoolkitbot User-agent: Mediatoolkitbot # Crawl-delay: 20 Allow: * # Applebot User-agent: Applebot # Crawl-delay: 10 Allow: * # trendictionbot User-agent: trendictionbot # Crawl-delay: 20 Allow: * # DuckDuckBot User-agent: DuckDuckBot # Crawl-delay: 5 Allow: * # Neticle Crawler User-agent: Neticle Crawler # Crawl-delay: 20 Allow: * # Owler User-agent: Owler # Crawl-delay: 30 Allow: * # PressEngineBot User-agent: PressEngineBot # Crawl-delay: 20 Allow: * # SentiBot User-agent: SentiBot # Crawl-delay: 20 Allow: * # fiperbot User-agent: fiperbot # Crawl-delay: 30 Allow: * # Bytespider User-agent: Bytespider # Crawl-delay: 20 Allow: * # RSSingBot User-agent: RSSingBot # Crawl-delay: 30 Allow: * # newspaper User-agent: newspaper # Crawl-delay: 20 Allow: * # Twitterbot User-agent: Twitterbot # Crawl-delay: 10 Allow: * # Barkrowler User-agent: Barkrowler # Crawl-delay: 20 Allow: * # meta-externalagent User-agent: meta-externalagent # Crawl-delay: 20 Allow: * User-agent: AhrefsBot # Crawl-delay: 30 Allow: * User-agent: ArchiveBot # Crawl-delay: 30 Allow: * User-agent: SemrushBot # Crawl-delay: 30 Allow: * User-agent: PetalBot # Crawl-delay: 30 Allow: * User-agent: BLEXBot # Crawl-delay: 30 Allow: * User-agent: DotBot # Crawl-delay: 30 Allow: * User-agent: GPTBot # Crawl-delay: 20 Allow: * User-agent: Amazonbot # Crawl-delay: 20 Allow: * User-agent: SiteAuditBot # Crawl-delay: 20 Allow: * User-agent: SemrushBot-BA Disallow: / User-agent: SemrushBot-SI Disallow: / User-agent: SemrushBot-SWA Disallow: / User-agent: SemrushBot-CT Disallow: / User-agent: SemrushBot-BM Disallow: / User-agent: SplitSignalBot Disallow: / User-agent: SemrushBot-COUB Disallow: / User-agent: SemrushBot Disallow: / User-agent: * Disallow: /administrator/ Disallow: /bin/ Disallow: /cache/ Disallow: /cli/ Disallow: /components/ Disallow: /includes/ Disallow: /installation/ Disallow: /language/ Disallow: /layouts/ Disallow: /libraries/ Disallow: /logs/ Disallow: /modules/ Disallow: /plugins/ Disallow: /tmp/