#include "settings/Settings.h"
#include "File.h"
-#include <map>
#include <vector>
#include <climits>
#include "SpecialProtocol.h"
#include "utils/CharsetConverter.h"
#include "utils/log.h"
-#include "utils/TimeUtils.h"
-using namespace std;
using namespace XFILE;
using namespace XCURL;
-using namespace XbmcThreads;
#define XMIN(a,b) ((a)<(b)?(a):(b))
#define FITS_INT(a) (((a) <= INT_MAX) && ((a) >= INT_MIN))
CURLPROXY_SOCKS5_HOSTNAME,
};
-static CCriticalSection s_hostMapLock;
-static map<string, EndTime> s_hostLastAccessTime; // used to rate-limit queries by host/domain
-
// curl calls this routine to debug
extern "C" int debug_callback(CURL_HANDLE *handle, curl_infotype info, char *output, size_t size, void *data)
{
CURL url2(url);
ParseAndCorrectUrl(url2);
- // Rate-limit queries per domain to 1 per 2s
- CSingleLock lock(s_hostMapLock);
- map<string, EndTime>::iterator it = s_hostLastAccessTime.find(url2.GetHostName());
- if (it != s_hostLastAccessTime.end())
- {
- if (!it->second.IsTimePast()) {
- CLog::Log(LOGDEBUG, "CurlFile::Open(%p) rate limiting queries to '%s' to avoid saturating, waiting %dmsec", (void*)this, it->first.c_str(), it->second.MillisLeft());
- Sleep(it->second.MillisLeft());
- }
- s_hostLastAccessTime.erase(it);
- }
- s_hostLastAccessTime.insert(make_pair(url2.GetHostName(), EndTime(1500)));
- lock.Leave();
-
CLog::Log(LOGDEBUG, "CurlFile::Open(%p) %s", (void*)this, m_url.c_str());
ASSERT(!(!m_state->m_easyHandle ^ !m_state->m_multiHandle));
}
if (!musicBrainzURL.m_url.empty())
{
+ Sleep(2000); // MusicBrainz rate-limits queries to 1 p.s - once we hit the rate-limiter
+ // they start serving up the 'you hit the rate-limiter' page fast - meaning
+ // we will never get below the rate-limit threshold again in a specific run.
+ // This helps us to avoidthe rate-limiter as far as possible.
CLog::Log(LOGDEBUG,"-- nfo-scraper: %s",vecScrapers[i]->Name().c_str());
CLog::Log(LOGDEBUG,"-- nfo url: %s", musicBrainzURL.m_url[0].m_url.c_str());
musicInfoScraper.SetScraperInfo(vecScrapers[i]);