summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorhorchi <vdr@jwendel.de>2017-03-16 19:03:57 +0100
committerhorchi <vdr@jwendel.de>2017-03-16 19:03:57 +0100
commit62627661d3fc79cdf0a38fc4708a1e88c7285e7a (patch)
tree36ecb8bda35d998f7d23ee13e87720b9dcb109be
parent21057136ae42ea59022d87f3e5d83c55c4cca074 (diff)
downloadvdr-epg-daemon-62627661d3fc79cdf0a38fc4708a1e88c7285e7a.tar.gz
vdr-epg-daemon-62627661d3fc79cdf0a38fc4708a1e88c7285e7a.tar.bz2
2017-03-16: version 1.1.108 (horchi)\n - change: Improved error message on failed downloads\n - added: sleep 1 second every 50 scraper downloads due to server limit\n\n
-rw-r--r--HISTORY.h13
-rw-r--r--lib/curl.c95
-rw-r--r--update.c12
3 files changed, 75 insertions, 45 deletions
diff --git a/HISTORY.h b/HISTORY.h
index f3198de..4d77c45 100644
--- a/HISTORY.h
+++ b/HISTORY.h
@@ -4,8 +4,8 @@
* -----------------------------------
*/
-#define _VERSION "1.1.107"
-#define VERSION_DATE "09.03.2017"
+#define _VERSION "1.1.108"
+#define VERSION_DATE "16.03.2017"
#define DB_API 4
#ifdef GIT_REV
@@ -16,9 +16,14 @@
/*
* ------------------------------------
-2017-03-09: version 1.1.106 (rechner)
+
+2017-03-16: version 1.1.108 (horchi)
+ - change: Improved error message on failed downloads
+ - added: sleep 1 second every 50 scraper downloads due to server limit
+
+2017-03-09: version 1.1.107 (rechner)
- bugfix: default namingmode on searchtimer was shown if VDR was stored
-
+
2017-03-09: version 1.1.106 (horchi)
- bugfix: Fixed missing INIT_AFTER in Make.config
diff --git a/lib/curl.c b/lib/curl.c
index ab5cf82..c8dda08 100644
--- a/lib/curl.c
+++ b/lib/curl.c
@@ -137,58 +137,70 @@ int cCurl::exit()
// Get Url
//***************************************************************************
-int cCurl::GetUrl(const char *url, std::string *sOutput, const std::string &sReferer)
+int cCurl::GetUrl(const char* url, std::string* sOutput, const std::string& sReferer)
{
- CURLcode res;
+ CURLcode res;
- init();
+ init();
- curl_easy_setopt(handle, CURLOPT_URL, url); // Set the URL to get
+ curl_easy_setopt(handle, CURLOPT_URL, url); // Set the URL to get
- if (sReferer != "")
- curl_easy_setopt(handle, CURLOPT_REFERER, sReferer.c_str());
+ if (sReferer != "")
+ curl_easy_setopt(handle, CURLOPT_REFERER, sReferer.c_str());
- curl_easy_setopt(handle, CURLOPT_HTTPGET, yes);
- curl_easy_setopt(handle, CURLOPT_FAILONERROR, yes);
- curl_easy_setopt(handle, CURLOPT_WRITEDATA, 0); // Set option to write to string
- sBuf = "";
+ curl_easy_setopt(handle, CURLOPT_HTTPGET, yes);
+ curl_easy_setopt(handle, CURLOPT_FAILONERROR, yes);
+ curl_easy_setopt(handle, CURLOPT_WRITEDATA, 0); // Set option to write to string
+ sBuf = "";
- res = curl_easy_perform(handle);
+ if ((res = curl_easy_perform(handle)) != CURLE_OK)
+ {
+ long httpCode = 0;
- if (res != CURLE_OK)
- {
- *sOutput = "";
- return 0;
- }
+ curl_easy_getinfo(handle, CURLINFO_RESPONSE_CODE, &httpCode);
+ tell(1, "Error: Getting URL failed; %s (%d); http code was (%ld) [%s]",
+ curl_easy_strerror(res), res, httpCode, url);
- *sOutput = sBuf;
- return 1;
+ *sOutput = "";
+
+ return 0;
+ }
+
+ *sOutput = sBuf;
+
+ return 1;
}
-int cCurl::GetUrlFile(const char *url, const char *filename, const std::string &sReferer)
+int cCurl::GetUrlFile(const char* url, const char* filename, const std::string& sReferer)
{
- int nRet = 0;
- init();
+ int nRet = 0;
- // Point the output to a file
+ init();
- FILE *fp;
- if ((fp = fopen(filename, "w")) == NULL)
- return 0;
+ // Point the output to a file
- curl_easy_setopt(handle, CURLOPT_WRITEDATA, fp); // Set option to write to file
- curl_easy_setopt(handle, CURLOPT_URL, url); // Set the URL to get
- if (sReferer != "")
- curl_easy_setopt(handle, CURLOPT_REFERER, sReferer.c_str());
- curl_easy_setopt(handle, CURLOPT_HTTPGET, yes);
- if (curl_easy_perform(handle) == 0)
- nRet = 1;
- else
- nRet = 0;
-
- curl_easy_setopt(handle, CURLOPT_WRITEDATA, NULL); // Set option back to default (string)
- fclose(fp);
- return nRet;
+ FILE *fp;
+
+ if ((fp = fopen(filename, "w")) == NULL)
+ return 0;
+
+ curl_easy_setopt(handle, CURLOPT_WRITEDATA, fp); // Set option to write to file
+ curl_easy_setopt(handle, CURLOPT_URL, url); // Set the URL to get
+
+ if (sReferer != "")
+ curl_easy_setopt(handle, CURLOPT_REFERER, sReferer.c_str());
+
+ curl_easy_setopt(handle, CURLOPT_HTTPGET, yes);
+
+ if (curl_easy_perform(handle) == 0)
+ nRet = 1;
+ else
+ nRet = 0;
+
+ curl_easy_setopt(handle, CURLOPT_WRITEDATA, NULL); // Set option back to default (string)
+ fclose(fp);
+
+ return nRet;
}
int cCurl::PostUrl(const char *url, const std::string &sPost, std::string *sOutput, const std::string &sReferer)
@@ -433,8 +445,13 @@ int cCurl::downloadFile(const char* url, int& size, MemoryStruct* data, int time
if ((res = curl_easy_perform(handle)) != 0)
{
+ long httpCode = 0;
+
+ curl_easy_getinfo(handle, CURLINFO_RESPONSE_CODE, &httpCode);
+ tell(1, "Error: Download failed; %s (%d); http code was (%ld) [%s]",
+ curl_easy_strerror(res), res, httpCode, url);
+
data->clear();
- tell(1, "Error, download failed; %s (%d)", curl_easy_strerror(res), res);
return fail;
}
diff --git a/update.c b/update.c
index 7c6535b..c3db428 100644
--- a/update.c
+++ b/update.c
@@ -1340,6 +1340,8 @@ void cEpgd::loop()
scheduleAutoUpdate(EpgdConfig.checkInitial ? 10 : 0);
+ scrapNewEvents(); // # debug scarper at Start
+
while (!doShutDown())
{
setState(Es::esStandby);
@@ -2307,9 +2309,12 @@ int cEpgd::scrapNewEvents()
cSystemNotification::check();
- if (seriesCur%10 == 0)
+ if (seriesCur % 10 == 0)
tell(0, "series episode %d / %d scraped...continuing scraping", seriesCur, seriesTotal);
+ if (seriesCur % 50 == 0)
+ sleep(1);
+
tvdbManager->ProcessSeries(*it);
if (doShutDown())
@@ -2351,9 +2356,12 @@ int cEpgd::scrapNewEvents()
{
movieCur++;
- if (movieCur%10 == 0)
+ if (movieCur % 10 == 0)
tell(0, "movie %d / %d scraped...continuing scraping", movieCur, moviesTotal);
+ if (movieCur % 50 == 0)
+ sleep(1);
+
movieDbManager->ProcessMovie(*it);
if (doShutDown())