1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
|
/*
* vdr-vodcatcher - A plugin for the Linux Video Disk Recorder
* Copyright (c) 2007 - 2009 Tobias Grimm <vdr@e-tobi.net>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
*/
#include "FeedRepository.h"
#include "IFeedParser.h"
#include "IDownloader.h"
#include "IDownloadCache.h"
#include "Download.h"
#include "IFeedSources.h"
#include "IDownloadPool.h"
using namespace std;
FeedRepository::FeedRepository(IDownloadCache& downloadCache, IDownloadPool& downloadPool, IDownloader& downloader,
IFeedParser& feedParser, IFeedSources& feedSources) :
_downloadCache(downloadCache), _downloadPool(downloadPool), _downloader(downloader), _feedParser(feedParser),
_feedSources(feedSources)
{
}
Feed FeedRepository::GetFeed(string url)
{
Feed feed(url);
if (_downloadCache.IsCached(url))
{
_feedParser.Parse(feed);
}
if (!_downloadCache.IsCached(url) || _downloadCache.GetAgeInMinutes(url) >= feed.GetTimeToLive())
{
RefPtr<Download> download(new Download(url));
_downloader.PerformDownload(*download);
_feedParser.Parse(feed);
}
return feed;
}
const FeedList FeedRepository::GetRootFeeds() const
{
FeedList feeds;
vector<string> urls = _feedSources.GetFeedUrls();
for (vector<string>::iterator i = urls.begin(); i != urls.end(); i++)
{
Feed feed(*i);
_feedParser.Parse(feed);
if (!_downloadCache.IsCached(*i) || _downloadCache.GetAgeInMinutes(*i) >= feed.GetTimeToLive())
{
_downloadPool.AddDownload(*i);
}
feeds.push_back(feed);
}
return feeds;
}
|