-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathHttpManager.h
More file actions
76 lines (66 loc) · 2.16 KB
/
HttpManager.h
File metadata and controls
76 lines (66 loc) · 2.16 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
#ifndef _HTTP_MANAGER_
#define _HTTP_MANAGER_
#include "IDownloader.h"
#include "IValidator.h"
#include "IRegistry.h"
#include "IParser.h"
#include "ILocalResourceKey.h"
#include <string>
#include <vector>
using namespace std;
namespace ua { namespace kiev { namespace ukma { namespace downloader {
//This is the main class of the http downloading
class HttpManager
{
private:
const string link; //initial link
int maxDeep; //deep of exploring
string localDir; //where resourses should be saved
IDownloader* downloader; //for downloading resources
IRegistry* registry; //for registering local resources on the PC
// Do not store it now - oldbam
//ILocalResourceKey* keyGen; //for primary key generation
IParser* parser; //parser to use for links retrieving
vector<IValidator*> validators; //validators which denote whether it is worth to download the link
/**
Constructor's helper
*/
void init();
/**
Disable copy constructor and operator=
*/
void operator=(HttpManager);
HttpManager(const HttpManager&);
public:
/**
Constructor with parameters
@param link where exploring is started
@param deep of the link-exploring
@param localDir local place to save downloaded files
*/
HttpManager(const string link, int deep, const string localDir);
/**
Constructor with parameters
@param link where exploring is started
@param deep of the link-exploring
@param localDir local place to save downloaded files
@param validators which denote whether it is worth to download the link
*/
HttpManager(const string link, int deep, const string localDir, const vector<IValidator*>& validators);
/**
Destructor. It is very important to call it after all =). Because it is not only memory leak, but wrong work of the Registry
*/
~HttpManager();
/**
Run the retrieving process
1) while queue is not empty
2) take url from it
3) IDownloader#getWebResourceInfo(string url)
4) for every WebResourceInfo call isValid(WebResourceInfo) from the validators array
5) if valid IDownloader#download(WebResourceInfo, string localpath)
6) if WebResource is html and deep is not reached then Parser#Parse and pushback all the links to thequeue
*/
void run();
};
}}}}
#endif