1 /*
2  *  This file is part of nzbget. See <http://nzbget.net>.
3  *
4  *  Copyright (C) 2012-2019 Andrey Prygunkov <hugbug@users.sourceforge.net>
5  *
6  *  This program is free software; you can redistribute it and/or modify
7  *  it under the terms of the GNU General Public License as published by
8  *  the Free Software Foundation; either version 2 of the License, or
9  *  (at your option) any later version.
10  *
11  *  This program is distributed in the hope that it will be useful,
12  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
13  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  *  GNU General Public License for more details.
15  *
16  *  You should have received a copy of the GNU General Public License
17  *  along with this program.  If not, see <http://www.gnu.org/licenses/>.
18  */
19 
20 
21 #include "nzbget.h"
22 #include "UrlCoordinator.h"
23 #include "Options.h"
24 #include "WorkState.h"
25 #include "WebDownloader.h"
26 #include "Util.h"
27 #include "FileSystem.h"
28 #include "NzbFile.h"
29 #include "Scanner.h"
30 #include "DiskState.h"
31 #include "QueueScript.h"
32 
ProcessHeader(const char * line)33 void UrlDownloader::ProcessHeader(const char* line)
34 {
35 	WebDownloader::ProcessHeader(line);
36 
37 	if (!strncmp(line, "X-DNZB-Category:", 16))
38 	{
39 		m_category = Util::Trim(CString(line + 16));
40 
41 		debug("Category: %s", *m_category);
42 	}
43 	else if (!strncmp(line, "X-DNZB-", 7))
44 	{
45 		CString modLine = line;
46 		char* value = strchr(modLine, ':');
47 		if (value)
48 		{
49 			*value = '\0';
50 			value++;
51 			while (*value == ' ') value++;
52 			Util::Trim(value);
53 
54 			debug("X-DNZB: %s", *modLine);
55 			debug("Value: %s", value);
56 
57 			BString<100> paramName("*DNZB:%s", modLine + 7);
58 			CString paramValue = WebUtil::Latin1ToUtf8(value);
59 			m_nzbInfo->GetParameters()->SetParameter(paramName, paramValue);
60 		}
61 	}
62 }
63 
UrlCoordinator()64 UrlCoordinator::UrlCoordinator()
65 {
66 	m_downloadQueueObserver.m_owner = this;
67 	DownloadQueue::Guard()->Attach(&m_downloadQueueObserver);
68 }
69 
~UrlCoordinator()70 UrlCoordinator::~UrlCoordinator()
71 {
72 	debug("Destroying UrlCoordinator");
73 
74 	for (UrlDownloader* urlDownloader : m_activeDownloads)
75 	{
76 		delete urlDownloader;
77 	}
78 	m_activeDownloads.clear();
79 
80 	debug("UrlCoordinator destroyed");
81 }
82 
Run()83 void UrlCoordinator::Run()
84 {
85 	debug("Entering UrlCoordinator-loop");
86 
87 	while (!DownloadQueue::IsLoaded())
88 	{
89 		Util::Sleep(20);
90 	}
91 
92 	while (!IsStopped())
93 	{
94 		time_t lastReset = 0;
95 		bool downloadStarted = false;
96 
97 		{
98 			NzbInfo* nzbInfo = nullptr;
99 			GuardedDownloadQueue downloadQueue = DownloadQueue::Guard();
100 			if ((int)m_activeDownloads.size() < g_Options->GetUrlConnections())
101 			{
102 				nzbInfo = GetNextUrl(downloadQueue);
103 				if (nzbInfo && (!g_WorkState->GetPauseDownload() || g_Options->GetUrlForce()))
104 				{
105 					StartUrlDownload(nzbInfo);
106 					downloadStarted = true;
107 				}
108 			}
109 			m_hasMoreJobs = !m_activeDownloads.empty() || nzbInfo;
110 		}
111 
112 		if (lastReset != Util::CurrentTime())
113 		{
114 			// this code should not be called too often, once per second is OK
115 			ResetHangingDownloads();
116 			lastReset = Util::CurrentTime();
117 		}
118 
119 		if (!m_hasMoreJobs && !IsStopped())
120 		{
121 			Guard guard(m_waitMutex);
122 			m_waitCond.Wait(m_waitMutex, [&] { return m_hasMoreJobs || IsStopped(); });
123 		}
124 		else
125 		{
126 			int sleepInterval = downloadStarted ? 0 : 100;
127 			Util::Sleep(sleepInterval);
128 		}
129 	}
130 
131 	WaitJobs();
132 
133 	debug("Exiting UrlCoordinator-loop");
134 }
135 
WaitJobs()136 void UrlCoordinator::WaitJobs()
137 {
138 	// waiting for downloads
139 	debug("UrlCoordinator: waiting for Downloads to complete");
140 
141 	while (true)
142 	{
143 		{
144 			GuardedDownloadQueue guard = DownloadQueue::Guard();
145 			if (m_activeDownloads.empty())
146 			{
147 				break;
148 			}
149 		}
150 		Util::Sleep(100);
151 		ResetHangingDownloads();
152 	}
153 
154 	debug("UrlCoordinator: Downloads are completed");
155 }
156 
Stop()157 void UrlCoordinator::Stop()
158 {
159 	Thread::Stop();
160 
161 	debug("Stopping UrlDownloads");
162 	{
163 		GuardedDownloadQueue guard = DownloadQueue::Guard();
164 		for (UrlDownloader* urlDownloader : m_activeDownloads)
165 		{
166 			urlDownloader->Stop();
167 		}
168 	}
169 	debug("UrlDownloads are notified");
170 
171 	// Resume Run() to exit it
172 	Guard guard(m_waitMutex);
173 	m_waitCond.NotifyAll();
174 }
175 
DownloadQueueUpdate(Subject * caller,void * aspect)176 void UrlCoordinator::DownloadQueueUpdate(Subject* caller, void* aspect)
177 {
178 	debug("Notification from download queue received");
179 
180 	DownloadQueue::Aspect* queueAspect = (DownloadQueue::Aspect*)aspect;
181 	if (queueAspect->action == DownloadQueue::eaUrlAdded ||
182 		queueAspect->action == DownloadQueue::eaUrlReturned)
183 	{
184 		// Resume Run()
185 		Guard guard(m_waitMutex);
186 		m_hasMoreJobs = true;
187 		m_waitCond.NotifyAll();
188 	}
189 }
190 
ResetHangingDownloads()191 void UrlCoordinator::ResetHangingDownloads()
192 {
193 	if (g_Options->GetUrlTimeout() == 0)
194 	{
195 		return;
196 	}
197 
198 	GuardedDownloadQueue guard = DownloadQueue::Guard();
199 	time_t tm = Util::CurrentTime();
200 
201 	for (UrlDownloader* urlDownloader: m_activeDownloads)
202 	{
203 		if (tm - urlDownloader->GetLastUpdateTime() > g_Options->GetUrlTimeout() + 10 &&
204 			urlDownloader->GetStatus() == UrlDownloader::adRunning)
205 		{
206 			error("Cancelling hanging url download %s", urlDownloader->GetInfoName());
207 			urlDownloader->Stop();
208 		}
209 	}
210 }
211 
LogDebugInfo()212 void UrlCoordinator::LogDebugInfo()
213 {
214 	info("   ---------- UrlCoordinator");
215 
216 	GuardedDownloadQueue guard = DownloadQueue::Guard();
217 	info("    Active Downloads: %i", (int)m_activeDownloads.size());
218 	for (UrlDownloader* urlDownloader : m_activeDownloads)
219 	{
220 		urlDownloader->LogDebugInfo();
221 	}
222 }
223 
224 /*
225  * Returns next URL for download.
226  */
GetNextUrl(DownloadQueue * downloadQueue)227 NzbInfo* UrlCoordinator::GetNextUrl(DownloadQueue* downloadQueue)
228 {
229 	NzbInfo* nzbInfo = nullptr;
230 
231 	for (NzbInfo* nzbInfo1 : downloadQueue->GetQueue())
232 	{
233 		if (nzbInfo1->GetKind() == NzbInfo::nkUrl &&
234 			nzbInfo1->GetUrlStatus() == NzbInfo::lsNone &&
235 			nzbInfo1->GetDeleteStatus() == NzbInfo::dsNone &&
236 			(!nzbInfo || nzbInfo1->GetPriority() > nzbInfo->GetPriority()))
237 		{
238 			nzbInfo = nzbInfo1;
239 		}
240 	}
241 
242 	return nzbInfo;
243 }
244 
StartUrlDownload(NzbInfo * nzbInfo)245 void UrlCoordinator::StartUrlDownload(NzbInfo* nzbInfo)
246 {
247 	debug("Starting new UrlDownloader");
248 
249 	UrlDownloader* urlDownloader = new UrlDownloader();
250 	urlDownloader->SetAutoDestroy(true);
251 	urlDownloader->Attach(this);
252 	urlDownloader->SetNzbInfo(nzbInfo);
253 	urlDownloader->SetUrl(nzbInfo->GetUrl());
254 	urlDownloader->SetForce(g_Options->GetUrlForce());
255 	urlDownloader->SetInfoName(nzbInfo->MakeNiceUrlName(nzbInfo->GetUrl(), nzbInfo->GetFilename()));
256 	urlDownloader->SetOutputFilename(BString<1024>("%s%curl-%i.tmp",
257 		g_Options->GetTempDir(), PATH_SEPARATOR, nzbInfo->GetId()));
258 
259 	nzbInfo->SetActiveDownloads(1);
260 	nzbInfo->SetUrlStatus(NzbInfo::lsRunning);
261 
262 	m_activeDownloads.push_back(urlDownloader);
263 	urlDownloader->Start();
264 }
265 
Update(Subject * caller,void * aspect)266 void UrlCoordinator::Update(Subject* caller, void* aspect)
267 {
268 	debug("Notification from UrlDownloader received");
269 
270 	UrlDownloader* urlDownloader = (UrlDownloader*) caller;
271 	if ((urlDownloader->GetStatus() == WebDownloader::adFinished) ||
272 		(urlDownloader->GetStatus() == WebDownloader::adFailed) ||
273 		(urlDownloader->GetStatus() == WebDownloader::adRetry))
274 	{
275 		UrlCompleted(urlDownloader);
276 	}
277 }
278 
UrlCompleted(UrlDownloader * urlDownloader)279 void UrlCoordinator::UrlCompleted(UrlDownloader* urlDownloader)
280 {
281 	debug("URL downloaded");
282 
283 	NzbInfo* nzbInfo = urlDownloader->GetNzbInfo();
284 
285 	const char* origname;
286 	if (urlDownloader->GetOriginalFilename())
287 	{
288 		origname = urlDownloader->GetOriginalFilename();
289 	}
290 	else
291 	{
292 		origname = FileSystem::BaseFileName(nzbInfo->GetUrl());
293 
294 		// TODO: decode URL escaping
295 	}
296 
297 	CString filename = FileSystem::MakeValidFilename(origname);
298 
299 	debug("Filename: [%s]", *filename);
300 
301 	bool retry;
302 
303 	{
304 		GuardedDownloadQueue downloadQueue = DownloadQueue::Guard();
305 
306 		// remove downloader from downloader list
307 		m_activeDownloads.erase(std::find(m_activeDownloads.begin(), m_activeDownloads.end(), urlDownloader));
308 
309 		retry = urlDownloader->GetStatus() == WebDownloader::adRetry && !nzbInfo->GetDeleting();
310 
311 		if (nzbInfo->GetDeleting())
312 		{
313 			nzbInfo->SetDeleteStatus(nzbInfo->GetDeleteStatus() == NzbInfo::dsNone ? NzbInfo::dsManual : nzbInfo->GetDeleteStatus());
314 			nzbInfo->SetUrlStatus(NzbInfo::lsNone);
315 			nzbInfo->SetDeleting(false);
316 		}
317 		else if (urlDownloader->GetStatus() == WebDownloader::adFinished)
318 		{
319 			nzbInfo->SetUrlStatus(NzbInfo::lsFinished);
320 		}
321 		else if (urlDownloader->GetStatus() == WebDownloader::adFailed)
322 		{
323 			nzbInfo->SetUrlStatus(NzbInfo::lsFailed);
324 		}
325 		else if (urlDownloader->GetStatus() == WebDownloader::adRetry)
326 		{
327 			nzbInfo->SetUrlStatus(NzbInfo::lsNone);
328 		}
329 
330 		if (!retry)
331 		{
332 			DownloadQueue::Aspect aspect = {DownloadQueue::eaUrlCompleted, downloadQueue, nzbInfo, nullptr};
333 			downloadQueue->Notify(&aspect);
334 		}
335 	}
336 
337 	if (retry)
338 	{
339 		nzbInfo->SetActiveDownloads(0);
340 		return;
341 	}
342 
343 	if (nzbInfo->GetUrlStatus() == NzbInfo::lsFinished)
344 	{
345 		// add nzb-file to download queue
346 		Scanner::EAddStatus addStatus = g_Scanner->AddExternalFile(
347 			!Util::EmptyStr(nzbInfo->GetFilename()) ? nzbInfo->GetFilename() : *filename,
348 			!Util::EmptyStr(nzbInfo->GetCategory()) ? nzbInfo->GetCategory() : urlDownloader->GetCategory(),
349 			nzbInfo->GetPriority(), nzbInfo->GetDupeKey(), nzbInfo->GetDupeScore(), nzbInfo->GetDupeMode(),
350 			nzbInfo->GetParameters(), false, nzbInfo->GetAddUrlPaused(), nzbInfo,
351 			urlDownloader->GetOutputFilename(), nullptr, 0, nullptr);
352 
353 		if (addStatus == Scanner::asSuccess)
354 		{
355 			// if scanner has successfully added nzb-file to queue, our nzbInfo is
356 			// already removed from queue and destroyed
357 			return;
358 		}
359 
360 		nzbInfo->SetUrlStatus(addStatus == Scanner::asFailed ? NzbInfo::lsScanFailed : NzbInfo::lsScanSkipped);
361 	}
362 
363 	// the rest of function is only for failed URLs or for failed scans
364 
365 	g_QueueScriptCoordinator->EnqueueScript(nzbInfo, QueueScriptCoordinator::qeUrlCompleted);
366 
367 	{
368 		GuardedDownloadQueue downloadQueue = DownloadQueue::Guard();
369 
370 		nzbInfo->SetActiveDownloads(0);
371 
372 		DownloadQueue::Aspect aspect = {DownloadQueue::eaUrlFailed, downloadQueue, nzbInfo, nullptr};
373 		downloadQueue->Notify(&aspect);
374 	}
375 }
376 
DeleteQueueEntry(DownloadQueue * downloadQueue,NzbInfo * nzbInfo,bool avoidHistory)377 bool UrlCoordinator::DeleteQueueEntry(DownloadQueue* downloadQueue, NzbInfo* nzbInfo, bool avoidHistory)
378 {
379 	if (nzbInfo->GetActiveDownloads() > 0)
380 	{
381 		info("Deleting active URL %s", nzbInfo->GetName());
382 		nzbInfo->SetDeleting(true);
383 		nzbInfo->SetAvoidHistory(avoidHistory);
384 
385 		for (UrlDownloader* urlDownloader : m_activeDownloads)
386 		{
387 			if (urlDownloader->GetNzbInfo() == nzbInfo)
388 			{
389 				urlDownloader->Stop();
390 				return true;
391 			}
392 		}
393 
394 		return false;
395 	}
396 
397 	info("Deleting URL %s", nzbInfo->GetName());
398 
399 	nzbInfo->SetDeleteStatus(nzbInfo->GetDeleteStatus() == NzbInfo::dsNone ? NzbInfo::dsManual : nzbInfo->GetDeleteStatus());
400 	nzbInfo->SetUrlStatus(NzbInfo::lsNone);
401 
402 	DownloadQueue::Aspect deletedAspect = {DownloadQueue::eaUrlDeleted, downloadQueue, nzbInfo, nullptr};
403 	downloadQueue->Notify(&deletedAspect);
404 
405 	return true;
406 }
407 
AddUrlToQueue(std::unique_ptr<NzbInfo> nzbInfo,bool addFirst)408 void UrlCoordinator::AddUrlToQueue(std::unique_ptr<NzbInfo> nzbInfo, bool addFirst)
409 {
410 	debug("Adding URL to queue");
411 
412 	NzbInfo* addedNzb = nzbInfo.get();
413 
414 	GuardedDownloadQueue downloadQueue = DownloadQueue::Guard();
415 
416 	DownloadQueue::Aspect foundAspect = {DownloadQueue::eaUrlFound, downloadQueue, addedNzb, nullptr};
417 	downloadQueue->Notify(&foundAspect);
418 
419 	if (addedNzb->GetDeleteStatus() != NzbInfo::dsManual)
420 	{
421 		downloadQueue->GetQueue()->Add(std::move(nzbInfo), addFirst);
422 
423 		DownloadQueue::Aspect addedAspect = {DownloadQueue::eaUrlAdded, downloadQueue, addedNzb, nullptr};
424 		downloadQueue->Notify(&addedAspect);
425 	}
426 
427 	downloadQueue->Save();
428 }
429 
430