forked from GNUsocial/gnu-social
Avoid excessive remote requests on oEmbed lookups
I noticed that each time a notice was accessed it'd do a remote lookup with HEAD (and continue despite 404 etc.) and then another attempt to download the resource. If this wasn't successful new attempts would be made for each loading of the resource, which is extremely resource intensive. Whenever we can say "it's been n seconds since the last attempt" we could probably enable this again - or just manually reload remote thumbnails (as part of the StoreRemoteMedia plugin etc.)
This commit is contained in:
parent
a7df79ac07
commit
3bc2454e91
@ -389,6 +389,13 @@ class OembedPlugin extends Plugin
|
|||||||
// First see if it's too large for us
|
// First see if it's too large for us
|
||||||
common_debug(__METHOD__ . ': '.sprintf('Performing HEAD request for remote file id==%u to avoid unnecessarily downloading too large files. URL: %s', $thumbnail->getFileId(), $remoteUrl));
|
common_debug(__METHOD__ . ': '.sprintf('Performing HEAD request for remote file id==%u to avoid unnecessarily downloading too large files. URL: %s', $thumbnail->getFileId(), $remoteUrl));
|
||||||
$head = $http->head($remoteUrl);
|
$head = $http->head($remoteUrl);
|
||||||
|
if (!$head->isOk()) {
|
||||||
|
common_log(LOG_WARN, 'HEAD request returned HTTP failure, so we will abort now and delete the thumbnail object.');
|
||||||
|
$thumbnail->delete();
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
common_debug('HEAD request returned HTTP success, so we will continue.');
|
||||||
|
}
|
||||||
$remoteUrl = $head->getEffectiveUrl(); // to avoid going through redirects again
|
$remoteUrl = $head->getEffectiveUrl(); // to avoid going through redirects again
|
||||||
|
|
||||||
$headers = $head->getHeader();
|
$headers = $head->getHeader();
|
||||||
|
Loading…
Reference in New Issue
Block a user