Merge branch 'inbox-del' into testing
This commit is contained in:
commit
ba26ffeef1
@ -32,6 +32,7 @@ require_once INSTALLDIR.'/classes/Memcached_DataObject.php';
|
||||
class Inbox extends Memcached_DataObject
|
||||
{
|
||||
const BOXCAR = 128;
|
||||
const MAX_NOTICES = 1024;
|
||||
|
||||
###START_AUTOCODE
|
||||
/* the code below is auto generated do not remove the above tag */
|
||||
@ -81,7 +82,7 @@ class Inbox extends Memcached_DataObject
|
||||
$ni->selectAdd();
|
||||
$ni->selectAdd('notice_id');
|
||||
$ni->orderBy('notice_id DESC');
|
||||
$ni->limit(0, 1024);
|
||||
$ni->limit(0, self::MAX_NOTICES);
|
||||
|
||||
if ($ni->find()) {
|
||||
while($ni->fetch()) {
|
||||
@ -115,9 +116,11 @@ class Inbox extends Memcached_DataObject
|
||||
|
||||
$result = $inbox->query(sprintf('UPDATE inbox '.
|
||||
'set notice_ids = concat(cast(0x%08x as binary(4)), '.
|
||||
'substr(notice_ids, 1, 4092)) '.
|
||||
'substr(notice_ids, 1, %d)) '.
|
||||
'WHERE user_id = %d',
|
||||
$notice_id, $user_id));
|
||||
$notice_id,
|
||||
4 * (self::MAX_NOTICES - 1),
|
||||
$user_id));
|
||||
|
||||
if ($result) {
|
||||
self::blow('inbox:user_id:%d', $user_id);
|
||||
@ -173,4 +176,61 @@ class Inbox extends Memcached_DataObject
|
||||
|
||||
return $ids;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrapper for Inbox::stream() and Notice::getStreamByIds() returning
|
||||
* additional items up to the limit if we were short due to deleted
|
||||
* notices still being listed in the inbox.
|
||||
*
|
||||
* The fast path (when no items are deleted) should be just as fast; the
|
||||
* offset parameter is applied *before* lookups for maximum efficiency.
|
||||
*
|
||||
* This means offset-based paging may show duplicates, but similar behavior
|
||||
* already exists when new notices are posted between page views, so we
|
||||
* think people will be ok with this until id-based paging is introduced
|
||||
* to the user interface.
|
||||
*
|
||||
* @param int $user_id
|
||||
* @param int $offset skip past the most recent N notices (after since_id checks)
|
||||
* @param int $limit
|
||||
* @param mixed $since_id return only notices after but not including this id
|
||||
* @param mixed $max_id return only notices up to and including this id
|
||||
* @param mixed $since obsolete/ignored
|
||||
* @param mixed $own ignored?
|
||||
* @return array of Notice objects
|
||||
*
|
||||
* @todo consider repacking the inbox when this happens?
|
||||
*/
|
||||
function streamNotices($user_id, $offset, $limit, $since_id, $max_id, $since, $own=false)
|
||||
{
|
||||
$ids = self::stream($user_id, $offset, self::MAX_NOTICES, $since_id, $max_id, $since, $own);
|
||||
|
||||
// Do a bulk lookup for the first $limit items
|
||||
// Fast path when nothing's deleted.
|
||||
$firstChunk = array_slice($ids, 0, $limit);
|
||||
$notices = Notice::getStreamByIds($firstChunk);
|
||||
|
||||
$wanted = count($firstChunk); // raw entry count in the inbox up to our $limit
|
||||
if ($notices->N >= $wanted) {
|
||||
common_log(LOG_DEBUG, __METHOD__ . " ok got $wanted items");
|
||||
return $notices;
|
||||
}
|
||||
|
||||
common_log(LOG_DEBUG, __METHOD__ . " got $notices->N of $wanted items, getting more");
|
||||
// There were deleted notices, we'll need to look for more.
|
||||
assert($notices instanceof ArrayWrapper);
|
||||
$items = $notices->_items;
|
||||
$remainder = array_slice($ids, $limit);
|
||||
|
||||
while (count($items) < $wanted && count($remainder) > 0) {
|
||||
$notice = Notice::staticGet(array_shift($remainder));
|
||||
if ($notice) {
|
||||
common_log(LOG_DEBUG, __METHOD__ . " got another one");
|
||||
$items[] = $notice;
|
||||
} else {
|
||||
common_log(LOG_DEBUG, __METHOD__ . " skipping another deleted one");
|
||||
}
|
||||
}
|
||||
return new ArrayWrapper($items);
|
||||
}
|
||||
}
|
||||
|
@ -502,28 +502,22 @@ class User extends Memcached_DataObject
|
||||
|
||||
function noticesWithFriends($offset=0, $limit=NOTICES_PER_PAGE, $since_id=0, $before_id=0, $since=null)
|
||||
{
|
||||
$ids = Inbox::stream($this->id, $offset, $limit, $since_id, $before_id, $since, false);
|
||||
return Notice::getStreamByIds($ids);
|
||||
return Inbox::streamNotices($this->id, $offset, $limit, $since_id, $before_id, $since, false);
|
||||
}
|
||||
|
||||
function noticeInbox($offset=0, $limit=NOTICES_PER_PAGE, $since_id=0, $before_id=0, $since=null)
|
||||
{
|
||||
$ids = Inbox::stream($this->id, $offset, $limit, $since_id, $before_id, $since, true);
|
||||
return Notice::getStreamByIds($ids);
|
||||
return Inbox::streamNotices($this->id, $offset, $limit, $since_id, $before_id, $since, true);
|
||||
}
|
||||
|
||||
function friendsTimeline($offset=0, $limit=NOTICES_PER_PAGE, $since_id=0, $before_id=0, $since=null)
|
||||
{
|
||||
$ids = Inbox::stream($this->id, $offset, $limit, $since_id, $before_id, $since, false);
|
||||
|
||||
return Notice::getStreamByIds($ids);
|
||||
return Inbox::streamNotices($this->id, $offset, $limit, $since_id, $before_id, $since, false);
|
||||
}
|
||||
|
||||
function ownFriendsTimeline($offset=0, $limit=NOTICES_PER_PAGE, $since_id=0, $before_id=0, $since=null)
|
||||
{
|
||||
$ids = Inbox::stream($this->id, $offset, $limit, $since_id, $before_id, $since, true);
|
||||
|
||||
return Notice::getStreamByIds($ids);
|
||||
return Inbox::streamNotices($this->id, $offset, $limit, $since_id, $before_id, $since, true);
|
||||
}
|
||||
|
||||
function blowFavesCache()
|
||||
|
Loading…
Reference in New Issue
Block a user