Merge pull request #148 from silverstripe-terraformers/bugfix/dirty-indexes-prop-default

Set SearchUpdateCommitJobProcessor::$dirty_indexes default to array, not bool. Fixes #151
This commit is contained in:
Daniel Hensby 2017-10-13 13:10:57 +01:00 committed by GitHub
commit 6eaea567fb
1 changed files with 4 additions and 4 deletions

View File

@ -8,7 +8,7 @@ class SearchUpdateCommitJobProcessor implements QueuedJob
{ {
/** /**
* The QueuedJob queue to use when processing commits * The QueuedJob queue to use when processing commits
* *
* @config * @config
* @var int * @var int
*/ */
@ -47,12 +47,12 @@ class SearchUpdateCommitJobProcessor implements QueuedJob
* *
* @var array * @var array
*/ */
public static $dirty_indexes = true; public static $dirty_indexes = array();
/** /**
* If solrindex::commit has already been performed, but additional commits are necessary, * If solrindex::commit has already been performed, but additional commits are necessary,
* how long do we wait before attempting to touch the index again? * how long do we wait before attempting to touch the index again?
* *
* {@see http://stackoverflow.com/questions/7512945/how-to-fix-exceeded-limit-of-maxwarmingsearchers} * {@see http://stackoverflow.com/questions/7512945/how-to-fix-exceeded-limit-of-maxwarmingsearchers}
* *
* @var int * @var int
@ -154,7 +154,7 @@ class SearchUpdateCommitJobProcessor implements QueuedJob
return; return;
} }
// If any commit has run, but some (or all) indexes are un-comitted, we must re-schedule this task. // If any commit has run, but some (or all) indexes are un-comitted, we must re-schedule this task.
// This could occur if we completed a searchupdate job in a prior request, as well as in // This could occur if we completed a searchupdate job in a prior request, as well as in
// the current request // the current request