Namespaced filters

Changed default settings (disable lock_bypage and set lock_timeout to 5)
Updated docs
This commit is contained in:
Damian Mooyman 2014-04-30 12:31:18 +12:00
parent a243a3510a
commit 241f0604b0
7 changed files with 39 additions and 37 deletions

View File

@ -2,9 +2,9 @@
Name: versionedfeedconfig
---
Injector:
RateLimitFilter: RateLimitFilter
RateLimitFilter: \VersionFeed\Filters\RateLimitFilter
ContentFilter:
class: CachedContentFilter
class: \VersionFeed\Filters\CachedContentFilter
constructor:
- %$RateLimitFilter
SiteTree:

View File

@ -10,16 +10,16 @@ class VersionFeed_Controller extends Extension {
/**
* Content handler
*
* @var ContentFilter
* @var \VersionFeed\Filters\ContentFilter
*/
protected $contentFilter;
/**
* Sets the content filter
*
* @param ContentFilter $contentFilter
* @param \VersionFeed\Filters\ContentFilter $contentFilter
*/
public function setContentFilter(ContentFilter $contentFilter) {
public function setContentFilter(\VersionFeed\Filters\ContentFilter $contentFilter) {
$this->contentFilter = $contentFilter;
}

View File

@ -1,5 +1,7 @@
<?php
namespace VersionFeed\Filters;
/**
* Caches results of a callback
*/

View File

@ -1,5 +1,7 @@
<?php
namespace VersionFeed\Filters;
/**
* Conditionally executes a given callback, attempting to return the desired results
* of its execution.
@ -23,7 +25,7 @@ abstract class ContentFilter {
* @return Zend_Cache_Frontend
*/
protected function getCache() {
$cache = SS_Cache::factory('VersionFeed_Controller');
$cache = \SS_Cache::factory('VersionFeed_Controller');
$cache->setOption('automatic_serialization', true);
return $cache;
}

View File

@ -1,5 +1,7 @@
<?php
namespace VersionFeed\Filters;
/**
* Provides rate limiting of execution of a callback
*/
@ -13,18 +15,16 @@ class RateLimitFilter extends ContentFilter {
* @config
* @var int
*/
private static $lock_timeout = 10;
private static $lock_timeout = 5;
/**
* Determine if the cache generation should be locked on a per-page basis. If true, concurrent page versions
* may be generated without rate interference.
*
* Suggested to turn this to false on small sites that will not have many concurrent views of page versions
*
* @config
* @var bool
*/
private static $lock_bypage = true;
private static $lock_bypage = false;
/**
* Determine if rate limiting should be applied independently to each IP address. This method is not
@ -50,13 +50,13 @@ class RateLimitFilter extends ContentFilter {
$key = self::CACHE_PREFIX;
// Add global identifier
if(Config::inst()->get(get_class(), 'lock_bypage')) {
if(\Config::inst()->get(get_class(), 'lock_bypage')) {
$key .= '_' . md5($itemkey);
}
// Add user-specific identifier
if(Config::inst()->get(get_class(), 'lock_byuserip') && Controller::has_curr()) {
$ip = Controller::curr()->getRequest()->getIP();
if(\Config::inst()->get(get_class(), 'lock_byuserip') && \Controller::has_curr()) {
$ip = \Controller::curr()->getRequest()->getIP();
$key .= '_' . md5($ip);
}
@ -66,7 +66,7 @@ class RateLimitFilter extends ContentFilter {
public function getContent($key, $callback) {
// Bypass rate limiting if flushing, or timeout isn't set
$timeout = Config::inst()->get(get_class(), 'lock_timeout');
$timeout = \Config::inst()->get(get_class(), 'lock_timeout');
if(isset($_GET['flush']) || !$timeout) {
return parent::getContent($key, $callback);
}
@ -77,7 +77,7 @@ class RateLimitFilter extends ContentFilter {
if($cacheBegin = $cache->load($limitKey)) {
if(time() - $cacheBegin < $timeout) {
// Politely inform visitor of limit
$response = new SS_HTTPResponse_Exception('Too Many Requests.', 429);
$response = new \SS_HTTPResponse_Exception('Too Many Requests.', 429);
$response->getResponse()->addHeader('Retry-After', 1 + time() - $cacheBegin);
throw $response;
}

View File

@ -36,20 +36,18 @@ Two filters are applied on top of one another:
Either one of these can be replaced, added to, or removed, by adjusting the `VersionFeed_Controller.dependencies`
config to point to a replacement (or no) filter.
For large servers with a high level of traffic (more than 1 visits every 10 seconds) the default settings should
be sufficient.
For smaller servers where it's reasonable to apply a strict approach to rate limiting the default
settings should be sufficient. The `RateLimitFilter.lock_bypage` config defaults to false, meaning that a
single limit will be applied to all URLs. If set to true, then each URL will have its own rate limit,
and on smaller servers with lots of concurrent requests this can still overwhelm capacity. This will
also leave smaller servers vulnerable to DDoS attacks which target many URLs simultaneously.
This config will have no effect on the `allchanges` method.
For smaller servers where it's reasonable to apply a more strict approach to rate limiting, consider setting the
`RateLimitFilter.lock_bypage` config to false, meaning that a single limit will be applied to all URLs.
If left on true, then each URL will have its own rate limit, and on smaller servers with lots of
concurrent requests this can still overwhelm capacity. This will also leave smaller servers vulnerable to DDoS
attacks which target many URLs simultaneously. This config will have no effect on the `allchanges` method.
`RateLimitFilter.lock_byuserip` can also be set to true in order to prevent requests from different users
`RateLimitFilter.lock_byuserip` can be set to true in order to prevent requests from different users
interfering with one another. However, this can provide an ineffective safeguard against malicious DDoS attacks
which use multiple IP addresses.
Another important variable is the `RateLimitFilter.lock_timeout` config, which is set to 10 seconds by default.
Another important variable is the `RateLimitFilter.lock_timeout` config, which is set to 5 seconds by default.
This should be increased on sites which may be slow to generate page versions, whether due to lower
server capacity or volume of content (number of page versions). Requests to this page after the timeout
will not trigger any rate limit safeguard, so you should be sure that this is set to an appropriate level.

View File

@ -17,9 +17,9 @@ class VersionFeedFunctionalTest extends FunctionalTest {
$this->userIP = isset($_SERVER['HTTP_CLIENT_IP']) ? $_SERVER['HTTP_CLIENT_IP'] : null;
Config::nest();
Config::inst()->update('RateLimitFilter', 'lock_timeout', 20);
Config::inst()->update('RateLimitFilter', 'lock_bypage', false);
Config::inst()->update('RateLimitFilter', 'lock_byuserip', false);
Config::inst()->update('VersionFeed\Filters\RateLimitFilter', 'lock_timeout', 20);
Config::inst()->update('VersionFeed\Filters\RateLimitFilter', 'lock_bypage', false);
Config::inst()->update('VersionFeed\Filters\RateLimitFilter', 'lock_byuserip', false);
}
public function tearDown() {
@ -60,10 +60,10 @@ class VersionFeedFunctionalTest extends FunctionalTest {
$page2 = $this->createPageWithChanges(array('PublicHistory' => true, 'Title' => 'Page2'));
// Artifically set cache lock
Config::inst()->update('RateLimitFilter', 'lock_byuserip', false);
Config::inst()->update('VersionFeed\Filters\RateLimitFilter', 'lock_byuserip', false);
$cache = SS_Cache::factory('VersionFeed_Controller');
$cache->setOption('automatic_serialization', true);
$cache->save(time() - 2, RateLimitFilter::CACHE_PREFIX);
$cache->save(time() - 2, \VersionFeed\Filters\RateLimitFilter::CACHE_PREFIX);
// Test normal hit
$response = $this->get($page1->RelativeLink('changes'));
@ -74,37 +74,37 @@ class VersionFeedFunctionalTest extends FunctionalTest {
$this->assertGreaterThan(0, $response->getHeader('Retry-After'));
// Test page specific lock
Config::inst()->update('RateLimitFilter', 'lock_bypage', true);
Config::inst()->update('VersionFeed\Filters\RateLimitFilter', 'lock_bypage', true);
$key = implode('_', array(
'changes',
$page1->ID,
Versioned::get_versionnumber_by_stage('SiteTree', 'Live', $page1->ID, false)
));
$key = RateLimitFilter::CACHE_PREFIX . '_' . md5($key);
$key = \VersionFeed\Filters\RateLimitFilter::CACHE_PREFIX . '_' . md5($key);
$cache->save(time() - 2, $key);
$response = $this->get($page1->RelativeLink('changes'));
$this->assertEquals(429, $response->getStatusCode());
$this->assertGreaterThan(0, $response->getHeader('Retry-After'));
$response = $this->get($page2->RelativeLink('changes'));
$this->assertEquals(200, $response->getStatusCode());
Config::inst()->update('RateLimitFilter', 'lock_bypage', false);
Config::inst()->update('VersionFeed\Filters\RateLimitFilter', 'lock_bypage', false);
// Test rate limit hit by IP
Config::inst()->update('RateLimitFilter', 'lock_byuserip', true);
Config::inst()->update('VersionFeed\Filters\RateLimitFilter', 'lock_byuserip', true);
$_SERVER['HTTP_CLIENT_IP'] = '127.0.0.1';
$cache->save(time() - 2, RateLimitFilter::CACHE_PREFIX . '_' . md5('127.0.0.1'));
$cache->save(time() - 2, \VersionFeed\Filters\RateLimitFilter::CACHE_PREFIX . '_' . md5('127.0.0.1'));
$response = $this->get($page1->RelativeLink('changes'));
$this->assertEquals(429, $response->getStatusCode());
$this->assertGreaterThan(0, $response->getHeader('Retry-After'));
// Test rate limit doesn't hit other IP
$_SERVER['HTTP_CLIENT_IP'] = '127.0.0.20';
$cache->save(time() - 2, RateLimitFilter::CACHE_PREFIX . '_' . md5('127.0.0.1'));
$cache->save(time() - 2, \VersionFeed\Filters\RateLimitFilter::CACHE_PREFIX . '_' . md5('127.0.0.1'));
$response = $this->get($page1->RelativeLink('changes'));
$this->assertEquals(200, $response->getStatusCode());
// Restore setting
Config::inst()->update('RateLimitFilter', 'lock_byuserip', false);
Config::inst()->update('VersionFeed\Filters\RateLimitFilter', 'lock_byuserip', false);
}
public function testContainsChangesForPageOnly() {