Added location parameter to GPareto distribution. Validated value-size distribution against Atikoglu's paper by comparing CDF and PDF.
This commit is contained in:
parent
1417bd4b01
commit
46015b29ac
@ -7,7 +7,7 @@
|
||||
Generator* createFacebookKey() { return new GEV(30.7984, 8.20449, 0.078688); }
|
||||
|
||||
Generator* createFacebookValue() {
|
||||
Generator* g = new GPareto(214.476, 0.348238);
|
||||
Generator* g = new GPareto(15.0, 214.476, 0.348238);
|
||||
|
||||
Discrete* d = new Discrete(g);
|
||||
d->add(0.00536, 0.0);
|
||||
@ -29,7 +29,7 @@ Generator* createFacebookValue() {
|
||||
return d;
|
||||
}
|
||||
|
||||
Generator* createFacebookIA() { return new GPareto(16.0292, 0.154971); }
|
||||
Generator* createFacebookIA() { return new GPareto(0, 16.0292, 0.154971); }
|
||||
|
||||
Generator* createGenerator(std::string str) {
|
||||
if (!strcmp(str.c_str(), "fb_key")) return createFacebookKey();
|
||||
@ -68,7 +68,7 @@ Generator* createGenerator(std::string str) {
|
||||
if (strcasestr(str.c_str(), "fixed")) return new Fixed(a1);
|
||||
else if (strcasestr(str.c_str(), "normal")) return new Normal(a1, a2);
|
||||
else if (strcasestr(str.c_str(), "exponential")) return new Exponential(a1);
|
||||
else if (strcasestr(str.c_str(), "pareto")) return new GPareto(a1, a2);
|
||||
else if (strcasestr(str.c_str(), "pareto")) return new GPareto(a1, a2, a3);
|
||||
else if (strcasestr(str.c_str(), "gev")) return new GEV(a1, a2, a3);
|
||||
else if (strcasestr(str.c_str(), "uniform")) return new Uniform(a1);
|
||||
|
||||
|
12
Generator.h
12
Generator.h
@ -121,23 +121,24 @@ private:
|
||||
|
||||
class GPareto : public Generator {
|
||||
public:
|
||||
GPareto(double _scale = 1.0, double _shape = 1.0) :
|
||||
scale(_scale), shape(_shape) {
|
||||
GPareto(double _loc = 0.0, double _scale = 1.0, double _shape = 1.0) :
|
||||
loc(_loc), scale(_scale), shape(_shape) {
|
||||
assert(shape != 0.0);
|
||||
D("GPareto(scale=%f, shape=%f)", scale, shape);
|
||||
D("GPareto(loc=%f, scale=%f, shape=%f)", loc, scale, shape);
|
||||
}
|
||||
|
||||
virtual double generate(double U = -1.0) {
|
||||
if (U < 0.0) U = drand48();
|
||||
return scale * (pow(U, -shape) - 1) / shape;
|
||||
return loc + scale * (pow(U, -shape) - 1) / shape;
|
||||
}
|
||||
|
||||
virtual void set_lambda(double lambda) {
|
||||
if (lambda <= 0.0) scale = 0.0;
|
||||
else scale = (1 - shape) / lambda;
|
||||
else scale = (1 - shape) / lambda - (1 - shape) * loc;
|
||||
}
|
||||
|
||||
private:
|
||||
double loc /* mu */;
|
||||
double scale /* sigma */, shape /* k */;
|
||||
};
|
||||
|
||||
@ -177,7 +178,6 @@ public:
|
||||
}
|
||||
|
||||
return def->generate(Uc);
|
||||
// return 0.0;
|
||||
}
|
||||
|
||||
void add(double p, double v) {
|
||||
|
@ -55,6 +55,12 @@ int main(int argc, char **argv) {
|
||||
KeyGenerator kg(g);
|
||||
*/
|
||||
|
||||
Generator *g = createFacebookValue();
|
||||
// Generator *g = createGenerator("pareto:15,214.476,0.348238");
|
||||
for (int i = 0; i < 1000000; i++)
|
||||
printf("%f\n", g->generate());
|
||||
|
||||
/*
|
||||
Generator *p2 = createGenerator("p:214.476,0.348238");
|
||||
// for (int i = 0; i < 1000; i++)
|
||||
// printf("%f\n", p2->generate());
|
||||
@ -62,6 +68,7 @@ int main(int argc, char **argv) {
|
||||
p2->set_lambda(1000);
|
||||
for (int i = 0; i < 1000; i++)
|
||||
printf("%f\n", p2->generate());
|
||||
*/
|
||||
|
||||
// for (int i = 0; i < 10000; i++)
|
||||
// printf("%s\n", kg.generate(i).c_str());
|
||||
|
28
cmdline.ggo
28
cmdline.ggo
@ -12,7 +12,7 @@ text "\nBasic options:"
|
||||
|
||||
option "server" s "Memcached server hostname[:port]. \
|
||||
Repeat to specify multiple servers." string multiple
|
||||
option "qps" q "Target aggregate QPS." int default="0"
|
||||
option "qps" q "Target aggregate QPS. 0 = peak QPS." int default="0"
|
||||
option "time" t "Maximum time to run (seconds)." int default="5"
|
||||
|
||||
option "keysize" K "Length of memcached keys (distribution)."
|
||||
@ -34,8 +34,9 @@ option "depth" d "Maximum depth to pipeline requests." int default="1"
|
||||
option "roundrobin" R "Assign threads to servers in round-robin fashion. \
|
||||
By default, each thread connects to every server."
|
||||
|
||||
option "iadist" i "Inter-arrival distribution (distribution)."
|
||||
string default="exponential"
|
||||
option "iadist" i "Inter-arrival distribution (distribution). Note: \
|
||||
The distribution will automatically be adjusted to match the QPS given \
|
||||
by --qps." string default="exponential"
|
||||
|
||||
option "noload" - "Skip database loading."
|
||||
option "loadonly" - "Load database and then exit."
|
||||
@ -62,16 +63,19 @@ Some options take a 'distribution' as an argument.
|
||||
Distributions are specified by <distribution>[:<param1>[,...]].
|
||||
Parameters are not required. The following distributions are supported:
|
||||
|
||||
[fixed:]<value> Always generates <value>.
|
||||
uniform:<max> Uniform distribution between 0 and <max>.
|
||||
normal:<mean>,<sd> Normal distribution.
|
||||
exponential:<lambda> Exponential distribution.
|
||||
pareto:<scale>,<shape> Generalized Pareto distribution.
|
||||
gev:<loc>,<scale>,<shape> Generalized Extreme Value distribution.
|
||||
[fixed:]<value> Always generates <value>.
|
||||
uniform:<max> Uniform distribution between 0 and <max>.
|
||||
normal:<mean>,<sd> Normal distribution.
|
||||
exponential:<lambda> Exponential distribution.
|
||||
pareto:<loc>,<scale>,<shape> Generalized Pareto distribution.
|
||||
gev:<loc>,<scale>,<shape> Generalized Extreme Value distribution.
|
||||
|
||||
fb_key ETC key-size distribution from [1].
|
||||
fb_value ETC value-size distribution from [1].
|
||||
fb_ia ETC inter-arrival distribution from [1].
|
||||
To recreate the Facebook \"ETC\" request stream from [1], the
|
||||
following hard-coded distributions are also provided:
|
||||
|
||||
fb_value = a hard-coded discrete and GPareto PDF of value sizes
|
||||
fb_key = \"gev:30.7984,8.20449,0.078688\", key-size distribution
|
||||
fb_ia = \"pareto:0.0,16.0292,0.154971\", inter-arrival time dist.
|
||||
|
||||
[1] Berk Atikoglu et al., Workload Analysis of a Large-Scale Key-Value Store,
|
||||
SIGMETRICS 2012
|
||||
|
Loading…
Reference in New Issue
Block a user