Commit 2e28bc46 authored by Björn Richerzhagen's avatar Björn Richerzhagen
Browse files

Merge branch 'master' into tm/sumo-integration

Conflicts:
	src/de/tud/kom/p2psim/impl/topology/DefaultTopologyComponent.java
parents 8aeec4ee 686eca40
......@@ -65,5 +65,10 @@ public class TransmissionControlProtocolDummy extends AbstractTransProtocol {
public int getHeaderSize() {
return TransProtocol.TCP.getHeaderSize();
}
@Override
public TransProtocol getProtocol() {
return TransProtocol.TCP;
}
}
......@@ -24,6 +24,7 @@ import de.tud.kom.p2psim.api.analyzer.MessageAnalyzer.Reason;
import de.tud.kom.p2psim.api.common.SimHost;
import de.tud.kom.p2psim.api.network.NetProtocol;
import de.tud.kom.p2psim.api.network.SimNetInterface;
import de.tud.kom.p2psim.api.transport.TransProtocol;
import de.tud.kom.p2psim.impl.transport.UDPMessage;
import de.tud.kom.p2psim.impl.transport.modular.AbstractTransProtocol;
import de.tudarmstadt.maki.simonstrator.api.Message;
......@@ -65,4 +66,9 @@ public class UserDatagramProtocol extends AbstractTransProtocol {
return 8;
}
@Override
public TransProtocol getProtocol() {
return TransProtocol.UDP;
}
}
package de.tud.kom.p2psim.impl.util;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
/**
* Reader for csv files. To use this reader you need to implement the
* {@link #parse(String[])}.
*
* @author Clemens Krug
*/
public abstract class CSVReader<T>
{
private String filename;
private String SEP;
public CSVReader(String filename, String SEP)
{
this.filename = filename;
this.SEP = SEP;
}
/**
* Reads the data into a list.
* @return A list of the generated objects.
*/
public List<T> readData()
{
List<T> data = new LinkedList<>();
BufferedReader csv = null;
try {
csv = new BufferedReader(new FileReader(filename));
while (csv.ready()) {
String line = csv.readLine();
if (line.contains(SEP)) {
String[] parts = line.split(SEP);
T entry = parse(parts);
if(entry != null) data.add(entry);
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (csv != null) {
try {
csv.close();
} catch (IOException e) {
//nothing
}
}
}
return data;
}
/**
* Parses one line of csv entries into the desired type of object.
* @param parts The csv entries of a line.
* @return Object of desired type.
*/
public abstract T parse(String[] parts);
}
......@@ -168,6 +168,41 @@ public class MeasurementDAO extends DAO {
groupMetric, observationDuration, describesWholeSimulation);
addToPersistQueue(measurement);
}
/**
* Stores a statistical description of a series of values for group of
* hosts and a given spatial coordinate.
*
* @param metricDesc
* The {@link MetricDescription} which describes the metric.
* @param groupName
* The host group
* @param time
* A time for the measurement in simulation time
* @param stats
* the {@link DescriptiveStatistics} object used as input
* @param observationDuration
* duration of this observation in simulation time
* @param describesWholeSimulation
* true, if this measurement is a description of the WHOLE
* simulation
* @param locationX
* x coordinate for spatial sampling
* @param locationY
* y coordinate for spatial sampling
*/
public static void storeSpatialGroupStatisticsMeasurement(
MetricDescription metricDesc, String groupName, long time,
DescriptiveStatistics stats, long observationDuration,
boolean describesWholeSimulation, int locationX, int locationY) {
Metric metric = MetricDAO.lookupStatisticsMetric(metricDesc);
GroupMetric groupMetric = GroupMetricDAO.lookupGroupMetric(metric,
groupName);
MeasurementStatistic measurement = new MeasurementStatistic(time, stats,
groupMetric, observationDuration, describesWholeSimulation, locationX, locationY);
addToPersistQueue(measurement);
}
/**
* Store a list-based measurement with a key (i.e., as a
......
......@@ -118,12 +118,15 @@ public class MeasurementStatistic implements GroupMetricBound {
@Column(nullable = true, name = "[perc5]")
private Double perc5; // 5
@Column(nullable = true, name = "[skewness]")
private Double skewness;
@Column(nullable = true, name = "[kurtosis]")
private Double kurtosis;
@Column(nullable = true, name = "[locationX]")
private Integer locationX;
@Column(nullable = true, name = "[locationY]")
private Integer locationY;
@Column(nullable = true, name = "[isSpatial]")
private boolean isSpatial;
/**
* Mapping to group metric
......@@ -155,6 +158,36 @@ public class MeasurementStatistic implements GroupMetricBound {
this.groupMetric = groupMetric;
}
/**
* Creates a {@link Measurement}-Object using the provided
* {@link DescriptiveStatistics} object, with spatial data attached.
*
* @param time
* The simulation time for to this measurement as Date
* @param stats
* the {@link DescriptiveStatistics} object
* @param hostMetric
* The reference to the {@link HostMetric}-Object, which
* describes this metric. Is used for the mapping.
* @param observationDuration
* duration of the observation
* @param describesWholeSimulation
* true, if this measurement describes the whole simulation
* @param locationX
* x coordinate for spatial sampling
* @param locationY
* y coordinate for spatial sampling
*/
public MeasurementStatistic(long time, DescriptiveStatistics stats,
GroupMetric groupMetric, long observationDuration,
boolean describesWholeSimulation, int locationX, int locationY) {
this(time, stats, observationDuration, describesWholeSimulation);
this.groupMetric = groupMetric;
this.locationX = locationX;
this.locationY = locationY;
this.isSpatial = true;
}
/**
* Internal - write statistics
*
......@@ -182,9 +215,8 @@ public class MeasurementStatistic implements GroupMetricBound {
this.perc97 = checkForSpecialNumbers(stats.getPercentile(97.7));
this.perc5 = checkForSpecialNumbers(stats.getPercentile(5));
this.perc95 = checkForSpecialNumbers(stats.getPercentile(95));
this.skewness = checkForSpecialNumbers(stats.getSkewness());
this.kurtosis = checkForSpecialNumbers(stats.getKurtosis());
this.std = checkForSpecialNumbers(stats.getStandardDeviation());
this.isSpatial = false;
}
/**
......
package de.tud.kom.p2psim.impl.util.stat.distributions;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.TreeMap;
import de.tudarmstadt.maki.simonstrator.api.Monitor;
import de.tudarmstadt.maki.simonstrator.api.Monitor.Level;
import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
/**
* This distribution loads CDF data from a given CSV file and provides random
* values out of the loaded data set.
*
* TODO: better scaling! Scaled with the highest value! But it gives many more
* cases for scaling in CDF.
*
* @author Fabio Zöllner
*
*/
public class CustomDistribution implements Distribution {
private Random rand = Randoms.getRandom(this);
private String csvFile = "";
private boolean scale = false;
private double scaleFactor = 1;
private TreeMap<Double, Double> cdfData = new TreeMap<Double, Double>();
@XMLConfigurableConstructor({ "csvFile" })
public CustomDistribution(String csvFile) {
readCDFData(csvFile);
}
public CustomDistribution(TreeMap<Double, Double> cdfData) {
this.cdfData = cdfData;
}
public CustomDistribution(TreeMap<Double, Double> cdfData, double scaleFactor) {
this(cdfData);
this.scaleFactor = scaleFactor;
this.scale = true;
scaleCDF(scaleFactor);
}
public int getSize() {
return cdfData.size();
}
@Override
public double returnValue() {
double randomDouble = rand.nextDouble();
Map.Entry<Double, Double> greaterOrEqualEntry = cdfData
.ceilingEntry(randomDouble);
if (greaterOrEqualEntry == null) {
Monitor.log(CustomDistribution.class, Level.WARN,
"No entry with a key greater or equal to " + randomDouble
+ " has been found. (Has the data been loaded?)");
return 0;
} else {
return greaterOrEqualEntry.getValue();
}
}
/**
* Reads a simple two column comma separated list of doubles and returns
* them in a TreeMap.
*
* @param csvFilename
* The path to the CSV file
* @return The read double values as a TreeMap
*/
private void readCDFData(String csvFilename) {
Monitor.log(CustomDistribution.class, Level.INFO,
"Reading CDF data from CSV file %s", csvFilename);
cdfData.clear();
boolean entrySuccessfullyRead = false;
double scaleFactor = Double.MIN_VALUE;
long counter = 0;
BufferedReader csv = null;
try {
csv = new BufferedReader(new FileReader(csvFilename));
while (csv.ready()) {
counter++;
String line = csv.readLine();
if (line.indexOf(",") > -1) {
String[] parts = line.split(",");
if (parts.length == 2) {
try {
Double x = Double.parseDouble(parts[0]);
Double cf = Double.parseDouble(parts[1]);
scaleFactor = Math.max(scaleFactor, x);
cdfData.put(cf, x);
entrySuccessfullyRead = true;
} catch (NumberFormatException e) {
// Ignore leading comments
if (entrySuccessfullyRead) {
Monitor.log(CustomDistribution.class,
Level.WARN,
"Couldn't parse cdf entry %s", line);
}
}
} else {
throw new AssertionError("To many columns in CSV.");
}
}
}
} catch (FileNotFoundException e) {
throw new RuntimeException(
"Could not open CSV file with CDF data (\"" + csvFilename
+ "\")");
} catch (IOException e) {
throw new RuntimeException("Failed to read the CDF data (\""
+ csvFilename + "\")");
} finally {
if (csv != null)
try {
csv.close();
} catch (IOException e) {
//
}
}
Monitor.log(CustomDistribution.class, Level.INFO,
"Read " + cdfData.size() + " unique entries from "
+ csvFilename + "with " + counter
+ " and got a scaling factor of " + scaleFactor);
/*
* Scale entries to a value range of ]0,1] if scale == true. This has to
* be done only once this way.
*/
if (scale == true) {
this.scaleFactor = scaleFactor;
scaleCDF(scaleFactor);
}
}
private void scaleCDF(double scaleFactor) {
TreeMap<Double, Double> scaledCdfData = new TreeMap<Double, Double>();
for (Entry<Double, Double> actEntry : cdfData.entrySet()) {
scaledCdfData.put(actEntry.getKey(), actEntry.getValue()
/ scaleFactor);
}
// replace original data by scaled data
cdfData = scaledCdfData;
}
public void setScale(boolean scale) {
this.scale = scale;
}
public double getScaleFactor() {
return this.scaleFactor;
}
public Map<Double, Double> getMap() {
return cdfData;
}
}
package de.tud.kom.p2psim.impl.util.stat.distributions;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.TreeMap;
import de.tudarmstadt.maki.simonstrator.api.Monitor;
import de.tudarmstadt.maki.simonstrator.api.Monitor.Level;
import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
/**
* This distribution loads CDF data from a given CSV file and provides random
* values out of the loaded data set.
*
* TODO: better scaling! Scaled with the highest value! But it gives many more
* cases for scaling in CDF.
*
* @author Fabio Zöllner
*
*/
public class CustomDistribution implements Distribution {
private Random rand = Randoms.getRandom(CustomDistribution.class);
private String csvFile = "";
private boolean scale = false;
private double scaleFactor = 1;
private TreeMap<Double, Double> cdfData = new TreeMap<Double, Double>();
@XMLConfigurableConstructor({ "csvFile" })
public CustomDistribution(String csvFile) {
readCDFData(csvFile);
}
public CustomDistribution(TreeMap<Double, Double> cdfData) {
this.cdfData = cdfData;
}
public CustomDistribution(TreeMap<Double, Double> cdfData, double scaleFactor) {
this(cdfData);
this.scaleFactor = scaleFactor;
this.scale = true;
scaleCDF(scaleFactor);
}
public int getSize() {
return cdfData.size();
}
@Override
public double returnValue() {
double randomDouble = rand.nextDouble();
Map.Entry<Double, Double> greaterOrEqualEntry = cdfData
.ceilingEntry(randomDouble);
if (greaterOrEqualEntry == null) {
Monitor.log(CustomDistribution.class, Level.WARN,
"No entry with a key greater or equal to " + randomDouble
+ " has been found. (Has the data been loaded?)");
return 0;
} else {
return greaterOrEqualEntry.getValue();
}
}
/**
* Reads a simple two column comma separated list of doubles and returns
* them in a TreeMap.
*
* @param csvFilename
* The path to the CSV file
* @return The read double values as a TreeMap
*/
private void readCDFData(String csvFilename) {
Monitor.log(CustomDistribution.class, Level.INFO,
"Reading CDF data from CSV file %s", csvFilename);
cdfData.clear();
boolean entrySuccessfullyRead = false;
double scaleFactor = Double.MIN_VALUE;
long counter = 0;
BufferedReader csv = null;
try {
csv = new BufferedReader(new FileReader(csvFilename));
while (csv.ready()) {
counter++;
String line = csv.readLine();
if (line.indexOf(",") > -1) {
String[] parts = line.split(",");
if (parts.length == 2) {
try {
Double x = Double.parseDouble(parts[0]);
Double cf = Double.parseDouble(parts[1]);
scaleFactor = Math.max(scaleFactor, x);
cdfData.put(cf, x);
entrySuccessfullyRead = true;
} catch (NumberFormatException e) {
// Ignore leading comments
if (entrySuccessfullyRead) {
Monitor.log(CustomDistribution.class,
Level.WARN,
"Couldn't parse cdf entry %s", line);
}
}
} else {
throw new AssertionError("To many columns in CSV.");
}
}
}
} catch (FileNotFoundException e) {
throw new RuntimeException(
"Could not open CSV file with CDF data (\"" + csvFilename
+ "\")");
} catch (IOException e) {
throw new RuntimeException("Failed to read the CDF data (\""
+ csvFilename + "\")");
} finally {
if (csv != null)
try {
csv.close();
} catch (IOException e) {
//
}
}
Monitor.log(CustomDistribution.class, Level.INFO,
"Read " + cdfData.size() + " unique entries from "
+ csvFilename + "with " + counter
+ " and got a scaling factor of " + scaleFactor);
/*
* Scale entries to a value range of ]0,1] if scale == true. This has to
* be done only once this way.
*/
if (scale == true) {
this.scaleFactor = scaleFactor;
scaleCDF(scaleFactor);
}
}
private void scaleCDF(double scaleFactor) {
TreeMap<Double, Double> scaledCdfData = new TreeMap<Double, Double>();
for (Entry<Double, Double> actEntry : cdfData.entrySet()) {
scaledCdfData.put(actEntry.getKey(), actEntry.getValue()
/ scaleFactor);
}
// replace original data by scaled data
cdfData = scaledCdfData;
}
public void setScale(boolean scale) {
this.scale = scale;
}
public double getScaleFactor() {
return this.scaleFactor;
}
public Map<Double, Double> getMap() {
return cdfData;
}
}
......@@ -19,9 +19,9 @@
*/
package de.tud.kom.p2psim.impl.util.stat.distributions;
package de.tud.kom.p2psim.impl.util.stat.distributions;
import org.apache.commons.math.MathException;
import org.apache.commons.math.distribution.ExponentialDistributionImpl;
......@@ -29,63 +29,64 @@ import de.tud.kom.p2psim.api.scenario.ConfigurationException;
import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class ExponentialDistribution implements Distribution {
private ExponentialDistributionImpl distr = null;
private double mu;
@XMLConfigurableConstructor({"mu"})
public ExponentialDistribution(double mu) {
this.mu = mu;
this.distr = new ExponentialDistributionImpl(mu);
}
/**
* returns a random value that is distributed as the configured
* distribution.
*/
@Override
public double returnValue() {
if (distr == null) throw new ConfigurationException("Mu was not set for exponential distribution " + this);
double random = Randoms.getRandom(this).nextDouble();
double result;
try {
result = distr.inverseCumulativeProbability(random);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
result = 0;
}
return result;
}
/**
* returns a random value exponentially distributed with mu = _mu.
*
* @param _mu
* @return as double
*/
public static double returnValue(double _mu) {
try {
ExponentialDistributionImpl d = new ExponentialDistributionImpl(_mu);
public class ExponentialDistribution implements Distribution {
private ExponentialDistributionImpl distr = null;
private double mu;
@XMLConfigurableConstructor({"mu"})
public ExponentialDistribution(double mu) {
this.mu = mu;
this.distr = new ExponentialDistributionImpl(mu);
}
/**
* returns a random value that is distributed as the configured
* distribution.
*/
@Override
public double returnValue() {
if (distr == null) throw new ConfigurationException("Mu was not set for exponential distribution " + this);
double random = Randoms.getRandom(ExponentialDistribution.class)
.nextDouble();
double result;
try {
result = distr.inverseCumulativeProbability(random);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
result = 0;
}
return result;
}
/**
* returns a random value exponentially distributed with mu = _mu.
*
* @param _mu
* @return as double
*/
public static double returnValue(double _mu) {
try {
ExponentialDistributionImpl d = new ExponentialDistributionImpl(_mu);
return d.inverseCumulativeProbability(Randoms.getRandom(
ExponentialDistribution.class)
.nextDouble());
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return 0;
}
}
@Override
public String toString() {
return "ExponentialDistribution [distr=" + distr + ", mu=" + mu + "]";
}
}
ExponentialDistribution.class)
.nextDouble());
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return 0;
}
}
@Override
public String toString() {
return "ExponentialDistribution [distr=" + distr + ", mu=" + mu + "]";
}
}
......@@ -19,309 +19,311 @@
*/
package de.tud.kom.p2psim.impl.util.stat.distributions;
package de.tud.kom.p2psim.impl.util.stat.distributions;
import org.apache.commons.math.MathException;
import org.apache.commons.math.distribution.NormalDistributionImpl;
import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class LimitedNormalDistribution implements Distribution {
private NormalDistributionImpl limitedNormal;
private double mu;
private double sigma;
private boolean limitedMin;
private boolean limitedMax;
private double min;
private double max;
private double pmin = 0;
private double pmax = 1;
// pfactor and pmin are used to determine the range in which the random
// values are allowed.
private double pfactor;
private int limitType;
private LimitedNormalConfigurer conf;
private final static int LIMIT_NORMAL_DIST_NONE = 0;
private final static int LIMIT_NORMAL_DIST_MIN = 1;
private final static int LIMIT_NORMAL_DIST_MAX = 2;
private final static int LIMIT_NORMAL_DIST_BOTH = 3;
@XMLConfigurableConstructor({"mu", "sigma", "min", "max", "limitedMin", "limitedMax"})
public LimitedNormalDistribution(double mu, double sigma, double min,
double max, boolean limitedMin, boolean limitedMax) {
conf = new LimitedNormalConfigurer(mu, sigma, min, max, limitedMin, limitedMax);
config(conf);
}
public void config(LimitedNormalConfigurer dc) {
mu = dc.getMu();
sigma = dc.getSigma();
limitedMin = dc.isLimitedMin();
limitedMax = dc.isLimitedMax();
limitedNormal = new NormalDistributionImpl(mu, sigma);
if (limitedMin == false) {
if (limitedMax == false) {
limitType = LIMIT_NORMAL_DIST_NONE;
} else {
// only max is limted
limitType = LIMIT_NORMAL_DIST_MAX;
max = dc.getMax();
try {
pmax = limitedNormal.cumulativeProbability(max);
} catch (MathException e) {
e.printStackTrace();
}
}
} else {
if (limitedMax == false) {
// only min is limited.
limitType = LIMIT_NORMAL_DIST_MIN;
min = dc.getMin();
try {
pmin = limitedNormal.cumulativeProbability(min);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
// both sides limited.
limitType = LIMIT_NORMAL_DIST_BOTH;
// make sure min is really smaller than max.
if (max > min) {
min = dc.getMin();
max = dc.getMax();
} else {
max = dc.getMin();
min = dc.getMax();
}
// get min and max probabilites that are possible
try {
pmin = limitedNormal.cumulativeProbability(min);
pmax = limitedNormal.cumulativeProbability(max);
pfactor = pmax - pmin;
} catch (MathException e) {
e.printStackTrace();
}
}
}
pfactor = pmax - pmin;
}
public double returnValue() {
double random = pmin + Randoms.getRandom(this).nextDouble() * pfactor;
double result;
try {
result = limitedNormal.inverseCumulativeProbability(random);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
result = 0;
}
return result;
}
/**
* @return Returns the limitType.
*/
public int getLimitType() {
return limitType;
}
/**
* Returns a random value that is distributed as a Normal Distribution with
* an upper and lower limit.
*
* @param _mu
* average
* @param _sigma
* standard deviation
* @param _min
* lower limit, set to "null", if no limit
* @param _max
* upper limit, set to "null", if no limit
* @return as double
*/
public static double returnValue(double _mu, double _sigma, Double _min,
Double _max) {
int llimitType;
double lmax;
double lmin;
double lpmax = 1;
double lpmin = 0;
double lpfactor;
NormalDistributionImpl llimitedNormal = new NormalDistributionImpl(_mu,
_sigma);
if (_min == null) {
if (_max == null) {
llimitType = LIMIT_NORMAL_DIST_NONE;
} else {
// only max is limted
llimitType = LIMIT_NORMAL_DIST_MAX;
lmax = _max.doubleValue();
try {
lpmax = llimitedNormal.cumulativeProbability(lmax);
} catch (MathException e) {
e.printStackTrace();
}
}
} else {
if (_max == null) {
// only min is limited.
llimitType = LIMIT_NORMAL_DIST_MIN;
lmin = _min.doubleValue();
try {
lpmin = llimitedNormal.cumulativeProbability(lmin);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
// both sides limited.
llimitType = LIMIT_NORMAL_DIST_BOTH;
// make sure min is really smaller than max.
if (_max.doubleValue() > _min.doubleValue()) {
lmin = _min.doubleValue();
lmax = _max.doubleValue();
} else {
lmax = _min.doubleValue();
lmin = _max.doubleValue();
}
// get min and max probabilites that are possible
try {
lpmin = llimitedNormal.cumulativeProbability(lmin);
lpmax = llimitedNormal.cumulativeProbability(lmax);
lpfactor = lpmax - lpmin;
} catch (MathException e) {
e.printStackTrace();
}
}
}
lpfactor = lpmax - lpmin;
public class LimitedNormalDistribution implements Distribution {
private NormalDistributionImpl limitedNormal;
private double mu;
private double sigma;
private boolean limitedMin;
private boolean limitedMax;
private double min;
private double max;
private double pmin = 0;
private double pmax = 1;
// pfactor and pmin are used to determine the range in which the random
// values are allowed.
private double pfactor;
private int limitType;
private LimitedNormalConfigurer conf;
private final static int LIMIT_NORMAL_DIST_NONE = 0;
private final static int LIMIT_NORMAL_DIST_MIN = 1;
private final static int LIMIT_NORMAL_DIST_MAX = 2;
private final static int LIMIT_NORMAL_DIST_BOTH = 3;
@XMLConfigurableConstructor({"mu", "sigma", "min", "max", "limitedMin", "limitedMax"})
public LimitedNormalDistribution(double mu, double sigma, double min,
double max, boolean limitedMin, boolean limitedMax) {
conf = new LimitedNormalConfigurer(mu, sigma, min, max, limitedMin, limitedMax);
config(conf);
}
public void config(LimitedNormalConfigurer dc) {
mu = dc.getMu();
sigma = dc.getSigma();
limitedMin = dc.isLimitedMin();
limitedMax = dc.isLimitedMax();
limitedNormal = new NormalDistributionImpl(mu, sigma);
if (limitedMin == false) {
if (limitedMax == false) {
limitType = LIMIT_NORMAL_DIST_NONE;
} else {
// only max is limted
limitType = LIMIT_NORMAL_DIST_MAX;
max = dc.getMax();
try {
pmax = limitedNormal.cumulativeProbability(max);
} catch (MathException e) {
e.printStackTrace();
}
}
} else {
if (limitedMax == false) {
// only min is limited.
limitType = LIMIT_NORMAL_DIST_MIN;
min = dc.getMin();
try {
pmin = limitedNormal.cumulativeProbability(min);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
// both sides limited.
limitType = LIMIT_NORMAL_DIST_BOTH;
// make sure min is really smaller than max.
if (max > min) {
min = dc.getMin();
max = dc.getMax();
} else {
max = dc.getMin();
min = dc.getMax();
}
// get min and max probabilites that are possible
try {
pmin = limitedNormal.cumulativeProbability(min);
pmax = limitedNormal.cumulativeProbability(max);
pfactor = pmax - pmin;
} catch (MathException e) {
e.printStackTrace();
}
}
}
pfactor = pmax - pmin;
}
public double returnValue() {
double random = pmin
+ Randoms.getRandom(LimitedNormalDistribution.class)
.nextDouble() * pfactor;
double result;
try {
result = limitedNormal.inverseCumulativeProbability(random);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
result = 0;
}
return result;
}
/**
* @return Returns the limitType.
*/
public int getLimitType() {
return limitType;
}
/**
* Returns a random value that is distributed as a Normal Distribution with
* an upper and lower limit.
*
* @param _mu
* average
* @param _sigma
* standard deviation
* @param _min
* lower limit, set to "null", if no limit
* @param _max
* upper limit, set to "null", if no limit
* @return as double
*/
public static double returnValue(double _mu, double _sigma, Double _min,
Double _max) {
int llimitType;
double lmax;
double lmin;
double lpmax = 1;
double lpmin = 0;
double lpfactor;
NormalDistributionImpl llimitedNormal = new NormalDistributionImpl(_mu,
_sigma);
if (_min == null) {
if (_max == null) {
llimitType = LIMIT_NORMAL_DIST_NONE;
} else {
// only max is limted
llimitType = LIMIT_NORMAL_DIST_MAX;
lmax = _max.doubleValue();
try {
lpmax = llimitedNormal.cumulativeProbability(lmax);
} catch (MathException e) {
e.printStackTrace();
}
}
} else {
if (_max == null) {
// only min is limited.
llimitType = LIMIT_NORMAL_DIST_MIN;
lmin = _min.doubleValue();
try {
lpmin = llimitedNormal.cumulativeProbability(lmin);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
// both sides limited.
llimitType = LIMIT_NORMAL_DIST_BOTH;
// make sure min is really smaller than max.
if (_max.doubleValue() > _min.doubleValue()) {
lmin = _min.doubleValue();
lmax = _max.doubleValue();
} else {
lmax = _min.doubleValue();
lmin = _max.doubleValue();
}
// get min and max probabilites that are possible
try {
lpmin = llimitedNormal.cumulativeProbability(lmin);
lpmax = llimitedNormal.cumulativeProbability(lmax);
lpfactor = lpmax - lpmin;
} catch (MathException e) {
e.printStackTrace();
}
}
}
lpfactor = lpmax - lpmin;
double lrandom = lpmin
+ Randoms.getRandom(LimitedNormalDistribution.class)
.nextDouble()
* lpfactor;
double lresult;
try {
lresult = llimitedNormal.inverseCumulativeProbability(lrandom);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
lresult = 0;
}
return lresult;
}
private class LimitedNormalConfigurer {
private double mu;
private double sigma;
private double min;
private double max;
private boolean limitedMin;
private boolean limitedMax;
public LimitedNormalConfigurer(double mu, double sigma, double min,
double max, boolean limitedMin, boolean limitedMax) {
super();
this.mu = mu;
this.sigma = sigma;
this.min = min;
this.max = max;
this.limitedMin = limitedMin;
this.limitedMax = limitedMax;
}
/**
* @return Returns the mu.
*/
public double getMu() {
return mu;
}
/**
* @return Returns the sigma.
*/
public double getSigma() {
return sigma;
}
/**
* @return Returns the max.
*/
public double getMax() {
return max;
}
/**
* @return Returns the min.
*/
public double getMin() {
return min;
}
/**
* @return Returns the limitedMax.
*/
public boolean isLimitedMax() {
return limitedMax;
}
/**
* @return Returns the limitedMin.
*/
public boolean isLimitedMin() {
return limitedMin;
}
@Override
public String toString() {
return "LimitedNormalDistribution [mu=" + mu + ", sigma=" + sigma
+ ", min=" + min + ", max=" + max + ", limitedMin="
+ limitedMin + ", limitedMax=" + limitedMax + "]";
}
}
public String toString(){
return conf.toString();
}
}
* lpfactor;
double lresult;
try {
lresult = llimitedNormal.inverseCumulativeProbability(lrandom);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
lresult = 0;
}
return lresult;
}
private class LimitedNormalConfigurer {
private double mu;
private double sigma;
private double min;
private double max;
private boolean limitedMin;
private boolean limitedMax;
public LimitedNormalConfigurer(double mu, double sigma, double min,
double max, boolean limitedMin, boolean limitedMax) {
super();
this.mu = mu;
this.sigma = sigma;
this.min = min;
this.max = max;
this.limitedMin = limitedMin;
this.limitedMax = limitedMax;
}
/**
* @return Returns the mu.
*/
public double getMu() {
return mu;
}
/**
* @return Returns the sigma.
*/
public double getSigma() {
return sigma;
}
/**
* @return Returns the max.
*/
public double getMax() {
return max;
}
/**
* @return Returns the min.
*/
public double getMin() {
return min;
}
/**
* @return Returns the limitedMax.
*/
public boolean isLimitedMax() {
return limitedMax;
}
/**
* @return Returns the limitedMin.
*/
public boolean isLimitedMin() {
return limitedMin;
}
@Override
public String toString() {
return "LimitedNormalDistribution [mu=" + mu + ", sigma=" + sigma
+ ", min=" + min + ", max=" + max + ", limitedMin="
+ limitedMin + ", limitedMax=" + limitedMax + "]";
}
}
public String toString(){
return conf.toString();
}
}
......@@ -19,67 +19,68 @@
*/
package de.tud.kom.p2psim.impl.util.stat.distributions;
import umontreal.iro.lecuyer.probdist.LognormalDist;
package de.tud.kom.p2psim.impl.util.stat.distributions;
import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class LognormalDistribution implements Distribution {
private double mu;
private double sigma;
private LognormalDist distr;
@Override
public String toString() {
return "LognormalDistribution [mu=" + mu + ", sigma=" + sigma + "]";
}
@XMLConfigurableConstructor({"mu", "sigma"})
public LognormalDistribution(double mu, double sigma) {
this.mu = mu;
this.sigma = sigma;
distr = new LognormalDist(mu, sigma);
}
public double returnValue() {
double random = Randoms.getRandom(this).nextDouble();
double result = 0;
try {
result = distr.inverseF(random);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return result;
}
/**
* returns a random value lognormally distributed with mu = _mu and sigma =
* _sigma.
*
* @param _mu
* @param _sigma
* @return as double
*/
public static double returnValue(double _mu, double _sigma) {
try {
LognormalDist d = new LognormalDist(_mu, _sigma);
import umontreal.iro.lecuyer.probdist.LognormalDist;
public class LognormalDistribution implements Distribution {
private double mu;
private double sigma;
private LognormalDist distr;
@Override
public String toString() {
return "LognormalDistribution [mu=" + mu + ", sigma=" + sigma + "]";
}
@XMLConfigurableConstructor({"mu", "sigma"})
public LognormalDistribution(double mu, double sigma) {
this.mu = mu;
this.sigma = sigma;
distr = new LognormalDist(mu, sigma);
}
public double returnValue() {
double random = Randoms.getRandom(LognormalDistribution.class)
.nextDouble();
double result = 0;
try {
result = distr.inverseF(random);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return result;
}
/**
* returns a random value lognormally distributed with mu = _mu and sigma =
* _sigma.
*
* @param _mu
* @param _sigma
* @return as double
*/
public static double returnValue(double _mu, double _sigma) {
try {
LognormalDist d = new LognormalDist(_mu, _sigma);
return d.inverseF(Randoms.getRandom(LognormalDistribution.class)
.nextDouble());
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return 0;
}
}
}
.nextDouble());
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return 0;
}
}
}
......@@ -19,93 +19,94 @@
*/
package de.tud.kom.p2psim.impl.util.stat.distributions;
import umontreal.iro.lecuyer.probdist.LognormalDist;
package de.tud.kom.p2psim.impl.util.stat.distributions;
import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class MixedLogNormalDistribution implements Distribution {
@Override
public String toString() {
return "MixedLogNormalDistribution [weight1=" + weight1 + ", mu1="
+ mu1 + ", sigma1=" + sigma1 + ", weight2=" + weight2
+ ", mu2=" + mu2 + ", sigma2=" + sigma2 + "]";
}
private double weight1;
private double mu1;
private double sigma1;
private double weight2;
private double mu2;
private double sigma2;
private LognormalDist distr1;
private LognormalDist distr2;
@XMLConfigurableConstructor({"mu1", "mu2", "sigma1", "sigma2", "weight1", "weight2"})
public MixedLogNormalDistribution(double mu1, double mu2, double sigma1, double sigma2, double weight1, double weight2) {
this.mu1 = mu1;
this.mu2 = mu2;
this.sigma1 = sigma1;
this.sigma2 = sigma2;
this.weight1 = weight1;
this.weight2 = weight2;
distr1 = new LognormalDist(mu1, sigma1);
distr2 = new LognormalDist(mu2, sigma2);
}
public double returnValue() {
double random = Randoms.getRandom(this).nextDouble();
double result = 0;
try {
result = weight1 * distr1.inverseF(random) + weight2
* distr2.inverseF(random);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return result;
}
/**
* returns a random value distributed after a mixed lognormal distribution:
* _weight1 * lognormal1 + _weight2 * lognormal2 with lorgnormal1(_mu1,
* _sigma1) and with lorgnormal1(_mu2, _sigma2).
*
* @param _mu1
* @param _sigma1
* @param _weight1
* @param _mu2
* @param _sigma2
* @param _weight2
* @return
*/
public static double returnValue(double _mu1, double _sigma1,
double _weight1, double _mu2, double _sigma2, double _weight2) {
try {
LognormalDist d1 = new LognormalDist(_mu1, _sigma1);
LognormalDist d2 = new LognormalDist(_mu2, _sigma2);
import umontreal.iro.lecuyer.probdist.LognormalDist;
public class MixedLogNormalDistribution implements Distribution {
@Override
public String toString() {
return "MixedLogNormalDistribution [weight1=" + weight1 + ", mu1="
+ mu1 + ", sigma1=" + sigma1 + ", weight2=" + weight2
+ ", mu2=" + mu2 + ", sigma2=" + sigma2 + "]";
}
private double weight1;
private double mu1;
private double sigma1;
private double weight2;
private double mu2;
private double sigma2;
private LognormalDist distr1;
private LognormalDist distr2;
@XMLConfigurableConstructor({"mu1", "mu2", "sigma1", "sigma2", "weight1", "weight2"})
public MixedLogNormalDistribution(double mu1, double mu2, double sigma1, double sigma2, double weight1, double weight2) {
this.mu1 = mu1;
this.mu2 = mu2;
this.sigma1 = sigma1;
this.sigma2 = sigma2;
this.weight1 = weight1;
this.weight2 = weight2;
distr1 = new LognormalDist(mu1, sigma1);
distr2 = new LognormalDist(mu2, sigma2);
}
public double returnValue() {
double random = Randoms.getRandom(MixedLogNormalDistribution.class)
.nextDouble();
double result = 0;
try {
result = weight1 * distr1.inverseF(random) + weight2
* distr2.inverseF(random);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return result;
}
/**
* returns a random value distributed after a mixed lognormal distribution:
* _weight1 * lognormal1 + _weight2 * lognormal2 with lorgnormal1(_mu1,
* _sigma1) and with lorgnormal1(_mu2, _sigma2).
*
* @param _mu1
* @param _sigma1
* @param _weight1
* @param _mu2
* @param _sigma2
* @param _weight2
* @return
*/
public static double returnValue(double _mu1, double _sigma1,
double _weight1, double _mu2, double _sigma2, double _weight2) {
try {
LognormalDist d1 = new LognormalDist(_mu1, _sigma1);
LognormalDist d2 = new LognormalDist(_mu2, _sigma2);
double random = Randoms.getRandom(MixedLogNormalDistribution.class)
.nextDouble();
return _weight1 * d1.inverseF(random) + _weight2
* d2.inverseF(random);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return 0;
}
}
}
.nextDouble();
return _weight1 * d1.inverseF(random) + _weight2
* d2.inverseF(random);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return 0;
}
}
}
/*
* Copyright (c) 2005-2011 KOM - Multimedia Communications Lab
*
* This file is part of PeerfactSim.KOM.
*
* PeerfactSim.KOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* PeerfactSim.KOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with PeerfactSim.KOM. If not, see <http://www.gnu.org/licenses/>.
*
*/
package de.tud.kom.p2psim.impl.util.stat.distributions;
import java.util.Random;
import org.apache.commons.math.MathException;
import org.apache.commons.math.distribution.NormalDistributionImpl;
import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class NormalDistribution implements Distribution {
private NormalDistributionImpl normal;
private Random randomGen = Randoms.getRandom(this);
private double mu;
private double sigma;
@XMLConfigurableConstructor({"mu", "sigma"})
public NormalDistribution(double mu, double sigma) {
this.mu = mu;
this.sigma = sigma;
normal = new NormalDistributionImpl(mu, sigma);
}
@Override
public double returnValue() {
double random = randomGen.nextDouble();
double result;
try {
result = normal.inverseCumulativeProbability(random);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
result = 0;
}
return result;
}
@Override
public String toString() {
return "NormalDistribution [mu=" + mu + ", sigma=" + sigma + "]";
}
/**
* returns a random value normally distributed with mu = _mu and sigma =
* _sigma.
*
* @param _mu
* @param _sigma
* @return as double
*/
public static double returnValue(double _mu, double _sigma) {
try {
NormalDistributionImpl d = new NormalDistributionImpl(_mu, _sigma);
return d.inverseCumulativeProbability(Randoms.getRandom(
NormalDistribution.class)
.nextDouble());
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return 0;
}
}
}
/*
* Copyright (c) 2005-2011 KOM - Multimedia Communications Lab
*
* This file is part of PeerfactSim.KOM.
*
* PeerfactSim.KOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* PeerfactSim.KOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with PeerfactSim.KOM. If not, see <http://www.gnu.org/licenses/>.
*
*/
package de.tud.kom.p2psim.impl.util.stat.distributions;
import java.util.Random;
import org.apache.commons.math.MathException;
import org.apache.commons.math.distribution.NormalDistributionImpl;
import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class NormalDistribution implements Distribution {
private NormalDistributionImpl normal;
private Random randomGen = Randoms.getRandom(NormalDistribution.class);
private double mu;
private double sigma;
@XMLConfigurableConstructor({"mu", "sigma"})
public NormalDistribution(double mu, double sigma) {
this.mu = mu;
this.sigma = sigma;
normal = new NormalDistributionImpl(mu, sigma);
}
@Override
public double returnValue() {
double random = randomGen.nextDouble();
double result;
try {
result = normal.inverseCumulativeProbability(random);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
result = 0;
}
return result;
}
@Override
public String toString() {
return "NormalDistribution [mu=" + mu + ", sigma=" + sigma + "]";
}
/**
* returns a random value normally distributed with mu = _mu and sigma =
* _sigma.
*
* @param _mu
* @param _sigma
* @return as double
*/
public static double returnValue(double _mu, double _sigma) {
try {
NormalDistributionImpl d = new NormalDistributionImpl(_mu, _sigma);
return d.inverseCumulativeProbability(Randoms.getRandom(
NormalDistribution.class)
.nextDouble());
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
return 0;
}
}
}
......@@ -19,64 +19,64 @@
*/
package de.tud.kom.p2psim.impl.util.stat.distributions;
package de.tud.kom.p2psim.impl.util.stat.distributions;
import org.apache.commons.math.MathException;
import org.apache.commons.math.distribution.PoissonDistributionImpl;
import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class PoissonDistribution implements Distribution {
private double lambda;
private PoissonDistributionImpl poisson;
@XMLConfigurableConstructor({"lambda"})
public PoissonDistribution(double lambda){
this.lambda = lambda;
this.poisson = new PoissonDistributionImpl(lambda);
}
// returns the x-value for a random value in the cdf
public double returnValue() {
double random = Randoms.getRandom(this)
.nextDouble();
int result;
try {
result = poisson.inverseCumulativeProbability(random);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
result = 0;
}
return result;
}
/**
* returns a random value Poisson distributed with lamda = _lamda.
* @param _lamda
* @return as double
*/
public static double returnValue(double _lamda) {
try {
PoissonDistributionImpl d = new PoissonDistributionImpl(_lamda);
public class PoissonDistribution implements Distribution {
private double lambda;
private PoissonDistributionImpl poisson;
@XMLConfigurableConstructor({"lambda"})
public PoissonDistribution(double lambda){
this.lambda = lambda;
this.poisson = new PoissonDistributionImpl(lambda);
}
// returns the x-value for a random value in the cdf
public double returnValue() {
double random = Randoms.getRandom(PoissonDistribution.class)
.nextDouble();
int result;
try {
result = poisson.inverseCumulativeProbability(random);
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
result = 0;
}
return result;
}
/**
* returns a random value Poisson distributed with lamda = _lamda.
* @param _lamda
* @return as double
*/
public static double returnValue(double _lamda) {
try {
PoissonDistributionImpl d = new PoissonDistributionImpl(_lamda);
return d.inverseCumulativeProbability(Randoms.getRandom(
PoissonDistribution.class).nextDouble());
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return 0;
}
}
@Override
public String toString() {
return "PoissonDistribution [lambda=" + lambda + "]";
}
}
PoissonDistribution.class).nextDouble());
} catch (MathException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return 0;
}
}
@Override
public String toString() {
return "PoissonDistribution [lambda=" + lambda + "]";
}
}
......@@ -19,61 +19,62 @@
*/
package de.tud.kom.p2psim.impl.util.stat.distributions;
package de.tud.kom.p2psim.impl.util.stat.distributions;
import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class UniformDistribution implements Distribution {
@Override
public String toString() {
return "UniformDistribution [min=" + min + ", max=" + max + "]";
}
private double min;
private double max;
private double factor;
@XMLConfigurableConstructor({"min", "max"})
public UniformDistribution(double min, double max) {
this.min = Math.min(min, max);
this.max = Math.max(min, max);
factor = Math.abs(max - min);
}
/**
* Delivers a random value distributed as the configured distribution.
*/
public double returnValue() {
return min + factor * Randoms.getRandom(this).nextDouble();
}
/**
* delivers a random value that is uniformly distributed between the _min
* and the _max value.
*
* @param _min
* @param _max
* @return random value as double
*/
public static double returnValue(double _min, double _max) {
double lmin, lmax, lfactor;
if (_min < _max) {
lmin = _min;
lmax = _max;
} else {
lmin = _max;
lmax = _min;
}
lfactor = Math.abs(lmax - lmin);
public class UniformDistribution implements Distribution {
@Override
public String toString() {
return "UniformDistribution [min=" + min + ", max=" + max + "]";
}
private double min;
private double max;
private double factor;
@XMLConfigurableConstructor({"min", "max"})
public UniformDistribution(double min, double max) {
this.min = Math.min(min, max);
this.max = Math.max(min, max);
factor = Math.abs(max - min);
}
/**
* Delivers a random value distributed as the configured distribution.
*/
public double returnValue() {
return min + factor
* Randoms.getRandom(UniformDistribution.class).nextDouble();
}
/**
* delivers a random value that is uniformly distributed between the _min
* and the _max value.
*
* @param _min
* @param _max
* @return random value as double
*/
public static double returnValue(double _min, double _max) {
double lmin, lmax, lfactor;
if (_min < _max) {
lmin = _min;
lmax = _max;
} else {
lmin = _max;
lmax = _min;
}
lfactor = Math.abs(lmax - lmin);
return lmin + lfactor
* Randoms.getRandom(UniformDistribution.class).nextDouble();
}
}
* Randoms.getRandom(UniformDistribution.class).nextDouble();
}
}
......@@ -66,7 +66,8 @@ public class ZipfDistribution implements Distribution {
// rank = 1 ... maximum_Number_Of_Ranks => 1/rank = 0..1
return 1 / (Math
.pow(1 / (Randoms.getRandom(this).nextDouble() * harmonicNormFactor),
.pow(1 / (Randoms.getRandom(ZipfDistribution.class).nextDouble()
* harmonicNormFactor),
1 / zipfExponent));
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment