Commit 2e28bc46 authored by Björn Richerzhagen's avatar Björn Richerzhagen
Browse files

Merge branch 'master' into tm/sumo-integration

Conflicts:
	src/de/tud/kom/p2psim/impl/topology/DefaultTopologyComponent.java
parents 8aeec4ee 686eca40
...@@ -65,5 +65,10 @@ public class TransmissionControlProtocolDummy extends AbstractTransProtocol { ...@@ -65,5 +65,10 @@ public class TransmissionControlProtocolDummy extends AbstractTransProtocol {
public int getHeaderSize() { public int getHeaderSize() {
return TransProtocol.TCP.getHeaderSize(); return TransProtocol.TCP.getHeaderSize();
} }
@Override
public TransProtocol getProtocol() {
return TransProtocol.TCP;
}
} }
...@@ -24,6 +24,7 @@ import de.tud.kom.p2psim.api.analyzer.MessageAnalyzer.Reason; ...@@ -24,6 +24,7 @@ import de.tud.kom.p2psim.api.analyzer.MessageAnalyzer.Reason;
import de.tud.kom.p2psim.api.common.SimHost; import de.tud.kom.p2psim.api.common.SimHost;
import de.tud.kom.p2psim.api.network.NetProtocol; import de.tud.kom.p2psim.api.network.NetProtocol;
import de.tud.kom.p2psim.api.network.SimNetInterface; import de.tud.kom.p2psim.api.network.SimNetInterface;
import de.tud.kom.p2psim.api.transport.TransProtocol;
import de.tud.kom.p2psim.impl.transport.UDPMessage; import de.tud.kom.p2psim.impl.transport.UDPMessage;
import de.tud.kom.p2psim.impl.transport.modular.AbstractTransProtocol; import de.tud.kom.p2psim.impl.transport.modular.AbstractTransProtocol;
import de.tudarmstadt.maki.simonstrator.api.Message; import de.tudarmstadt.maki.simonstrator.api.Message;
...@@ -65,4 +66,9 @@ public class UserDatagramProtocol extends AbstractTransProtocol { ...@@ -65,4 +66,9 @@ public class UserDatagramProtocol extends AbstractTransProtocol {
return 8; return 8;
} }
@Override
public TransProtocol getProtocol() {
return TransProtocol.UDP;
}
} }
package de.tud.kom.p2psim.impl.util;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
/**
* Reader for csv files. To use this reader you need to implement the
* {@link #parse(String[])}.
*
* @author Clemens Krug
*/
public abstract class CSVReader<T>
{
private String filename;
private String SEP;
public CSVReader(String filename, String SEP)
{
this.filename = filename;
this.SEP = SEP;
}
/**
* Reads the data into a list.
* @return A list of the generated objects.
*/
public List<T> readData()
{
List<T> data = new LinkedList<>();
BufferedReader csv = null;
try {
csv = new BufferedReader(new FileReader(filename));
while (csv.ready()) {
String line = csv.readLine();
if (line.contains(SEP)) {
String[] parts = line.split(SEP);
T entry = parse(parts);
if(entry != null) data.add(entry);
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (csv != null) {
try {
csv.close();
} catch (IOException e) {
//nothing
}
}
}
return data;
}
/**
* Parses one line of csv entries into the desired type of object.
* @param parts The csv entries of a line.
* @return Object of desired type.
*/
public abstract T parse(String[] parts);
}
...@@ -168,6 +168,41 @@ public class MeasurementDAO extends DAO { ...@@ -168,6 +168,41 @@ public class MeasurementDAO extends DAO {
groupMetric, observationDuration, describesWholeSimulation); groupMetric, observationDuration, describesWholeSimulation);
addToPersistQueue(measurement); addToPersistQueue(measurement);
} }
/**
* Stores a statistical description of a series of values for group of
* hosts and a given spatial coordinate.
*
* @param metricDesc
* The {@link MetricDescription} which describes the metric.
* @param groupName
* The host group
* @param time
* A time for the measurement in simulation time
* @param stats
* the {@link DescriptiveStatistics} object used as input
* @param observationDuration
* duration of this observation in simulation time
* @param describesWholeSimulation
* true, if this measurement is a description of the WHOLE
* simulation
* @param locationX
* x coordinate for spatial sampling
* @param locationY
* y coordinate for spatial sampling
*/
public static void storeSpatialGroupStatisticsMeasurement(
MetricDescription metricDesc, String groupName, long time,
DescriptiveStatistics stats, long observationDuration,
boolean describesWholeSimulation, int locationX, int locationY) {
Metric metric = MetricDAO.lookupStatisticsMetric(metricDesc);
GroupMetric groupMetric = GroupMetricDAO.lookupGroupMetric(metric,
groupName);
MeasurementStatistic measurement = new MeasurementStatistic(time, stats,
groupMetric, observationDuration, describesWholeSimulation, locationX, locationY);
addToPersistQueue(measurement);
}
/** /**
* Store a list-based measurement with a key (i.e., as a * Store a list-based measurement with a key (i.e., as a
......
...@@ -118,12 +118,15 @@ public class MeasurementStatistic implements GroupMetricBound { ...@@ -118,12 +118,15 @@ public class MeasurementStatistic implements GroupMetricBound {
@Column(nullable = true, name = "[perc5]") @Column(nullable = true, name = "[perc5]")
private Double perc5; // 5 private Double perc5; // 5
@Column(nullable = true, name = "[skewness]") @Column(nullable = true, name = "[locationX]")
private Double skewness; private Integer locationX;
@Column(nullable = true, name = "[kurtosis]") @Column(nullable = true, name = "[locationY]")
private Double kurtosis; private Integer locationY;
@Column(nullable = true, name = "[isSpatial]")
private boolean isSpatial;
/** /**
* Mapping to group metric * Mapping to group metric
...@@ -155,6 +158,36 @@ public class MeasurementStatistic implements GroupMetricBound { ...@@ -155,6 +158,36 @@ public class MeasurementStatistic implements GroupMetricBound {
this.groupMetric = groupMetric; this.groupMetric = groupMetric;
} }
/**
* Creates a {@link Measurement}-Object using the provided
* {@link DescriptiveStatistics} object, with spatial data attached.
*
* @param time
* The simulation time for to this measurement as Date
* @param stats
* the {@link DescriptiveStatistics} object
* @param hostMetric
* The reference to the {@link HostMetric}-Object, which
* describes this metric. Is used for the mapping.
* @param observationDuration
* duration of the observation
* @param describesWholeSimulation
* true, if this measurement describes the whole simulation
* @param locationX
* x coordinate for spatial sampling
* @param locationY
* y coordinate for spatial sampling
*/
public MeasurementStatistic(long time, DescriptiveStatistics stats,
GroupMetric groupMetric, long observationDuration,
boolean describesWholeSimulation, int locationX, int locationY) {
this(time, stats, observationDuration, describesWholeSimulation);
this.groupMetric = groupMetric;
this.locationX = locationX;
this.locationY = locationY;
this.isSpatial = true;
}
/** /**
* Internal - write statistics * Internal - write statistics
* *
...@@ -182,9 +215,8 @@ public class MeasurementStatistic implements GroupMetricBound { ...@@ -182,9 +215,8 @@ public class MeasurementStatistic implements GroupMetricBound {
this.perc97 = checkForSpecialNumbers(stats.getPercentile(97.7)); this.perc97 = checkForSpecialNumbers(stats.getPercentile(97.7));
this.perc5 = checkForSpecialNumbers(stats.getPercentile(5)); this.perc5 = checkForSpecialNumbers(stats.getPercentile(5));
this.perc95 = checkForSpecialNumbers(stats.getPercentile(95)); this.perc95 = checkForSpecialNumbers(stats.getPercentile(95));
this.skewness = checkForSpecialNumbers(stats.getSkewness());
this.kurtosis = checkForSpecialNumbers(stats.getKurtosis());
this.std = checkForSpecialNumbers(stats.getStandardDeviation()); this.std = checkForSpecialNumbers(stats.getStandardDeviation());
this.isSpatial = false;
} }
/** /**
......
package de.tud.kom.p2psim.impl.util.stat.distributions; package de.tud.kom.p2psim.impl.util.stat.distributions;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.FileReader; import java.io.FileReader;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Random; import java.util.Random;
import java.util.TreeMap; import java.util.TreeMap;
import de.tudarmstadt.maki.simonstrator.api.Monitor; import de.tudarmstadt.maki.simonstrator.api.Monitor;
import de.tudarmstadt.maki.simonstrator.api.Monitor.Level; import de.tudarmstadt.maki.simonstrator.api.Monitor.Level;
import de.tudarmstadt.maki.simonstrator.api.Randoms; import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution; import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor; import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
/** /**
* This distribution loads CDF data from a given CSV file and provides random * This distribution loads CDF data from a given CSV file and provides random
* values out of the loaded data set. * values out of the loaded data set.
* *
* TODO: better scaling! Scaled with the highest value! But it gives many more * TODO: better scaling! Scaled with the highest value! But it gives many more
* cases for scaling in CDF. * cases for scaling in CDF.
* *
* @author Fabio Zöllner * @author Fabio Zöllner
* *
*/ */
public class CustomDistribution implements Distribution { public class CustomDistribution implements Distribution {
private Random rand = Randoms.getRandom(this); private Random rand = Randoms.getRandom(CustomDistribution.class);
private String csvFile = ""; private String csvFile = "";
private boolean scale = false; private boolean scale = false;
private double scaleFactor = 1; private double scaleFactor = 1;
private TreeMap<Double, Double> cdfData = new TreeMap<Double, Double>(); private TreeMap<Double, Double> cdfData = new TreeMap<Double, Double>();
@XMLConfigurableConstructor({ "csvFile" }) @XMLConfigurableConstructor({ "csvFile" })
public CustomDistribution(String csvFile) { public CustomDistribution(String csvFile) {
readCDFData(csvFile); readCDFData(csvFile);
} }
public CustomDistribution(TreeMap<Double, Double> cdfData) { public CustomDistribution(TreeMap<Double, Double> cdfData) {
this.cdfData = cdfData; this.cdfData = cdfData;
} }
public CustomDistribution(TreeMap<Double, Double> cdfData, double scaleFactor) { public CustomDistribution(TreeMap<Double, Double> cdfData, double scaleFactor) {
this(cdfData); this(cdfData);
this.scaleFactor = scaleFactor; this.scaleFactor = scaleFactor;
this.scale = true; this.scale = true;
scaleCDF(scaleFactor); scaleCDF(scaleFactor);
} }
public int getSize() { public int getSize() {
return cdfData.size(); return cdfData.size();
} }
@Override @Override
public double returnValue() { public double returnValue() {
double randomDouble = rand.nextDouble(); double randomDouble = rand.nextDouble();
Map.Entry<Double, Double> greaterOrEqualEntry = cdfData Map.Entry<Double, Double> greaterOrEqualEntry = cdfData
.ceilingEntry(randomDouble); .ceilingEntry(randomDouble);
if (greaterOrEqualEntry == null) { if (greaterOrEqualEntry == null) {
Monitor.log(CustomDistribution.class, Level.WARN, Monitor.log(CustomDistribution.class, Level.WARN,
"No entry with a key greater or equal to " + randomDouble "No entry with a key greater or equal to " + randomDouble
+ " has been found. (Has the data been loaded?)"); + " has been found. (Has the data been loaded?)");
return 0; return 0;
} else { } else {
return greaterOrEqualEntry.getValue(); return greaterOrEqualEntry.getValue();
} }
} }
/** /**
* Reads a simple two column comma separated list of doubles and returns * Reads a simple two column comma separated list of doubles and returns
* them in a TreeMap. * them in a TreeMap.
* *
* @param csvFilename * @param csvFilename
* The path to the CSV file * The path to the CSV file
* @return The read double values as a TreeMap * @return The read double values as a TreeMap
*/ */
private void readCDFData(String csvFilename) { private void readCDFData(String csvFilename) {
Monitor.log(CustomDistribution.class, Level.INFO, Monitor.log(CustomDistribution.class, Level.INFO,
"Reading CDF data from CSV file %s", csvFilename); "Reading CDF data from CSV file %s", csvFilename);
cdfData.clear(); cdfData.clear();
boolean entrySuccessfullyRead = false; boolean entrySuccessfullyRead = false;
double scaleFactor = Double.MIN_VALUE; double scaleFactor = Double.MIN_VALUE;
long counter = 0; long counter = 0;
BufferedReader csv = null; BufferedReader csv = null;
try { try {
csv = new BufferedReader(new FileReader(csvFilename)); csv = new BufferedReader(new FileReader(csvFilename));
while (csv.ready()) { while (csv.ready()) {
counter++; counter++;
String line = csv.readLine(); String line = csv.readLine();
if (line.indexOf(",") > -1) { if (line.indexOf(",") > -1) {
String[] parts = line.split(","); String[] parts = line.split(",");
if (parts.length == 2) { if (parts.length == 2) {
try { try {
Double x = Double.parseDouble(parts[0]); Double x = Double.parseDouble(parts[0]);
Double cf = Double.parseDouble(parts[1]); Double cf = Double.parseDouble(parts[1]);
scaleFactor = Math.max(scaleFactor, x); scaleFactor = Math.max(scaleFactor, x);
cdfData.put(cf, x); cdfData.put(cf, x);
entrySuccessfullyRead = true; entrySuccessfullyRead = true;
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
// Ignore leading comments // Ignore leading comments
if (entrySuccessfullyRead) { if (entrySuccessfullyRead) {
Monitor.log(CustomDistribution.class, Monitor.log(CustomDistribution.class,
Level.WARN, Level.WARN,
"Couldn't parse cdf entry %s", line); "Couldn't parse cdf entry %s", line);
} }
} }
} else { } else {
throw new AssertionError("To many columns in CSV."); throw new AssertionError("To many columns in CSV.");
} }
} }
} }
} catch (FileNotFoundException e) { } catch (FileNotFoundException e) {
throw new RuntimeException( throw new RuntimeException(
"Could not open CSV file with CDF data (\"" + csvFilename "Could not open CSV file with CDF data (\"" + csvFilename
+ "\")"); + "\")");
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("Failed to read the CDF data (\"" throw new RuntimeException("Failed to read the CDF data (\""
+ csvFilename + "\")"); + csvFilename + "\")");
} finally { } finally {
if (csv != null) if (csv != null)
try { try {
csv.close(); csv.close();
} catch (IOException e) { } catch (IOException e) {
// //
} }
} }
Monitor.log(CustomDistribution.class, Level.INFO, Monitor.log(CustomDistribution.class, Level.INFO,
"Read " + cdfData.size() + " unique entries from " "Read " + cdfData.size() + " unique entries from "
+ csvFilename + "with " + counter + csvFilename + "with " + counter
+ " and got a scaling factor of " + scaleFactor); + " and got a scaling factor of " + scaleFactor);
/* /*
* Scale entries to a value range of ]0,1] if scale == true. This has to * Scale entries to a value range of ]0,1] if scale == true. This has to
* be done only once this way. * be done only once this way.
*/ */
if (scale == true) { if (scale == true) {
this.scaleFactor = scaleFactor; this.scaleFactor = scaleFactor;
scaleCDF(scaleFactor); scaleCDF(scaleFactor);
} }
} }
private void scaleCDF(double scaleFactor) { private void scaleCDF(double scaleFactor) {
TreeMap<Double, Double> scaledCdfData = new TreeMap<Double, Double>(); TreeMap<Double, Double> scaledCdfData = new TreeMap<Double, Double>();
for (Entry<Double, Double> actEntry : cdfData.entrySet()) { for (Entry<Double, Double> actEntry : cdfData.entrySet()) {
scaledCdfData.put(actEntry.getKey(), actEntry.getValue() scaledCdfData.put(actEntry.getKey(), actEntry.getValue()
/ scaleFactor); / scaleFactor);
} }
// replace original data by scaled data // replace original data by scaled data
cdfData = scaledCdfData; cdfData = scaledCdfData;
} }
public void setScale(boolean scale) { public void setScale(boolean scale) {
this.scale = scale; this.scale = scale;
} }
public double getScaleFactor() { public double getScaleFactor() {
return this.scaleFactor; return this.scaleFactor;
} }
public Map<Double, Double> getMap() { public Map<Double, Double> getMap() {
return cdfData; return cdfData;
} }
} }
...@@ -19,9 +19,9 @@ ...@@ -19,9 +19,9 @@
*/ */
package de.tud.kom.p2psim.impl.util.stat.distributions; package de.tud.kom.p2psim.impl.util.stat.distributions;
import org.apache.commons.math.MathException; import org.apache.commons.math.MathException;
import org.apache.commons.math.distribution.ExponentialDistributionImpl; import org.apache.commons.math.distribution.ExponentialDistributionImpl;
...@@ -29,63 +29,64 @@ import de.tud.kom.p2psim.api.scenario.ConfigurationException; ...@@ -29,63 +29,64 @@ import de.tud.kom.p2psim.api.scenario.ConfigurationException;
import de.tudarmstadt.maki.simonstrator.api.Randoms; import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution; import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor; import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class ExponentialDistribution implements Distribution { public class ExponentialDistribution implements Distribution {
private ExponentialDistributionImpl distr = null; private ExponentialDistributionImpl distr = null;
private double mu; private double mu;
@XMLConfigurableConstructor({"mu"}) @XMLConfigurableConstructor({"mu"})
public ExponentialDistribution(double mu) { public ExponentialDistribution(double mu) {
this.mu = mu; this.mu = mu;
this.distr = new ExponentialDistributionImpl(mu); this.distr = new ExponentialDistributionImpl(mu);
} }
/** /**
* returns a random value that is distributed as the configured * returns a random value that is distributed as the configured
* distribution. * distribution.
*/ */
@Override @Override
public double returnValue() { public double returnValue() {
if (distr == null) throw new ConfigurationException("Mu was not set for exponential distribution " + this); if (distr == null) throw new ConfigurationException("Mu was not set for exponential distribution " + this);
double random = Randoms.getRandom(this).nextDouble(); double random = Randoms.getRandom(ExponentialDistribution.class)
double result; .nextDouble();
double result;
try {
result = distr.inverseCumulativeProbability(random); try {
} catch (MathException e) { result = distr.inverseCumulativeProbability(random);
// TODO Auto-generated catch block } catch (MathException e) {
e.printStackTrace(); // TODO Auto-generated catch block
result = 0; e.printStackTrace();
} result = 0;
}
return result;
} return result;
}
/**
* returns a random value exponentially distributed with mu = _mu. /**
* * returns a random value exponentially distributed with mu = _mu.
* @param _mu *
* @return as double * @param _mu
*/ * @return as double
public static double returnValue(double _mu) { */
try { public static double returnValue(double _mu) {
ExponentialDistributionImpl d = new ExponentialDistributionImpl(_mu); try {
ExponentialDistributionImpl d = new ExponentialDistributionImpl(_mu);
return d.inverseCumulativeProbability(Randoms.getRandom( return d.inverseCumulativeProbability(Randoms.getRandom(
ExponentialDistribution.class) ExponentialDistribution.class)
.nextDouble()); .nextDouble());
} catch (MathException e) { } catch (MathException e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
return 0; return 0;
} }
} }
@Override @Override
public String toString() { public String toString() {
return "ExponentialDistribution [distr=" + distr + ", mu=" + mu + "]"; return "ExponentialDistribution [distr=" + distr + ", mu=" + mu + "]";
} }
} }
...@@ -19,309 +19,311 @@ ...@@ -19,309 +19,311 @@
*/ */
package de.tud.kom.p2psim.impl.util.stat.distributions; package de.tud.kom.p2psim.impl.util.stat.distributions;
import org.apache.commons.math.MathException; import org.apache.commons.math.MathException;
import org.apache.commons.math.distribution.NormalDistributionImpl; import org.apache.commons.math.distribution.NormalDistributionImpl;
import de.tudarmstadt.maki.simonstrator.api.Randoms; import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution; import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor; import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class LimitedNormalDistribution implements Distribution { public class LimitedNormalDistribution implements Distribution {
private NormalDistributionImpl limitedNormal; private NormalDistributionImpl limitedNormal;
private double mu; private double mu;
private double sigma; private double sigma;
private boolean limitedMin; private boolean limitedMin;
private boolean limitedMax; private boolean limitedMax;
private double min; private double min;
private double max; private double max;
private double pmin = 0; private double pmin = 0;
private double pmax = 1; private double pmax = 1;
// pfactor and pmin are used to determine the range in which the random // pfactor and pmin are used to determine the range in which the random
// values are allowed. // values are allowed.
private double pfactor; private double pfactor;
private int limitType; private int limitType;
private LimitedNormalConfigurer conf; private LimitedNormalConfigurer conf;
private final static int LIMIT_NORMAL_DIST_NONE = 0; private final static int LIMIT_NORMAL_DIST_NONE = 0;
private final static int LIMIT_NORMAL_DIST_MIN = 1; private final static int LIMIT_NORMAL_DIST_MIN = 1;
private final static int LIMIT_NORMAL_DIST_MAX = 2; private final static int LIMIT_NORMAL_DIST_MAX = 2;
private final static int LIMIT_NORMAL_DIST_BOTH = 3; private final static int LIMIT_NORMAL_DIST_BOTH = 3;
@XMLConfigurableConstructor({"mu", "sigma", "min", "max", "limitedMin", "limitedMax"}) @XMLConfigurableConstructor({"mu", "sigma", "min", "max", "limitedMin", "limitedMax"})
public LimitedNormalDistribution(double mu, double sigma, double min, public LimitedNormalDistribution(double mu, double sigma, double min,
double max, boolean limitedMin, boolean limitedMax) { double max, boolean limitedMin, boolean limitedMax) {
conf = new LimitedNormalConfigurer(mu, sigma, min, max, limitedMin, limitedMax); conf = new LimitedNormalConfigurer(mu, sigma, min, max, limitedMin, limitedMax);
config(conf); config(conf);
} }
public void config(LimitedNormalConfigurer dc) { public void config(LimitedNormalConfigurer dc) {
mu = dc.getMu(); mu = dc.getMu();
sigma = dc.getSigma(); sigma = dc.getSigma();
limitedMin = dc.isLimitedMin(); limitedMin = dc.isLimitedMin();
limitedMax = dc.isLimitedMax(); limitedMax = dc.isLimitedMax();
limitedNormal = new NormalDistributionImpl(mu, sigma); limitedNormal = new NormalDistributionImpl(mu, sigma);
if (limitedMin == false) { if (limitedMin == false) {
if (limitedMax == false) { if (limitedMax == false) {
limitType = LIMIT_NORMAL_DIST_NONE; limitType = LIMIT_NORMAL_DIST_NONE;
} else { } else {
// only max is limted // only max is limted
limitType = LIMIT_NORMAL_DIST_MAX; limitType = LIMIT_NORMAL_DIST_MAX;
max = dc.getMax(); max = dc.getMax();
try { try {
pmax = limitedNormal.cumulativeProbability(max); pmax = limitedNormal.cumulativeProbability(max);
} catch (MathException e) { } catch (MathException e) {
e.printStackTrace(); e.printStackTrace();
} }
} }
} else { } else {
if (limitedMax == false) { if (limitedMax == false) {
// only min is limited. // only min is limited.
limitType = LIMIT_NORMAL_DIST_MIN; limitType = LIMIT_NORMAL_DIST_MIN;
min = dc.getMin(); min = dc.getMin();
try { try {
pmin = limitedNormal.cumulativeProbability(min); pmin = limitedNormal.cumulativeProbability(min);
} catch (MathException e) { } catch (MathException e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
} }
} else { } else {
// both sides limited. // both sides limited.
limitType = LIMIT_NORMAL_DIST_BOTH; limitType = LIMIT_NORMAL_DIST_BOTH;
// make sure min is really smaller than max. // make sure min is really smaller than max.
if (max > min) { if (max > min) {
min = dc.getMin(); min = dc.getMin();
max = dc.getMax(); max = dc.getMax();
} else { } else {
max = dc.getMin(); max = dc.getMin();
min = dc.getMax(); min = dc.getMax();
} }
// get min and max probabilites that are possible // get min and max probabilites that are possible
try { try {
pmin = limitedNormal.cumulativeProbability(min); pmin = limitedNormal.cumulativeProbability(min);
pmax = limitedNormal.cumulativeProbability(max); pmax = limitedNormal.cumulativeProbability(max);
pfactor = pmax - pmin; pfactor = pmax - pmin;
} catch (MathException e) { } catch (MathException e) {
e.printStackTrace(); e.printStackTrace();
} }
} }
} }
pfactor = pmax - pmin; pfactor = pmax - pmin;
} }
public double returnValue() { public double returnValue() {
double random = pmin + Randoms.getRandom(this).nextDouble() * pfactor; double random = pmin
double result; + Randoms.getRandom(LimitedNormalDistribution.class)
.nextDouble() * pfactor;
try { double result;
result = limitedNormal.inverseCumulativeProbability(random);
} catch (MathException e) { try {
// TODO Auto-generated catch block result = limitedNormal.inverseCumulativeProbability(random);
e.printStackTrace(); } catch (MathException e) {
result = 0; // TODO Auto-generated catch block
} e.printStackTrace();
result = 0;
return result; }
}
return result;
/** }
* @return Returns the limitType.
*/ /**
public int getLimitType() { * @return Returns the limitType.
return limitType; */
} public int getLimitType() {
return limitType;
/** }
* Returns a random value that is distributed as a Normal Distribution with
* an upper and lower limit. /**
* * Returns a random value that is distributed as a Normal Distribution with
* @param _mu * an upper and lower limit.
* average *
* @param _sigma * @param _mu
* standard deviation * average
* @param _min * @param _sigma
* lower limit, set to "null", if no limit * standard deviation
* @param _max * @param _min
* upper limit, set to "null", if no limit * lower limit, set to "null", if no limit
* @return as double * @param _max
*/ * upper limit, set to "null", if no limit
public static double returnValue(double _mu, double _sigma, Double _min, * @return as double
Double _max) { */
int llimitType; public static double returnValue(double _mu, double _sigma, Double _min,
double lmax; Double _max) {
double lmin; int llimitType;
double lpmax = 1; double lmax;
double lpmin = 0; double lmin;
double lpfactor; double lpmax = 1;
double lpmin = 0;
NormalDistributionImpl llimitedNormal = new NormalDistributionImpl(_mu, double lpfactor;
_sigma);
if (_min == null) { NormalDistributionImpl llimitedNormal = new NormalDistributionImpl(_mu,
if (_max == null) { _sigma);
llimitType = LIMIT_NORMAL_DIST_NONE; if (_min == null) {
} else { if (_max == null) {
// only max is limted llimitType = LIMIT_NORMAL_DIST_NONE;
llimitType = LIMIT_NORMAL_DIST_MAX; } else {
lmax = _max.doubleValue(); // only max is limted
try { llimitType = LIMIT_NORMAL_DIST_MAX;
lpmax = llimitedNormal.cumulativeProbability(lmax); lmax = _max.doubleValue();
} catch (MathException e) { try {
e.printStackTrace(); lpmax = llimitedNormal.cumulativeProbability(lmax);
} } catch (MathException e) {
} e.printStackTrace();
} else { }
if (_max == null) { }
// only min is limited. } else {
llimitType = LIMIT_NORMAL_DIST_MIN; if (_max == null) {
lmin = _min.doubleValue(); // only min is limited.
try { llimitType = LIMIT_NORMAL_DIST_MIN;
lpmin = llimitedNormal.cumulativeProbability(lmin); lmin = _min.doubleValue();
} catch (MathException e) { try {
// TODO Auto-generated catch block lpmin = llimitedNormal.cumulativeProbability(lmin);
e.printStackTrace(); } catch (MathException e) {
} // TODO Auto-generated catch block
} else { e.printStackTrace();
// both sides limited. }
llimitType = LIMIT_NORMAL_DIST_BOTH; } else {
// both sides limited.
// make sure min is really smaller than max. llimitType = LIMIT_NORMAL_DIST_BOTH;
if (_max.doubleValue() > _min.doubleValue()) {
lmin = _min.doubleValue(); // make sure min is really smaller than max.
lmax = _max.doubleValue(); if (_max.doubleValue() > _min.doubleValue()) {
} else { lmin = _min.doubleValue();
lmax = _min.doubleValue(); lmax = _max.doubleValue();
lmin = _max.doubleValue(); } else {
} lmax = _min.doubleValue();
lmin = _max.doubleValue();
// get min and max probabilites that are possible }
try {
lpmin = llimitedNormal.cumulativeProbability(lmin); // get min and max probabilites that are possible
lpmax = llimitedNormal.cumulativeProbability(lmax); try {
lpmin = llimitedNormal.cumulativeProbability(lmin);
lpfactor = lpmax - lpmin; lpmax = llimitedNormal.cumulativeProbability(lmax);
} catch (MathException e) { lpfactor = lpmax - lpmin;
e.printStackTrace();
} } catch (MathException e) {
} e.printStackTrace();
} }
lpfactor = lpmax - lpmin; }
}
lpfactor = lpmax - lpmin;
double lrandom = lpmin double lrandom = lpmin
+ Randoms.getRandom(LimitedNormalDistribution.class) + Randoms.getRandom(LimitedNormalDistribution.class)
.nextDouble() .nextDouble()
* lpfactor; * lpfactor;
double lresult; double lresult;
try { try {
lresult = llimitedNormal.inverseCumulativeProbability(lrandom); lresult = llimitedNormal.inverseCumulativeProbability(lrandom);
} catch (MathException e) { } catch (MathException e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
lresult = 0; lresult = 0;
} }
return lresult; return lresult;
} }
private class LimitedNormalConfigurer { private class LimitedNormalConfigurer {
private double mu; private double mu;
private double sigma; private double sigma;
private double min; private double min;
private double max; private double max;
private boolean limitedMin; private boolean limitedMin;
private boolean limitedMax; private boolean limitedMax;
public LimitedNormalConfigurer(double mu, double sigma, double min, public LimitedNormalConfigurer(double mu, double sigma, double min,
double max, boolean limitedMin, boolean limitedMax) { double max, boolean limitedMin, boolean limitedMax) {
super(); super();
this.mu = mu; this.mu = mu;
this.sigma = sigma; this.sigma = sigma;
this.min = min; this.min = min;
this.max = max; this.max = max;
this.limitedMin = limitedMin; this.limitedMin = limitedMin;
this.limitedMax = limitedMax; this.limitedMax = limitedMax;
} }
/** /**
* @return Returns the mu. * @return Returns the mu.
*/ */
public double getMu() { public double getMu() {
return mu; return mu;
} }
/** /**
* @return Returns the sigma. * @return Returns the sigma.
*/ */
public double getSigma() { public double getSigma() {
return sigma; return sigma;
} }
/** /**
* @return Returns the max. * @return Returns the max.
*/ */
public double getMax() { public double getMax() {
return max; return max;
} }
/** /**
* @return Returns the min. * @return Returns the min.
*/ */
public double getMin() { public double getMin() {
return min; return min;
} }
/** /**
* @return Returns the limitedMax. * @return Returns the limitedMax.
*/ */
public boolean isLimitedMax() { public boolean isLimitedMax() {
return limitedMax; return limitedMax;
} }
/** /**
* @return Returns the limitedMin. * @return Returns the limitedMin.
*/ */
public boolean isLimitedMin() { public boolean isLimitedMin() {
return limitedMin; return limitedMin;
} }
@Override @Override
public String toString() { public String toString() {
return "LimitedNormalDistribution [mu=" + mu + ", sigma=" + sigma return "LimitedNormalDistribution [mu=" + mu + ", sigma=" + sigma
+ ", min=" + min + ", max=" + max + ", limitedMin=" + ", min=" + min + ", max=" + max + ", limitedMin="
+ limitedMin + ", limitedMax=" + limitedMax + "]"; + limitedMin + ", limitedMax=" + limitedMax + "]";
} }
} }
public String toString(){ public String toString(){
return conf.toString(); return conf.toString();
} }
} }
...@@ -19,67 +19,68 @@ ...@@ -19,67 +19,68 @@
*/ */
package de.tud.kom.p2psim.impl.util.stat.distributions; package de.tud.kom.p2psim.impl.util.stat.distributions;
import umontreal.iro.lecuyer.probdist.LognormalDist;
import de.tudarmstadt.maki.simonstrator.api.Randoms; import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution; import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor; import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
import umontreal.iro.lecuyer.probdist.LognormalDist;
public class LognormalDistribution implements Distribution {
public class LognormalDistribution implements Distribution {
private double mu;
private double mu;
private double sigma;
private double sigma;
private LognormalDist distr;
private LognormalDist distr;
@Override
public String toString() { @Override
return "LognormalDistribution [mu=" + mu + ", sigma=" + sigma + "]"; public String toString() {
} return "LognormalDistribution [mu=" + mu + ", sigma=" + sigma + "]";
}
@XMLConfigurableConstructor({"mu", "sigma"})
public LognormalDistribution(double mu, double sigma) { @XMLConfigurableConstructor({"mu", "sigma"})
this.mu = mu; public LognormalDistribution(double mu, double sigma) {
this.sigma = sigma; this.mu = mu;
distr = new LognormalDist(mu, sigma); this.sigma = sigma;
} distr = new LognormalDist(mu, sigma);
}
public double returnValue() {
double random = Randoms.getRandom(this).nextDouble(); public double returnValue() {
double result = 0; double random = Randoms.getRandom(LognormalDistribution.class)
.nextDouble();
try { double result = 0;
result = distr.inverseF(random);
} catch (Exception e) { try {
// TODO Auto-generated catch block result = distr.inverseF(random);
e.printStackTrace(); } catch (Exception e) {
} // TODO Auto-generated catch block
e.printStackTrace();
return result; }
}
return result;
/** }
* returns a random value lognormally distributed with mu = _mu and sigma =
* _sigma. /**
* * returns a random value lognormally distributed with mu = _mu and sigma =
* @param _mu * _sigma.
* @param _sigma *
* @return as double * @param _mu
*/ * @param _sigma
public static double returnValue(double _mu, double _sigma) { * @return as double
try { */
LognormalDist d = new LognormalDist(_mu, _sigma); public static double returnValue(double _mu, double _sigma) {
try {
LognormalDist d = new LognormalDist(_mu, _sigma);
return d.inverseF(Randoms.getRandom(LognormalDistribution.class) return d.inverseF(Randoms.getRandom(LognormalDistribution.class)
.nextDouble()); .nextDouble());
} catch (Exception e) { } catch (Exception e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
return 0; return 0;
} }
} }
} }
...@@ -19,93 +19,94 @@ ...@@ -19,93 +19,94 @@
*/ */
package de.tud.kom.p2psim.impl.util.stat.distributions; package de.tud.kom.p2psim.impl.util.stat.distributions;
import umontreal.iro.lecuyer.probdist.LognormalDist;
import de.tudarmstadt.maki.simonstrator.api.Randoms; import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution; import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor; import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
import umontreal.iro.lecuyer.probdist.LognormalDist;
public class MixedLogNormalDistribution implements Distribution {
public class MixedLogNormalDistribution implements Distribution {
@Override
public String toString() { @Override
return "MixedLogNormalDistribution [weight1=" + weight1 + ", mu1=" public String toString() {
+ mu1 + ", sigma1=" + sigma1 + ", weight2=" + weight2 return "MixedLogNormalDistribution [weight1=" + weight1 + ", mu1="
+ ", mu2=" + mu2 + ", sigma2=" + sigma2 + "]"; + mu1 + ", sigma1=" + sigma1 + ", weight2=" + weight2
} + ", mu2=" + mu2 + ", sigma2=" + sigma2 + "]";
}
private double weight1;
private double weight1;
private double mu1;
private double mu1;
private double sigma1;
private double sigma1;
private double weight2;
private double weight2;
private double mu2;
private double mu2;
private double sigma2;
private double sigma2;
private LognormalDist distr1;
private LognormalDist distr1;
private LognormalDist distr2;
private LognormalDist distr2;
@XMLConfigurableConstructor({"mu1", "mu2", "sigma1", "sigma2", "weight1", "weight2"})
public MixedLogNormalDistribution(double mu1, double mu2, double sigma1, double sigma2, double weight1, double weight2) { @XMLConfigurableConstructor({"mu1", "mu2", "sigma1", "sigma2", "weight1", "weight2"})
this.mu1 = mu1; public MixedLogNormalDistribution(double mu1, double mu2, double sigma1, double sigma2, double weight1, double weight2) {
this.mu2 = mu2; this.mu1 = mu1;
this.sigma1 = sigma1; this.mu2 = mu2;
this.sigma2 = sigma2; this.sigma1 = sigma1;
this.weight1 = weight1; this.sigma2 = sigma2;
this.weight2 = weight2; this.weight1 = weight1;
distr1 = new LognormalDist(mu1, sigma1); this.weight2 = weight2;
distr2 = new LognormalDist(mu2, sigma2); distr1 = new LognormalDist(mu1, sigma1);
} distr2 = new LognormalDist(mu2, sigma2);
}
public double returnValue() {
double random = Randoms.getRandom(this).nextDouble(); public double returnValue() {
double result = 0; double random = Randoms.getRandom(MixedLogNormalDistribution.class)
try { .nextDouble();
result = weight1 * distr1.inverseF(random) + weight2 double result = 0;
* distr2.inverseF(random); try {
} catch (Exception e) { result = weight1 * distr1.inverseF(random) + weight2
// TODO Auto-generated catch block * distr2.inverseF(random);
e.printStackTrace(); } catch (Exception e) {
} // TODO Auto-generated catch block
return result; e.printStackTrace();
} }
return result;
/** }
* returns a random value distributed after a mixed lognormal distribution:
* _weight1 * lognormal1 + _weight2 * lognormal2 with lorgnormal1(_mu1, /**
* _sigma1) and with lorgnormal1(_mu2, _sigma2). * returns a random value distributed after a mixed lognormal distribution:
* * _weight1 * lognormal1 + _weight2 * lognormal2 with lorgnormal1(_mu1,
* @param _mu1 * _sigma1) and with lorgnormal1(_mu2, _sigma2).
* @param _sigma1 *
* @param _weight1 * @param _mu1
* @param _mu2 * @param _sigma1
* @param _sigma2 * @param _weight1
* @param _weight2 * @param _mu2
* @return * @param _sigma2
*/ * @param _weight2
public static double returnValue(double _mu1, double _sigma1, * @return
double _weight1, double _mu2, double _sigma2, double _weight2) { */
try { public static double returnValue(double _mu1, double _sigma1,
LognormalDist d1 = new LognormalDist(_mu1, _sigma1); double _weight1, double _mu2, double _sigma2, double _weight2) {
LognormalDist d2 = new LognormalDist(_mu2, _sigma2); try {
LognormalDist d1 = new LognormalDist(_mu1, _sigma1);
LognormalDist d2 = new LognormalDist(_mu2, _sigma2);
double random = Randoms.getRandom(MixedLogNormalDistribution.class) double random = Randoms.getRandom(MixedLogNormalDistribution.class)
.nextDouble(); .nextDouble();
return _weight1 * d1.inverseF(random) + _weight2 return _weight1 * d1.inverseF(random) + _weight2
* d2.inverseF(random); * d2.inverseF(random);
} catch (Exception e) { } catch (Exception e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
return 0; return 0;
} }
} }
} }
/* /*
* Copyright (c) 2005-2011 KOM - Multimedia Communications Lab * Copyright (c) 2005-2011 KOM - Multimedia Communications Lab
* *
* This file is part of PeerfactSim.KOM. * This file is part of PeerfactSim.KOM.
* *
* PeerfactSim.KOM is free software: you can redistribute it and/or modify * PeerfactSim.KOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by * it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or * the Free Software Foundation, either version 3 of the License, or
* any later version. * any later version.
* *
* PeerfactSim.KOM is distributed in the hope that it will be useful, * PeerfactSim.KOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of * but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details. * GNU General Public License for more details.
* *
* You should have received a copy of the GNU General Public License * You should have received a copy of the GNU General Public License
* along with PeerfactSim.KOM. If not, see <http://www.gnu.org/licenses/>. * along with PeerfactSim.KOM. If not, see <http://www.gnu.org/licenses/>.
* *
*/ */
package de.tud.kom.p2psim.impl.util.stat.distributions; package de.tud.kom.p2psim.impl.util.stat.distributions;
import java.util.Random; import java.util.Random;
import org.apache.commons.math.MathException; import org.apache.commons.math.MathException;
import org.apache.commons.math.distribution.NormalDistributionImpl; import org.apache.commons.math.distribution.NormalDistributionImpl;
import de.tudarmstadt.maki.simonstrator.api.Randoms; import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution; import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor; import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class NormalDistribution implements Distribution { public class NormalDistribution implements Distribution {
private NormalDistributionImpl normal; private NormalDistributionImpl normal;
private Random randomGen = Randoms.getRandom(this); private Random randomGen = Randoms.getRandom(NormalDistribution.class);
private double mu; private double mu;
private double sigma; private double sigma;
@XMLConfigurableConstructor({"mu", "sigma"}) @XMLConfigurableConstructor({"mu", "sigma"})
public NormalDistribution(double mu, double sigma) { public NormalDistribution(double mu, double sigma) {
this.mu = mu; this.mu = mu;
this.sigma = sigma; this.sigma = sigma;
normal = new NormalDistributionImpl(mu, sigma); normal = new NormalDistributionImpl(mu, sigma);
} }
@Override @Override
public double returnValue() { public double returnValue() {
double random = randomGen.nextDouble(); double random = randomGen.nextDouble();
double result; double result;
try { try {
result = normal.inverseCumulativeProbability(random); result = normal.inverseCumulativeProbability(random);
} catch (MathException e) { } catch (MathException e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
result = 0; result = 0;
} }
return result; return result;
} }
@Override @Override
public String toString() { public String toString() {
return "NormalDistribution [mu=" + mu + ", sigma=" + sigma + "]"; return "NormalDistribution [mu=" + mu + ", sigma=" + sigma + "]";
} }
/** /**
* returns a random value normally distributed with mu = _mu and sigma = * returns a random value normally distributed with mu = _mu and sigma =
* _sigma. * _sigma.
* *
* @param _mu * @param _mu
* @param _sigma * @param _sigma
* @return as double * @return as double
*/ */
public static double returnValue(double _mu, double _sigma) { public static double returnValue(double _mu, double _sigma) {
try { try {
NormalDistributionImpl d = new NormalDistributionImpl(_mu, _sigma); NormalDistributionImpl d = new NormalDistributionImpl(_mu, _sigma);
return d.inverseCumulativeProbability(Randoms.getRandom( return d.inverseCumulativeProbability(Randoms.getRandom(
NormalDistribution.class) NormalDistribution.class)
.nextDouble()); .nextDouble());
} catch (Exception e) { } catch (Exception e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
return 0; return 0;
} }
} }
} }
...@@ -19,64 +19,64 @@ ...@@ -19,64 +19,64 @@
*/ */
package de.tud.kom.p2psim.impl.util.stat.distributions; package de.tud.kom.p2psim.impl.util.stat.distributions;
import org.apache.commons.math.MathException; import org.apache.commons.math.MathException;
import org.apache.commons.math.distribution.PoissonDistributionImpl; import org.apache.commons.math.distribution.PoissonDistributionImpl;
import de.tudarmstadt.maki.simonstrator.api.Randoms; import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution; import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor; import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class PoissonDistribution implements Distribution { public class PoissonDistribution implements Distribution {
private double lambda; private double lambda;
private PoissonDistributionImpl poisson; private PoissonDistributionImpl poisson;
@XMLConfigurableConstructor({"lambda"}) @XMLConfigurableConstructor({"lambda"})
public PoissonDistribution(double lambda){ public PoissonDistribution(double lambda){
this.lambda = lambda; this.lambda = lambda;
this.poisson = new PoissonDistributionImpl(lambda); this.poisson = new PoissonDistributionImpl(lambda);
} }
// returns the x-value for a random value in the cdf // returns the x-value for a random value in the cdf
public double returnValue() { public double returnValue() {
double random = Randoms.getRandom(this) double random = Randoms.getRandom(PoissonDistribution.class)
.nextDouble(); .nextDouble();
int result; int result;
try { try {
result = poisson.inverseCumulativeProbability(random); result = poisson.inverseCumulativeProbability(random);
} catch (MathException e) { } catch (MathException e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
result = 0; result = 0;
} }
return result; return result;
} }
/** /**
* returns a random value Poisson distributed with lamda = _lamda. * returns a random value Poisson distributed with lamda = _lamda.
* @param _lamda * @param _lamda
* @return as double * @return as double
*/ */
public static double returnValue(double _lamda) { public static double returnValue(double _lamda) {
try { try {
PoissonDistributionImpl d = new PoissonDistributionImpl(_lamda); PoissonDistributionImpl d = new PoissonDistributionImpl(_lamda);
return d.inverseCumulativeProbability(Randoms.getRandom( return d.inverseCumulativeProbability(Randoms.getRandom(
PoissonDistribution.class).nextDouble()); PoissonDistribution.class).nextDouble());
} catch (MathException e) { } catch (MathException e) {
// TODO Auto-generated catch block // TODO Auto-generated catch block
e.printStackTrace(); e.printStackTrace();
return 0; return 0;
} }
} }
@Override @Override
public String toString() { public String toString() {
return "PoissonDistribution [lambda=" + lambda + "]"; return "PoissonDistribution [lambda=" + lambda + "]";
} }
} }
...@@ -19,61 +19,62 @@ ...@@ -19,61 +19,62 @@
*/ */
package de.tud.kom.p2psim.impl.util.stat.distributions; package de.tud.kom.p2psim.impl.util.stat.distributions;
import de.tudarmstadt.maki.simonstrator.api.Randoms; import de.tudarmstadt.maki.simonstrator.api.Randoms;
import de.tudarmstadt.maki.simonstrator.api.util.Distribution; import de.tudarmstadt.maki.simonstrator.api.util.Distribution;
import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor; import de.tudarmstadt.maki.simonstrator.api.util.XMLConfigurableConstructor;
public class UniformDistribution implements Distribution { public class UniformDistribution implements Distribution {
@Override @Override
public String toString() { public String toString() {
return "UniformDistribution [min=" + min + ", max=" + max + "]"; return "UniformDistribution [min=" + min + ", max=" + max + "]";
} }
private double min; private double min;
private double max; private double max;
private double factor; private double factor;
@XMLConfigurableConstructor({"min", "max"}) @XMLConfigurableConstructor({"min", "max"})
public UniformDistribution(double min, double max) { public UniformDistribution(double min, double max) {
this.min = Math.min(min, max); this.min = Math.min(min, max);
this.max = Math.max(min, max); this.max = Math.max(min, max);
factor = Math.abs(max - min); factor = Math.abs(max - min);
} }
/** /**
* Delivers a random value distributed as the configured distribution. * Delivers a random value distributed as the configured distribution.
*/ */
public double returnValue() { public double returnValue() {
return min + factor * Randoms.getRandom(this).nextDouble(); return min + factor
} * Randoms.getRandom(UniformDistribution.class).nextDouble();
}
/**
* delivers a random value that is uniformly distributed between the _min /**
* and the _max value. * delivers a random value that is uniformly distributed between the _min
* * and the _max value.
* @param _min *
* @param _max * @param _min
* @return random value as double * @param _max
*/ * @return random value as double
public static double returnValue(double _min, double _max) { */
double lmin, lmax, lfactor; public static double returnValue(double _min, double _max) {
if (_min < _max) { double lmin, lmax, lfactor;
lmin = _min; if (_min < _max) {
lmax = _max; lmin = _min;
} else { lmax = _max;
lmin = _max; } else {
lmax = _min; lmin = _max;
} lmax = _min;
lfactor = Math.abs(lmax - lmin); }
lfactor = Math.abs(lmax - lmin);
return lmin + lfactor return lmin + lfactor
* Randoms.getRandom(UniformDistribution.class).nextDouble(); * Randoms.getRandom(UniformDistribution.class).nextDouble();
} }
} }
...@@ -66,7 +66,8 @@ public class ZipfDistribution implements Distribution { ...@@ -66,7 +66,8 @@ public class ZipfDistribution implements Distribution {
// rank = 1 ... maximum_Number_Of_Ranks => 1/rank = 0..1 // rank = 1 ... maximum_Number_Of_Ranks => 1/rank = 0..1
return 1 / (Math return 1 / (Math
.pow(1 / (Randoms.getRandom(this).nextDouble() * harmonicNormFactor), .pow(1 / (Randoms.getRandom(ZipfDistribution.class).nextDouble()
* harmonicNormFactor),
1 / zipfExponent)); 1 / zipfExponent));
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment