networkingtestandutils/networkingperformancemeasurementtools/testcontroller/data/netperf_example.ini
author Dremov Kirill (Nokia-D-MSW/Tampere) <kirill.dremov@nokia.com>
Fri, 16 Apr 2010 16:36:59 +0300
changeset 14 b33c3d136b7e
parent 0 af10295192d8
permissions -rw-r--r--
Revision: 201015 Kit: 201015

#
# Copyright (c) 2009 Nokia Corporation and/or its subsidiary(-ies).
# All rights reserved.
# This component and the accompanying materials are made available
# under the terms of "Eclipse Public License v1.0"
# which accompanies this distribution, and is available
# at the URL "http://www.eclipse.org/legal/epl-v10.html".
#
# Initial Contributors:
# Nokia Corporation - initial contribution.
#
# Contributors:
#
# Description: 
#
# NETPERF test suite example configuration file
# =============================================
#
# This is a comprehensively commented reference ini file for Netperf.
#
# It should be used as the basis for creating your own customised ini files.
#  It's best if you work with a modified copy (e.g. in \netperfwork\netperf_new.ini)
#   so that netperf_example.ini itself can be kept as a master reference copy.
#
# So.. before you start modifying the file, delete these comments up to this point
#   and save it as a local working copy (e.g. \netperfwork\netperf_new.ini)


# The #'ed out options shown below will be taken at their default values.
#  Uncomment them if you want a non-default value.
#  Otherwise you can remove them from the file to keep it simple and small.

# After you've configured this ini file, use it as follows:
#
#  netperf \netperfwork\netperf_yours.ini emulator <interactive> <run>
#  netperf \netperfwork\netperf_yours.ini devboard <interactive> <run>
#  netperf \netperfwork\netperf_yours.ini device <interactive> <run>
#
#  The optional keyword 'interactive' causes the user to be prompted when manual intervention
#   may be required, e.g. when the setup files have been created and the user needs to
#    run them on the device, prior to the tests proceeding to run. So omit this keyword
#     if you're running the tests in some kind of batch job environment
#      (e.g. overnight builds).
#
#  The optional keyword 'run' causes the same tests to run as the last invocation
#    of netperf. This speeds things up when the device has already been set up,
#     and the list of tests to run hasn't changed since last time. If you've
#      changed your config file then you should omit this keyword so the
#       test run is reconfigured correctly.
#



#############################################################################
# The following section specifies information about the TestControllerPC,   #
#  which will set up / drive the performance tests.                         #
#   The TestControllerPC is probably the PC you're reading this on.         #
#############################################################################
[TestControllerPC]

# Option: [TestControllerPC].Method
#[Method=Testdriver or CopyTestScripts]
# For fully-automatic mode (tests invoked from PC), use value Testdriver
# For semi-automatic mode (tests invoked from Device), use value CopyTestScripts
#
# No default. Must be specified.
Method=Testdriver

# Option: [TestControllerPC].DestinationForSetupData
# The destination, according to the TestControllerPC, of the setup data which
#  will be generated by Netperf.
#
#  The setup data comprises of
#   - commsdat (if specified below)
#   - stat.ini file (if Testdriver mode)
#   - script to install the above
#
# So for a devboard you probably want to set this to a card reader
#  (e.g. F:\)
#  In this case ensure a card is inserted when you run the script!
#
# For a production device you might want to copy to a local folder
#   (e.g. the default, 'netperf_device_setup') and somehow copy
#   the files manually to the phone when prompted (in "interactive" mode).
#
# Defaults to local folder "netperf_devboard_<target>"
#                             (where <target> is emulator/devboard/device)
#DestinationForSetupData=F:\

# Option: [TestControllerPC].TestBearerIP
# The TestControllerPC IP address (on its network interface of
#    the bearer for which we're measuring performance, the "test bearer").
#   This is the address that the DeviceUnderTest-side performance library will
#    send data to / receive data from.
#
# No default. Must be specified.
TestBearerIP=10.16.83.1

# Option: [TestControllerPC].ControlIP
# The TestControllerPC IP address (on its network interface used for control).
#  If you're happy with control traffic to be "in-band" (i.e. sharing the bearer
#  with the "test bearer" mentioned above), the value of this should match
#  that of TestBearerIP above.
#
# No default. Must be specified.
ControlIP=192.168.0.1

# Option: [TestControllerPC].ControlPort
# The TCP port of the UCC service running on the TestControllerPC.
#
# Defaults to 1683. Only change this if you have something else using the port.
#ControlPort=1683

# Option: [TestControllerPC].RunServices
# Whether the script should try to run the PC-side services UCC and exeservice
#  during its test run, and shut them down at the end.
# If you have a reason to launch them independently before the script starts,
#  you need to set this to 'no'
#
# Defaults to yes
#RunServices=yes


############################################################################
# The following section specifies information about the Symbian OS device  #
#  on which we want to measure networking performancev ("DeviceUnderTest") #
############################################################################
[DeviceUnderTest]

# Option: [DeviceUnderTest].LocationForSetupData
#  This is where the device will expect to see the setup files.
#   e.g. if you've copied them to a memory card which mounts as "E:\" on
#         the target device, put "E:\" here.
#   e.g. if you've copied them manually to the inbuild system drive
#         called "C:\", put "C:\" here.
#
#  N.B. In fully-automatic (Testdriver) mode this needs to be a root
#   of a drive (e.g. C:\, E:\) because Stat will expect to find the ini file
#   generated by Netperf in a specific place on the drive \system\data
#  
# Defaults to C:\
#LocationForSetupData=E:\

# Option: [DeviceUnderTest].TestBearerIAP/SNAP
# The connection of the test bearer on the DeviceUnderTest
#  This is the connection that the DeviceUnderTest-side performance
#   library will attach to before
#     sending data to / receiving data from [TestControllerPC].TestBearerIP
#
#  Specified by *either* IAP number *or* SNAP number.
#
# Defaults to 0 - this means "default connection".
#TestBearerIAP=0
#TestBearerSNAP=0

# Option: [DeviceUnderTest].TestBearerIP
# The address on the test bearer of the DeviceUnderTest
# This is the address that the PC-side performance tool (e.g. iperf)
#  will send data to / receive data from.
#  Also goes into the generated commsdat for configuring the
#   network interface statically.
# Subnet mask can be specified in CIDR format (e.g. 24 for 255.255.255.0).
#  Subnet mask defaults to 24 bits if omitted.
#
# No default. Must be specified.
TestBearerIP=10.16.83.2/24

# Option: [DeviceUnderTest].TestBearerGateway
# The address of the gateway (if any) on the test bearer.
#  Goes into the generated commsdat for configuring the
#   network interface statically.
#
# Defaults to x.x.x.1 in the subnet scope specified above.
TestBearerGateway=10.16.83.254

# Option: [DeviceUnderTest].ControlIAP/SNAP
# The connection of the control link on the DeviceUnderTest
#  UCC will attach to this connection before sending control calls 
#   to [TestControllerPC].ControlIP
# (in testdriver mode, Stat will run on this connection too)
#
#  Specified by *either* IAP number *or* SNAP number.
#
# Defaults to 0 - this means "default connection".
#ControlIAP=0
#ControlSNAP=0

# Option: [DeviceUnderTest].ControlIP
# The IP address of the DeviceUnderTest, which the TestControllerPC
#  will contact to run tests using testdriver/stat.
# As before, if control is "in-band", use the same value for this
#  as for TestBearerIP.
#
# No default. Only needed in Testdriver (fully-auto) mode.
#   Can be omitted in CopyTestScripts (semi-auto) mode.
ControlIP=192.168.0.2

# Option: [DeviceUnderTest].ControlPort
# The TCP/IP port on the DeviceUnderTest where Stat is listening.
#
# Defaults to 3000. Only change this if you have something else using the port.
#ControlPort=3000

# Option: [DeviceUnderTest].CpuMeterThreadPriority
# This sets the thread priority of the idle loop which is used
#  to measure available CPU cycles during the performance tests
#  (by counting how many it manages to eat over time).
#
# If no CPU measurements are recorded it's a sign this is set too low.
# If test system runs sluggishly or comes to a halt,
#    it's a sign that this is set too high.
# Depending on the system scheduler or the exact test environment
#  it may be necessary to tweak this.
#
# See TThreadPriority in %EPOCROOT%epoc32/include/e32const.h for possible values.
# Defaults to 100 (EPriorityAbsoluteVeryLow)
#CpuMeterThreadPriority=100

# Option: [DeviceUnderTest].RunSamplingProfiler
# Whether to run the sampling profiler, and retrieve its results
#  for each test. The profiler must be present on the device for this
#   to work (e.g. by including profiler.iby when building ROM).
#
# Defaults to false   (because running it applies a small processing overhead)
#RunSamplingProfiler=false

# Option: [DeviceUnderTest].Platform,BuildVariant
# Variant and platform of the software sent to the DeviceUnderTest.
# For accurate performance measurement on target devices, leave this section
#  alone (defaulting to armv5/urel).
# Be wary - udeb is a lot slower due to all the logging calls, so should
#  not be trusted for reliable performance measurements- it's only to be used
#  for debugging.
# Also, winscw with testdriver isn't possible as testdriver tries to start the
#  emulator locally (i.e. on the TestControllerPC). This is a bad thing,
#  as the software running on the TestControllerPC (specifically iperf) doesn't
#  like to run on the same box as the DeviceUnderTest.
# Unfortunately Testdriver to a remote emulator isn't possible either, because
#  Testdriver itself tries to start the emulator on the TestController,
#  and communicate over a TAP device.
# So only use winscw if your [TestControllerPC].Method is "CopyTestScriptsTo ..."
#
# Defaults to armv5/urel
#Platform=armv5
#BuildVariant=urel

# Option: [DeviceUnderTest].CommsdatTemplate
#
# Defaults to "" (i.e. no commsdat template generation.. in case your commsdat
# is all set up already and you don't want Netperf to interfere with it)
#CommsdatTemplate=netperf_eth_ntras_template.xml
#CommsdatTemplate=netperf_eth_ntras_delays_template.cfg


# Option: [DeviceUnderTest].Cert,Key
#
# Certificate and Private Key files used for signing .sis files.
#
# Defaults to "" (i.e. TestDriver uses its own certificates)
#Cert=c:\test.cert
#Key=c:\test.key

# Option: [DeviceUnderTest].PassOpts
#
# Password options if Private Key requires it. This argument is passed to
#  TestDriver command line. At present it supports these parameters:
#  --sigalgorithm RSA/DSA
#  --certpass [password]
#
# Defaults to "" (i.e. no password)
#PassOpts=--sigalgorithm DSA --certpass [password]


###########################################################################
# The following section specifies information about the monitoring PC,    #
#  which will run wireshark for you, to catch all the data sent or        #
#   received during the running of the performance tests (for further     #
#    analysis after the tests finish). The benefit of this is that each   #
#     test will have a packet dump file corresponding to the name of      #
#      the test. This allows long test runs with extensive post-analysis  #
###########################################################################
[PacketCapturePC]

# Option: [PacketCapturePC].PacketCapture
#
# Defaults to false
#PacketCapture=false

# Option: [PacketCapturePC].IPAddress
#  This is the IP address of the monitoring PC. Obiously it needs an IP
#   address on the test bearer network. It also needs wireshark installed,
#   and the PcapService installed and configured(see HowTo document).
#  e.g. IPAddress=10.16.83.10

# Option: [PacketCapturePC].FTPUser,FTPPassword
#  These are the login details of the FTP server running on the monitoring PC.
#   Netperf will expect to find wireshark logs at the initial current
#    directory on logging in. It will pull each one down then delete it.
#  e.g. FTPUser=anonymous
#  e.g. FTPPassword=aa



########################################################################
# Simply specified tests follow (speed/proto/direction)                #
########################################################################
[Test]

# Option: [DeviceUnderTest].DurationInSeconds
# The duration (in seconds) that data is sent/received per test
#
# Defaults to 60
#DurationInSeconds=60

# Option: [DeviceUnderTest].BasePortNumber
# The base port number used by the send/receive tests.
#  This increments by 1 per reader/writer used throughout the test run.
#   So use a number at the beginning of a large unused range of ports.
#
# Defaults to 5001
#BasePortNumber=5001

# Option: [DeviceUnderTest].Rates
# Concise way to specify the tests you want to run.
# e.g. Rates=1000				for 1000kbps udp send, then 1000kbps tcp send, then 1000kbps udp receive, then 1000kbps tcp receive (4 test cases)
#   or Rates=00256ts,0512u		for 256kbps tcp send then 512kbps udp send then 512kbps udp receive (3 test cases)
#   or Rates=10(+10)80us		for udp send at 10,20,30,40,50,60,70,80 kbps (8 test cases)
#   or Rates=16(*1.5)182ur		for udp receive at 16,24,36,54,81,121,182kbps (7 test cases). Note the integers round downwards.
#   or Rates=00256ts,0512u,10(+10)80us,16(*1.5)182ur    for all the above in turn (4+3+8+7=22 test cases in total)
#
# Defaults to ''  (i.e. no simply specified tests, just run any
#   fully specified tests in the following sections)
Rates=1000


####################################################################################
# Individual fully specified tests follow. More wordy to set up but more flexible. #
# These will run after the tests specified in Rates= above.                        #
####################################################################################

[Test_00FileTransferUseCase]
# run this particular test for 900 sec (15 min)
DurationInSeconds=900
# we have a sender (device->PC), called Sender_01. it will be sending udp.
Sender_01.Protocol=udp
# it will be sending at a rate of 500kbps
Sender_01.Rate=500
# it will be sending in a packet size of 1024 bytes.
Sender_01.PacketSize=1024
# we have another udp sender (to be running at the same time). It's called Sender_bob
Sender_bob.Protocol=udp
# also sending at 500kbps
Sender_bob.Rate=500
Sender_bob.PacketSize=1024
# a third simultaneous sender called Sender_foo
Sender_foo.Protocol=udp
Sender_foo.Rate=500
Sender_foo.PacketSize=1024
# at the same time as the 3 senders we will have a udp receiver (PC->device)
Receiver_01.Protocol=udp
# ... at 500kbps
Receiver_01.Rate=500
Receiver_01.PacketSize=1470


[Test_01BigPackets1024]
# The next test. 1 TCP receiver at 1024 byte packet size
Receiver_01.Protocol=tcp
Receiver_01.PacketSize=1024

[Test_02BigPackets2048]
Receiver_01.Protocol=tcp
Receiver_01.PacketSize=2048

[Test_03BigPackets4096]
Receiver_01.Protocol=tcp
Receiver_01.PacketSize=4096

[Test_04BigPackets8192]
Receiver_01.Protocol=tcp
Receiver_01.PacketSize=8192


[Test_10SimpleTestCase]
DurationInSeconds=10
Sender_01.Protocol=udp
Sender_01.Rate=500
Sender_01.PacketSize=512 
Sender_02.Protocol=tcp
Sender_02.Rate=50
Sender_02.PacketSize=512
Receiver.Protocol=udp
Receiver.Rate=50
Receiver.PacketSize=512

[Test_11FileTransferUseCase]
Sender_01.Protocol=tcp
Sender_01.Rate=50000
Sender_01.PacketSize=16384
Receiver_01.Protocol=tcp
Receiver_01.PacketSize=16384

[Test_12StreamingMediaDownloadUseCase]
Receiver_01.Protocol=tcp
Receiver_01.PacketSize=16384
Receiver_02.Protocol=udp
Receiver_02.Rate=500
Receiver_02.PacketSize=1470

[Test_13StreamingMediaUploadUseCase]
Sender_01.Protocol=tcp
Sender_01.Rate=50000
Sender_01.PacketSize=16384
Sender_02.Protocol=udp
Sender_02.Rate=500
Sender_02.PacketSize=1470

[Test_14ConversationalVoipUseCase]
DurationInSeconds=240
Sender_01.Protocol=tcp
Sender_01.Rate=50000
Sender_01.PacketSize=1470
Sender_02.Protocol=udp
Sender_02.Rate=100
Sender_02.PacketSize=1470
Receiver_01.Protocol=tcp
Receiver_01.PacketSize=16384
Receiver_02.Protocol=udp
Receiver_02.Rate=100
Receiver_02.PacketSize=1470