Unpack thesis sources.
This commit is contained in:
parent
9ca84861b3
commit
38c93a2cb1
33
thesis/.gitignore
vendored
Normal file
33
thesis/.gitignore
vendored
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
*.acn
|
||||||
|
*.acr
|
||||||
|
*.alg
|
||||||
|
*.aux
|
||||||
|
*.bbl
|
||||||
|
*.blg
|
||||||
|
*.dvi
|
||||||
|
*.fdb_latexmk
|
||||||
|
*.glg
|
||||||
|
*.glo
|
||||||
|
*.gls
|
||||||
|
*.idx
|
||||||
|
*.ilg
|
||||||
|
*.ind
|
||||||
|
*.ist
|
||||||
|
*.lof
|
||||||
|
*.log
|
||||||
|
*.lot
|
||||||
|
*.maf
|
||||||
|
*.mtc
|
||||||
|
*.mtc0
|
||||||
|
*.nav
|
||||||
|
*.nlo
|
||||||
|
*.out
|
||||||
|
*.pdfsync
|
||||||
|
*.ps
|
||||||
|
*.snm
|
||||||
|
*.synctex.gz
|
||||||
|
*.toc
|
||||||
|
*.vrb
|
||||||
|
*.xdy
|
||||||
|
*.tdo
|
||||||
|
*.texpadtmp
|
||||||
BIN
thesis/BScThesisUnibas_main-4.pdf
Normal file
BIN
thesis/BScThesisUnibas_main-4.pdf
Normal file
Binary file not shown.
162
thesis/Back/AppendixA.tex
Normal file
162
thesis/Back/AppendixA.tex
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
% !TEX root = ../Thesis.tex
|
||||||
|
\chapter{Appendix A}
|
||||||
|
|
||||||
|
\section{Command Line Examples}\label{example:pre-post}
|
||||||
|
\subsection{Pre and post scripts}
|
||||||
|
In this example, the \verb|--unsafe| option allows not to specify a IP or MAC address.
|
||||||
|
\verb|default| is the device name used and \verb|-c 10| tells \iottb that we only want to capture 10 packets.
|
||||||
|
\begin{minted}{bash}
|
||||||
|
# Command:
|
||||||
|
$ iottb sniff --pre='/usr/bin/echo "pre"' --post='/usr/bin/echo "post"' \
|
||||||
|
default --unsafe -c 10
|
||||||
|
# Stdout:
|
||||||
|
Testbed [Info]
|
||||||
|
Running pre command /usr/bin/echo "pre"
|
||||||
|
pre
|
||||||
|
Using canonical device name default
|
||||||
|
Found device at path /home/seb/iottb.db/default
|
||||||
|
Using filter None
|
||||||
|
Files will be placed in /home/seb/iottb.db/default/sniffs/2024-06-30/cap0002-2101
|
||||||
|
Capture has id dcdf1e0b-6c4d-4f01-ba16-f42a04131fbe
|
||||||
|
Capture setup complete!
|
||||||
|
Capture complete. Saved to default_dcdf1e0b-6c4d-4f01-ba16-f42a04131fbe.pcap
|
||||||
|
tcpdump took 2.12 seconds.
|
||||||
|
Ensuring correct ownership of created files.
|
||||||
|
Saving metadata.
|
||||||
|
END SNIFF SUBCOMMAND
|
||||||
|
Running post script /usr/bin/echo "post"
|
||||||
|
post
|
||||||
|
\end{minted}
|
||||||
|
|
||||||
|
The contents of the 'sniff' directory for the default device after this capture has completed:
|
||||||
|
\begin{minted}{bash}
|
||||||
|
sniffs/2024-06-30/cap0002-2101
|
||||||
|
$ tree
|
||||||
|
.
|
||||||
|
|-- capture_metadata.json
|
||||||
|
|-- default_dcdf1e0b-6c4d-4f01-ba16-f42a04131fbe.pcap
|
||||||
|
|-- stderr_dcdf1e0b-6c4d-4f01-ba16-f42a04131fbe.log
|
||||||
|
L__ stdout_dcdf1e0b-6c4d-4f01-ba16-f42a04131fbe.log
|
||||||
|
\end{minted}
|
||||||
|
and the metadata file contains (\verb|\| only used for fitting into this document):\\
|
||||||
|
\verb|# capture_metadata.json|\\
|
||||||
|
\begin{minted}{json}
|
||||||
|
{
|
||||||
|
"device": "default",
|
||||||
|
"device_id": "default",
|
||||||
|
"capture_id": "dcdf1e0b-6c4d-4f01-ba16-f42a04131fbe",
|
||||||
|
"capture_date_iso": "2024-06-30T21:01:31.496870",
|
||||||
|
"invoked_command": "sudo tcpdump -# -n -c 10 -w \
|
||||||
|
/home/seb/iottb.db \
|
||||||
|
/default/sniffs/2024-06-30 \
|
||||||
|
/cap0002-2101/default_dcdf1e0b-6c4d-4f01-ba16-f42a04131fbe.pcap",
|
||||||
|
"capture_duration": 2.117154359817505,
|
||||||
|
"generic_parameters": {
|
||||||
|
"flags": "-# -n",
|
||||||
|
"kwargs": "-c 10",
|
||||||
|
"filter": null
|
||||||
|
},
|
||||||
|
"non_generic_parameters": {
|
||||||
|
"kwargs": "-w \
|
||||||
|
/home/seb/iottb.db/default/sniffs/2024-06-30 \
|
||||||
|
/cap0002-2101 \
|
||||||
|
/default_dcdf1e0b-6c4d-4f01-ba16-f42a04131fbe.pcap",
|
||||||
|
"filter": null
|
||||||
|
},
|
||||||
|
"features": {
|
||||||
|
"interface": null,
|
||||||
|
"address": null
|
||||||
|
},
|
||||||
|
"resources": {
|
||||||
|
"pcap_file": "default_dcdf1e0b-6c4d-4f01-ba16-f42a04131fbe.pcap",
|
||||||
|
"stdout_log": "stdout_dcdf1e0b-6c4d-4f01-ba16-f42a04131fbe.log",
|
||||||
|
"stderr_log": "stderr_dcdf1e0b-6c4d-4f01-ba16-f42a04131fbe.log",
|
||||||
|
"pre": "/usr/bin/echo \"pre\"",
|
||||||
|
"post": "/usr/bin/echo \"post\""
|
||||||
|
},
|
||||||
|
"environment": {
|
||||||
|
"capture_dir": "cap0002-2101",
|
||||||
|
"database": "iottb.db",
|
||||||
|
"capture_base_dir": "/home/seb/iottb.db/default/sniffs/2024-06-30",
|
||||||
|
"capture_dir_abs_path": \
|
||||||
|
"/home/seb/iottb.db/default/sniffs/2024-06-30/cap0002-2101"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
\end{minted}
|
||||||
|
|
||||||
|
\section{Canonical Name}
|
||||||
|
\begin{listing}[!ht]
|
||||||
|
\inputminted[firstline=12, lastline=40]{python}{string_processing.py}
|
||||||
|
\caption{Shows how the canonical name is created.}
|
||||||
|
\label{lst:dev-canonical}
|
||||||
|
\end{listing}
|
||||||
|
|
||||||
|
\section{Add Device Example}
|
||||||
|
\subsection{Configuration File}\label{appendixA:add-dev-cfg}
|
||||||
|
\begin{listing}[!ht]
|
||||||
|
\inputminted[linenos, breaklines]{python}{appendixa-after-add-device-dir.txt}
|
||||||
|
\caption{Directory and file contents after adding two devices.}
|
||||||
|
\label{lst:appendix:appendixa:config-file}
|
||||||
|
\end{listing}
|
||||||
|
|
||||||
|
\section{Debug Flag Standard Output}
|
||||||
|
|
||||||
|
\begin{figure}
|
||||||
|
\centering
|
||||||
|
\begin{minted}{bash}
|
||||||
|
❯ iottb -vvv --debug sniff roomba --unsafe -c 10
|
||||||
|
<_io.TextIOWrapper name='<stdout>' mode='w' encoding='utf-8'>
|
||||||
|
INFO - main - cli - 48 - Starting execution.
|
||||||
|
INFO - iottb_config - __init__ - 24 - Initializing Config object
|
||||||
|
WARNING - iottb_config - warn - 21 - DatabaseLocations are DatabaseLocationMap in the class iottb.models.iottb_config
|
||||||
|
INFO - iottb_config - load_config - 57 - Loading configuration file
|
||||||
|
INFO - iottb_config - load_config - 62 - Config file exists, opening.
|
||||||
|
DEBUG - main - cli - 52 - Verbosity: 3
|
||||||
|
DEBUG - main - cli - 54 - Debug: True
|
||||||
|
INFO - sniff - validate_sniff - 37 - Validating sniff...
|
||||||
|
INFO - sniff - sniff - 91 - sniff command invoked
|
||||||
|
DEBUG - sniff - sniff - 98 - Config loaded: <iottb.models.iottb_config.IottbConfig object at 0x7f16197d5e50>
|
||||||
|
DEBUG - sniff - sniff - 104 - Full db path is /home/seb/showcase
|
||||||
|
INFO - string_processing - make_canonical_name - 20 - Normalizing name roomba
|
||||||
|
DEBUG - string_processing - make_canonical_name - 38 - Canonical name: roomba
|
||||||
|
DEBUG - string_processing - make_canonical_name - 39 - Aliases: ['roomba']
|
||||||
|
Testbed [I]
|
||||||
|
Using canonical device name roomba
|
||||||
|
Found device at path /home/seb/showcase/roomba
|
||||||
|
INFO - sniff - sniff - 152 - Generic filter None
|
||||||
|
Using filter None
|
||||||
|
DEBUG - sniff - sniff - 160 - Previous captures <generator object Path.glob at 0x7f16194ec590>
|
||||||
|
DEBUG - sniff - sniff - 162 - Capture count is 4
|
||||||
|
DEBUG - sniff - sniff - 165 - capture_dir: cap0004-0310
|
||||||
|
Files will be placed in /home/seb/showcase/roomba/sniffs/2024-07-01/cap0004-0310
|
||||||
|
DEBUG - sniff - sniff - 172 - successfully created capture directory
|
||||||
|
Capture has id 59153b53-c49d-44de-99d2-b5a3490df29a
|
||||||
|
DEBUG - sniff - sniff - 185 - Full pcap file path is /home/seb/showcase/roomba/sniffs/2024-07-01/cap0004-0310/roomba_59153b53-c49d-44de-99d2-b5a3490df29a.pcap
|
||||||
|
INFO - sniff - sniff - 186 - pcap file name is roomba_59153b53-c49d-44de-99d2-b5a3490df29a.pcap
|
||||||
|
INFO - sniff - sniff - 187 - stdout log file is stdout_59153b53-c49d-44de-99d2-b5a3490df29a.log
|
||||||
|
INFO - sniff - sniff - 188 - stderr log file is stderr_59153b53-c49d-44de-99d2-b5a3490df29a.log
|
||||||
|
DEBUG - sniff - sniff - 191 - pgid 260696
|
||||||
|
DEBUG - sniff - sniff - 192 - ppid 12862
|
||||||
|
DEBUG - sniff - sniff - 193 - (real, effective, saved) user id: (1000, 1000, 1000)
|
||||||
|
DEBUG - sniff - sniff - 194 - (real, effective, saved) group id: (1000, 1000, 1000)
|
||||||
|
DEBUG - sniff - sniff - 209 - Flags: -# -n
|
||||||
|
DEBUG - sniff - sniff - 217 - verbosity string to pass to tcpdump: -vvv
|
||||||
|
DEBUG - sniff - sniff - 228 - KW args: -c 10
|
||||||
|
DEBUG - sniff - sniff - 237 - Non transferable (special) kw args: -w /home/seb/showcase/roomba/sniffs/2024-07-01/cap0004-0310/roomba_59153b53-c49d-44de-99d2-b5a3490df29a.pcap
|
||||||
|
INFO - sniff - sniff - 246 - tcpdump command: sudo tcpdump -# -n -vvv -c 10 -w /home/seb/showcase/roomba/sniffs/2024-07-01/cap0004-0310/roomba_59153b53-c49d-44de-99d2-b5a3490df29a.pcap
|
||||||
|
Capture setup complete!
|
||||||
|
DEBUG - sniff - sniff - 259 -
|
||||||
|
stdout: <_io.TextIOWrapper name='/home/seb/showcase/roomba/sniffs/2024-07-01/cap0004-0310/stdout_59153b53-c49d-44de-99d2-b5a3490df29a.log' mode='w' encoding='UTF-8'>.
|
||||||
|
stderr: <_io.TextIOWrapper name='/home/seb/showcase/roomba/sniffs/2024-07-01/cap0004-0310/stderr_59153b53-c49d-44de-99d2-b5a3490df29a.log' mode='w' encoding='UTF-8'>.
|
||||||
|
|
||||||
|
Capture complete. Saved to roomba_59153b53-c49d-44de-99d2-b5a3490df29a.pcap
|
||||||
|
tcpdump took 1.11 seconds.
|
||||||
|
Ensuring correct ownership of created files.
|
||||||
|
Saving metadata.
|
||||||
|
END SNIFF SUBCOMMAND
|
||||||
|
|
||||||
|
\end{minted}
|
||||||
|
\caption{Output with max verbosity and debug flag set.}
|
||||||
|
\label{fig:example-debug-output}
|
||||||
|
\end{figure}
|
||||||
16
thesis/Back/AppendixB.tex
Normal file
16
thesis/Back/AppendixB.tex
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
\chapter{Appendix B}
|
||||||
|
\section{Software Requirements}\label{sec:software-req}
|
||||||
|
\iottbsc was developed on the \textit{Linux}\footnote{\url{kernel.org}} operating system \textit{Fedora 40}\footnote{\url{https://fedoraproject.org/workstation/}}. It has not been tested on any other platform.
|
||||||
|
\iottbsc is implemented in a Python\footnote{\url{python.org}} package \iottb, which has been developed with Python version 3.12.
|
||||||
|
|
||||||
|
\subsection{Runtime Dependencies}
|
||||||
|
\begin{itemize}
|
||||||
|
\item Poetry\footnote{\url{https://python-poetry.org/}}, version 1.8.3. Used for packaging and dependency management.
|
||||||
|
\item Click\footnote{\url{https://click.palletsprojects.com/en/8.1.x/}}, version 8.1, is a library which enables parameter handling through decorated functions.
|
||||||
|
\end{itemize}
|
||||||
|
|
||||||
|
\subsection{Testing Dependencies}
|
||||||
|
\begin{itemize}
|
||||||
|
\item Pytest\footnote{\url{https://docs.pytest.org/en/8.2.x/}}, versions 8.2. Although not many exist.
|
||||||
|
|
||||||
|
\end{itemize}
|
||||||
145
thesis/Back/CommandRef.tex
Normal file
145
thesis/Back/CommandRef.tex
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
\chapter{Appendix D}\label{appendix:cmdref}
|
||||||
|
|
||||||
|
\section{\iottb}\label{cmdref:iottb}
|
||||||
|
\begin{verbatim}
|
||||||
|
Usage: iottb [OPTIONS] COMMAND [ARGS]...
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-v, --verbosity Set verbosity [default: 0; 0<=x<=3]
|
||||||
|
-d, --debug Enable debug mode
|
||||||
|
--dry-run [default: True]
|
||||||
|
--cfg-file PATH Path to iottb config file [default:
|
||||||
|
$HOME/.config/iottb/iottb.cfg]
|
||||||
|
--help Show this message and exit.
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
add-device Add a device to a database
|
||||||
|
init-db
|
||||||
|
rm-cfg Removes the cfg file from the filesystem.
|
||||||
|
rm-dbs Removes ALL(!) databases from the filesystem if...
|
||||||
|
set-key-in-table-to Edit config or metadata files.
|
||||||
|
show-all Show everything: configuration, databases, and...
|
||||||
|
show-cfg Show the current configuration context
|
||||||
|
sniff Sniff packets with tcpdump
|
||||||
|
\end{verbatim}
|
||||||
|
|
||||||
|
\subsection{Initialize Database}\label{cmdref:init-db}
|
||||||
|
\begin{verbatim}
|
||||||
|
Usage: iottb init-db [OPTIONS]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-d, --dest PATH Location to put (new) iottb database
|
||||||
|
-n, --name TEXT Name of new database. [default: iottb.db]
|
||||||
|
--update-default / --no-update-default
|
||||||
|
If new db should be set as the new default
|
||||||
|
[default: update-default]
|
||||||
|
--help Show this message and exit.
|
||||||
|
\end{verbatim}
|
||||||
|
|
||||||
|
\subsection{Add device}\label{cmdref:add-device}
|
||||||
|
\begin{verbatim}
|
||||||
|
Usage: iottb add-device [OPTIONS]
|
||||||
|
|
||||||
|
Add a device to a database
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--dev, --device-name TEXT The name of the device to be added. If this
|
||||||
|
string contains spaces or other special
|
||||||
|
characters normalization is
|
||||||
|
performed to derive a canonical name [required]
|
||||||
|
--db, --database DIRECTORY Database in which to add this device. If not
|
||||||
|
specified use default from config. [env var:
|
||||||
|
IOTTB_DB]
|
||||||
|
--guided Add device interactively [env var:
|
||||||
|
IOTTB_GUIDED_ADD]
|
||||||
|
--help Show this message and exit.
|
||||||
|
\end{verbatim}
|
||||||
|
|
||||||
|
\subsection{Capture traffic with \textit{tcpdump}}\label{cmdref:sniff}
|
||||||
|
\begin{verbatim}
|
||||||
|
Usage: iottb sniff [OPTIONS] [TCPDUMP-ARGS] [DEVICE]
|
||||||
|
|
||||||
|
Sniff packets with tcpdump
|
||||||
|
|
||||||
|
Options:
|
||||||
|
Testbed sources:
|
||||||
|
--db, --database TEXT Database of device. Only needed if not current
|
||||||
|
default. [env var: IOTTB_DB]
|
||||||
|
--app TEXT Companion app being used during capture
|
||||||
|
Runtime behaviour:
|
||||||
|
--unsafe Disable checks for otherwise required options.
|
||||||
|
[env var: IOTTB_UNSAFE]
|
||||||
|
--guided [env var: IOTTB_GUIDED]
|
||||||
|
--pre TEXT Script to be executed before main command is
|
||||||
|
started.
|
||||||
|
--post TEXT Script to be executed upon completion of main
|
||||||
|
command.
|
||||||
|
Tcpdump options:
|
||||||
|
-i, --interface TEXT Network interface to capture on.If not specified
|
||||||
|
tcpdump tries to find and appropriate one.
|
||||||
|
[env var: IOTTB_CAPTURE_INTERFACE]
|
||||||
|
-a, --address TEXT IP or MAC address to filter packets by.
|
||||||
|
[env var: IOTTB_CAPTURE_ADDRESS]
|
||||||
|
-I, --monitor-mode Put interface into monitor mode.
|
||||||
|
--ff TEXT tcpdump filter as string or file path.
|
||||||
|
[env var: IOTTB_CAPTURE_FILTER]
|
||||||
|
-#, --print-pacno Print packet number at beginning of line. True by
|
||||||
|
default. [default: True]
|
||||||
|
-e, --print-ll Print link layer headers. True by default.
|
||||||
|
-c, --count INTEGER Number of packets to capture. [default: 1000]
|
||||||
|
--help Show this message and exit.
|
||||||
|
\end{verbatim}
|
||||||
|
|
||||||
|
\section{Utility commands}\label{cmdref:sec:utils}
|
||||||
|
Utility Commands mostly for development and have not yet been integrated into the standard workflow.
|
||||||
|
\subsection{Remove Configuration}\label{cmdref:rm-cfg}
|
||||||
|
\begin{verbatim}
|
||||||
|
Usage: iottb rm-cfg [OPTIONS]
|
||||||
|
|
||||||
|
Removes the cfg file from the filesystem.
|
||||||
|
|
||||||
|
This is mostly a utility during development. Once non-standard database
|
||||||
|
locations are implemented, deleting this would lead to iottb not being able
|
||||||
|
to find them anymore.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--yes Confirm the action without prompting.
|
||||||
|
--help Show this message and exit.
|
||||||
|
\end{verbatim}
|
||||||
|
|
||||||
|
\subsection{Remove Database}\label{cmdref:rm-dbs}
|
||||||
|
\begin{verbatim}
|
||||||
|
Usage: iottb rm-dbs [OPTIONS]
|
||||||
|
|
||||||
|
Removes ALL(!) databases from the filesystem if they're empty.
|
||||||
|
|
||||||
|
Development utility currently unfit for use.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--yes Confirm the action without prompting.
|
||||||
|
--help Show this message and exit.
|
||||||
|
\end{verbatim}
|
||||||
|
|
||||||
|
\subsection{Display Configuration File}\label{cmdref:show-cfg}
|
||||||
|
\begin{verbatim}
|
||||||
|
Usage: iottb show-cfg [OPTIONS]
|
||||||
|
|
||||||
|
Show the current configuration context
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--cfg-file PATH Path to the config file [default:
|
||||||
|
/home/seb/.config/iottb/iottb.cfg]
|
||||||
|
-pp Pretty Print
|
||||||
|
--help Show this message and exit
|
||||||
|
\end{verbatim}
|
||||||
|
|
||||||
|
\subsection{"Show All"}\label{cmdref:show-all}
|
||||||
|
\begin{verbatim}
|
||||||
|
Usage: iottb show-all [OPTIONS]
|
||||||
|
|
||||||
|
Show everything: configuration, databases, and device metadata
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--help Show this message and exit.
|
||||||
|
\end{verbatim}
|
||||||
|
|
||||||
BIN
thesis/Back/wissensch_Redlichkeit_D_09-2023.pdf
Normal file
BIN
thesis/Back/wissensch_Redlichkeit_D_09-2023.pdf
Normal file
Binary file not shown.
BIN
thesis/Back/wissensch_Redlichkeit_E_09-2023.pdf
Normal file
BIN
thesis/Back/wissensch_Redlichkeit_E_09-2023.pdf
Normal file
Binary file not shown.
44
thesis/Chapters/ch1-introduction.tex
Normal file
44
thesis/Chapters/ch1-introduction.tex
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
% !TEX root = ../Thesis.tex
|
||||||
|
\chapter{Introduction}\label{introduction}
|
||||||
|
\iot devices are becoming increasingly prevalent in modern homes, offering a range of benefits such as controlling home lighting, remote video monitoring, and automated cleaning \citep{iothome2019}.
|
||||||
|
These conveniences are made possible by the sensors and networked communication capabilities embedded within these devices.
|
||||||
|
However, these features also pose significant privacy and security risks \citep{islamiot2023}.
|
||||||
|
IoT devices are often integrated into home networks and communicate over the internet with external servers, potentially enabling surveillance or unauthorized data sharing without the user's knowledge or consent \citep{infoexpiot}. Moreover, even in the absence of malicious intent by the manufacturer, these devices are still vulnerable to programming bugs and other security failures \citep{peekaboo2020}.
|
||||||
|
\medskip
|
||||||
|
|
||||||
|
Security researchers focused on the security and privacy of such \iot devices rely on various utilities and tools for conducting research.
|
||||||
|
These tools are often glued together in scripts with arbitrary decisions about file naming and data structuring.
|
||||||
|
Such impromptu scripts typically have a narrow range of application, making them difficult to reuse across different projects. Consequently, useful parts are manually extracted and incorporated into new scripts for each project, exacerbating the problem.
|
||||||
|
\medskip
|
||||||
|
|
||||||
|
This approach leads to scattered data, highly tailored scripts, and a lack of standardized methods for sharing or reproducing experiments. The absence of standardized tools and practices results in inconsistencies in data collection and storage, making it difficult to maintain compatibility across projects.
|
||||||
|
Furthermore, the lack of conventions about file naming and data structuring leads to issues in finding and accessing the data.
|
||||||
|
For research groups, these issues are further compounded during the onboarding of new members, who must navigate this fragmented landscape and often create their own ad-hoc solutions, perpetuating the cycle of inefficiency and inconsistency.
|
||||||
|
\medskip
|
||||||
|
|
||||||
|
To systematically and reproducibly study the privacy and security of IoT devices, an easy-to-use testbed that automates and standardizes various aspects of experimenting with IoT devices is needed.
|
||||||
|
|
||||||
|
\section{Motivation}\label{sec:motivation}
|
||||||
|
The primary motivation behind this project is to address the challenges faced by security researchers in the field of IoT device security and privacy.
|
||||||
|
The scattered nature of data, the lack of standardized tools, and the ad-hoc methods used for data collection or processing, are an obstacle for researchers who want to produce valid and reproducible results \citep{fursinckorg2021}.
|
||||||
|
A standardized testbed, enabling a more systematic approach to collecting and analyzing network data from \iot devices, can help make tedious and error-prone aspects of conducting experiments on \iot devices more bearable, while at the same time enhancing the quality of the data, by adhering to interoperability standards by establishing data collection and storage standards.
|
||||||
|
This bachelor project is specifically informed by the needs of the PET research group at the University of Basel, who will utilize it to run IoT device experiments, and as a foundation to build more extensive tooling.
|
||||||
|
|
||||||
|
\section{Goal}\label{sec:goal}
|
||||||
|
The goal of this project is to design and implement a testbed for IoT device experiments. To aid reproducibility, there are two main objectives:
|
||||||
|
|
||||||
|
First, the testbed should automate key aspects of running experiments with IoT devices, particularly the setup and initialization of data collection processes as well as some basic post-collection data processing.
|
||||||
|
|
||||||
|
Secondly, the testbed should standardize how data from experiments is stored. This includes standardizing data and metadata organization, establishing a naming scheme, and defining necessary data formats.
|
||||||
|
A more detailed description to how this is adapted for this project follows in \cref{ch:adaptation}.
|
||||||
|
|
||||||
|
|
||||||
|
\section{Outline}
|
||||||
|
This report documents the design and implementation of an \iot testbed.
|
||||||
|
In the remainder of the text, the typographically formatted string "\iottbsc" refers to this projects' conception of testbed, whereas "\iottb" specifically denotes the Python package which is the implementation artifact from this project.
|
||||||
|
|
||||||
|
This report outlines the general goals of a testbed, details the specific functionalities of \iottbsc, and explains how the principles of automation and standardization are implemented.
|
||||||
|
We begin by giving some background on the most immediately useful concepts.
|
||||||
|
\cref{ch:adaptation} derives requirements for \iottbsc starting from first principles and concludes by delineating the scope considered for implementation, which is described in \cref{ch4}.
|
||||||
|
In \cref{ch:5-eval} we evaluate \iottbsc, and more specifically, the \iottb software package against the requirements stated in \cref{ch:adaptation}.
|
||||||
|
We conclude in \cref{ch:conclusion} with an outlook on further development for \iottbsc.
|
||||||
48
thesis/Chapters/ch2-background.tex
Normal file
48
thesis/Chapters/ch2-background.tex
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
% !TEX root = ../Thesis.tex
|
||||||
|
\chapter{Background}
|
||||||
|
This section provides the necessary background to understand the foundational concepts related to IoT devices, testbeds, and data principles that inform the design and implementation of \iottbsc.
|
||||||
|
|
||||||
|
\section{Internet of Things}
|
||||||
|
The \iot refers to the connection of “things” other than traditional computers to the internet. The decreasing size of microprocessors has enabled their integration into smaller and smaller objects. Today, objects like security cameras, home lighting, or children's toys may contain a processor and embedded software that enables them to interact with the internet. The Internet of Things encompasses objects whose purpose has a physical dimension, such as using sensors to measure the physical world or functioning as simple controllers. When these devices can connect to the internet, they are considered part of the Internet of Things and are referred to as \textbf{IoT devices} (see \citet{whatissmartdevice2018} and \citet{iotfundamentals}).
|
||||||
|
|
||||||
|
\section{Testbed}
|
||||||
|
A testbed is a controlled environment set up to perform experiments and tests on new technologies. The concept is used across various fields such as aviation, science, and industry. Despite the varying contexts, all testbeds share the common goal of providing a stable, controlled environment to evaluate the performance and characteristics of the object of interest.
|
||||||
|
|
||||||
|
Examples of testbeds include:
|
||||||
|
\begin{enumerate}
|
||||||
|
\item \textbf{Industry and Engineering}: In industry and engineering, the term \emph{platform} is often used to describe a starting point for product development. A platform in this context can be considered a testbed where various components and technologies are integrated and tested together before final deployment.
|
||||||
|
\item \textbf{Natural Sciences}: In the natural sciences, laboratories serve as testbeds by providing controlled environments for scientific experiments. For example, climate chambers are used to study the effects of different environmental conditions on biological samples (e.g., in \citet{vaughan2005use}). Another example is the use of wind tunnels in aerodynamics research to simulate and study the effects of airflow over models of aircraft or other structures.
|
||||||
|
\item \textbf{Computing}: In computing, specifically within software testing, a suite of unit tests, integrated development environments (IDEs), and other tools could be considered as a testbed. This setup helps in identifying and resolving potential issues before deployment. By controlling parameters of the environment, a testbed can ensure that the software behaves as expected under specified conditions, which is essential for reliable and consistent testing.
|
||||||
|
\item \textbf{Interdisciplinary}: Testbeds can take on considerable scales. For instance, in \citet{tbsmartgrid2013} provides insight into the aspects of a testbed for a smart electric grid.
|
||||||
|
This testbed is composed out of multiple systems, — an electrical grid, internet, and communication provision — which in their own right are already complex environments.
|
||||||
|
The testbed must, via simulation or prototyping, provide control mechanisms, communication, and physical system components.
|
||||||
|
|
||||||
|
\end{enumerate}
|
||||||
|
|
||||||
|
|
||||||
|
\section{FAIR Data Principles}
|
||||||
|
\label{concept:fair}
|
||||||
|
The \emph{FAIR Data Principles} were first introduced by \citet{wilkinson_fair_2016} with the intention to improve the reusability of scientific data. The principles address \textbf{F}indability, \textbf{A}ccessibility, \textbf{I}nteroperability, and \textbf{R}eusability. Data storage designers may use these principles as a guide when designing data storage systems intended to hold data for easy reuse.
|
||||||
|
For a more detailed description, see \citep{go-fair}.
|
||||||
|
|
||||||
|
\section{Network Traffic}\label{sec:network-traffic}
|
||||||
|
Studying \iot devices fundamentally involves understanding their network traffic behavior.
|
||||||
|
This is because network traffic contains (either explicitly or implicitly embedded in it) essential information of interest.
|
||||||
|
Here are key reasons why network traffic is essential in the context of \iot device security:
|
||||||
|
\begin{enumerate}
|
||||||
|
\item \textbf{Communication Patterns}: Network traffic captures the communication patterns between IoT devices and external servers or other devices within the network. By analyzing these patterns, researchers can understand how data flows in and out of the device, which is critical for evaluating performance and identifying any unauthorized communications or unintended leaking of sensitive information.
|
||||||
|
\item \textbf{Protocol Analysis:} Examining the protocols used by IoT devices helps in understanding how they operate. Different devices might use various communication protocols, and analyzing these can reveal insights into their compatibility, efficiency, and security. Protocol analysis can also uncover potential misconfigurations or deviations from expected behavior.
|
||||||
|
\item \textbf{Flow Monitoring:} Network traffic analysis is a cornerstone of security research. It allows researchers to identify potential security threats such as data breaches, unauthorized access, and malware infections. By monitoring traffic, one can detect anomalies that may indicate security incidents or vulnerabilities within the device.
|
||||||
|
\item \textbf{Information Leakage}: \iot devices are often deployed in a home environment and connect to the network through wireless technologies \citep{iothome2019}. This allows an adversary to passively observe traffic. While often this traffic is encrypted, the network flow can leak sensitive information, which is extracted through more complex analysis of communication traffic and Wi-Fi packets \citep{friesssniffing2018}, \citep{infoexpiot}. In some cases, the adversary can determine the state of the smart environment and their users \citep{peekaboo2020}.
|
||||||
|
\end{enumerate}
|
||||||
|
|
||||||
|
|
||||||
|
\section{(Network) Packet Capture}
|
||||||
|
Network \textit{packet capture} \footnote{also known as \emph{packet sniffing}, \emph{network traffic capture}, or just \emph{sniffing}. The latter is often used when referring to nefarious practices.} fundamentally describes the act or process of intercepting and storing data packets traversing a network. It is the principal technique used for studying the behavior and communication patterns of devices on a network. For the reasons mentioned in \cref{sec:network-traffic}, packet capturing is the main data collection mechanism used in \iot device security research, and also the one considered for this project.
|
||||||
|
|
||||||
|
\section{Automation Recipes}
|
||||||
|
\todoRevise()
|
||||||
|
Automation recipes can be understood as a way of defining a sequence of steps needed for a process.
|
||||||
|
In the field of machine learning, \textit{Collective Mind}\footnote{\url{https://github.com/mlcommons/ck}} provides a small framework to define reusable recipes for building, running, benchmarking and optimizing machine learning applications.
|
||||||
|
A key aspect of these recipes some platform-independent, which has enabled wider testing and benchmarking of machine learning models. Even if a given recipe is not yet platform independent, it can be supplemented with user-specific scripts which handle the platform specifics. Furthermore, it is possible to create a new recipe from the old recipe and the new script, which, when made accessible, essentially has extended the applicability of the recipe \citet{friesssniffing2018}.
|
||||||
|
Automation recipes express the fact that some workflow is automated irrespective of the underlying tooling. A simple script or application can be considered an recipe (or part of)
|
||||||
141
thesis/Chapters/ch3-adaptation.tex
Normal file
141
thesis/Chapters/ch3-adaptation.tex
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
\chapter{Adaptation}\label{ch:adaptation}
|
||||||
|
|
||||||
|
In this chapter, we outline the considerations made during the development of the IoT testbed, \iottbsc.
|
||||||
|
Starting from first principles, we derive the requirements for our testbed and finally establish the scope for \iottbsc.
|
||||||
|
The implemented testbed which results from this analysis, the software package \iottb, is discussed in \cref{ch4}.\\
|
||||||
|
|
||||||
|
\section{Principal Objectives}\label{sec:principles-and-objectives}
|
||||||
|
The stated goal for this bachelor project (see \cref{sec:goal}), is to create a testbed for \iot devices, which automates aspects of the involved workflow, with the aim of increasing reproducibility, standardization, and compatibility of tools and data across project boundaries.
|
||||||
|
We specify two key objectives supporting this goal:
|
||||||
|
\begin{enumerate}[label=\textit{Objective \arabic*}]
|
||||||
|
\item \textbf{Automation Recipes:}\label{obj:recipies} The testbed should support specification and repeated execution of important aspects of experiments with IoT devices, such as data collection and analysis (see \citep{fursinckorg2021})
|
||||||
|
\item \textbf{FAIR Data Storage:}\label{obj:fair} The testbed should store data in accordance with the FAIR \citep{go-fair} principles.
|
||||||
|
\end{enumerate}
|
||||||
|
|
||||||
|
\section{Requirements Analysis}\label{sec:requirements}
|
||||||
|
In this section, we present the results of the requirements analysis based on the principal objectives.
|
||||||
|
The requirements derived for \ref{obj:recipies} are presented in \cref{table:auto_recipe_requirements}.
|
||||||
|
\cref{table:fair_data_storage_requirements} we present requirements based on \ref{obj:fair}.
|
||||||
|
|
||||||
|
\begin{table}[H]
|
||||||
|
\centering
|
||||||
|
\caption{Automation Recipes Requirements}
|
||||||
|
\label{table:auto_recipe_requirements}
|
||||||
|
\begin{minipage}{\textwidth}
|
||||||
|
\begin{enumerate}[label=\textit{R1.\arabic*}]
|
||||||
|
\item \label{req:auto_install_tools} \textbf{Installation of Tools}: Support installation of necessary tools like \textit{mitmproxy} \cite{mitmproxy}, \textit{Wireshark} \cite{wiresharkorg} or \textit{tcpdump} \cite{tcpdump}).
|
||||||
|
|
||||||
|
\textit{Reasoning:}
|
||||||
|
There are various tools used for data collection and specifically packet capture.
|
||||||
|
Automating the installation of necessary tools ensures that all required software is available and configured correctly without manual intervention. This reduces the risk of human error during setup and guarantees that the testbed environment is consistently prepared for use. Many platforms, notably most common Linux distributions, come with package managers which provide a simple command-line interface for installing software while automatically handling dependencies. This allows tools to be quickly installed, making it a \textit{lower priority} requirement for \iottbsc.
|
||||||
|
|
||||||
|
\item \label{req:auto_config_start} \textbf{Configuration and Start of Data Collection}: Automate the configuration and start of data collection processes. Specific subtasks include:
|
||||||
|
\begin{enumerate}
|
||||||
|
\item Automate wireless hotspot management on capture device.
|
||||||
|
\item Automatic handling of network capture, including the collection of relevant metadata.
|
||||||
|
\end{enumerate}
|
||||||
|
|
||||||
|
\textit{Reasoning:}
|
||||||
|
Data collection is a central step in the experimentation workflow. Configuration is time-consuming and prone to error, suggesting automating this process is useful.As mentioned in \cref{sec:motivation}, current practices lead to incompatible data and difficult to reuse scripts.
|
||||||
|
Automating the configuration and start of data collection processes ensures a standardized approach, reducing the potential for user error
|
||||||
|
and thereby increasing data compatibility and efficient use of tools. Automating this process must be a central aspect of \iottbsc.
|
||||||
|
|
||||||
|
\item \label{req:auto_data_processing} \textbf{Data Processing}: Automate data processing tasks.
|
||||||
|
|
||||||
|
\textit{Reasoning:} Some network capture tools produce output in a binary format. To make the data available to other processes, often the data must be transformed in some way.
|
||||||
|
Data processing automation ensures that the collected data is processed uniformly and efficiently, enhancing it reusability and interoperability. Processing steps may include cleaning, transforming, and analyzing the data, which are essential steps to derive meaningful insights. Automated data processing saves time and reduces the potential for human error. It ensures that data handling procedures are consistent, which is crucial for comparing results across different experiments and ensuring the validity of findings.
|
||||||
|
|
||||||
|
|
||||||
|
\item \label{req:auto_reproducibility} \textbf{Reproducibility}: Ensure that experiments can be repeated with the same setup and configuration.
|
||||||
|
|
||||||
|
\textit{Reasoning:} A precondition to reproducible scientific results is the ability to run experiments repeatedly with all relevant aspects are set up and configured identically.
|
||||||
|
\item \label{req:auto_execution_control} \textbf{Execution Control}: Provide mechanisms for controlling the execution of automation recipes (e.g., start, stop, status checks).
|
||||||
|
|
||||||
|
\textit{Reasoning:} Control mechanisms are essential for managing the execution of automated tasks. This includes starting, stopping, and monitoring the status of these tasks to ensure they are completed successfully.
|
||||||
|
|
||||||
|
\item \label{req:auto_error_logging} \textbf{Error Handling and Logging}: Include robust error handling and logging to facilitate debugging to enhance reusability.
|
||||||
|
|
||||||
|
\textit{Reasoning:} Effective error handling and logging improve the robustness and reliability of the testbed.Automation recipes may contain software with incompatible logging mechanisms.
|
||||||
|
To facilitate development and troubleshooting, a unified and principled logging important for \iottbsc.
|
||||||
|
\item \label{req:auto_documentation} \textbf{Documentation}: Provide clear documentation and examples for creating and running automation recipes.
|
||||||
|
\end{enumerate}
|
||||||
|
\end{minipage}
|
||||||
|
\end{table}
|
||||||
|
|
||||||
|
\begin{table}[H]
|
||||||
|
\centering
|
||||||
|
\caption{FAIR Data Storage Requirements}
|
||||||
|
\label{table:fair_data_storage_requirements}
|
||||||
|
\begin{minipage}{\textwidth}
|
||||||
|
\begin{enumerate}[label=\textit{R2.\arabic*}]
|
||||||
|
\item \label{req:fair_data_meta_inventory} \textbf{Data and Metadata Inventory}: \iottbsc should provide an inventory of data and metadata that typically need to be recorded (e.g., raw traffic, timestamps, device identifiers).
|
||||||
|
|
||||||
|
\textit{Reasoning:} Providing a comprehensive inventory of data and metadata ensures that data remains findable after collection. Including metadata increases interpretability and gives context necessary for extracting reproducible results.
|
||||||
|
|
||||||
|
\item \label{req:fair_data_formats} \textbf{Data Formats and Schemas}: Define standardized data formats and schemas.
|
||||||
|
|
||||||
|
\textit{Reasoning:} Standardized data formats and schemas ensure consistency and interoperability.
|
||||||
|
|
||||||
|
\item \label{req:fair_file_naming} \textbf{File Naming and Directory Hierarchy}: Establish clear file naming conventions and directory hierarchies. for organized data storage.
|
||||||
|
|
||||||
|
\textit{Reasoning:} This enhances findability and accessibility.
|
||||||
|
\item \label{req:fair_preservation} \textbf{Data Preservation Practices}: Implement best practices for data preservation, including recommendations from authoritative sources like the Library of Congress \citep{recommendedformatrsLOC}.
|
||||||
|
|
||||||
|
\textit{Reasoning:} Implementing best practices for data preservation can mitigate data degradation and ensures integrity of data over time. This ensures long-term accessibility and reusability.
|
||||||
|
\item \label{req:fair_accessibility} \textbf{Accessibility Controls}: Ensure data accessibility with appropriate permissions and access controls.
|
||||||
|
\item \label{req:fair_interoperability} \textbf{Interoperability Standards}: Use widely supported formats and protocols to facilitate data exchange and interoperability.
|
||||||
|
\item \label{req:fair_reusability} \textbf{Reusability Documentation}: Provide detailed metadata to support data reuse by other researchers.
|
||||||
|
\end{enumerate}
|
||||||
|
\end{minipage}
|
||||||
|
\end{table}
|
||||||
|
|
||||||
|
We return to these when we evaluate \iottbsc in \cref{ch:5-eval}.
|
||||||
|
|
||||||
|
\section{Scope}\label{sec:scope}
|
||||||
|
This section defines the scope of the testbed \iottbsc.
|
||||||
|
To guide the implementation of the software component of this bachelor project, \iottb,
|
||||||
|
we focus on a specific set of requirements that align with the scope of a bachelor project.
|
||||||
|
While the identified requirements encompass a broad range of considerations, we have prioritized those that are most critical to achieving the primary objectives of the project.
|
||||||
|
|
||||||
|
For this project, we delineate our scope regarding the principal objectives as follows:
|
||||||
|
\begin{itemize}
|
||||||
|
\item \ref{obj:recipies}: \iottb focuses on complying with \ref{req:auto_config_start}, \ref{req:auto_reproducibility}.
|
||||||
|
\item \ref{obj:fair}: \iottb ensures FAIR data storage implicitly, with the main focus lying on \ref{req:fair_data_formats}, \ref{req:fair_data_meta_inventory}, \ref{req:fair_file_naming}.
|
||||||
|
\end{itemize}
|
||||||
|
|
||||||
|
|
||||||
|
\subsection{Model Environment}\label{sec:assumed-setup}
|
||||||
|
In this section, we describe the environment model assumed as the basis for conduction \iot device experiments.
|
||||||
|
This mainly involves delineating the network topology. Considerations are taken to make this environment, over which the \iottb testbed software has no control, easy reproducible \citep{vacuumpie2023}.\\
|
||||||
|
|
||||||
|
We assume that the \iot device generally requires a Wi-Fi connection.
|
||||||
|
This implies that the environment is configured to reliably capture network traffic without disrupting the \iot device's connectivity. This involves setting up a machine with internet access (wired or wireless) and possibly one Wi-Fi card supporting AP mode to act as the \ap for the \iot device under test \citep{surveytestingmethods2022}.
|
||||||
|
Additionally, the setup must enable bridging the IoT-AP network to the internet to ensure \iot device.\\
|
||||||
|
|
||||||
|
Specifically, the assumed setup for network traffic capture includes the following components:
|
||||||
|
\begin{enumerate}
|
||||||
|
\item \textbf{IoT Device:} The device under investigation, connected to a network.
|
||||||
|
\item \textbf{Capture Device:} A computer or dedicated hardware device configured to intercept and record network traffic. This is where \iottb runs.
|
||||||
|
\item \textbf{Wi-Fi \ap:} The \ap through which the \iot device gets network access.
|
||||||
|
\item \textbf{Router/ Internet gateway:} The network must provide internet access.
|
||||||
|
\item \textbf{Switch or software bridge:} At least either a switch or an \os with software bridge support must be available to be able to implement one of the setups described in \cref{fig:cap-setup1} and \cref{fig:cap-setup2}.
|
||||||
|
\item \textbf{Software:} tcpdump is needed for network capture.
|
||||||
|
\end{enumerate}
|
||||||
|
\newpage
|
||||||
|
\begin{figure}[!ht]
|
||||||
|
\centering
|
||||||
|
\includegraphics[width=0.75\linewidth]{Figures/network-setup1.png}
|
||||||
|
\caption{Capture setup with separate Capture Device and AP}
|
||||||
|
\label{fig:cap-setup1}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
\begin{figure}[!ht]
|
||||||
|
\centering
|
||||||
|
\includegraphics[width=0.75\linewidth]{Figures/setup2.png}
|
||||||
|
\caption{Capture setup where the capture device doubles as the \ap for the \iot device.}
|
||||||
|
\label{fig:cap-setup2}
|
||||||
|
\end{figure}
|
||||||
|
\newpage
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
153
thesis/Chapters/ch4-iottb.tex
Normal file
153
thesis/Chapters/ch4-iottb.tex
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
\chapter{Implementation}\label{ch4}
|
||||||
|
This chapter discusses the implementation of the IoT device testbed, \iottbsc which is developed using the Python programming language. This choice is motivated by Python's wide availability and the familiarity many users have with it, thus lowering the barrier for extending and modifying the testbed in the future. The testbed is delivered as a Python package and provides the \iottb command with various subcommands. A full command reference can be found at \cref{appendix:cmdref}.\\
|
||||||
|
Conceptually, the software implements two separate aspects: data collection and data storage.
|
||||||
|
The \iottbsc database schema is implicitly implemented by \iottb. Users use \iottb mainly to operate on the database or initiate data collection. Since the database schema is transparent to the user during operation, we begin with a brief description of the database layout as a directory hierarchy, before we get into \iottb \cli.
|
||||||
|
|
||||||
|
\section{Database Schema}
|
||||||
|
The storage for \iottbsc is implemented on top of the file system of the user.
|
||||||
|
Since user folder structures provide little standardization, we require a configuration file, while gives \iottb some basic information about the execution environment.
|
||||||
|
The testbed is configured in a configuration file in JSON format, following the scheme in \cref{lst:cfg-shema}.
|
||||||
|
\verb|DefaultDatabase| is a string which represents the name of the database, which is a directory in \\
|
||||||
|
\verb|DefaultDatabasePath| once initialized.
|
||||||
|
\iottb assumes these values during execution, unless the user specified otherwise.
|
||||||
|
If the user specifies a different database location as in option in a subcommand, \verb|DatabaseLocations| is consulted.
|
||||||
|
\verb|DatabaseLocations| is a mapping from every known database name to the full path of its parent directory in the file system.
|
||||||
|
The configuration file is loaded for every invocation of \iottb.
|
||||||
|
It provides the minimal operating information.
|
||||||
|
Now that we understand
|
||||||
|
\begin{listing}[!ht]
|
||||||
|
\inputminted[]{json}{cfg-shema.json}
|
||||||
|
\caption{Schema of the testbed configuration file.}
|
||||||
|
\label{lst:cfg-shema}
|
||||||
|
\end{listing}
|
||||||
|
\newpage
|
||||||
|
\section{High Level Description}
|
||||||
|
\iottb is invoked following the schema below. In all cases, a subcommand be specified for anything to happen.
|
||||||
|
\iottb is used from the command line and follows the following schema:
|
||||||
|
\begin{minted}[fontsize=\small]{bash}
|
||||||
|
iottb [<global options>] <subcommand> [<subcommand options>] [<argument(s)>]
|
||||||
|
\end{minted}
|
||||||
|
\todoRevise{Better listing}
|
||||||
|
When \iottb is invoked, it first checks to see if it can find the database directory in the \os users home directory\footnote{Default can be changed}.
|
||||||
|
|
||||||
|
\section{Database Initialization}\label{sec:db-init}
|
||||||
|
The IoT testbed database is defined to be a directory named \db. Currently, \iottb creates this directory in the user's home directory (commonly located at the path \texttt{/home/<username>} on Linux systems) the first time any subcommand is used. All data and metadata are placed under this directory. Invoking \verb|iottb init-db| without arguments causes defaults to be loaded from the configuration file. If the file does not exist, it is created with default values following \cref{lst:cfg-shema}. Else, the database is created with the default name or the user-suplied name as a directory in the file system, unless a database under that name is already registered in the \verb|DatabaseLocaions| map. The commands described in the later sections all depend on the existence of a \iottbsc database.
|
||||||
|
It is neither possible to add a device nor initiate data collection without an existing database.
|
||||||
|
The full command line specification can be found in \cref{cmdref:init-db}.
|
||||||
|
Once a database is initialized, devices may be added to that database.
|
||||||
|
|
||||||
|
\section{Adding Devices}\label{sec:add-dev}
|
||||||
|
Before we capture the traffic of a \iot device, \iottb demands that there exists a dedicated
|
||||||
|
directory for it.
|
||||||
|
We add a device to the database by passing a string representing the name of the device to the \addev subcommand.
|
||||||
|
This does two things:
|
||||||
|
\begin{enumerate}
|
||||||
|
\item A python object is initialized from the class as in \cref{lst:dev-meta-python}
|
||||||
|
\item A directory for the device is created as \verb|<db-path>/<device_canonical_name>|
|
||||||
|
\item A metadata file \verb|device_metadata.json| is created and placed in the newly created directory. This file is in the JSON format, and follows the schema seen in \cref{lst:dev-meta-python}.
|
||||||
|
\end{enumerate}
|
||||||
|
|
||||||
|
\begin{listing}[!ht]
|
||||||
|
\inputminted[firstline=12, lastline=29, linenos]{python}{device_metadata.py}
|
||||||
|
\caption{Device Metadata}
|
||||||
|
\label{lst:dev-meta-python}
|
||||||
|
\end{listing}
|
||||||
|
|
||||||
|
The Device ID is automatically generated using a UUID to be FAIR compliant. \verb|canonical_name| is generated by the \verb|make_canonical_name()| function provided in \cref{lst:dev-canonical}.
|
||||||
|
Fields not supplied to \verb|__init__| in \cref{lst:dev-meta-python} are kept empty. The other fields in are currently not used by \iottb itself, but provide metadata
|
||||||
|
which can be used during a processing step. Optionally, one can manually create such a file with pre-set values and pass it to the setup.
|
||||||
|
For example, say the testbed contains a configuration as can be seen in \cref{lst:appendix:appendixa:config-file}
|
||||||
|
|
||||||
|
\begin{listing}[!ht]
|
||||||
|
\inputminted[firstline=1, lastline=8, linenos]{json}{appendixa-after-add-device-dir.txt}
|
||||||
|
\caption{Directory layout after adding device 'default' and 'Roomba'}
|
||||||
|
\label{lst:cfg-file-post-add}
|
||||||
|
\end{listing}
|
||||||
|
|
||||||
|
If we then add two devices \verb|'iPhone 13 (year 2043)'| and \verb|roomba|, the layout of the database resembles \cref{lst:cfg-db-layout-post-add} and, for instance, the \verb|roomba| devices' will contain the metadata listed in \cref{lst:meta-roomba-post-add}. See \cref{appendixA:add-dev-cfg} for a complete overview.
|
||||||
|
|
||||||
|
\begin{listing}[!ht]
|
||||||
|
\lstinputlisting[firstline=11, lastline=16]{appendixa-after-add-device-dir.txt}
|
||||||
|
\caption{Directory layout after adding device 'default' and 'Roomba'}
|
||||||
|
\label{lst:cfg-db-layout-post-add}
|
||||||
|
\end{listing}
|
||||||
|
|
||||||
|
\begin{listing}[!ht]
|
||||||
|
\lstinputlisting[firstline=39, lastline=55]{appendixa-after-add-device-dir.txt}
|
||||||
|
\caption{Directory layout after adding device 'default' and 'Roomba'}
|
||||||
|
\label{lst:meta-roomba-post-add}
|
||||||
|
\end{listing}
|
||||||
|
|
||||||
|
\newpage
|
||||||
|
\section{Traffic Sniffing}\label{sec:sniff}
|
||||||
|
Automated network capture is a key component of \iottb. The standard network capture is provided by the \texttt{sniff} subcommand, which wraps the common traffic capture utility \emph{tcpdump}\citep{tcpdump}. \cref{cmdref:sniff} shows usage of the command.
|
||||||
|
|
||||||
|
Unless explicitly allowed by specifying that the command should run in \texttt{unsafe} mode, an IPv4, or MAC address \emph{must} be provided. An IP addresses are only accepted in dot-decimal notation \footnote{e.g., 172.168.1.1} and MAC addresses must specify as six groups of two hexadecimal digits\footnote{e.g., 12:34:56:78:AA:BB}. Failing to provide either results in the capture being aborted. The rationale behind this is simple: they are the only way to identify the traffic of interest. Of course, it is possible to retrieve the IP or MAC after a capture. Still, the merits outweigh the annoyance. The hope is that this makes \iottb easier to use \emph{correctly}. For example, consider the situation, where a student is tasked with performing multiple captures across multiple devices. If the student is not aware of the need of an address for the captured data to be usable, then this policy avoids the headache and frustration of wasted time and unusable data.
|
||||||
|
|
||||||
|
To comply with \ref{req:auto_config_start} and \ref{req:fair_data_meta_inventory}, each capture also stores some metadata in \texttt{capture\_metadata.json}. \cref{lst:cap-meta} shows the metadata files schema.
|
||||||
|
|
||||||
|
|
||||||
|
\begin{listing}[!ht]
|
||||||
|
\inputminted[firstline=288, lastline=319]{python}{sniff.py}
|
||||||
|
\caption{Metadata Stored for sniff command}
|
||||||
|
\label{lst:cap-meta}
|
||||||
|
\end{listing}
|
||||||
|
|
||||||
|
The \texttt{device\_id} is the \uuid \ of the device for which the capture was performed. This ensures the capture metadata remains associated even if files are moved. Furthermore, each capture also gets a \uuid. This \uuid \ is used as the suffix for the PCAP file, and the log files. The exact naming scheme is given in \cref{lst:cap-naming}.
|
||||||
|
|
||||||
|
\begin{listing}
|
||||||
|
\inputminted[firstline=179, lastline=181]{python}{sniff.py}
|
||||||
|
\caption{Naming scheme for files created during capture.}
|
||||||
|
\label{lst:cap-naming}
|
||||||
|
\end{listing}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
\section{Working with Metadata}
|
||||||
|
The \texttt{meta} subcommand provides a facility for manipulating metadata files. It allows users to get the value of any key in a metadata file as well as introduce new key-value pairs. However, it is not possible to change the value of any key already present in the metadata. This restriction is in place to prevent metadata corruption.
|
||||||
|
|
||||||
|
The most crucial value in any metadata file is the \texttt{uuid} of the device or capture the metadata belongs to. Changing the \texttt{uuid} would cause \iottb to mishandle the data, as all references to data associated with that \texttt{uuid} would become invalid. Changeing the any other value might not cause mishandling by \iottb, but they nonetheless represent essential information about the data. Therefore, \iottb does not allow changes to existing keys once they are set.
|
||||||
|
|
||||||
|
Future improvements might relax this restriction by implementing stricter checks on which keys can be modified. This would involve defining a strict set of keys that are write-once and then read-only.
|
||||||
|
|
||||||
|
\section{Raw Captures}
|
||||||
|
The \texttt{raw} subcommand offers a flexible way to run virtually any command wrapped in \iottb. Of course, the intended use is with other capture tools, like \textit{mitmproxy}\citet{mitmproxy}, and not arbitrary shell commands.
|
||||||
|
While some benefits, particularly those related to standardized capture, are diminished, users still retain the advantages of the database.
|
||||||
|
|
||||||
|
|
||||||
|
The syntax of the \texttt{raw} subcommand is as follows:
|
||||||
|
\begin{minted}{bash}
|
||||||
|
iottb raw <device> <command-name> "<command-options-string>" # or
|
||||||
|
iottb raw <device> "<string-executable-by-a-shell>" #
|
||||||
|
\end{minted}
|
||||||
|
|
||||||
|
\iottb does not provide error checking for user-supplied arguments or strings.
|
||||||
|
Users benefit from the fact that captures will be registered in the database, assigned a \texttt{uuid}, and associated with the device.
|
||||||
|
The metadata file of the capture can then be edited manually if needed.
|
||||||
|
|
||||||
|
|
||||||
|
\iottb does not provide error checking for user-supplied arguments or strings.
|
||||||
|
Users benefit from the fact that captures will be registered in the database, assigned a \texttt{uuid}, and associated with the device.
|
||||||
|
The metadata file of the capture can then be edited manually if needed.
|
||||||
|
|
||||||
|
However, each incorrect or unintended invocation that adheres to the database syntax (i.e., the specified device exists) will create a new capture directory with a metadata file and \texttt{uuid}. Therefore, users are advised to thoroughly test commands beforehand to avoid creating unnecessary clutter.
|
||||||
|
|
||||||
|
\section{Integrating user scripts}\label{sec:integrating-user-scripts}
|
||||||
|
The \texttt{--pre} and \texttt{--post} options allow users to run any executable before and after any subcommand, respectively.
|
||||||
|
Both options take a string as their argument, which is passed as input to a shell and launched as a subprocess.
|
||||||
|
The rationale for running the process in a shell is that Python's Standard Library process management module, \texttt{subprocess}\footnote{\url{https://docs.python.org/3/library/subprocess.html}}, does not accepts argument to the target subprocess when a single string is passed for execution.
|
||||||
|
|
||||||
|
Execution is synchronous: the subcommand does not begin execution until the \texttt{--pre} script finishes, and the \texttt{--post} script only starts executing after the subcommand has completed its execution. \iottb always runs in that order.
|
||||||
|
|
||||||
|
There may be cases where a script provides some type of relevant interaction intended to run in parallel with the capture. Currently, the recommended way to achieve this is to wrap the target executable in a script that forks a process to execute the target script, detaches from it, and returns.
|
||||||
|
|
||||||
|
These options are a gateway for more complex environment setups and, in particular, allow users to reuse their scripts, thus lowering the barrier to adopting \iottb.
|
||||||
|
|
||||||
|
\section{Extending and Modifying the Testbed}
|
||||||
|
One of the key design goals of \iottb is easy extensibility. \iottb uses the Click Library \citep{click} to handle parsing arguments. Adding a new command amounts to no more than writing a function and decorating it according to Click specification.
|
||||||
|
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
%%%%%%%%%%%% Figures
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
|
||||||
|
|
||||||
202
thesis/Chapters/ch5-evaluation.tex
Normal file
202
thesis/Chapters/ch5-evaluation.tex
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
\chapter{Evaluation}\label{ch:5-eval}
|
||||||
|
In this sectioned we evaluate \iottb, paying particular attention to the requirements defined in \cref{sec:requirements}.
|
||||||
|
|
||||||
|
\begin{table}[h!]
|
||||||
|
\centering
|
||||||
|
\begin{tabular}{|c|l|c|}
|
||||||
|
\hline
|
||||||
|
\textbf{Requirement ID} & \textbf{Description} & \textbf{Status} \\ \hline
|
||||||
|
\ref{req:auto_install_tools} & Installation of Tools & Not Met \\ \hline
|
||||||
|
\ref{req:auto_config_start} & Configuration and Start of Data Collection & $\downarrow$ \\ \hline
|
||||||
|
\ref{req:auto_config_start}a) & Automate Wi-Fi Setup & Not Met \\ \hline
|
||||||
|
\ref{req:auto_config_start}b) & Automate Data Capture & Met \\ \hline
|
||||||
|
\ref{req:auto_data_processing} & Data Processing & Partially Met \\ \hline
|
||||||
|
\ref{req:auto_reproducibility} & Reproducibility & Partially Met \\ \hline
|
||||||
|
\ref{req:auto_execution_control} & Execution Control & Not Met \\ \hline
|
||||||
|
\ref{req:auto_error_logging} & Error Handling and Logging & Partially Met \\ \hline
|
||||||
|
\ref{req:auto_documentation} & Documentation & $\downarrow$ \\ \hline
|
||||||
|
\ref{req:auto_documentation}a) & User Manual & Met \\ \hline
|
||||||
|
\ref{req:auto_documentation}b) & Developer Docs & Not Met \\ \hline
|
||||||
|
\ref{req:fair_data_meta_inventory} & Data and Metadata Inventory & Met \\ \hline
|
||||||
|
\ref{req:fair_data_formats} & Data Formats and Schemas & Met \\ \hline
|
||||||
|
\ref{req:fair_file_naming} & File Naming and Directory Hierarchy & Met \\ \hline
|
||||||
|
\ref{req:fair_preservation} & Data Preservation Practices & Partially Met \\ \hline
|
||||||
|
\ref{req:fair_accessibility} & Accessibility Controls & Not Met \\ \hline
|
||||||
|
\ref{req:fair_interoperability} & Interoperability Standards & Fully Met \\ \hline
|
||||||
|
\ref{req:fair_reusability} & Reusability Documentation & Met \\ \hline
|
||||||
|
\end{tabular}
|
||||||
|
\caption{Summary of Requirements Evaluation}
|
||||||
|
\label{tab:requirements-evaluation}
|
||||||
|
\end{table}
|
||||||
|
|
||||||
|
\cref{tab:requirements-evaluation} gives an overview of the requirements introduced in \cref{sec:requirements} and our assessment of their status.
|
||||||
|
It is important to note that the status “Met” does not imply that the requirement is implemented to the highest possible standard.
|
||||||
|
Furthermore, this set of requirements itself can (and should) be made more specific and expanded in both detail and scope as the project evolves.
|
||||||
|
|
||||||
|
Additionally, \ref{tab:requirements-evaluation} does not provide granularity regarding the status of individual components, which might meet the requirements to varying degrees.
|
||||||
|
For example, while the requirement for data collection automation may be fully met in terms of basic functionality, advanced features such as handling edge cases or optimizing performance might still need improvement.
|
||||||
|
Similarly, the requirement for data storage might be met in terms of basic file organization but could benefit from enhanced data preservation practices.
|
||||||
|
|
||||||
|
Thus, the statuses presented in \cref{tab:requirements-evaluation} should be viewed as a general assessment rather ground truth.
|
||||||
|
Future work should aim to refine these requirements and their implementation to ensure that \iottbsc continues to evolve and improve.
|
||||||
|
|
||||||
|
To provide a more comprehensive understanding, the following sections offer a detailed evaluation of each requirement. This detailed analysis will discuss how each requirement was addressed, the degree to which it was met, and any specific aspects that may still need improvement. By examining each requirement individually, we can better understand the strengths and limitations of the current implementation and identify areas for future enhancement.
|
||||||
|
|
||||||
|
\section{\ref{req:auto_install_tools}: Installation of Tools}
|
||||||
|
\textbf{Status: Not Met} \\
|
||||||
|
\iottbsc does not install any software or tools by itself. Dependency management for Python packages is handled by installers like PIP, since the Python package declares its dependencies.
|
||||||
|
Tcpdump is the only external dependency, and \iottbsc checks if Tcpdump is available on the capture device. If it is not, the user is asked to install it.
|
||||||
|
Our position is that generally it is a good idea to not force installation of software and allow users the freedom to choose. The added benefit to the user of a built-in installer seems low. Adding some installer to \iottbsc does not promise great enough improvement in ease-of-use vis-à-vis the higher maintenance cost introduced to maintain such a module.
|
||||||
|
For future work, this requirement could be removed.
|
||||||
|
|
||||||
|
\section{\ref{req:auto_config_start}: Configuration and Start of Data Collection}
|
||||||
|
\textbf{Status: Partially Met} \\
|
||||||
|
The testbed automates the configuration and initiation of data collection processes, including wireless hotspot management and network capture. This automation reduces setup time and minimizes errors.
|
||||||
|
The testbed automates some aspects of configuring and initializing the data collection process. This project focused on package capture and adjacent tasks. \ref{req:auto_config_start}b can be considered \textit{complete} in that packet capture is fully supported thorough Tcpdump and important metadata is saved. Depending on the setup (see \cref{fig:cap-setup1} and \cref{fig:cap-setup2}) a Wi-Fi hotspot needs to be set up before packet capture is initiated. \iottbsc does not currently implement automated setup and takedown of a hotspot on any platform, so \ref{req:auto_config_start} a is not currently met. There are scripts for Linux systems bundled with the Python package which can be used with the \texttt{--pre} and \texttt{--post} options mentioned in \cref{sec:integrating-user-scripts}. But to consider this task fully automated and supported, this should be built in to \iottbsc itself.
|
||||||
|
Furthermore, there are other data collection tools like \textit{mitmproxy}\citep{mitmproxy} or more complicated setup tasks like setting up a routing table to allow for more capture scenarios, which are tedious tasks and lend themselves to automation. Future work should include extending the set of available automation recipes continuously.
|
||||||
|
New task groups/recipe domains should be added as sub-requirements of \ref{req:auto_config_start}.
|
||||||
|
We propose the following new sub-requirement
|
||||||
|
\begin{itemize}
|
||||||
|
\item \ref{req:auto_config_start}c: Testbed should implement automatic setup of NAT routing for situations where \ap is connection to the capture device and a bridged setup is not supported.
|
||||||
|
\item \ref{req:auto_config_start}d: Testbed should dynamically determine which type of hotspot setup is possible and choose the appropriate automation recipe.
|
||||||
|
\end{itemize}
|
||||||
|
Extending \ref{req:auto_config_start} means stating which data collection and adjacent recipes are wanted.
|
||||||
|
|
||||||
|
\section{\ref{req:auto_data_processing}: Data Processing}
|
||||||
|
\textbf{Status: Partially Met} \\
|
||||||
|
While the testbed includes some basic data processing capabilities, there is room for improvement.
|
||||||
|
Currently, the only one recipe exists for processing raw data. \iottbsc can extract a CSV file from a PCAP file. The possibilities for automation recipes which support data processing are many.
|
||||||
|
Having the data in a more standardized format allows for the creation of more sophisticated feature extraction recipes with application for machine learning.
|
||||||
|
Before they are available, users can still use the \texttt{--post} option with their feature extraction scripts.
|
||||||
|
|
||||||
|
\section{\ref{req:auto_reproducibility}: Reproducibility}
|
||||||
|
\textbf{Status: Met} \\
|
||||||
|
Supported automation can be run with repeatedly, and used options are documented in the capture metadata. This allows others to repeat the process with the same options.
|
||||||
|
So in this respect, this requirement is met. But, the current state can be significantly improved by automating the process of repeating a capture task with the same configuration as previous captures.
|
||||||
|
To support this, we propose the following new subrequirement which aids the automated reproduction of past capture workflows
|
||||||
|
\begin{itemize}
|
||||||
|
\item \ref{req:auto_reproducibility}a: The testbed should be able to read command options from a file
|
||||||
|
\item \cref{req:auto_reproducibility}b: The testbed should be able to perform a capture based on metadata files of completed captures.
|
||||||
|
\end{itemize}
|
||||||
|
Taking these requirements promises to seriously increase reproducibility.
|
||||||
|
|
||||||
|
\section{\ref{req:auto_execution_control}: Execution Control}
|
||||||
|
\textbf{Status: Not Met} \\
|
||||||
|
The testbed currently provides no controlled method to interfere with a running recipe. In most cases, \iottb will gracefully end if the user sends the process a SIGINT, but there are no explicit protections against data corruption in this case. Furthermore, during execution, \iottb writes to log files and prints basic information to the users' terminal. Extending this with a type of monitoring mechanism would be good steps toward complying with this requirement in the future.
|
||||||
|
|
||||||
|
\section{R1.6: Error Handling and Logging}
|
||||||
|
\textbf{Status: Met} \\
|
||||||
|
Robust error handling and logging are implemented, ensuring that issues can be diagnosed and resolved effectively. Detailed logs help maintain the integrity of experiments. It is also possible for the user to control how much output is given in the terminal. Here are four examples of the same command, with just increasing degrees of verbosity specified by the user:
|
||||||
|
|
||||||
|
\subsection{Logging Example}
|
||||||
|
\textbf{Command: } \verb|iottb sniff roomba --unsafe -c 10 <verbosity>|
|
||||||
|
Verbosity can be unspecified, \verb|-v|, \verb|-vv| or \verb|-vvv|
|
||||||
|
|
||||||
|
\begin{figure}
|
||||||
|
\centering
|
||||||
|
\begin{minted}[breaklines]{bash}
|
||||||
|
$ iottb sniff roomba --unsafe -c 10
|
||||||
|
Testbed [I]
|
||||||
|
Using canonical device name roomba
|
||||||
|
Found device at path /home/seb/showcase/roomba
|
||||||
|
Using filter None
|
||||||
|
Files will be placed in /home/seb/showcase/roomba/sniffs/2024-07-01/cap0000-0214
|
||||||
|
Capture has id 62de82ad-3aa2-460e-acd0-546e46377987
|
||||||
|
Capture setup complete!
|
||||||
|
Capture complete. Saved to roomba_62de82ad-3aa2-460e-acd0-546e46377987.pcap
|
||||||
|
tcpdump took 2.16 seconds.
|
||||||
|
Ensuring correct ownership of created files.
|
||||||
|
Saving metadata.
|
||||||
|
END SNIFF SUBCOMMAND
|
||||||
|
\end{minted}
|
||||||
|
\caption{No verbosity.}
|
||||||
|
\label{fig:example-no-verb}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
On the first verbosity level, only logger warnings are now printed to the standard output. During normal execution we do not expect significantly more output. This is also true for the second verbosity level.
|
||||||
|
\begin{figure}
|
||||||
|
\centering
|
||||||
|
\begin{minted}{bash}
|
||||||
|
$ iottb -v|-vv sniff roomba --unsafe -c 10
|
||||||
|
<_io.TextIOWrapper name='<stdout>' mode='w' encoding='utf-8'>
|
||||||
|
WARNING - iottb_config - DatabaseLocations are DatabaseLocationMap in the class iottb.models.iottb_config
|
||||||
|
\end{minted}
|
||||||
|
\caption{Only \textit{additional} output for \-v or \-vv.}
|
||||||
|
\label{fig:example-one-verb}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
This changes once we reach the third verbosity level, because now additonally the logger level is set to "INFO".
|
||||||
|
Clearly, \cref{fig:example-lvl-three} contains far more output than \cref{fig:example-one-verb}.
|
||||||
|
It is possible to get even more output printed to standard output by also passing the \verb|--debug| flag.
|
||||||
|
This produces significantly more output as can be seen in \cref{fig:example-debug-output}.
|
||||||
|
\begin{figure}
|
||||||
|
\centering
|
||||||
|
\begin{minted}{bash}
|
||||||
|
$ iottb -vvv sniff roomba --unsafe -c 10
|
||||||
|
<_io.TextIOWrapper name='<stdout>' mode='w' encoding='utf-8'>
|
||||||
|
INFO - main - cli - 48 - Starting execution.
|
||||||
|
INFO - iottb_config - __init__ - 24 - Initializing Config object
|
||||||
|
WARNING - iottb_config - warn - 21 - DatabaseLocations are DatabaseLocationMap in the class iottb.models.iottb_config
|
||||||
|
INFO - iottb_config - load_config - 57 - Loading configuration file
|
||||||
|
INFO - iottb_config - load_config - 62 - Config file exists, opening.
|
||||||
|
INFO - sniff - validate_sniff - 37 - Validating sniff...
|
||||||
|
INFO - sniff - sniff - 91 - sniff command invoked
|
||||||
|
INFO - string_processing - make_canonical_name - 20 - Normalizing name roomba
|
||||||
|
Testbed [I]
|
||||||
|
Using canonical device name roomba
|
||||||
|
Found device at path /home/seb/showcase/roomba
|
||||||
|
INFO - sniff - sniff - 152 - Generic filter None
|
||||||
|
Using filter None
|
||||||
|
Files will be placed in /home/seb/showcase/roomba/sniffs/2024-07-01/cap0003-0309
|
||||||
|
Capture has id f1e92062-4a82-4429-996c-97bd7fa57bec
|
||||||
|
INFO - sniff - sniff - 186 - pcap file name is roomba_f1e92062-4a82-4429-996c-97bd7fa57bec.pcap
|
||||||
|
INFO - sniff - sniff - 187 - stdout log file is stdout_f1e92062-4a82-4429-996c-97bd7fa57bec.log
|
||||||
|
INFO - sniff - sniff - 188 - stderr log file is stderr_f1e92062-4a82-4429-996c-97bd7fa57bec.log
|
||||||
|
INFO - sniff - sniff - 246 - tcpdump command: sudo tcpdump -# -n -vvv -c 10 -w /home/seb/showcase/roomba/sniffs/2024-07-01/cap0003-0309/roomba_f1e92062-4a82-4429-996c-97bd7fa57bec.pcap
|
||||||
|
Capture setup complete!
|
||||||
|
Capture complete. Saved to roomba_f1e92062-4a82-4429-996c-97bd7fa57bec.pcap
|
||||||
|
tcpdump took 2.12 seconds.
|
||||||
|
Ensuring correct ownership of created files.
|
||||||
|
Saving metadata.
|
||||||
|
END SNIFF SUBCOMMAND
|
||||||
|
\end{minted}
|
||||||
|
\caption{Caption}
|
||||||
|
\label{fig:example-lvl-three}
|
||||||
|
\end{figure}
|
||||||
|
|
||||||
|
\section{\ref{req:auto_documentation}: Documentation}
|
||||||
|
\textbf{Status: Partially Met} \\
|
||||||
|
For users, there is a 'Command Line Reference' (see \cref{appendix:cmdref}) which details all important aspects of operating the \iottb \cli. Furthermore, helpful messages are displayed regarding the correct syntax of the commands if an input is malformed. So user documentation does exist and, while certainly can be improved upon, is already helpful.
|
||||||
|
Unfortunately, documentation for developers is currently poor. The codebase is not systematically documented and there is currently no developer's manual.
|
||||||
|
Thoroughly documenting the existing codebase should be considered the most pressing issue and tackled first to improve developer documentation.
|
||||||
|
|
||||||
|
\section{\ref{req:fair_data_meta_inventory}: Data and Metadata Inventory}
|
||||||
|
\textbf{Status: Fully Met} \\
|
||||||
|
The testbed organizes data and metadata in a standardized and principled way. The database is complete with respects to the currently primary and secondary artifact which stem from operating \iottb itself.
|
||||||
|
While complete now, extending \iottb carries the risk of breaking this requirement if not careful attention is given.
|
||||||
|
Since the database is a central part of the system as a whole, extensions must ensure that they comply with this requirement before they get built in.
|
||||||
|
|
||||||
|
\section{\ref{req:fair_data_formats}: Data Formats and Schemas}
|
||||||
|
\textbf{Status: Met} \\
|
||||||
|
The testbed standardizes directory and file naming. All metadata is in plain test and in the JSON format. This makes them very accessible to both humans and machines. Currently, the only binary format which \iottbsc creates are PCAP files. Luckily, the PCAP format is widely known and not proprietary, and widely available tools (e.g., Wireshark\citep{wiresharkorg}) exist to inspect them. Furthermore, the data in the PCAP files can be extracted in to the plaintext CSV format, this further improves interoperability. Consistence is currently implicitly handles, that is, there are no strict schemas \footnote{Strict schemas for metadata file briefly were introduced, but then abandoned due to the lack of knowledge surrounding the PYdantic library \citep{pydantic}.} \iottb should generally not corrupt data during operation. But plaintext files are manually editable and can inadvertently be corrupted or made invalid (e.g. accidentally deleting a few digits from a UUID).
|
||||||
|
It is important to keep this in mind when extending \iottbsc and the types of files residing in the database become more heterogeneous.
|
||||||
|
|
||||||
|
|
||||||
|
\subsection{\ref{req:fair_file_naming}: File Naming and Directory Hierarchy}
|
||||||
|
\textbf{Status: Met} \\
|
||||||
|
\iottb currently names all files which it creates according to a well-defined schema. In all cases, the file name is easily legible (e.g., metadata files like \cref{lst:cap-meta}) or the context of where the file resides provides easy orientation to a human reviewer. For instance, raw data files, which currently only are PCAP files, are all named with a \uuid. This is not helpful to the human, but the metadata file, which resides in the same directory, provides all the needed information to be able to understand what is contained within it. Furthermore, these files reside in a directory hierarchy which identifies what devices the traffic belongs to, the date the capture file was created. Finally, capture files reside in a directory which identifies where in the sequence of capture of a given day it was created.
|
||||||
|
Automation recipes expanding the range of data types collected can just follow this convention. This ensures interoperability and findability between various capture methods.
|
||||||
|
|
||||||
|
\cref{ch4} \ref{sec:add-dev} already showed examples of the naming convention when adding devices.
|
||||||
|
|
||||||
|
|
||||||
|
\section{\cref{req:fair_preservation}: Data Preservation Practices}
|
||||||
|
\textbf{Status: Partially Met} \\
|
||||||
|
Specific data preservation practices are not taken. \iottb already follows the Library of Congress recommendations on data formats (see \citet{recommendedformatrsLOC}). Most data is stored in plain text, and the binary formats used are widely known within the field and there is no access barrier.
|
||||||
|
To enhance the testbeds' compliance with this requirement, automation recipes which back-up the data to secure locations periodically can be developed. The need for built-in preservation should be balanced with the goal of not introducing dependencies not related to the core aim of automated collection and FAIR storage. One way is just to have a repository of scripts which are not built in to the \iottb executable, but which users can use and adapt to their needs\footnote{For instance rsync scripts with predefined filters appropriate for the database.}.
|
||||||
|
|
||||||
|
\section{\cref{req:fair_accessibility}: Accessibility Controls}
|
||||||
|
\textbf{Status: x} \\
|
||||||
|
While the \iottb executable is ware what data it can and cannot access or change, there are currently no wider access controls implemented.
|
||||||
|
|
||||||
|
|
||||||
7
thesis/Chapters/ch6-conclusion.tex
Normal file
7
thesis/Chapters/ch6-conclusion.tex
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
\chapter{Conclusion}\label{ch:conclusion}
|
||||||
|
\iottbsc is an attempt for at an automation testbed for \iot devices.
|
||||||
|
The \iottb package can be considered somewhat feature limited and incomplete for a proper testbed, but it provides a foundation on which to build a more fully fledged system.
|
||||||
|
\iottb currently automates the setup and configuration of network packet capture and saves relevant database.
|
||||||
|
The testbed uses the file system as a database such that it is also navigable by humans, not just machines.
|
||||||
|
Data is stored in a predictably named hierarchy, and files which are produced as a result of operating \iottb are both uniquely identifiable and interpretable for humans. This is achieved by using the file system paths to provide some context, such that file names must only contain minimal information to make it meaningful to humans. Additionally, all created resources are identified by a \uuid which ensures that even if data is accidentally moved, their data is linked at least in principle.
|
||||||
|
In summary, \iottbsc is a testbed which takes the first step toward a future where data is FAIR and experiments are reproducible.
|
||||||
BIN
thesis/Figures/network-setup1.png
Normal file
BIN
thesis/Figures/network-setup1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 96 KiB |
BIN
thesis/Figures/setup1.jpeg
Normal file
BIN
thesis/Figures/setup1.jpeg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 92 KiB |
BIN
thesis/Figures/setup2.png
Normal file
BIN
thesis/Figures/setup2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 106 KiB |
5
thesis/Front/Abstract.tex
Normal file
5
thesis/Front/Abstract.tex
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
% !TEX root = ../Thesis.tex
|
||||||
|
\chapter{Abstract}
|
||||||
|
To systematically study and assess the privacy and security implications of IoTdevices, it is crucial to have a reliable method for conducting experiments and extracting meaningful data in a reproducible manner. This necessitates the development of a system —referred to as a "testbed"— that includes all the necessary tools, definitions, and automated environment setup required for conduction reproducible experiments on IoT devices.
|
||||||
|
|
||||||
|
In this project, I aim to design and implement a testbed that automates and standardizes the collection and processing of network data from IoT devices. The outcome of this project is a Python package that facilitates these tasks, providing a foundation for reproducible IoT device experiments.
|
||||||
3
thesis/Front/Acknowledgment.tex
Normal file
3
thesis/Front/Acknowledgment.tex
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
% !TEX root = ../Thesis.tex
|
||||||
|
\chapter{Acknowledgments}
|
||||||
|
So Long, and Thanks for All the Fish. And the template.
|
||||||
21
thesis/LICENSE.md
Normal file
21
thesis/LICENSE.md
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Ivan Giangreco
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
27
thesis/Makefile
Normal file
27
thesis/Makefile
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
#
|
||||||
|
# Thanks to Kevin Urban for providing this makefile.
|
||||||
|
# Jul 2012
|
||||||
|
#
|
||||||
|
|
||||||
|
# choose which latex compiler you want to use (xelatex or pdflatex)
|
||||||
|
TEX = pdflatex
|
||||||
|
|
||||||
|
|
||||||
|
################
|
||||||
|
# stop editing #
|
||||||
|
################
|
||||||
|
latexfile = Thesis
|
||||||
|
|
||||||
|
# rerun pdf generation until it doesn't say rerun anymore
|
||||||
|
$(latexfile).pdf: $(latexfile).bbl
|
||||||
|
while ($(TEX) $(latexfile); grep -q "Rerun to get cross" $(latexfile).log); do true; done
|
||||||
|
|
||||||
|
$(latexfile).bbl: $(latexfile).aux
|
||||||
|
bibtex $(latexfile)
|
||||||
|
|
||||||
|
$(latexfile).aux: $(latexfile).tex
|
||||||
|
$(TEX) $(latexfile)
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm *.aux
|
||||||
|
rm *.bbl
|
||||||
1
thesis/README.md
Normal file
1
thesis/README.md
Normal file
@ -0,0 +1 @@
|
|||||||
|
Latex Template for Bachelor and Master thesis (University of Basel, Department of Mathematics and Computer Science, DBIS)
|
||||||
1061
thesis/Template/logo-de.pdf
Normal file
1061
thesis/Template/logo-de.pdf
Normal file
File diff suppressed because one or more lines are too long
1056
thesis/Template/logo-en.pdf
Normal file
1056
thesis/Template/logo-en.pdf
Normal file
File diff suppressed because one or more lines are too long
107
thesis/Thesis.tex
Normal file
107
thesis/Thesis.tex
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
%% ----------------------------------------------------------------
|
||||||
|
%% Thesis.tex -- main
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
|
|
||||||
|
\documentclass[a4paper, 10pt, oneside]{memoir}
|
||||||
|
%% Use the option citeauthor to be able to use citet. The default cite will still work.
|
||||||
|
\usepackage[citeauthor]{basilea}
|
||||||
|
\usepackage{minted}
|
||||||
|
\usepackage{enumitem}
|
||||||
|
\usepackage{caption}
|
||||||
|
\usepackage{float}
|
||||||
|
\usepackage{listings}
|
||||||
|
|
||||||
|
\usepackage[toc, acronym]{glossaries}
|
||||||
|
\makeglossaries
|
||||||
|
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
|
|
||||||
|
\title {IOTTB: An Automation Testbed for IOT Devices}
|
||||||
|
\thesistype {Bachelor Project}
|
||||||
|
|
||||||
|
\department {Department of Mathematics and Computer Science}
|
||||||
|
\faculty {Natural Science Faculty of the University of Basel}
|
||||||
|
\research {Privacy Enhancing Technologies \\ https://pet.dmi.unibas.ch}
|
||||||
|
|
||||||
|
\examiner {Prof. Dr. Isabel Wagner}
|
||||||
|
\supervisor {Valentyna Pavliv}
|
||||||
|
|
||||||
|
\authors {Sebastian Lenzlinger}
|
||||||
|
\email {sebastian.lenzlinger@unibas.ch}
|
||||||
|
\immatriculnr {2018-775-494}
|
||||||
|
|
||||||
|
\date {30. June 2024}
|
||||||
|
|
||||||
|
% switch here for the german logo to logo-de
|
||||||
|
\ulogo {Template/logo-en}
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
%%%% Glossary and Acronyms
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
\setacronymstyle{long-short}
|
||||||
|
\newacronym{iot}{IoT}{Internet of Things}
|
||||||
|
\newacronym{os}{OS}{Operating System}
|
||||||
|
\newacronym{cli}{CLI}{Command Line Interface}
|
||||||
|
\newacronym{ap}{AP}{Access Point}
|
||||||
|
\newacronym{uuid}{UUID}{Universally Unique Identifier}
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
%% Personal commands specific to my report
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
\newcommand{\iot}{\gls{iot} }
|
||||||
|
\newcommand{\iottbsc}{\textsc{iottb} }
|
||||||
|
\newcommand{\iottb}{\texttt{iottb} }
|
||||||
|
\newcommand{\os}{\gls{os} }
|
||||||
|
\newcommand{\ap}{\gls{ap} }
|
||||||
|
\newcommand{\db}{\texttt{iottb.db} }
|
||||||
|
\newcommand{\sniff}{\texttt{sniff} }
|
||||||
|
\newcommand{\addev}{\texttt{add-device} }
|
||||||
|
\newcommand{\dsn}{\texttt{device\_short\_name} }
|
||||||
|
\newcommand{\dname}{\texttt{device\_name} }
|
||||||
|
\newcommand{\did}{\texttt{device\_id} }
|
||||||
|
\newcommand{\cid}{\texttt{capture\_id} }
|
||||||
|
\newcommand{\uuid}{\gls{uuid}}
|
||||||
|
\newcommand{\cli}{\gls{cli} }
|
||||||
|
\newcommand{\mytodo}[1][0]{{\color{red}\textsc{TODO}: #1 \label{todo:#1}}}
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
|
\begin{document}
|
||||||
|
|
||||||
|
% for english use \selectlanguage{english}, for german use \selectlanguage{ngerman}
|
||||||
|
\selectlanguage{english}
|
||||||
|
|
||||||
|
\thesisfront
|
||||||
|
\maketitle
|
||||||
|
\pagestyle{thesis}
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
|
% \input{./Front/Acknowledgment}
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
|
\input{./Front/Abstract}
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
|
\thesistoc
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
|
%\thesisnomencl
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
|
\thesismain
|
||||||
|
|
||||||
|
\input{./Chapters/ch1-introduction}
|
||||||
|
\input{Chapters/ch2-background}
|
||||||
|
\input{Chapters/ch3-adaptation}
|
||||||
|
\input{Chapters/ch4-iottb}
|
||||||
|
\input{Chapters/ch5-evaluation}
|
||||||
|
\input{Chapters/ch6-conclusion}
|
||||||
|
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
|
\thesisappendix
|
||||||
|
\printglossary[type=\acronymtype]
|
||||||
|
\thesisbib
|
||||||
|
\begin{appendices}
|
||||||
|
\input{./Back/AppendixA}
|
||||||
|
\input{./Back/AppendixB}
|
||||||
|
\input{Back/CommandRef}
|
||||||
|
\end{appendices}
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
|
\thesisback
|
||||||
|
\iflanguage{english}
|
||||||
|
{\includepdf{./Back/wissensch_Redlichkeit_E_09-2023.pdf}}
|
||||||
|
{\includepdf{./Back/wissensch_Redlichkeit_D_09-2023.pdf}}
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
|
\end{document}
|
||||||
|
%% ----------------------------------------------------------------
|
||||||
55
thesis/appendixa-after-add-device-dir.txt
Normal file
55
thesis/appendixa-after-add-device-dir.txt
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
{
|
||||||
|
"DefaultDatabase": "showcase",
|
||||||
|
"DefaultDatabasePath": "/home/seb",
|
||||||
|
"DatabaseLocations": {
|
||||||
|
"iottb.db": "/home/seb",
|
||||||
|
"showcase": "/home/seb"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Database: showcase
|
||||||
|
Database Location: /home/seb/showcase
|
||||||
|
$ tree
|
||||||
|
iphone-13/
|
||||||
|
|--device_metadata.json
|
||||||
|
roomba/
|
||||||
|
|-- device_metadata.json
|
||||||
|
|
||||||
|
{
|
||||||
|
"device_id": "a2158407-2b73-428d-9f94-cc8f3a497478",
|
||||||
|
"device_name": "iPhone 13 (year 2043)",
|
||||||
|
"aliases": [
|
||||||
|
"iphone-13--year-2043-",
|
||||||
|
"iPhone-13--year-2043-",
|
||||||
|
"iPhone 13 (year 2043)",
|
||||||
|
"iphone-13"
|
||||||
|
],
|
||||||
|
"canonical_name": "iphone-13",
|
||||||
|
"date_added": "2024-07-01T00:54:56.655710",
|
||||||
|
"description": "",
|
||||||
|
"model": "",
|
||||||
|
"manufacturer": "",
|
||||||
|
"current_firmware_version": "",
|
||||||
|
"device_type": "",
|
||||||
|
"supported_interfaces": "",
|
||||||
|
"companion_applications": "",
|
||||||
|
"last_metadata_update": "2024-07-01T00:54:56.655719"
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
"device_id": "339de2af-c3ef-4c5d-a9c8-a03f7a65cc0a",
|
||||||
|
"device_name": "roomba",
|
||||||
|
"aliases": [
|
||||||
|
"roomba"
|
||||||
|
],
|
||||||
|
"canonical_name": "roomba",
|
||||||
|
"date_added": "2024-07-01T00:54:34.715850",
|
||||||
|
"description": "",
|
||||||
|
"model": "",
|
||||||
|
"manufacturer": "",
|
||||||
|
"current_firmware_version": "",
|
||||||
|
"device_type": "",
|
||||||
|
"supported_interfaces": "",
|
||||||
|
"companion_applications": "",
|
||||||
|
"last_metadata_update": "2024-07-01T00:54:34.715859"
|
||||||
|
}
|
||||||
252
thesis/basilea.sty
Normal file
252
thesis/basilea.sty
Normal file
@ -0,0 +1,252 @@
|
|||||||
|
%%
|
||||||
|
%% This file is originally based on 'ECSthesis.cls' by Steve R. Gunn
|
||||||
|
%% and 'phdthesis.sty' by Jamie Stevens. In this new version many parts
|
||||||
|
%% have been replaced by calls to the memoir package. Many existing
|
||||||
|
%% templates using the memoir served as model.
|
||||||
|
%%
|
||||||
|
%% v.2.0
|
||||||
|
%%
|
||||||
|
%% 2014, Ivan Giangreco
|
||||||
|
%%
|
||||||
|
|
||||||
|
\NeedsTeXFormat{LaTeX2e}
|
||||||
|
\ProvidesPackage{basilea}[2014]
|
||||||
|
|
||||||
|
\newcommand*{\basileaBibStyle}{thesis}
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Provides the option to use \citet to get the authors name
|
||||||
|
%% Internally, this changes the bibliographystyle from thesis to plainnat
|
||||||
|
%%
|
||||||
|
\DeclareOption{citeauthor}{
|
||||||
|
\renewcommand*{\basileaBibStyle}{plainnat}
|
||||||
|
}
|
||||||
|
%%
|
||||||
|
%% Provides the default option to not use \citet to get the authors name
|
||||||
|
%% Internally, this sets the bibliographystyle to thesis
|
||||||
|
%%
|
||||||
|
\DeclareOption{citenumber}{
|
||||||
|
\renewcommand*{\basileaBibStyle}{thesis}
|
||||||
|
}
|
||||||
|
|
||||||
|
\ExecuteOptions{citenumber}
|
||||||
|
|
||||||
|
\ProcessOptions\relax
|
||||||
|
|
||||||
|
\usepackage[ngerman, english]{babel}
|
||||||
|
\usepackage[utf8]{inputenc}
|
||||||
|
|
||||||
|
% text to be set for frontpage
|
||||||
|
\newcommand*{\supervisor}[1]{\def\supervisorint{#1}}
|
||||||
|
\newcommand*{\examiner}[1]{\def\examinerint{#1}}
|
||||||
|
\newcommand*{\authors}[1]{\def\authorsint{#1}}
|
||||||
|
\newcommand*{\email}[1]{\def\emailint{#1}}
|
||||||
|
\newcommand*{\ulogo}[1]{\def\universitylogoint{#1}}
|
||||||
|
\newcommand*{\department}[1]{\def\departmentint{#1}}
|
||||||
|
\newcommand*{\faculty}[1]{\def\facultyint{#1}}
|
||||||
|
\newcommand*{\research}[1]{\def\researchgroupint{#1}}
|
||||||
|
\newcommand*{\thesistype}[1]{\def\thesistypeint{#1}}
|
||||||
|
\newcommand*{\immatriculnr}[1]{\def\immatriculnrint{#1}}
|
||||||
|
|
||||||
|
\def\titleint{\@title}
|
||||||
|
\def\dateint{\@date}
|
||||||
|
|
||||||
|
% settings for nomenclature (i.e. the abbreviations)
|
||||||
|
% - for using in text use e.g. \nomenclature{LED}{Light-Emitting Diode}
|
||||||
|
% this will print the text directly as Light-Emitting Diode (LED)
|
||||||
|
% - the abbreviations for the whole document can be printed with the command \thesisnomencl
|
||||||
|
% - invoke MakeIndex to create a *.nlo file (see also documentation of nomenclature package)
|
||||||
|
\usepackage{nomencl}
|
||||||
|
\newcommand*{\nomenclint}{}
|
||||||
|
\let\nomenclint\nomenclature
|
||||||
|
\renewcommand*{\nomenclature}[2]{\textit{#2} (#1) \nomenclint{#1}{#2}}
|
||||||
|
\makenomenclature
|
||||||
|
\newcommand*{\thesisnomencl}{\renewcommand{\nomname}{\iflanguage{english}{Abbreviations}{Abkürzungsverzeichnis}}\printnomenclature\addcontentsline{toc}{chapter}{\iflanguage{english}{Abbreviations}{Abkürzungsverzeichnis}}}
|
||||||
|
|
||||||
|
% graphics
|
||||||
|
% (see also floats section in this document for more options)
|
||||||
|
\usepackage{eso-pic}
|
||||||
|
\usepackage{everyshi}
|
||||||
|
\usepackage{ifthen}
|
||||||
|
\usepackage{calc}
|
||||||
|
\usepackage{pdfpages}
|
||||||
|
\graphicspath{{Figures/}}
|
||||||
|
\newsubfloat{figure}
|
||||||
|
|
||||||
|
% definition of fonts (phv is Helvetica, pcr is Courrier)
|
||||||
|
\renewcommand{\sfdefault}{phv}
|
||||||
|
\renewcommand{\ttdefault}{pcr}
|
||||||
|
|
||||||
|
% definition of colors
|
||||||
|
\usepackage{color,graphicx}
|
||||||
|
\definecolor{chaptercolor}{rgb}{.7,.7,.7}
|
||||||
|
\definecolor{emphasizecolor}{rgb}{.7,.7,.7}
|
||||||
|
|
||||||
|
% margins
|
||||||
|
\setlrmarginsandblock{3.5cm}{3.5cm}{*}
|
||||||
|
\setulmarginsandblock{3.5cm}{3.5cm}{*}
|
||||||
|
\checkandfixthelayout
|
||||||
|
|
||||||
|
% document organization
|
||||||
|
\setsecnumdepth{subsubsection}
|
||||||
|
\setcounter{tocdepth}{3}
|
||||||
|
\newcounter{dummy}
|
||||||
|
\newcommand\addtotoc[1]{
|
||||||
|
\refstepcounter{dummy}
|
||||||
|
\addcontentsline{toc}{chapter}{#1}}
|
||||||
|
|
||||||
|
% definition of chapter style (i.e. large number and text)
|
||||||
|
\setlength{\beforechapskip}{50pt}
|
||||||
|
\setlength{\afterchapskip}{1pt}
|
||||||
|
\newcommand*{\colorchapnumber}{\color{chaptercolor}}
|
||||||
|
\newcommand*{\fontchapnumber}{\usefont{T1}{phv}{b}{n}\fontsize{100}{130}\selectfont}
|
||||||
|
\newcommand*{\fontchaptitle}{\sffamily\bfseries\huge}
|
||||||
|
\renewcommand*{\afterchapternum}{}
|
||||||
|
\renewcommand*{\printchaptername}{}
|
||||||
|
\setlength{\midchapskip}{20mm}
|
||||||
|
\renewcommand*{\chapternamenum}{}
|
||||||
|
\renewcommand*{\printchapternum}{\raggedleft{\colorchapnumber\fontchapnumber\thechapter}\par}
|
||||||
|
\renewcommand*{\printchaptertitle}[1]{\raggedleft\fontchaptitle{#1}\par \vspace{30pt} \nobreak}
|
||||||
|
|
||||||
|
% definition of (subsub)section styles
|
||||||
|
\setsecheadstyle{\sffamily\Large}
|
||||||
|
\setbeforesecskip{25pt}
|
||||||
|
\setaftersecskip{1pt}
|
||||||
|
|
||||||
|
\setsubsecheadstyle{\sffamily\large}
|
||||||
|
\setbeforesubsecskip{20pt}
|
||||||
|
\setaftersubsecskip{1pt}
|
||||||
|
|
||||||
|
\setsubsubsecheadstyle{\sffamily\normalsize}
|
||||||
|
\setbeforesubsubsecskip{20pt}
|
||||||
|
\setaftersubsubsecskip{1pt}
|
||||||
|
|
||||||
|
% definition of line spacing, indent of paragraph, etc.
|
||||||
|
\OnehalfSpacing
|
||||||
|
\setlength{\parindent}{0pt}
|
||||||
|
\setlength{\parskip}{0pt}
|
||||||
|
\raggedbottom
|
||||||
|
|
||||||
|
% header, footer
|
||||||
|
\nouppercaseheads
|
||||||
|
\renewcommand{\chaptermark}[1]{\markboth{#1}{}}
|
||||||
|
\renewcommand{\sectionmark}[1]{\markright{#1}{}}
|
||||||
|
|
||||||
|
\makepagestyle{thesis}
|
||||||
|
\makeevenhead{thesis}{\leftmark}{}{\thepage}
|
||||||
|
\makeoddhead{thesis}{\leftmark}{}{\thepage}
|
||||||
|
\makeheadrule{thesis}{\textwidth}{0.2pt}
|
||||||
|
\makeevenfoot{thesis}{}{}{}
|
||||||
|
\makeoddfoot{thesis}{}{}{}
|
||||||
|
\copypagestyle{chapter}{empty}
|
||||||
|
|
||||||
|
% footnotes
|
||||||
|
\setlength{\skip\footins}{20pt}
|
||||||
|
\usepackage[hang]{footmisc}
|
||||||
|
\setlength{\footnotemargin}{10pt}
|
||||||
|
\usepackage{chngcntr}
|
||||||
|
\counterwithout{footnote}{chapter}
|
||||||
|
|
||||||
|
% add highlighting possibilities
|
||||||
|
% use \hlt to highlight parts of text, e.g. \hlt{this is important}
|
||||||
|
\usepackage{soul}
|
||||||
|
\newcommand{\hlt}[1]{\hl{#1}}
|
||||||
|
|
||||||
|
% various imports
|
||||||
|
\usepackage{amsmath,amsfonts,amssymb,amscd,amsthm}
|
||||||
|
|
||||||
|
% clearing pages
|
||||||
|
\def\cleardoublepage{\clearpage\if@twoside \ifodd\c@page\else\hbox{}\thispagestyle{empty}\newpage\if@twocolumn\hbox{}\newpage\fi\fi\fi}
|
||||||
|
\newcommand*{\tmpcleardoublepage}{}
|
||||||
|
\let\tmpcleardoublepage\cleardoublepage
|
||||||
|
|
||||||
|
% floats
|
||||||
|
\captionstyle{\raggedright}
|
||||||
|
\setfloatlocations{figure}{ht!}
|
||||||
|
\setlength{\intextsep}{10pt}
|
||||||
|
\setlength{\textfloatsep}{10pt}
|
||||||
|
|
||||||
|
% definition of table of contents (i.e. title, header text, appearance in pdf-TOC, etc.)
|
||||||
|
\newcommand*{\thesistoc}{\cleardoublepage\renewcommand*{\contentsname}{\iflanguage{english}{Table of Contents}{Inhaltsverzeichnis}} \createplainmark{toc}{both}{\iflanguage{english}{Table of Contents}{Inhaltsverzeichnis}} \currentpdfbookmark{\iflanguage{english}{Table of Contents}{Inhaltsverzeichnis}}{toc} \tableofcontents*}
|
||||||
|
% adds the word "Appendix" in front of chapters in the appendix section
|
||||||
|
\renewcommand*{\cftappendixname}{\iflanguage{english}{Appendix}{Anhang}\space}
|
||||||
|
|
||||||
|
% definition of bibliography
|
||||||
|
\newcommand*{\thesisbib}{\cleardoublepage\renewcommand*{\contentsname}{\iflanguage{english}{Bibliography}{Literaturverzeichnis}} \createplainmark{bib}{both}{\iflanguage{english}{Bibliography}{Literaturverzeichnis}}\bibliographystyle{\basileaBibStyle}\bibliography{references}}
|
||||||
|
\usepackage[square, numbers, comma, sort&compress]{natbib}
|
||||||
|
|
||||||
|
% document structure
|
||||||
|
\newcommand*{\thesisfront}{\frontmatter \let\cleardoublepage\clearpage }
|
||||||
|
\newcommand*{\thesismain}{ \addtocontents{toc}{\vspace{2em}} \mainmatter \let\cleardoublepage\tmpcleardoublepage }
|
||||||
|
\newcommand*{\thesisappendix}{ \addtocontents{toc}{\vspace{2em}} \appendix \let\cleardoublepage\clearpage}
|
||||||
|
|
||||||
|
\newcommand*{\thesisback}{ \addtocontents{toc}{\vspace{2em}} \backmatter \let\cleardoublepage\clearpage}
|
||||||
|
|
||||||
|
% title page
|
||||||
|
\renewcommand\maketitle{
|
||||||
|
\hypersetup{pdftitle={\@title}}
|
||||||
|
\hypersetup{pdfauthor=\authorsint}
|
||||||
|
\thispagestyle{empty}
|
||||||
|
{\raggedright\includegraphics{\universitylogoint}}\par
|
||||||
|
\begin{center}
|
||||||
|
\vspace{3cm}
|
||||||
|
{\Huge \sffamily \bfseries \@title \par}
|
||||||
|
\vspace{0.5cm}
|
||||||
|
{\thesistypeint \par}
|
||||||
|
\vspace{3cm}
|
||||||
|
{\facultyint \par}
|
||||||
|
{\departmentint \par}
|
||||||
|
{\researchgroupint \par}
|
||||||
|
\vspace{1.5cm}
|
||||||
|
{\iflanguage{english}{Examiner}{Beurteiler}: \examinerint \par}
|
||||||
|
{\iflanguage{english}{Supervisor}{Zweitbeurteiler}: \supervisorint \par}
|
||||||
|
\vspace{1.5cm}
|
||||||
|
{\authorsint \par}
|
||||||
|
{\emailint \par}
|
||||||
|
{\immatriculnrint \par}
|
||||||
|
\vfill
|
||||||
|
{\@date \par}
|
||||||
|
\end{center}
|
||||||
|
}
|
||||||
|
|
||||||
|
% declaration
|
||||||
|
\newcommand{\formlabel}[2]{\vspace{0.25cm}\textbf{\sffamily \footnotesize #1 --- #2}\\}
|
||||||
|
|
||||||
|
% urls
|
||||||
|
\usepackage{url}
|
||||||
|
\urlstyle{same}
|
||||||
|
|
||||||
|
% footnotes
|
||||||
|
\usepackage{chngcntr}
|
||||||
|
\counterwithout{footnote}{chapter}
|
||||||
|
|
||||||
|
% math
|
||||||
|
\usepackage{amsmath}
|
||||||
|
\usepackage{amssymb}
|
||||||
|
\usepackage{amsfonts}
|
||||||
|
|
||||||
|
% reference settings
|
||||||
|
\usepackage[pdfpagemode={UseOutlines},bookmarks=true,bookmarksopen=true,
|
||||||
|
bookmarksopenlevel=0,bookmarksnumbered=true,hypertexnames=false,
|
||||||
|
colorlinks,linkcolor={black},citecolor={black},urlcolor={black}, filecolor={black},
|
||||||
|
pdfstartview={FitV}, breaklinks=true, unicode]{hyperref}
|
||||||
|
\usepackage[capitalize]{cleveref}
|
||||||
|
% rename here, e.g. \crefname{listing}{Algorithm}{Algorithm}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
% todos
|
||||||
|
\usepackage[colorinlistoftodos, textwidth=2cm, disable]{todonotes} % add ",disable" in [] to remove all todos, missing figures and the todo list
|
||||||
|
\newcommand{\todoEmpty}[2][]{\todo[fancyline, #1]{#2}}
|
||||||
|
\newcommand{\todoMissing}[2][]{\todoEmpty[color=magenta!80, linecolor=magenta!80, #1]{Missing: #2}}
|
||||||
|
\newcommand{\todoCheck}[2][]{\todoEmpty[color=red!80, linecolor=red!80, #1]{Check: #2}}
|
||||||
|
\newcommand{\todoRevise}[2][]{\todoEmpty[color=orange!80, linecolor=orange!80, #1]{Revise: #2}}
|
||||||
|
\newcommand{\todoCitation}[2][]{\todoEmpty[color=yellow!80, linecolor=yellow!80, #1]{Citation: #2}}
|
||||||
|
\newcommand{\todoLanguage}[2][]{\todoEmpty[color=blue!40!white, linecolor=blue!40!white, #1]{Language: #2}}
|
||||||
|
\newcommand{\todoQuestion}[2][]{\todoEmpty[color=green!80!white, linecolor=green!80!white, #1]{Question: #2}}
|
||||||
|
\newcommand{\todoNote}[2][]{\todoEmpty[color=black!20!white, linecolor=black!20!white, #1]{Note: #2}}
|
||||||
|
\newcommand{\todoFigure}[5]{\begin{figure}[#1]\centering\missingfigure[figwidth=#2]{#3}\caption{#4}\label{#5}\end{figure}}
|
||||||
|
|
||||||
|
\endinput
|
||||||
|
|
||||||
|
%% End of file 'basilea.sty'
|
||||||
15
thesis/capture_metadata_template.json
Normal file
15
thesis/capture_metadata_template.json
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"device_id": "",
|
||||||
|
"capture_id": "",
|
||||||
|
"capture_date": "",
|
||||||
|
"capture_file": "",
|
||||||
|
"start_time": "",
|
||||||
|
"stop_time": "",
|
||||||
|
"capture_duration": "",
|
||||||
|
"interfaces": "",
|
||||||
|
"device_ip_address": "",
|
||||||
|
"device_mac_address": "",
|
||||||
|
"contacted_ip_address": [],
|
||||||
|
"device_firmware_version": "",
|
||||||
|
"campanion_app": ""
|
||||||
|
}
|
||||||
7
thesis/cfg-shema.json
Normal file
7
thesis/cfg-shema.json
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"DefaultDatabase": "iottb.db",
|
||||||
|
"DefaultDatabasePath": $HOME,
|
||||||
|
"DatabaseLocations": {
|
||||||
|
<str>: <path>
|
||||||
|
}
|
||||||
|
}
|
||||||
110
thesis/command_reference.txt
Normal file
110
thesis/command_reference.txt
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
Usage: iottb [OPTIONS] COMMAND [ARGS]...
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-v, --verbosity Set verbosity [default: 0; 0<=x<=3]
|
||||||
|
-d, --debug Enable debug mode
|
||||||
|
--dry-run [default: True]
|
||||||
|
--cfg-file PATH Path to iottb config file [default:
|
||||||
|
/home/seb/.config/iottb/iottb.cfg]
|
||||||
|
--help Show this message and exit.
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
add-device Add a device to a database
|
||||||
|
init-db
|
||||||
|
rm-cfg Removes the cfg file from the filesystem.
|
||||||
|
rm-dbs Removes ALL(!) databases from the filesystem if...
|
||||||
|
set-key-in-table-to Edit config or metadata files.
|
||||||
|
show-all Show everything: configuration, databases, and...
|
||||||
|
show-cfg Show the current configuration context
|
||||||
|
sniff Sniff packets with tcpdump
|
||||||
|
Usage: iottb init-db [OPTIONS]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-d, --dest PATH Location to put (new) iottb database
|
||||||
|
-n, --name TEXT Name of new database. [default: iottb.db]
|
||||||
|
--update-default / --no-update-default
|
||||||
|
If new db should be set as the new default
|
||||||
|
[default: update-default]
|
||||||
|
--help Show this message and exit.
|
||||||
|
Usage: iottb add-device [OPTIONS]
|
||||||
|
|
||||||
|
Add a device to a database
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--dev, --device-name TEXT The name of the device to be added. If this
|
||||||
|
string contains spaces or other special
|
||||||
|
characters normalization is
|
||||||
|
performed to derive a canonical name [required]
|
||||||
|
--db, --database DIRECTORY Database in which to add this device. If not
|
||||||
|
specified use default from config. [env var:
|
||||||
|
IOTTB_DB]
|
||||||
|
--guided Add device interactively [env var:
|
||||||
|
IOTTB_GUIDED_ADD]
|
||||||
|
--help Show this message and exit.
|
||||||
|
Usage: iottb sniff [OPTIONS] [TCPDUMP-ARGS] [DEVICE]
|
||||||
|
|
||||||
|
Sniff packets with tcpdump
|
||||||
|
|
||||||
|
Options:
|
||||||
|
Testbed sources:
|
||||||
|
--db, --database TEXT Database of device. Only needed if not current
|
||||||
|
default. [env var: IOTTB_DB]
|
||||||
|
--app TEXT Companion app being used during capture
|
||||||
|
Runtime behaviour:
|
||||||
|
--unsafe Disable checks for otherwise required options.
|
||||||
|
[env var: IOTTB_UNSAFE]
|
||||||
|
--guided [env var: IOTTB_GUIDED]
|
||||||
|
--pre TEXT Script to be executed before main command is
|
||||||
|
started.
|
||||||
|
--post TEXT Script to be executed upon completion of main
|
||||||
|
command.
|
||||||
|
Tcpdump options:
|
||||||
|
-i, --interface TEXT Network interface to capture on.If not specified
|
||||||
|
tcpdump tries to find and appropriate one. [env
|
||||||
|
var: IOTTB_CAPTURE_INTERFACE]
|
||||||
|
-a, --address TEXT IP or MAC address to filter packets by. [env var:
|
||||||
|
IOTTB_CAPTURE_ADDRESS]
|
||||||
|
-I, --monitor-mode Put interface into monitor mode.
|
||||||
|
--ff TEXT tcpdump filter as string or file path. [env var:
|
||||||
|
IOTTB_CAPTURE_FILTER]
|
||||||
|
-#, --print-pacno Print packet number at beginning of line. True by
|
||||||
|
default. [default: True]
|
||||||
|
-e, --print-ll Print link layer headers. True by default.
|
||||||
|
-c, --count INTEGER Number of packets to capture. [default: 1000]
|
||||||
|
--help Show this message and exit.
|
||||||
|
Utility Commands mostly for development
|
||||||
|
Usage: iottb rm-cfg [OPTIONS]
|
||||||
|
|
||||||
|
Removes the cfg file from the filesystem.
|
||||||
|
|
||||||
|
This is mostly a utility during development. Once non-standard database
|
||||||
|
locations are implemented, deleting this would lead to iottb not being able
|
||||||
|
to find them anymore.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--yes Confirm the action without prompting.
|
||||||
|
--help Show this message and exit.
|
||||||
|
Usage: iottb rm-dbs [OPTIONS]
|
||||||
|
|
||||||
|
Removes ALL(!) databases from the filesystem if they're empty.
|
||||||
|
|
||||||
|
Development utility currently unfit for use.
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--yes Confirm the action without prompting.
|
||||||
|
--help Show this message and exit.
|
||||||
|
Usage: iottb show-cfg [OPTIONS]
|
||||||
|
|
||||||
|
Show the current configuration context
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--cfg-file PATH Path to the config file [default:
|
||||||
|
/home/seb/.config/iottb/iottb.cfg]
|
||||||
|
-pp Pretty Print
|
||||||
|
--help Show this message and exit.
|
||||||
|
Usage: iottb show-all [OPTIONS]
|
||||||
|
|
||||||
|
Show everything: configuration, databases, and device metadata
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--help Show this message and exit.
|
||||||
4
thesis/config_template.json
Normal file
4
thesis/config_template.json
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"database_path": "~/.iottb.db",
|
||||||
|
"log_level": "INFO"
|
||||||
|
}
|
||||||
45
thesis/device_metadata.py
Normal file
45
thesis/device_metadata.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
import logging
|
||||||
|
import click
|
||||||
|
|
||||||
|
from iottb.utils.string_processing import make_canonical_name
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceMetadata:
|
||||||
|
def __init__(self, device_name, description="", model="",
|
||||||
|
manufacturer="", firmware_version="", device_type="",
|
||||||
|
supported_interfaces="", companion_applications="",
|
||||||
|
save_to_file=None):
|
||||||
|
self.device_id = str(uuid.uuid4())
|
||||||
|
self.device_name = device_name
|
||||||
|
cn, aliases = make_canonical_name(device_name)
|
||||||
|
self.aliases = aliases
|
||||||
|
self.canonical_name = cn
|
||||||
|
self.date_added = datetime.now().isoformat()
|
||||||
|
self.description = description
|
||||||
|
self.model = model
|
||||||
|
self.manufacturer = manufacturer
|
||||||
|
self.current_firmware_version = firmware_version
|
||||||
|
self.device_type = device_type
|
||||||
|
self.supported_interfaces = supported_interfaces
|
||||||
|
self.companion_applications = companion_applications
|
||||||
|
self.last_metadata_update = datetime.now().isoformat()
|
||||||
|
if save_to_file is not None:
|
||||||
|
click.echo('TODO: Implement saving config to file after creation!')
|
||||||
|
|
||||||
|
def add_alias(self, alias: str = ""):
|
||||||
|
if alias == "":
|
||||||
|
return
|
||||||
|
self.aliases.append(alias)
|
||||||
|
|
||||||
|
def get_canonical_name(self):
|
||||||
|
return self.canonical_name
|
||||||
|
|
||||||
|
def print_attributes(self):
|
||||||
|
print(f'Printing attribute value pairs in {__name__}')
|
||||||
|
for attr, value in self.__dict__.items():
|
||||||
|
print(f'{attr}: {value}')
|
||||||
14
thesis/device_metadata_template.json
Normal file
14
thesis/device_metadata_template.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"device_id": "",
|
||||||
|
"device_name": "",
|
||||||
|
"device_short_name": "",
|
||||||
|
"date_created": "",
|
||||||
|
"description": "",
|
||||||
|
"model": "",
|
||||||
|
"manufacturer": "",
|
||||||
|
"firmware_version": "",
|
||||||
|
"device_type": "",
|
||||||
|
"supported_interfaces": "",
|
||||||
|
"companion_applications": "",
|
||||||
|
"last_metadata_update": ""
|
||||||
|
}
|
||||||
393
thesis/lstpatch.sty
Normal file
393
thesis/lstpatch.sty
Normal file
@ -0,0 +1,393 @@
|
|||||||
|
%%
|
||||||
|
%% This is file `lstpatch.sty', generated manually.
|
||||||
|
%%
|
||||||
|
%% (w)(c) 2004 Carsten Heinz
|
||||||
|
%%
|
||||||
|
%% This file may be distributed under the terms of the LaTeX Project Public
|
||||||
|
%% License from CTAN archives in directory macros/latex/base/lppl.txt.
|
||||||
|
%% Either version 1.0 or, at your option, any later version.
|
||||||
|
%%
|
||||||
|
%% Send comments and ideas on the package, error reports and additional
|
||||||
|
%% programming languages to <cheinz@gmx.de>.
|
||||||
|
%%
|
||||||
|
%% This patch file will remove the following bugs from the listings package.
|
||||||
|
%% Each item contains the bug finder with date of report and first bug fix
|
||||||
|
%% version, a short description of the problem, and the reason for the bug
|
||||||
|
%% in parenthesis.
|
||||||
|
%%
|
||||||
|
%% 1) Frank Atanassow, 2004/10/07, 1.3b
|
||||||
|
%%
|
||||||
|
%% space after mathescape is not preserved
|
||||||
|
%% (\lst@newlines>0)
|
||||||
|
%%
|
||||||
|
%% 2) Benjamin Lings, 2004/10/15, 1.3b (2004/10/17)
|
||||||
|
%%
|
||||||
|
%% \usepackage{xy,listings} yields:
|
||||||
|
%% "Forbidden control sequence found while scanning use of \lst@lExtend"
|
||||||
|
%% (xy-pic correctly resets catcode of ^^L (to active), which is \outer)
|
||||||
|
%%
|
||||||
|
%%
|
||||||
|
%% The following features are added to the base package.
|
||||||
|
%%
|
||||||
|
%% 1.3a (2004/09/07)
|
||||||
|
%%
|
||||||
|
%% a) H I G H L Y E X P E R I M E N T A L
|
||||||
|
%%
|
||||||
|
%% Use the options
|
||||||
|
%% rangeprefix=<prefix for begin and end of range>
|
||||||
|
%% rangesuffix=<suffix for begin and end of range>
|
||||||
|
%%
|
||||||
|
%% rangebeginprefix=<prefix for begin of range>
|
||||||
|
%% rangebeginsuffix=<suffix for begin of range>
|
||||||
|
%%
|
||||||
|
%% rangeendprefix=<prefix for end of range>
|
||||||
|
%% rangeendsuffix=<suffix for end of range>
|
||||||
|
%%
|
||||||
|
%% includerangemarker=true|false
|
||||||
|
%% together with
|
||||||
|
%% firstline=<begin marker>
|
||||||
|
%% lastline=<end marker>
|
||||||
|
%% or
|
||||||
|
%% linerange={<begin marker 1>-<end marker 1>,
|
||||||
|
%% <begin marker 2>-<end marker 2>, ...}
|
||||||
|
%% The according markers in the source code are
|
||||||
|
%% <prefix><marker><suffix>
|
||||||
|
%% for begin respectively end of range. Moreover, one can use
|
||||||
|
%% includerangemarker=true|false
|
||||||
|
%% to show or hide the range markers in the output.
|
||||||
|
%%
|
||||||
|
%% 1.3b (2004/10/17)
|
||||||
|
%%
|
||||||
|
%% b) multicols=<number> (requires loaded multicol package)
|
||||||
|
%%
|
||||||
|
%%
|
||||||
|
\lst@CheckVersion{1.3}
|
||||||
|
{\typeout{^^J%
|
||||||
|
***^^J%
|
||||||
|
*** This is a patch for listings 1.3, but you're using^^J%
|
||||||
|
*** version \lst@version.^^J%
|
||||||
|
***^^J
|
||||||
|
*** Patch file not loaded.^^J%
|
||||||
|
***^^J}%
|
||||||
|
\endinput
|
||||||
|
}
|
||||||
|
\def\fileversion{1.3b}
|
||||||
|
\def\filedate{2004/10/17}
|
||||||
|
\ProvidesFile{lstpatch.sty}[\filedate\space\fileversion\space (Carsten Heinz)]
|
||||||
|
%
|
||||||
|
% 0) Insert % after #1.
|
||||||
|
\def\@@xbitor #1{\@tempcntb \count#1%
|
||||||
|
\ifnum \@tempcnta =\z@
|
||||||
|
\else
|
||||||
|
\divide\@tempcntb\@tempcnta
|
||||||
|
\ifodd\@tempcntb \@testtrue\fi
|
||||||
|
\fi}
|
||||||
|
%
|
||||||
|
% 1) Reset \lst@newlines at end of escape.
|
||||||
|
\def\lstpatch@escape{%
|
||||||
|
\gdef\lst@Escape##1##2##3##4{%
|
||||||
|
\lst@CArgX ##1\relax\lst@CDefX
|
||||||
|
{}%
|
||||||
|
{\lst@ifdropinput\else
|
||||||
|
\lst@TrackNewLines\lst@OutputLostSpace \lst@XPrintToken
|
||||||
|
\lst@InterruptModes
|
||||||
|
\lst@EnterMode{\lst@TeXmode}{\lst@modetrue}%
|
||||||
|
\ifx\^^M##2%
|
||||||
|
\lst@CArg ##2\relax\lst@ActiveCDefX
|
||||||
|
{}%
|
||||||
|
{\lst@escapeend ##4\lst@LeaveAllModes\lst@ReenterModes}%
|
||||||
|
{\lst@MProcessListing}%
|
||||||
|
\else
|
||||||
|
\lst@CArg ##2\relax\lst@ActiveCDefX
|
||||||
|
{}%
|
||||||
|
{\lst@escapeend ##4\lst@LeaveAllModes\lst@ReenterModes
|
||||||
|
\lst@newlines\z@ \lst@whitespacefalse}%
|
||||||
|
{}%
|
||||||
|
\fi
|
||||||
|
##3\lst@escapebegin
|
||||||
|
\fi}%
|
||||||
|
{}}%
|
||||||
|
}
|
||||||
|
%
|
||||||
|
% 2) Deactivate \outer definition of ^^L temporarily (inside and outside
|
||||||
|
% of \lst@ScanChars) and restore \catcode at end of package.
|
||||||
|
\begingroup \catcode12=\active\let^^L\@empty
|
||||||
|
\gdef\lst@ScanChars{%
|
||||||
|
\let\lsts@ssL^^L%
|
||||||
|
\def^^L{\par}%
|
||||||
|
\lst@GetChars\lst@RestoreOrigCatcodes\@ne {128}%
|
||||||
|
\let^^L\lsts@ssL
|
||||||
|
\lst@GetChars\lst@RestoreOrigExtendedCatcodes{128}{256}}
|
||||||
|
\endgroup
|
||||||
|
\lst@lAddTo\lst@RestoreCatcodes{\catcode12\active}
|
||||||
|
%
|
||||||
|
% a) Let's start with the options:
|
||||||
|
\lst@Key{rangeprefix}\relax{\def\lst@rangebeginprefix{#1}%
|
||||||
|
\def\lst@rangeendprefix{#1}}
|
||||||
|
\lst@Key{rangesuffix}\relax{\def\lst@rangebeginsuffix{#1}%
|
||||||
|
\def\lst@rangeendsuffix{#1}}
|
||||||
|
\lst@Key{rangebeginprefix}{}{\def\lst@rangebeginprefix{#1}}
|
||||||
|
\lst@Key{rangebeginsuffix}{}{\def\lst@rangebeginsuffix{#1}}
|
||||||
|
\lst@Key{rangeendprefix}{}{\def\lst@rangeendprefix{#1}}
|
||||||
|
\lst@Key{rangeendsuffix}{}{\def\lst@rangeendsuffix{#1}}
|
||||||
|
\lst@Key{includerangemarker}{true}[t]{\lstKV@SetIf{#1}\lst@ifincluderangemarker}
|
||||||
|
%
|
||||||
|
% The key is a redefinition of \lst@GLI@ checking for numbers.
|
||||||
|
\def\lst@GLI@#1-#2-#3\@nil{%
|
||||||
|
\lst@IfNumber{#1}%
|
||||||
|
{\ifx\@empty#1\@empty
|
||||||
|
\let\lst@firstline\@ne
|
||||||
|
\else
|
||||||
|
\def\lst@firstline{#1\relax}%
|
||||||
|
\fi
|
||||||
|
\ifx\@empty#3\@empty
|
||||||
|
\def\lst@lastline{9999999\relax}%
|
||||||
|
\else
|
||||||
|
\ifx\@empty#2\@empty
|
||||||
|
\let\lst@lastline\lst@firstline
|
||||||
|
\else
|
||||||
|
\def\lst@lastline{#2\relax}%
|
||||||
|
\fi
|
||||||
|
\fi}%
|
||||||
|
%
|
||||||
|
% If we've found a general marker, we set firstline and lastline to 9999999.
|
||||||
|
% This prevents (almost) anything to be printed for now.
|
||||||
|
{\def\lst@firstline{9999999\relax}%
|
||||||
|
\let\lst@lastline\lst@firstline
|
||||||
|
%
|
||||||
|
% We add the prefixes and suffixes to the markers.
|
||||||
|
\let\lst@rangebegin\lst@rangebeginprefix
|
||||||
|
\lst@AddTo\lst@rangebegin{#1}\lst@Extend\lst@rangebegin\lst@rangebeginsuffix
|
||||||
|
\ifx\@empty#3\@empty
|
||||||
|
\let\lst@rangeend\lst@rangeendprefix
|
||||||
|
\lst@AddTo\lst@rangeend{#1}\lst@Extend\lst@rangeend\lst@rangeendsuffix
|
||||||
|
\else
|
||||||
|
\ifx\@empty#2\@empty
|
||||||
|
\let\lst@rangeend\@empty
|
||||||
|
\else
|
||||||
|
\let\lst@rangeend\lst@rangeendprefix
|
||||||
|
\lst@AddTo\lst@rangeend{#2}\lst@Extend\lst@rangeend\lst@rangeendsuffix
|
||||||
|
\fi
|
||||||
|
\fi
|
||||||
|
% The following definition will be executed in the SelectCharTable hook
|
||||||
|
% and here right now if we are already processing a listing.
|
||||||
|
\global\def\lst@DefRange{\expandafter\lst@CArgX\lst@rangebegin\relax\lst@DefRangeB}%
|
||||||
|
\ifnum\lst@mode=\lst@Pmode \expandafter\lst@DefRange \fi}}
|
||||||
|
% \lst@DefRange is not inserted via a hook anymore. Instead it is now called
|
||||||
|
% directly from \lst@SelectCharTable. This was necessary to get rid of an
|
||||||
|
% interference with the escape-to-LaTeX-feature. The bug was reported by
|
||||||
|
% \lsthelper{Michael~Bachmann}{2004/07/21}{Keine label-Referenzierung
|
||||||
|
% m\"oglich...}. Another chance is due to the same bug: \lst@DefRange is
|
||||||
|
% redefined globally when the begin of code is found, see below. The bug was
|
||||||
|
% reported by \lsthelper{Tobias~Rapp}{2004/04/06}{undetected end of range if
|
||||||
|
% listing crosses page break} \lsthelper{Markus~Luisser}{2004/08/13}{Bug mit
|
||||||
|
% 'linerangemarker' in umgebrochenen listings}
|
||||||
|
%\lst@AddToHook{SelectCharTable}{\lst@DefRange}
|
||||||
|
\lst@AddToHookExe{DeInit}{\global\let\lst@DefRange\@empty}
|
||||||
|
%
|
||||||
|
% Actually defining the marker (via \lst@GLI@, \lst@DefRange, \lst@CArgX as
|
||||||
|
% seen above) is similar to \lst@DefDelimB---except that we unfold the first
|
||||||
|
% parameter and use different <execute>, <pre>, and <post> statements.
|
||||||
|
\def\lst@DefRangeB#1#2{\lst@DefRangeB@#1#2}
|
||||||
|
\def\lst@DefRangeB@#1#2#3#4{%
|
||||||
|
\lst@CDef{#1{#2}{#3}}#4{}%
|
||||||
|
{\lst@ifincluderangemarker
|
||||||
|
\lst@LeaveMode
|
||||||
|
\let#1#4%
|
||||||
|
\lst@DefRangeEnd
|
||||||
|
\lst@InitLstNumber
|
||||||
|
\else
|
||||||
|
\@tempcnta\lst@lineno \advance\@tempcnta\@ne
|
||||||
|
\edef\lst@firstline{\the\@tempcnta\relax}%
|
||||||
|
\gdef\lst@OnceAtEOL{\let#1#4\lst@DefRangeEnd}%
|
||||||
|
\lst@InitLstNumber
|
||||||
|
\fi
|
||||||
|
\global\let\lst@DefRange\lst@DefRangeEnd
|
||||||
|
\lst@CArgEmpty}%
|
||||||
|
\@empty}
|
||||||
|
%
|
||||||
|
% Modify labels and define |\lst@InitLstNumber| used above.
|
||||||
|
% \lsthelper{Omair-Inam~Abdul-Matin}{2004/05/10}{experimental linerange
|
||||||
|
% feature does not work with firstnumber}
|
||||||
|
\def\lstpatch@labels{%
|
||||||
|
\gdef\lst@SetFirstNumber{%
|
||||||
|
\ifx\lst@firstnumber\@undefined
|
||||||
|
\@tempcnta 0\csname\@lst no@\lst@intname\endcsname\relax
|
||||||
|
\ifnum\@tempcnta=\z@ \else
|
||||||
|
\lst@nololtrue
|
||||||
|
\advance\@tempcnta\lst@advancenumber
|
||||||
|
\edef\lst@firstnumber{\the\@tempcnta\relax}%
|
||||||
|
\fi
|
||||||
|
\fi}%
|
||||||
|
}
|
||||||
|
\lst@AddToAtTop\lsthk@PreInit
|
||||||
|
{\ifx\lst@firstnumber\@undefined
|
||||||
|
\def\lst@firstnumber{\lst@lineno}%
|
||||||
|
\fi}
|
||||||
|
\def\lst@InitLstNumber{%
|
||||||
|
\global\c@lstnumber\lst@firstnumber
|
||||||
|
\global\advance\c@lstnumber\lst@advancenumber
|
||||||
|
\global\advance\c@lstnumber-\lst@advancelstnum
|
||||||
|
\ifx \lst@firstnumber\c@lstnumber
|
||||||
|
\global\advance\c@lstnumber-\lst@advancelstnum
|
||||||
|
\fi}
|
||||||
|
%
|
||||||
|
% The end-marker is defined if and only if it's not empty. The definition is
|
||||||
|
% similar to \lst@DefDelimE---with the above exceptions and except that we
|
||||||
|
% define the re-entry point \lst@DefRangeE@@ as it is defined in the new
|
||||||
|
% version of \lst@MProcessListing above.
|
||||||
|
\def\lst@DefRangeEnd{%
|
||||||
|
\ifx\lst@rangeend\@empty\else
|
||||||
|
\expandafter\lst@CArgX\lst@rangeend\relax\lst@DefRangeE
|
||||||
|
\fi}
|
||||||
|
\def\lst@DefRangeE#1#2{\lst@DefRangeE@#1#2}
|
||||||
|
\def\lst@DefRangeE@#1#2#3#4{%
|
||||||
|
\lst@CDef{#1#2{#3}}#4{}%
|
||||||
|
{\let#1#4%
|
||||||
|
\edef\lst@lastline{\the\lst@lineno\relax}%
|
||||||
|
\lst@DefRangeE@@}%
|
||||||
|
\@empty}
|
||||||
|
\def\lst@DefRangeE@@#1\@empty{%
|
||||||
|
\lst@ifincluderangemarker
|
||||||
|
#1\lst@XPrintToken
|
||||||
|
\fi
|
||||||
|
\lst@LeaveModeToPmode
|
||||||
|
\lst@BeginDropInput{\lst@Pmode}}
|
||||||
|
%
|
||||||
|
\def\lst@LeaveModeToPmode{%
|
||||||
|
\ifnum\lst@mode=\lst@Pmode
|
||||||
|
\expandafter\lsthk@EndGroup
|
||||||
|
\else
|
||||||
|
\expandafter\egroup\expandafter\lst@LeaveModeToPmode
|
||||||
|
\fi}
|
||||||
|
%
|
||||||
|
% Eventually we shouldn't forget to install \lst@OnceAtEOL, which must
|
||||||
|
% also be called in \lst@MSkipToFirst.
|
||||||
|
\lst@AddToHook{EOL}{\lst@OnceAtEOL\global\let\lst@OnceAtEOL\@empty}
|
||||||
|
\gdef\lst@OnceAtEOL{}% Init
|
||||||
|
\def\lst@MSkipToFirst{%
|
||||||
|
\global\advance\lst@lineno\@ne
|
||||||
|
\ifnum \lst@lineno=\lst@firstline
|
||||||
|
\def\lst@next{\lst@LeaveMode \global\lst@newlines\z@
|
||||||
|
\lst@OnceAtEOL \global\let\lst@OnceAtEOL\@empty
|
||||||
|
\lst@InitLstNumber % Added to work with modified \lsthk@PreInit.
|
||||||
|
\lsthk@InitVarsBOL
|
||||||
|
\lst@BOLGobble}%
|
||||||
|
\expandafter\lst@next
|
||||||
|
\fi}
|
||||||
|
\def\lst@SkipToFirst{%
|
||||||
|
\ifnum \lst@lineno<\lst@firstline
|
||||||
|
\def\lst@next{\lst@BeginDropInput\lst@Pmode
|
||||||
|
\lst@Let{13}\lst@MSkipToFirst
|
||||||
|
\lst@Let{10}\lst@MSkipToFirst}%
|
||||||
|
\expandafter\lst@next
|
||||||
|
\else
|
||||||
|
\expandafter\lst@BOLGobble
|
||||||
|
\fi}
|
||||||
|
%
|
||||||
|
% Finally the service macro \lst@IfNumber:
|
||||||
|
\def\lst@IfNumber#1{%
|
||||||
|
\ifx\@empty#1\@empty
|
||||||
|
\let\lst@next\@firstoftwo
|
||||||
|
\else
|
||||||
|
\lst@IfNumber@#1\@nil
|
||||||
|
\fi
|
||||||
|
\lst@next}
|
||||||
|
\def\lst@IfNumber@#1#2\@nil{%
|
||||||
|
\let\lst@next\@secondoftwo
|
||||||
|
\ifnum`#1>47\relax \ifnum`#1>57\relax\else
|
||||||
|
\let\lst@next\@firstoftwo
|
||||||
|
\fi\fi}
|
||||||
|
%
|
||||||
|
% b) The following is known to fail with some keys.
|
||||||
|
\lst@Key{multicols}{}{\@tempcnta=0#1\relax\def\lst@multicols{#1}}
|
||||||
|
\def\lst@Init#1{%
|
||||||
|
\begingroup
|
||||||
|
\ifx\lst@float\relax\else
|
||||||
|
\edef\@tempa{\noexpand\lst@beginfloat{lstlisting}[\lst@float]}%
|
||||||
|
\expandafter\@tempa
|
||||||
|
\fi
|
||||||
|
% chmod begin
|
||||||
|
\ifx\lst@multicols\@empty\else
|
||||||
|
\edef\lst@next{\noexpand\multicols{\lst@multicols}}
|
||||||
|
\expandafter\lst@next
|
||||||
|
\fi
|
||||||
|
% chmod end
|
||||||
|
\ifhmode\ifinner \lst@boxtrue \fi\fi
|
||||||
|
\lst@ifbox
|
||||||
|
\lsthk@BoxUnsafe
|
||||||
|
\hbox to\z@\bgroup
|
||||||
|
$\if t\lst@boxpos \vtop
|
||||||
|
\else \if b\lst@boxpos \vbox
|
||||||
|
\else \vcenter \fi\fi
|
||||||
|
\bgroup \par\noindent
|
||||||
|
\else
|
||||||
|
\lst@ifdisplaystyle
|
||||||
|
\lst@EveryDisplay
|
||||||
|
\par\penalty-50\relax
|
||||||
|
\vspace\lst@aboveskip
|
||||||
|
\fi
|
||||||
|
\fi
|
||||||
|
\normalbaselines
|
||||||
|
\abovecaptionskip\lst@abovecaption\relax
|
||||||
|
\belowcaptionskip\lst@belowcaption\relax
|
||||||
|
\lst@MakeCaption t%
|
||||||
|
\lsthk@PreInit \lsthk@Init
|
||||||
|
\lst@ifdisplaystyle
|
||||||
|
\global\let\lst@ltxlabel\@empty
|
||||||
|
\if@inlabel
|
||||||
|
\lst@ifresetmargins
|
||||||
|
\leavevmode
|
||||||
|
\else
|
||||||
|
\xdef\lst@ltxlabel{\the\everypar}%
|
||||||
|
\lst@AddTo\lst@ltxlabel{%
|
||||||
|
\global\let\lst@ltxlabel\@empty
|
||||||
|
\everypar{\lsthk@EveryLine\lsthk@EveryPar}}%
|
||||||
|
\fi
|
||||||
|
\fi
|
||||||
|
\everypar\expandafter{\lst@ltxlabel
|
||||||
|
\lsthk@EveryLine\lsthk@EveryPar}%
|
||||||
|
\else
|
||||||
|
\everypar{}\let\lst@NewLine\@empty
|
||||||
|
\fi
|
||||||
|
\lsthk@InitVars \lsthk@InitVarsBOL
|
||||||
|
\lst@Let{13}\lst@MProcessListing
|
||||||
|
\let\lst@Backslash#1%
|
||||||
|
\lst@EnterMode{\lst@Pmode}{\lst@SelectCharTable}%
|
||||||
|
\lst@InitFinalize}
|
||||||
|
\def\lst@DeInit{%
|
||||||
|
\lst@XPrintToken \lst@EOLUpdate
|
||||||
|
\global\advance\lst@newlines\m@ne
|
||||||
|
\lst@ifshowlines
|
||||||
|
\lst@DoNewLines
|
||||||
|
\else
|
||||||
|
\setbox\@tempboxa\vbox{\lst@DoNewLines}%
|
||||||
|
\fi
|
||||||
|
\lst@ifdisplaystyle \par\removelastskip \fi
|
||||||
|
\lsthk@ExitVars\everypar{}\lsthk@DeInit\normalbaselines\normalcolor
|
||||||
|
\lst@MakeCaption b%
|
||||||
|
\lst@ifbox
|
||||||
|
\egroup $\hss \egroup
|
||||||
|
\vrule\@width\lst@maxwidth\@height\z@\@depth\z@
|
||||||
|
\else
|
||||||
|
\lst@ifdisplaystyle
|
||||||
|
\par\penalty-50\vspace\lst@belowskip
|
||||||
|
\fi
|
||||||
|
\fi
|
||||||
|
% chmod begin
|
||||||
|
\ifx\lst@multicols\@empty\else
|
||||||
|
\def\lst@next{\global\let\@checkend\@gobble
|
||||||
|
\endmulticols
|
||||||
|
\global\let\@checkend\lst@@checkend}
|
||||||
|
\expandafter\lst@next
|
||||||
|
\fi
|
||||||
|
% chmod end
|
||||||
|
\ifx\lst@float\relax\else
|
||||||
|
\expandafter\lst@endfloat
|
||||||
|
\fi
|
||||||
|
\endgroup}
|
||||||
|
\let\lst@@checkend\@checkend
|
||||||
|
%%
|
||||||
|
\endinput
|
||||||
|
%%
|
||||||
|
%% End of file `lstpatch.sty'.
|
||||||
594
thesis/references.bib
Normal file
594
thesis/references.bib
Normal file
@ -0,0 +1,594 @@
|
|||||||
|
|
||||||
|
@article{wilkinson_fair_2016,
|
||||||
|
title = {The {FAIR} Guiding Principles for scientific data management and stewardship},
|
||||||
|
volume = {3},
|
||||||
|
rights = {2016 The Author(s)},
|
||||||
|
issn = {2052-4463},
|
||||||
|
url = {https://www.nature.com/articles/sdata201618},
|
||||||
|
doi = {10.1038/sdata.2016.18},
|
||||||
|
abstract = {There is an urgent need to improve the infrastructure supporting the reuse of scholarly data. A diverse set of stakeholders—representing academia, industry, funding agencies, and scholarly publishers—have come together to design and jointly endorse a concise and measureable set of principles that we refer to as the {FAIR} Data Principles. The intent is that these may act as a guideline for those wishing to enhance the reusability of their data holdings. Distinct from peer initiatives that focus on the human scholar, the {FAIR} Principles put specific emphasis on enhancing the ability of machines to automatically find and use the data, in addition to supporting its reuse by individuals. This Comment is the first formal publication of the {FAIR} Principles, and includes the rationale behind them, and some exemplar implementations in the community.},
|
||||||
|
pages = {160018},
|
||||||
|
number = {1},
|
||||||
|
journaltitle = {Scientific Data},
|
||||||
|
shortjournal = {Sci Data},
|
||||||
|
author = {Wilkinson, Mark D. and Swertz, Morris A. and et al.},
|
||||||
|
urldate = {2024-06-22},
|
||||||
|
date = {2016-03-15},
|
||||||
|
langid = {english},
|
||||||
|
note = {Publisher: Nature Publishing Group},
|
||||||
|
keywords = {Publication characteristics, Research data},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{pydantic,
|
||||||
|
title = {Welcome to Pydantic - Pydantic},
|
||||||
|
url = {https://docs.pydantic.dev/latest/},
|
||||||
|
urldate = {2024-07-01},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{surveytestingmethods2022,
|
||||||
|
title = {Survey of Testing Methods and Testbed Development Concerning Internet of Things},
|
||||||
|
volume = {123},
|
||||||
|
issn = {1572-834X},
|
||||||
|
url = {https://doi.org/10.1007/s11277-021-09124-5},
|
||||||
|
doi = {10.1007/s11277-021-09124-5},
|
||||||
|
abstract = {The concept of Internet of Things ({IoT}) was designed to change everyday lives of people via multiple forms of computing and easy deployment of applications. In recent years, the increasing complexity of {IoT}-ready devices and processes has led to potential risks related to system reliability. Therefore, the comprehensive testing of {IoT} technology has attracted the attention of many researchers, which promotes the extensive development of {IoT} testing methods and infrastructure. However, the current research on {IoT} testing methods and testbeds mainly focuses on specific application scenarios, lacking systematic review and analysis of many applications from different points of view. This paper systematically summarizes the latest testing methods covering different {IoT} fields and discusses the development status of the existing Internet of things testbed. Findings of this review demonstrate that {IoT} testing is moving toward larger scale and intelligent testing, and that in near future, {IoT} test architecture is set to become more standardized and universally applicable with multi-technology convergence—i.e., a combination of big data, cloud computing, and artificial intelligence—being the prime focus of {IoT} testing.},
|
||||||
|
pages = {165--194},
|
||||||
|
number = {1},
|
||||||
|
journaltitle = {Wireless Personal Communications},
|
||||||
|
shortjournal = {Wireless Pers Commun},
|
||||||
|
author = {Zhu, Shicheng and Yang, Shunkun and Gou, Xiaodong and Xu, Yang and Zhang, Tao and Wan, Yueliang},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2022-03-01},
|
||||||
|
langid = {english},
|
||||||
|
keywords = {Internet of Things, {IoT} testing, Testbed, Testing method},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{recommendedformatrsLOC,
|
||||||
|
title = {Recommended Formats Statement – Datasets {\textbar} Resources (Preservation, Library of Congress)},
|
||||||
|
url = {https://www.loc.gov/preservation/resources/rfs/data.html},
|
||||||
|
abstract = {Lists technical characteristics of and metadata for datasets that best support the preservation of and long-term access to these creative works. Identifies the formats the Library of Congress prefers or finds acceptable.},
|
||||||
|
type = {web page},
|
||||||
|
urldate = {2024-06-23},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{poetry,
|
||||||
|
title = {Poetry - Python dependency management and packaging made easy},
|
||||||
|
url = {https://python-poetry.org/},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{click,
|
||||||
|
title = {Welcome to Click — Click Documentation (8.1.x)},
|
||||||
|
url = {https://click.palletsprojects.com/en/8.1.x/},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{pmsSpinellis2012,
|
||||||
|
title = {Package Management Systems},
|
||||||
|
volume = {29},
|
||||||
|
issn = {1937-4194},
|
||||||
|
url = {https://ieeexplore.ieee.org/abstract/document/6155145},
|
||||||
|
doi = {10.1109/MS.2012.38},
|
||||||
|
abstract = {A package management system organizes and simplifies the installation and maintenance of software by standardizing and organizing the production and consumption of software collections. As a software developer, you can benefit from package managers in two ways: through a rich and stable development environment and through friction-free reuse. Promisingly, the structure that package managers bring both to the tools we use in our development process and the libraries we reuse in our products ties nicely with the recent move emphasizing {DevOps} (development operations) as an integration between software development and {IT} operations.},
|
||||||
|
pages = {84--86},
|
||||||
|
number = {2},
|
||||||
|
journaltitle = {{IEEE} Software},
|
||||||
|
author = {Spinellis, Diomidis},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2012-03},
|
||||||
|
note = {Conference Name: {IEEE} Software},
|
||||||
|
keywords = {{DevOps}, Maintenance engineering, Product management, Software libraries, Software reusability, module dependencies, package management system, shared library, software reuse},
|
||||||
|
}
|
||||||
|
|
||||||
|
@misc{rrrr2023,
|
||||||
|
title = {Repeatability, Reproducibility, Replicability, Reusability (4R) in Journals' Policies and Software/Data Management in Scientific Publications: A Survey, Discussion, and Perspectives},
|
||||||
|
url = {http://arxiv.org/abs/2312.11028},
|
||||||
|
doi = {10.48550/arXiv.2312.11028},
|
||||||
|
shorttitle = {Repeatability, Reproducibility, Replicability, Reusability (4R) in Journals' Policies and Software/Data Management in Scientific Publications},
|
||||||
|
abstract = {With the recognized crisis of credibility in scientific research, there is a growth of reproducibility studies in computer science, and although existing surveys have reviewed reproducibility from various perspectives, especially very specific technological issues, they do not address the author-publisher relationship in the publication of reproducible computational scientific articles. This aspect requires significant attention because it is the basis for reliable research. We have found a large gap between the reproducibility-oriented practices, journal policies, recommendations, publisher artifact Description/Evaluation guidelines, submission guides, technological reproducibility evolution, and its effective adoption to contribute to tackling the crisis. We conducted a narrative survey, a comprehensive overview and discussion identifying the mutual efforts required from Authors, Journals, and Technological actors to achieve reproducibility research. The relationship between authors and scientific journals in their mutual efforts to jointly improve the reproducibility of scientific results is analyzed. Eventually, we propose recommendations for the journal policies, as well as a unified and standardized Reproducibility Guide for the submission of scientific articles for authors. The main objective of this work is to analyze the implementation and experiences of reproducibility policies, techniques and technologies, standards, methodologies, software, and data management tools required for scientific reproducible publications. Also, the benefits and drawbacks of such an adoption, as well as open challenges and promising trends, to propose possible strategies and efforts to mitigate the identified gaps. To this purpose, we analyzed 200 scientific articles, surveyed 16 Computer Science journals, and systematically classified them according to reproducibility strategies, technologies, policies, code citation, and editorial business. We conclude there is still a reproducibility gap in scientific publications, although at the same time also the opportunity to reduce this gap with the joint effort of authors, publishers, and technological providers.},
|
||||||
|
number = {{arXiv}:2312.11028},
|
||||||
|
publisher = {{arXiv}},
|
||||||
|
author = {Hernández, José Armando and Colom, Miguel},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2023-12-18},
|
||||||
|
eprinttype = {arxiv},
|
||||||
|
eprint = {2312.11028 [cs]},
|
||||||
|
keywords = {Computer Science - Software Engineering, repeatability, replicability, reproducibility, reusability},
|
||||||
|
}
|
||||||
|
|
||||||
|
@thesis{vacuumpie2023,
|
||||||
|
location = {Gjøvik},
|
||||||
|
title = {Private Information Exposed by the Use of Robot Vacuum Cleaner in Smart Environments},
|
||||||
|
abstract = {Robot vacuum cleaners are popular {IoT} devices and are deployed in all kinds of
|
||||||
|
smart environments. Integration with {IoT} systems introduce more security and
|
||||||
|
privacy issues related to the operation of these devices. Vendors have developed
|
||||||
|
smart phone applications where users can personalize cleaning or view informa-
|
||||||
|
tion about the vacuum cleaner. This increase the integration between user’s life
|
||||||
|
and the robot vacuum cleaner, which potentially exposes private information. In-
|
||||||
|
dustry standards include end-to-end encryption between the application, cloud
|
||||||
|
service and robot vacuum cleaner to secure the private information exchanged.
|
||||||
|
Regardless of encryption, network header metadata is still available through net-
|
||||||
|
work eavesdropping attacks. In this project we investigated the potential private
|
||||||
|
information exposed by this metadata. An Irobot Roomba i7 was deployed in two
|
||||||
|
different smart environments where passive network eavesdropping was conduc-
|
||||||
|
ted during smart feature triggering. Analysis revealed that it was possible to attrib-
|
||||||
|
ute different events triggered on the Irobot Roomba i7, only based on metadata
|
||||||
|
in the Internet traffic capture. Different signature-based detection algorithms are
|
||||||
|
proposed, with a high detection rate. Wi-Fi and Internet capturing metadata were
|
||||||
|
compared and similar patterns were identified, making the detection method ap-
|
||||||
|
plicable for Wi-Fi eavesdropping as well. This thesis covers the implementation,
|
||||||
|
capturing and analysis of network traffic and proposes event detection algorithms.},
|
||||||
|
institution = {Norwegian University of Science and Technology},
|
||||||
|
type = {Master Thgesis},
|
||||||
|
author = {Ulsmåg, Benjamin Andreas},
|
||||||
|
date = {2023-01-06},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{alyamiwifi2022,
|
||||||
|
location = {Las Vegas, {NV}, {USA}},
|
||||||
|
title = {{WiFi}-based {IoT} Devices Profiling Attack based on Eavesdropping of Encrypted {WiFi} Traffic},
|
||||||
|
isbn = {978-1-66543-161-3},
|
||||||
|
url = {https://ieeexplore.ieee.org/document/9700674/},
|
||||||
|
doi = {10.1109/CCNC49033.2022.9700674},
|
||||||
|
abstract = {Recent research has shown that in-network observers of {WiFi} communication (i.e., observers who have joined the {WiFi} network) can obtain much information regarding the types, user identities, and activities of Internet-of-Things ({IoT}) devices in the network. What has not been explored is the question of how much information can be inferred by an out-ofnetwork observer who does not have access to the {WiFi} network. This attack scenario is more realistic and much harder to defend against, thus imposes a real threat to user privacy. In this paper, we investigate privacy leakage derived from an out-of-network traffic eavesdropper on the encrypted {WiFi} traffic of popular {IoT} devices. We instrumented a testbed of 12 popular {IoT} devices and evaluated multiple machine learning methods for fingerprinting and inferring what {IoT} devices exist in a {WiFi} network. By only exploiting the {WiFi} frame header information, we have achieved 95\% accuracy in identifying the devices and often their working status. This study demonstrates that information leakage and privacy attack is a real threat for {WiFi} networks and {IoT} applications.},
|
||||||
|
eventtitle = {2022 {IEEE} 19th Annual Consumer Communications \& Networking Conference ({CCNC})},
|
||||||
|
pages = {385--392},
|
||||||
|
booktitle = {2022 {IEEE} 19th Annual Consumer Communications \& Networking Conference ({CCNC})},
|
||||||
|
publisher = {{IEEE}},
|
||||||
|
author = {Alyami, Mnassar and Alharbi, Ibrahim and Zou, Cliff and Solihin, Yan and Ackerman, Karl},
|
||||||
|
urldate = {2024-03-22},
|
||||||
|
date = {2022-01-08},
|
||||||
|
langid = {english},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{tbsmartgrid2013,
|
||||||
|
title = {Cyber-Physical Security Testbeds: Architecture, Application, and Evaluation for Smart Grid},
|
||||||
|
volume = {4},
|
||||||
|
issn = {1949-3061},
|
||||||
|
url = {https://ieeexplore.ieee.org/abstract/document/6473865},
|
||||||
|
doi = {10.1109/TSG.2012.2226919},
|
||||||
|
shorttitle = {Cyber-Physical Security Testbeds},
|
||||||
|
abstract = {The development of a smarter electric grid will depend on increased deployments of information and communication technology ({ICT}) to support novel communication and control functions. Unfortunately, this additional dependency also expands the risk from cyber attacks. Designing systems with adequate cyber security depends heavily on the availability of representative environments, such as testbeds, where current issues and future ideas can be evaluated. This paper provides an overview of a smart grid security testbed, including the set of control, communication, and physical system components required to provide an accurate cyber-physical environment. It then identifies various testbed research applications and also identifies how various components support these applications. The {PowerCyber} testbed at Iowa State University is then introduced, including the architecture, applications, and novel capabilities, such as virtualization, Real Time Digital Simulators ({RTDS}), and {ISEAGE} {WAN} emulation. Finally, several attack scenarios are evaluated using the testbed to explore cyber-physical impacts. In particular, availability and integrity attacks are demonstrated with both isolated and coordinated approaches, these attacks are then evaluated based on the physical system's voltage and rotor angle stability.},
|
||||||
|
pages = {847--855},
|
||||||
|
number = {2},
|
||||||
|
journaltitle = {{IEEE} Transactions on Smart Grid},
|
||||||
|
author = {Hahn, Adam and Ashok, Aditya and Sridhar, Siddharth and Govindarasu, Manimaran},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2013-06},
|
||||||
|
note = {Conference Name: {IEEE} Transactions on Smart Grid},
|
||||||
|
keywords = {Computer architecture, Cyber-physical systems, Protocols, Real-time systems, Security, Smart grids, Software, Substations, cyber security, ieee, iot, smart grid, testbed, testbeds},
|
||||||
|
}
|
||||||
|
|
||||||
|
@incollection{iotfundamentals,
|
||||||
|
location = {Cham},
|
||||||
|
title = {{IoT} Fundamentals: Definitions, Architectures, Challenges, and Promises},
|
||||||
|
isbn = {978-3-030-30367-9},
|
||||||
|
url = {https://doi.org/10.1007/978-3-030-30367-9_1},
|
||||||
|
abstract = {The Internet is everywhere and touched almost every corner of the globe affecting our lives in previously unimagined ways. As a living entity, the Internet is constantly evolving, and now, an era of widespread connectivity through various smart devices (i.e., things) that connect with the Internet has begun. This paradigm change is generally referred to as the Internet of Things ({IoT}). Welcoming {IoT} will bring significant benefits to economies and businesses as it enables greater innovation and productivity. On the other hand, the rapid adoption of {IoT} presents new challenges regarding connectivity, security, data processing, and scalability. Because the {IoT} world is vast and versatile, it cannot be viewed as a single technology. {IoT} looks more like an umbrella covering many protocols, technologies, and concepts that depend on specific industries. In this chapter, we will seek to look at the history of {IoT}, more clearly define it, and review its terms and concepts. We will also review vertical {IoT} markets and higher-level use cases that have successfully adopted {IoT} solutions. We will also discuss the details of the business implications, business models, and opportunities of {IoT}. Finally, the complete {IoT} stack and reference architectures from smart objects, to the networks, to the cloud, and finally the applications where information is leveraged are explained.},
|
||||||
|
pages = {3--50},
|
||||||
|
booktitle = {Intelligent Internet of Things: From Device to Fog and Cloud},
|
||||||
|
publisher = {Springer International Publishing},
|
||||||
|
author = {Firouzi, Farshad and Farahani, Bahar and Weinberger, Markus and {DePace}, Gabriel and Aliee, Fereidoon Shams},
|
||||||
|
editor = {Firouzi, Farshad and Chakrabarty, Krishnendu and Nassif, Sani},
|
||||||
|
date = {2020},
|
||||||
|
doi = {10.1007/978-3-030-30367-9_1},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{whatissmartdevice2018,
|
||||||
|
title = {What is a smart device? - a conceptualisation within the paradigm of the internet of things},
|
||||||
|
volume = {6},
|
||||||
|
issn = {2213-7459},
|
||||||
|
url = {https://doi.org/10.1186/s40327-018-0063-8},
|
||||||
|
doi = {10.1186/s40327-018-0063-8},
|
||||||
|
abstract = {The Internet of Things ({IoT}) is an interconnected network of objects which range from simple sensors to smartphones and tablets; it is a relatively novel paradigm that has been rapidly gaining ground in the scenario of modern wireless telecommunications with an expected growth of 25 to 50 billion of connected devices for 2020 Due to the recent rise of this paradigm, authors across the literature use inconsistent terms to address the devices present in the {IoT}, such as mobile device, smart device, mobile technologies or mobile smart device. Based on the existing literature, this paper chooses the term smart device as a starting point towards the development of an appropriate definition for the devices present in the {IoT}. This investigation aims at exploring the concept and main features of smart devices as well as their role in the {IoT}. This paper follows a systematic approach for reviewing compendium of literature to explore the current research in this field. It has been identified smart devices as the primary objects interconnected in the network of {IoT}, having an essential role in this paradigm. The developed concept for defining smart device is based on three main features, namely context-awareness, autonomy and device connectivity. Other features such as mobility and user-interaction were highly mentioned in the literature, but were not considered because of the nature of the {IoT} as a network mainly oriented to device-to-device connectivity whether they are mobile or not and whether they interact with people or not. What emerges from this paper is a concept which can be used to homogenise the terminology used on further research in the Field of digitalisation and smart technologies.},
|
||||||
|
pages = {3},
|
||||||
|
number = {1},
|
||||||
|
journaltitle = {Visualization in Engineering},
|
||||||
|
shortjournal = {Visualization in Engineering},
|
||||||
|
author = {Silverio-Fernández, Manuel and Renukappa, Suresh and Suresh, Subashini},
|
||||||
|
date = {2018-05-09},
|
||||||
|
}
|
||||||
|
|
||||||
|
@report{dasilvaworkflow2021,
|
||||||
|
title = {Workflows Community Summit: Bringing the Scientific Workflows Community Together},
|
||||||
|
url = {http://arxiv.org/abs/2103.09181},
|
||||||
|
shorttitle = {Workflows Community Summit},
|
||||||
|
abstract = {Scientific workflows have been used almost universally across scientific domains, and have underpinned some of the most significant discoveries of the past several decades. Many of these workflows have high computational, storage, and/or communication demands, and thus must execute on a wide range of large-scale platforms, from large clouds to upcoming exascale high-performance computing ({HPC}) platforms. These executions must be managed using some software infrastructure. Due to the popularity of workflows, workflow management systems ({WMSs}) have been developed to provide abstractions for creating and executing workflows conveniently, efficiently, and portably. While these efforts are all worthwhile, there are now hundreds of independent {WMSs}, many of which are moribund. As a result, the {WMS} landscape is segmented and presents significant barriers to entry due to the hundreds of seemingly comparable, yet incompatible, systems that exist. As a result, many teams, small and large, still elect to build their own custom workflow solution rather than adopt, or build upon, existing {WMSs}. This current state of the {WMS} landscape negatively impacts workflow users, developers, and researchers. The "Workflows Community Summit" was held online on January 13, 2021. The overarching goal of the summit was to develop a view of the state of the art and identify crucial research challenges in the workflow community. Prior to the summit, a survey sent to stakeholders in the workflow community (including both developers of {WMSs} and users of workflows) helped to identify key challenges in this community that were translated into 6 broad themes for the summit, each of them being the object of a focused discussion led by a volunteer member of the community. This report documents and organizes the wealth of information provided by the participants before, during, and after the summit.},
|
||||||
|
author = {da Silva, Rafael Ferreira and Casanova, Henri and Chard, Kyle and Laney, Dan and Ahn, Dong and Jha, Shantenu and Goble, Carole and Ramakrishnan, Lavanya and Peterson, Luc and Enders, Bjoern and Thain, Douglas and Altintas, Ilkay and Babuji, Yadu and Badia, Rosa M. and Bonazzi, Vivien and Coleman, Taina and Crusoe, Michael and Deelman, Ewa and Di Natale, Frank and Di Tommaso, Paolo and Fahringer, Thomas and Filgueira, Rosa and Fursin, Grigori and Ganose, Alex and Gruning, Bjorn and Katz, Daniel S. and Kuchar, Olga and Kupresanin, Ana and Ludascher, Bertram and Maheshwari, Ketan and Mattoso, Marta and Mehta, Kshitij and Munson, Todd and Ozik, Jonathan and Peterka, Tom and Pottier, Loic and Randles, Tim and Soiland-Reyes, Stian and Tovar, Benjamin and Turilli, Matteo and Uram, Thomas and Vahi, Karan and Wilde, Michael and Wolf, Matthew and Wozniak, Justin},
|
||||||
|
urldate = {2024-02-25},
|
||||||
|
date = {2021-03-16},
|
||||||
|
doi = {10.5281/zenodo.4606958},
|
||||||
|
eprinttype = {arxiv},
|
||||||
|
eprint = {2103.09181 [cs]},
|
||||||
|
keywords = {Computer Science - Distributed, Parallel, and Cluster Computing},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{faircsartefacts2022,
|
||||||
|
location = {New York, {NY}, {USA}},
|
||||||
|
title = {Toward findable, accessible, interoperable, and reusable cybersecurity artifacts},
|
||||||
|
isbn = {978-1-4503-9684-4},
|
||||||
|
url = {https://doi.org/10.1145/3546096.3546104},
|
||||||
|
doi = {10.1145/3546096.3546104},
|
||||||
|
series = {Cset '22},
|
||||||
|
abstract = {Researchers in experimental cybersecurity are increasingly sharing the code, data, and other artifacts associated with their studies. This trend is encouraged and rewarded by conferences and journals through practices such as artifact evaluation and badging. While these trends in sharing artifacts are promising, the cybersecurity community is still far from an ecosystem in which artifacts are {FAIR}: findable, accessible, interoperable, and reusable. The lack of established standards and best practices for sharing and reuse results in artifacts that are often difficult to find and reuse; in addition, the lack of community standards results in artifacts that may be incomplete and low-quality. In this paper we describe our experience in creating an online community hub, called {SEARCCH}, to promote the sharing and reuse of artifacts for cybersecurity research. Based on our experience, we offer lessons learned: issues that must be addressed to further promote {FAIR} principles in experimental cybersecurity.},
|
||||||
|
pages = {65--70},
|
||||||
|
booktitle = {Proceedings of the 15th workshop on cyber security experimentation and test},
|
||||||
|
publisher = {Association for Computing Machinery},
|
||||||
|
author = {Balenson, David and Benzel, Terry and Eide, Eric and Emmerich, David and Johnson, David and Mirkovic, Jelena and Tinnel, Laura},
|
||||||
|
date = {2022},
|
||||||
|
note = {Number of pages: 6
|
||||||
|
Place: Virtual, {CA}, {USA}},
|
||||||
|
keywords = {{FAIR} principles, {SEARCCH}, artifact catalog, cybersecurity artifacts, reproducibility},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{islamiot2023,
|
||||||
|
title = {Internet of Things: Device Capabilities, Architectures, Protocols, and Smart Applications in Healthcare Domain},
|
||||||
|
volume = {10},
|
||||||
|
issn = {2327-4662},
|
||||||
|
url = {https://ieeexplore.ieee.org/abstract/document/9983826/references#references},
|
||||||
|
doi = {10.1109/JIOT.2022.3228795},
|
||||||
|
shorttitle = {Internet of Things},
|
||||||
|
abstract = {Nowadays, the Internet has spread to practically every country around the world and is having unprecedented effects on people’s lives. The Internet of Things ({IoT}) is getting more popular and has a high level of interest in both practitioners and academicians in the age of wireless communication due to its diverse applications. The {IoT} is a technology that enables everyday things to become savvier, everyday computation toward becoming intellectual, and everyday communication to become a little more insightful. In this article, the most common and popular {IoT} device capabilities, architectures, and protocols are demonstrated in brief to provide a clear overview of the {IoT} technology to the researchers in this area. The common {IoT} device capabilities, including hardware (Raspberry Pi, Arduino, and {ESP}8266) and software (operating systems ({OSs}), and built-in tools) platforms are described in detail. The widely used architectures that have recently evolved and used are the three-layer architecture, service-oriented architecture, and middleware-based architecture. The popular protocols for {IoT} are demonstrated which include constrained application protocol, message queue telemetry transport, extensible messaging and presence protocol, advanced message queuing protocol, data distribution service, low power wireless personal area network, Bluetooth low energy, and {ZigBee} that are frequently utilized to develop smart {IoT} applications. Additionally, this research provides an in-depth overview of the potential healthcare applications based on {IoT} technologies in the context of addressing various healthcare concerns. Finally, this article summarizes state-of-the-art knowledge, highlights open issues and shortcomings, and provides recommendations for further studies which would be quite beneficial to anyone with a desire to work in this field and make breakthroughs to get expertise in this area.},
|
||||||
|
pages = {3611--3641},
|
||||||
|
number = {4},
|
||||||
|
journaltitle = {{IEEE} Internet of Things Journal},
|
||||||
|
author = {Islam, Md. Milon and Nooruddin, Sheikh and Karray, Fakhri and Muhammad, Ghulam},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2023-02},
|
||||||
|
note = {Conference Name: {IEEE} Internet of Things Journal},
|
||||||
|
keywords = {Communication protocol, Computer architecture, Hardware, Internet of Things, Internet of Things ({IoT}), {IoT} architecture, Medical services, Protocols, Security, Software, device capabilities, healthcare applications},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{romanfeatures2013,
|
||||||
|
title = {On the features and challenges of security and privacy in distributed internet of things},
|
||||||
|
volume = {57},
|
||||||
|
issn = {1389-1286},
|
||||||
|
url = {https://www.sciencedirect.com/science/article/pii/S1389128613000054},
|
||||||
|
doi = {10.1016/j.comnet.2012.12.018},
|
||||||
|
series = {Towards a Science of Cyber Security},
|
||||||
|
abstract = {In the Internet of Things, services can be provisioned using centralized architectures, where central entities acquire, process, and provide information. Alternatively, distributed architectures, where entities at the edge of the network exchange information and collaborate with each other in a dynamic way, can also be used. In order to understand the applicability and viability of this distributed approach, it is necessary to know its advantages and disadvantages – not only in terms of features but also in terms of security and privacy challenges. The purpose of this paper is to show that the distributed approach has various challenges that need to be solved, but also various interesting properties and strengths.},
|
||||||
|
pages = {2266--2279},
|
||||||
|
number = {10},
|
||||||
|
journaltitle = {Computer Networks},
|
||||||
|
shortjournal = {Computer Networks},
|
||||||
|
author = {Roman, Rodrigo and Zhou, Jianying and Lopez, Javier},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2013-07-05},
|
||||||
|
keywords = {Distributed Architectures, Internet of Things, Security, connectivity, iot, network},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{iothome2019,
|
||||||
|
location = {Santa Clara, {CA}},
|
||||||
|
title = {All things considered: An analysis of {IoT} devices on home networks},
|
||||||
|
isbn = {978-1-939133-06-9},
|
||||||
|
url = {https://www.usenix.org/conference/usenixsecurity19/presentation/kumar-deepak},
|
||||||
|
pages = {1169--1185},
|
||||||
|
booktitle = {28th {USENIX} security symposium ({USENIX} security 19)},
|
||||||
|
publisher = {{USENIX} Association},
|
||||||
|
author = {Kumar, Deepak and Shen, Kelly and Case, Benton and Garg, Deepali and Alperovich, Galina and Kuznetsov, Dmitry and Gupta, Rajarshi and Durumeric, Zakir},
|
||||||
|
date = {2019-08},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{iotInHomes2019,
|
||||||
|
title = {All Things Considered: An Analysis of \{{IoT}\} Devices on Home Networks},
|
||||||
|
isbn = {978-1-939133-06-9},
|
||||||
|
url = {https://www.usenix.org/conference/usenixsecurity19/presentation/kumar-deepak},
|
||||||
|
shorttitle = {All Things Considered},
|
||||||
|
eventtitle = {28th {USENIX} Security Symposium ({USENIX} Security 19)},
|
||||||
|
pages = {1169--1185},
|
||||||
|
author = {Kumar, Deepak and Shen, Kelly and Case, Benton and Garg, Deepali and Alperovich, Galina and Kuznetsov, Dmitry and Gupta, Rajarshi and Durumeric, Zakir},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2019},
|
||||||
|
langid = {english},
|
||||||
|
keywords = {adoption, home, iot},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{ukil_embedded_2011,
|
||||||
|
title = {Embedded security for Internet of Things},
|
||||||
|
url = {https://ieeexplore.ieee.org/abstract/document/5751382},
|
||||||
|
doi = {10.1109/NCETACS.2011.5751382},
|
||||||
|
abstract = {Internet of Things ({IoT}) consists of several tiny devices connected together to form a collaborative computing environment. {IoT} imposes peculiar constraints in terms of connectivity, computational power and energy budget, which make it significantly different from those contemplated by the canonical doctrine of security in distributed systems. In order to circumvent the problem of security in {IoT} domain, networks and devices need to be secured. In this paper, we consider the embedded device security only, assuming that network security is properly in place. It can be noticed that the existence of tiny computing devices that form ubiquity in {IoT} domain are very much vulnerable to different security attacks. In this work, we provide the requirements of embedded security, the solutions to resists different attacks and the technology for resisting temper proofing of the embedded devices by the concept of trusted computing. Our paper attempts to address the issue of security for data at rest. Addressing this issue is equivalent to addressing the security issue of the hardware platform. Our work also partially helps in addressing securing data in transit.},
|
||||||
|
eventtitle = {2011 2nd National Conference on Emerging Trends and Applications in Computer Science},
|
||||||
|
pages = {1--6},
|
||||||
|
booktitle = {2011 2nd National Conference on Emerging Trends and Applications in Computer Science},
|
||||||
|
author = {Ukil, Arijit and Sen, Jaydip and Koilakonda, Sripad},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2011-03},
|
||||||
|
keywords = {{ARM}, Computer architecture, Embedded systems, Hardware, Internet of things ({IoT}), Protocols, Security, Smart phones, Trustzone, confidentiality, embedded device, security, ubiquitous computing},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{tcpdump,
|
||||||
|
title = {Home {\textbar} {TCPDUMP} \& {LIBPCAP}},
|
||||||
|
url = {https://www.tcpdump.org/},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{wiresharkorg,
|
||||||
|
title = {Wireshark · Go Deep},
|
||||||
|
url = {https://www.wireshark.org/},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{pythonorg,
|
||||||
|
title = {Welcome to Python.org},
|
||||||
|
url = {https://www.python.org/},
|
||||||
|
abstract = {The official home of the Python Programming Language},
|
||||||
|
titleaddon = {Python.org},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2024-06-27},
|
||||||
|
langid = {english},
|
||||||
|
keywords = {tool},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{mitmproxy,
|
||||||
|
title = {mitmproxy - an interactive {HTTPS} proxy},
|
||||||
|
url = {https://mitmproxy.org/},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
keywords = {proxy, sniffing, tools},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{dasilvaComRoad2021,
|
||||||
|
title = {A Community Roadmap for Scientific Workflows Research and Development},
|
||||||
|
url = {https://ieeexplore.ieee.org/abstract/document/9652570/authors#authors},
|
||||||
|
doi = {10.1109/WORKS54523.2021.00016},
|
||||||
|
abstract = {The landscape of workflow systems for scientific applications is notoriously convoluted with hundreds of seemingly equivalent workflow systems, many isolated research claims, and a steep learning curve. To address some of these challenges and lay the groundwork for transforming workflows research and development, the {WorkflowsRI} and {ExaWorks} projects partnered to bring the international workflows community together. This paper reports on discussions and findings from two virtual “Workflows Community Summits” (January and April, 2021). The overarching goals of these workshops were to develop a view of the state of the art, identify crucial research challenges in the workflows community, articulate a vision for potential community efforts, and discuss technical approaches for realizing this vision. To this end, participants identified six broad themes: {FAIR} computational workflows; {AI} workflows; exascale challenges; {APIs}, interoperability, reuse, and standards; training and education; and building a workflows community. We summarize discussions and recommendations for each of these themes.},
|
||||||
|
eventtitle = {2021 {IEEE} Workshop on Workflows in Support of Large-Scale Science ({WORKS})},
|
||||||
|
pages = {81--90},
|
||||||
|
booktitle = {2021 {IEEE} Workshop on Workflows in Support of Large-Scale Science ({WORKS})},
|
||||||
|
author = {da Silva, Rafael Ferreira and Casanova, Henri and Chard, Kyle and Altintas, Ilkay and Badia, Rosa M and Balis, Bartosz and Coleman, Tainã and Coppens, Frederik and Di Natale, Frank and Enders, Bjoern and Fahringer, Thomas and Filgueira, Rosa and Fursin, Grigori and Garijo, Daniel and Goble, Carole and Howell, Dorran and Jha, Shantenu and Katz, Daniel S. and Laney, Daniel and Leser, Ulf and Malawski, Maciej and Mehta, Kshitij and Pottier, Loïc and Ozik, Jonathan and Peterson, J. Luc and Ramakrishnan, Lavanya and Soiland-Reyes, Stian and Thain, Douglas and Wolf, Matthew},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2021-11},
|
||||||
|
keywords = {{AI} workflows, Artificial intelligence, Buildings, Conferences, Research and development, Scientific workflows, Stakeholders, Standards, Training, community roadmap, data management, exascale computing, interoperability},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{zander2014survey,
|
||||||
|
title = {A survey of testbeds and experimental research infrastructures for wireless networks},
|
||||||
|
volume = {15},
|
||||||
|
pages = {1231--1246},
|
||||||
|
number = {4},
|
||||||
|
journaltitle = {{IEEE} Communications Surveys \& Tutorials},
|
||||||
|
author = {Zander, Justus and Zinner, Thomas and Bifulco, Roberto and Carle, Georg},
|
||||||
|
date = {2014},
|
||||||
|
note = {Publisher: {IEEE}},
|
||||||
|
keywords = {iot, springer, survey, testbed},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{al-hawawreh_developing_2021,
|
||||||
|
title = {Developing a Security Testbed for Industrial Internet of Things},
|
||||||
|
volume = {8},
|
||||||
|
issn = {2327-4662},
|
||||||
|
url = {https://ieeexplore.ieee.org/abstract/document/9233425},
|
||||||
|
doi = {10.1109/JIOT.2020.3032093},
|
||||||
|
abstract = {While achieving security for Industrial Internet of Things ({IIoT}) is a critical and nontrivial task, more attention is required for brownfield {IIoT} systems. This is a consequence of long life cycles of their legacy devices which were initially designed without considering security and {IoT} connectivity, but they are now becoming more connected and integrated with emerging {IoT} technologies and messaging communication protocols. Deploying today's methodologies and solutions in brownfield {IIoT} systems is not viable, as security solutions must co-exist and fit these systems' requirements. This necessitates a realistic standardized {IIoT} testbed that can be used as an optimal format to measure the credibility of security solutions of {IIoT} networks, analyze {IIoT} attack landscapes and extract threat intelligence. Developing a testbed for brownfield {IIoT} systems is considered a significant challenge as these systems are comprised of legacy, heterogeneous devices, communication layers and applications that need to be implemented holistically to achieve high fidelity. In this article, we propose a new generic end-to-end {IIoT} security testbed, with a particular focus on the brownfield system and provide details of the testbed's architectural design and the implementation process. The proposed testbed can be easily reproduced and reconfigured to support the testing activities of new processes and various security scenarios. The proposed testbed operation is demonstrated on different connected devices, communication protocols and applications. The experiments demonstrate that this testbed is effective in terms of its operation and security testing. A comparison with existing testbeds, including a table of features is provided.},
|
||||||
|
pages = {5558--5573},
|
||||||
|
number = {7},
|
||||||
|
journaltitle = {{IEEE} Internet of Things Journal},
|
||||||
|
author = {Al-Hawawreh, Muna and Sitnikova, Elena},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2021-04},
|
||||||
|
note = {Conference Name: {IEEE} Internet of Things Journal},
|
||||||
|
keywords = {Brownfield, Industrial Internet of Things ({IIoT}), Protocols, Resilience, Security, Sensors, Testing, ieee, iot, security testing, testbed},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{sibonitestbed2019,
|
||||||
|
title = {Security Testbed for Internet-of-Things Devices},
|
||||||
|
volume = {68},
|
||||||
|
issn = {1558-1721},
|
||||||
|
url = {https://ieeexplore.ieee.org/abstract/document/8565917},
|
||||||
|
doi = {10.1109/TR.2018.2864536},
|
||||||
|
abstract = {The Internet of Things ({IoT}) is a global ecosystem of information and communication technologies aimed at connecting any type of object (thing), at any time, and in any place, to each other and to the Internet. One of the major problems associated with the {IoT} is the heterogeneous nature of such deployments; this heterogeneity poses many challenges, particularly, in the areas of security and privacy. Specifically, security testing and analysis of {IoT} devices is considered a very complex task, as different security testing methodologies, including software and hardware security testing approaches, are needed. In this paper, we propose an innovative security testbed framework targeted at {IoT} devices. The security testbed is aimed at testing all types of {IoT} devices, with different software/hardware configurations, by performing standard and advanced security testing. Advanced analysis processes based on machine learning algorithms are employed in the testbed in order to monitor the overall operation of the {IoT} device under test. The architectural design of the proposed security testbed along with a detailed description of the testbed implementation is discussed. The testbed operation is demonstrated on different {IoT} devices using several specific {IoT} testing scenarios. The results obtained demonstrate that the testbed is effective at detecting vulnerabilities and compromised {IoT} devices.},
|
||||||
|
pages = {23--44},
|
||||||
|
number = {1},
|
||||||
|
journaltitle = {{IEEE} Transactions on Reliability},
|
||||||
|
author = {Siboni, Shachar and Sachidananda, Vinay and Meidan, Yair and Bohadana, Michael and Mathov, Yael and Bhairav, Suhas and Shabtai, Asaf and Elovici, Yuval},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2019-03},
|
||||||
|
note = {Conference Name: {IEEE} Transactions on Reliability},
|
||||||
|
keywords = {Hardware, Internet of Things, Internet of Things ({IoT}), {IoT} devices, Security, Software, Standards, Testing, privacy, security, testbed framework},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{fursinckorg2021,
|
||||||
|
title = {Collective knowledge: organizing research projects as a database of reusable components and portable workflows with common interfaces},
|
||||||
|
volume = {379},
|
||||||
|
url = {https://royalsocietypublishing.org/doi/full/10.1098/rsta.2020.0211},
|
||||||
|
doi = {10.1098/rsta.2020.0211},
|
||||||
|
shorttitle = {Collective knowledge},
|
||||||
|
abstract = {This article provides the motivation and overview of the Collective Knowledge Framework ({CK} or {cKnowledge}). The {CK} concept is to decompose research projects into reusable components that encapsulate research artifacts and provide unified application programming interfaces ({APIs}), command-line interfaces ({CLIs}), meta descriptions and common automation actions for related artifacts. The {CK} framework is used to organize and manage research projects as a database of such components. Inspired by the {USB} ‘plug and play’ approach for hardware, {CK} also helps to assemble portable workflows that can automatically plug in compatible components from different users and vendors (models, datasets, frameworks, compilers, tools). Such workflows can build and run algorithms on different platforms and environments in a unified way using the customizable {CK} program pipeline with software detection plugins and the automatic installation of missing packages. This article presents a number of industrial projects in which the modular {CK} approach was successfully validated in order to automate benchmarking, auto-tuning and co-design of efficient software and hardware for machine learning and artificial intelligence in terms of speed, accuracy, energy, size and various costs. The {CK} framework also helped to automate the artifact evaluation process at several computer science conferences as well as to make it easier to reproduce, compare and reuse research techniques from published papers, deploy them in production, and automatically adapt them to continuously changing datasets, models and systems. The long-term goal is to accelerate innovation by connecting researchers and practitioners to share and reuse all their knowledge, best practices, artifacts, workflows and experimental results in a common, portable and reproducible format at {cKnowledge}.io.
|
||||||
|
|
||||||
|
This article is part of the theme issue ‘Reliability and reproducibility in computational science: implementing verification, validation and uncertainty quantification in silico’.},
|
||||||
|
pages = {20200211},
|
||||||
|
number = {2197},
|
||||||
|
journaltitle = {Philosophical Transactions of the Royal Society A: Mathematical, Physical and Engineering Sciences},
|
||||||
|
author = {Fursin, Grigori},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2021-03-29},
|
||||||
|
note = {Publisher: Royal Society},
|
||||||
|
keywords = {{DevOps}, {FAIR} principles, portability, reproducibility, research automation, reusability},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{friesssniffing2018,
|
||||||
|
title = {Multichannel-Sniffing-System for Real-World Analysing of Wi-Fi-Packets},
|
||||||
|
url = {https://ieeexplore.ieee.org/abstract/document/8436715},
|
||||||
|
doi = {10.1109/ICUFN.2018.8436715},
|
||||||
|
abstract = {Wireless technologies like Wi-Fi send their data using multiple channels. To analyze an environment and all Wi-Fi packets inside, a sniffing system is needed, which can sniff on all used channels of the wireless technology at the same time. This allows catching most packets on each channel. In this paper, a way to build up a multi-channel-sniffing-system ({MCSS}) is described. The test system uses several single board computers ({SBC}) with an external Wi-Fi adapter ({USB}), 19 {SBCs} are sniffing nodes ({SFN}) and one {SBC} as sending node ({SN}). The sniffing {SBCs} are placed in a cycle around the sender so that every node has the same chance to receive the simulated packets from the {SN}. For the control of all 20 {SBCs}, a self-developed software is used, which connects from the host to the clients and is used for configuring the experiments. The configuration is sent to each client and will initiate their start, so that their times are also synchronized, for this all clients are synchronised using a time server.},
|
||||||
|
eventtitle = {2018 Tenth International Conference on Ubiquitous and Future Networks ({ICUFN})},
|
||||||
|
pages = {358--364},
|
||||||
|
booktitle = {2018 Tenth International Conference on Ubiquitous and Future Networks ({ICUFN})},
|
||||||
|
author = {Friess, Kristof},
|
||||||
|
urldate = {2024-04-06},
|
||||||
|
date = {2018-07},
|
||||||
|
note = {{ISSN}: 2165-8536},
|
||||||
|
keywords = {Bluetooth, Europe, Hardware, Monitoring, Universal Serial Bus, Wireless communication, Wireless fidelity, multichannel, node.js, sbc, sniffing, wifi},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{peekaboo2020,
|
||||||
|
title = {Peek-a-Boo: I see your smart home activities, even encrypted!},
|
||||||
|
url = {http://arxiv.org/abs/1808.02741},
|
||||||
|
doi = {10.1145/3395351.3399421},
|
||||||
|
shorttitle = {Peek-a-Boo},
|
||||||
|
abstract = {A myriad of {IoT} devices such as bulbs, switches, speakers in a smart home environment allow users to easily control the physical world around them and facilitate their living styles through the sensors already embedded in these devices. Sensor data contains a lot of sensitive information about the user and devices. However, an attacker inside or near a smart home environment can potentially exploit the innate wireless medium used by these devices to exfiltrate sensitive information from the encrypted payload (i.e., sensor data) about the users and their activities, invading user privacy. With this in mind,in this work, we introduce a novel multi-stage privacy attack against user privacy in a smart environment. It is realized utilizing state-of-the-art machine-learning approaches for detecting and identifying the types of {IoT} devices, their states, and ongoing user activities in a cascading style by only passively sniffing the network traffic from smart home devices and sensors. The attack effectively works on both encrypted and unencrypted communications. We evaluate the efficiency of the attack with real measurements from an extensive set of popular off-the-shelf smart home {IoT} devices utilizing a set of diverse network protocols like {WiFi}, {ZigBee}, and {BLE}. Our results show that an adversary passively sniffing the traffic can achieve very high accuracy (above 90\%) in identifying the state and actions of targeted smart home devices and their users. To protect against this privacy leakage, we also propose a countermeasure based on generating spoofed traffic to hide the device states and demonstrate that it provides better protection than existing solutions.},
|
||||||
|
pages = {207--218},
|
||||||
|
booktitle = {Proceedings of the 13th {ACM} Conference on Security and Privacy in Wireless and Mobile Networks},
|
||||||
|
author = {Acar, Abbas and Fereidooni, Hossein and Abera, Tigist and Sikder, Amit Kumar and Miettinen, Markus and Aksu, Hidayet and Conti, Mauro and Sadeghi, Ahmad-Reza and Uluagac, Selcuk},
|
||||||
|
urldate = {2024-02-25},
|
||||||
|
date = {2020-07-08},
|
||||||
|
eprinttype = {arxiv},
|
||||||
|
eprint = {1808.02741 [cs]},
|
||||||
|
keywords = {{BLE}, Computer Science - Cryptography and Security, {ZigBee}, network traffic, privacy, smart-home, wifi},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{abuwaragaTestbed2020,
|
||||||
|
title = {Design and implementation of automated {IoT} security testbed},
|
||||||
|
volume = {88},
|
||||||
|
issn = {0167-4048},
|
||||||
|
url = {https://www.sciencedirect.com/science/article/pii/S0167404819301920},
|
||||||
|
doi = {10.1016/j.cose.2019.101648},
|
||||||
|
abstract = {The emergence of technology associated with the Internet of Things ({IoT}) is reshaping our lives, while simultaneously raising many issues due to their low level of security, which attackers can exploit for malicious purposes. This research paper conducts a comprehensive analysis of previous studies on {IoT} device security with a focus on the various tools used to test {IoT} devices and the vulnerabilities that were found. Additionally, the paper contains a survey of {IoT}-based security testbeds in the research literature. In this research study, we introduce an open source platform for identifying weaknesses in {IoT} networks and communications. The platform is easily modifiable and extendible to enable the addition of new security assessment tests and functionalities. It automates security evaluation, allowing for testing without human intervention. The testbed reports the security problems of the tested devices and can detect all attacks made against the devices. It is also designed to monitor communications within the testbed and with connected devices, enabling the system to abort if malicious activity is detected. To demonstrate the capabilities of the proposed {IoT} security testbed, it is used to examine the vulnerabilities of two {IoT} devices: a wireless camera and a smart bulb.},
|
||||||
|
pages = {101648},
|
||||||
|
journaltitle = {Computers \& Security},
|
||||||
|
shortjournal = {Computers \& Security},
|
||||||
|
author = {Abu Waraga, Omnia and Bettayeb, Meriem and Nasir, Qassim and Abu Talib, Manar},
|
||||||
|
urldate = {2024-06-30},
|
||||||
|
date = {2020-01-01},
|
||||||
|
keywords = {Automated testbed architecture, Internet of Things, {IoT} testbed, Vulnerability assessment},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{vasserman_vampire_2013,
|
||||||
|
title = {Vampire Attacks: Draining Life from Wireless Ad Hoc Sensor Networks},
|
||||||
|
volume = {12},
|
||||||
|
issn = {1558-0660},
|
||||||
|
url = {https://ieeexplore.ieee.org/document/6112758},
|
||||||
|
doi = {10.1109/TMC.2011.274},
|
||||||
|
shorttitle = {Vampire Attacks},
|
||||||
|
abstract = {Ad hoc low-power wireless networks are an exciting research direction in sensing and pervasive computing. Prior security work in this area has focused primarily on denial of communication at the routing or medium access control levels. This paper explores resource depletion attacks at the routing protocol layer, which permanently disable networks by quickly draining nodes' battery power. These "Vampire” attacks are not specific to any specific protocol, but rather rely on the properties of many popular classes of routing protocols. We find that all examined protocols are susceptible to Vampire attacks, which are devastating, difficult to detect, and are easy to carry out using as few as one malicious insider sending only protocol-compliant messages. In the worst case, a single Vampire can increase network-wide energy usage by a factor of O(N), where N in the number of network nodes. We discuss methods to mitigate these types of attacks, including a new proof-of-concept protocol that provably bounds the damage caused by Vampires during the packet forwarding phase.},
|
||||||
|
pages = {318--332},
|
||||||
|
number = {2},
|
||||||
|
journaltitle = {{IEEE} Transactions on Mobile Computing},
|
||||||
|
author = {Vasserman, Eugene Y. and Hopper, Nicholas},
|
||||||
|
urldate = {2024-06-22},
|
||||||
|
date = {2013-02},
|
||||||
|
note = {Conference Name: {IEEE} Transactions on Mobile Computing},
|
||||||
|
keywords = {Ad hoc networks, Denial of service, Energy consumption, Network topology, Routing, Routing protocols, Topology, ad hoc networks, routing, security, sensor networks, wireless networks},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{bashir2017internet,
|
||||||
|
title = {The Internet of Things testbed: a survey and evaluation},
|
||||||
|
volume = {78},
|
||||||
|
pages = {409--421},
|
||||||
|
journaltitle = {Future Generation Computer Systems},
|
||||||
|
author = {Bashir, Abid H and Gill, Khurram},
|
||||||
|
date = {2017},
|
||||||
|
note = {Publisher: Elsevier},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{vaughan2005use,
|
||||||
|
title = {The use of climate chambers in biological research},
|
||||||
|
volume = {39},
|
||||||
|
pages = {5121--5127},
|
||||||
|
number = {14},
|
||||||
|
journaltitle = {Environmental Science \& Technology},
|
||||||
|
author = {Vaughan, {TL} and Battle, {SC} and Walker, {KL}},
|
||||||
|
date = {2005},
|
||||||
|
note = {Publisher: {ACS} Publications},
|
||||||
|
}
|
||||||
|
|
||||||
|
@article{huang2011testbed,
|
||||||
|
title = {Testbed for evaluating performance of health monitoring systems},
|
||||||
|
volume = {60},
|
||||||
|
pages = {114--123},
|
||||||
|
number = {1},
|
||||||
|
journaltitle = {{IEEE} Transactions on Instrumentation and Measurement},
|
||||||
|
author = {Huang, Qinfen and Liu, Min and Garcia, Alfredo and Reynolds, Matthew},
|
||||||
|
date = {2011},
|
||||||
|
note = {Publisher: {IEEE}},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{noauthor_fhs_nodate,
|
||||||
|
title = {{FHS} Referenced Specifications},
|
||||||
|
url = {https://refspecs.linuxfoundation.org/fhs.shtml},
|
||||||
|
urldate = {2024-06-22},
|
||||||
|
}
|
||||||
|
|
||||||
|
@misc{fsh-home,
|
||||||
|
title = {3.8. /home : User home directories (optional)},
|
||||||
|
url = {https://refspecs.linuxfoundation.org/FHS_3.0/fhs/ch03s08.html},
|
||||||
|
urldate = {2024-06-22},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{go-fair,
|
||||||
|
title = {{FAIR} Principles},
|
||||||
|
url = {https://www.go-fair.org/fair-principles/},
|
||||||
|
abstract = {In 2016, the ‘{FAIR} Guiding Principles for scientific data management and stewardship’ were published in Scientific Data. The authors intended to provide guidelines to improve the Findability, Accessibility, Interoperability, and Reuse of digital assets. The principles emphasise machine-actionability (i.e., the capacity of… Continue reading →},
|
||||||
|
titleaddon = {{GO} {FAIR}},
|
||||||
|
urldate = {2024-06-22},
|
||||||
|
langid = {american},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{coryefelle_correcting_2016,
|
||||||
|
title = {Correcting the {IoT} History},
|
||||||
|
url = {http://www.chetansharma.com/correcting-the-iot-history/},
|
||||||
|
abstract = {In the last 5 years, {IoT} has entered the industry consciousness. There are varying forecasts calling for tremendous growth and … Continued},
|
||||||
|
titleaddon = {Chetan Sharma},
|
||||||
|
author = {{CoryEfelle}},
|
||||||
|
urldate = {2024-06-20},
|
||||||
|
date = {2016-03-14},
|
||||||
|
langid = {american},
|
||||||
|
}
|
||||||
|
|
||||||
|
@misc{noauthor_overview_2012,
|
||||||
|
title = {Overview of the Internet of things},
|
||||||
|
url = {https://handle.itu.int/11.1002/1000/11559},
|
||||||
|
shorttitle = {Y.{IoT}-overview},
|
||||||
|
abstract = {Recommendation {ITU}-T Y.2060 provides an overview of the Internet of things ({IoT}). It clarifies the concept and scope of the {IoT}, identifies the fundamental characteristics and high-level requirements of the {IoT} and describes the {IoT} reference model. The ecosystem and business models are also provided in an informative appendix.
|
||||||
|
|
||||||
|
Former {ITU}-T Y.2060 renumbered as {ITU}-T Y.4000 on 2016-02-05 without further modification and without being republished.},
|
||||||
|
number = {{ITU}-T Y.4000},
|
||||||
|
date = {2012-06-15},
|
||||||
|
}
|
||||||
|
|
||||||
|
@online{testbedOxford,
|
||||||
|
title = {test bed noun - Definition, pictures, pronunciation and usage notes {\textbar} Oxford Advanced Learner's Dictionary at {OxfordLearnersDictionaries}.com},
|
||||||
|
url = {https://www.oxfordlearnersdictionaries.com/definition/english/test-bed},
|
||||||
|
urldate = {2024-06-20},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{infoexpiot,
|
||||||
|
location = {New York, {NY}, {USA}},
|
||||||
|
title = {Information Exposure From Consumer {IoT} Devices: A Multidimensional, Network-Informed Measurement Approach},
|
||||||
|
isbn = {978-1-4503-6948-0},
|
||||||
|
url = {https://dl.acm.org/doi/10.1145/3355369.3355577},
|
||||||
|
doi = {10.1145/3355369.3355577},
|
||||||
|
series = {{IMC} '19},
|
||||||
|
shorttitle = {Information Exposure From Consumer {IoT} Devices},
|
||||||
|
abstract = {Internet of Things ({IoT}) devices are increasingly found in everyday homes, providing useful functionality for devices such as {TVs}, smart speakers, and video doorbells. Along with their benefits come potential privacy risks, since these devices can communicate information about their users to other parties over the Internet. However, understanding these risks in depth and at scale is difficult due to heterogeneity in devices' user interfaces, protocols, and functionality. In this work, we conduct a multidimensional analysis of information exposure from 81 devices located in labs in the {US} and {UK}. Through a total of 34,586 rigorous automated and manual controlled experiments, we characterize information exposure in terms of destinations of Internet traffic, whether the contents of communication are protected by encryption, what are the {IoT}-device interactions that can be inferred from such content, and whether there are unexpected exposures of private and/or sensitive information (e.g., video surreptitiously transmitted by a recording device). We highlight regional differences between these results, potentially due to different privacy regulations in the {US} and {UK}. Last, we compare our controlled experiments with data gathered from an in situ user study comprising 36 participants.},
|
||||||
|
pages = {267--279},
|
||||||
|
booktitle = {Proceedings of the Internet Measurement Conference},
|
||||||
|
publisher = {Association for Computing Machinery},
|
||||||
|
author = {Ren, Jingjing and Dubois, Daniel J. and Choffnes, David and Mandalari, Anna Maria and Kolcun, Roman and Haddadi, Hamed},
|
||||||
|
urldate = {2024-02-25},
|
||||||
|
date = {2019-10-21},
|
||||||
|
}
|
||||||
|
|
||||||
|
@inproceedings{aysom23,
|
||||||
|
title = {Are You Spying on Me? \{Large-Scale\} Analysis on \{{IoT}\} Data Exposure through Companion Apps},
|
||||||
|
isbn = {978-1-939133-37-3},
|
||||||
|
url = {https://www.usenix.org/conference/usenixsecurity23/presentation/nan},
|
||||||
|
shorttitle = {Are You Spying on Me?},
|
||||||
|
eventtitle = {32nd {USENIX} Security Symposium ({USENIX} Security 23)},
|
||||||
|
pages = {6665--6682},
|
||||||
|
author = {Nan, Yuhong and Wang, Xueqiang and Xing, Luyi and Liao, Xiaojing and Wu, Ruoyu and Wu, Jianliang and Zhang, Yifan and Wang, {XiaoFeng}},
|
||||||
|
urldate = {2024-02-25},
|
||||||
|
date = {2023},
|
||||||
|
langid = {english},
|
||||||
|
}
|
||||||
|
|
||||||
|
@unpublished{noauthor_toward_2023,
|
||||||
|
title = {Toward a common language to facilitate reproducible research and technology transfer: challenges and solutions},
|
||||||
|
url = {https://zenodo.org/records/8105339},
|
||||||
|
shorttitle = {Toward a common language to facilitate reproducible research and technology transfer},
|
||||||
|
abstract = {The keynote presentation from the 1st {ACM} conference on reproducibility and replicability ({ACM} {REP}'23).The video of this presentation is available at the {ACM} {YouTube} channel.Please don't hesitate to provide your feedback via the public Discord server from the {MLCommons} Task Force on Automation and Reproducibility and {GitHub} issues.[ {GitHub} project ] [ Public Collective Knowledge repository ][ Related reproducibility initiatives ] [ {cTuning}.org ] [ {cKnowledge}.org ]During the past 10 years, we have considerably improved the reproducibility of experimental results from published papers by introducing the artifact evaluation process with a unified artifact appendix and reproducibility checklists, Jupyter notebooks, containers, and Git repositories. On the other hand, our experience reproducing more than 200 papers shows that it can take weeks and months of painful and repetitive interactions between teams to reproduce artifacts. This effort includes decrypting numerous {README} files, examining ad-hoc artifacts and containers, and figuring out how to reproduce computational results. Furthermore, snapshot containers pose a challenge to optimize algorithms' performance, accuracy, power consumption and operational costs across diverse and rapidly evolving software, hardware, and data used in the real world.In this talk, I explain how our practical artifact evaluation experience and the feedback from researchers and evaluators motivated us to develop a simple, intuitive, technology agnostic, and English-like scripting language called Collective Mind ({CM}). It helps to automatically adapt any given experiment to any software, hardware, and data while automatically generating unified {README} files and synthesizing modular containers with a unified {API}. It is being developed by {MLCommons} to facilitate reproducible {AI}/{ML} Systems research and minimizing manual and repetitive benchmarking and optimization efforts, reduce time and costs for reproducible research, and simplify technology transfer to production. I also present several recent use cases of how {CM} helps {MLCommons}, the Student Cluster Competition, and artifact evaluation at {ACM}/{IEEE} conferences. I conclude with our development plans, new challenges, possible solutions, and upcoming reproducibility and optimization challenges powered by the {MLCommons} Collective Knowledge platform and {CM}: access.{cKnowledge}.org.},
|
||||||
|
urldate = {2024-02-25},
|
||||||
|
date = {2023-06-28},
|
||||||
|
doi = {10.5281/zenodo.8105339},
|
||||||
|
keywords = {artifact evaluation, artificial intelligence, automation, {cTuning}, chatgpt, cknowledge, collective knowledge, collective mind, competitions, llm, llm automation, machine learning, mlcommons, mlperf, optimization challenges, performance, replicability, reproducibility, reusability, systems},
|
||||||
|
}
|
||||||
24
thesis/setup_argparse.py
Normal file
24
thesis/setup_argparse.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
def setup_argparse():
|
||||||
|
# create top level parser
|
||||||
|
root_parser = argparse.ArgumentParser(prog='iottb')
|
||||||
|
# shared options
|
||||||
|
root_parser.add_argument('--verbose', '-v', action='count', default=0)
|
||||||
|
root_parser.add_argument('--script-mode', action='store_true', help='Run in script mode (non-interactive)')
|
||||||
|
# Group of args w.r.t iottb.db creation
|
||||||
|
group = root_parser.add_argument_group('database options')
|
||||||
|
group.add_argument('--db-home', default=Path.home() / 'IoTtb.db')
|
||||||
|
group.add_argument('--config-home', default=Path.home() / '.config' / 'iottb.conf', type=Path, )
|
||||||
|
group.add_argument('--user', default=Path.home().stem, type=Path, )
|
||||||
|
|
||||||
|
# configure subcommands
|
||||||
|
subparsers = root_parser.add_subparsers(title='subcommands', required=True, dest='command')
|
||||||
|
# setup_capture_parser(subparsers)
|
||||||
|
setup_init_device_root_parser(subparsers)
|
||||||
|
setup_sniff_parser(subparsers)
|
||||||
|
# Utility to list interfaces directly with iottb instead of relying on external tooling
|
||||||
|
|
||||||
|
interfaces_parser = subparsers.add_parser('list-interfaces', aliases=['li', 'if'],
|
||||||
|
help='List available network interfaces.')
|
||||||
|
interfaces_parser.set_defaults(func=list_interfaces)
|
||||||
|
|
||||||
|
return root_parser
|
||||||
339
thesis/sniff.py
Normal file
339
thesis/sniff.py
Normal file
@ -0,0 +1,339 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from time import time
|
||||||
|
|
||||||
|
import click
|
||||||
|
from click_option_group import optgroup
|
||||||
|
|
||||||
|
from iottb.utils.string_processing import make_canonical_name
|
||||||
|
|
||||||
|
# Setup logger
|
||||||
|
logger = logging.getLogger('iottb.sniff')
|
||||||
|
|
||||||
|
|
||||||
|
def is_ip_address(address):
|
||||||
|
ip_pattern = re.compile(r"^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$")
|
||||||
|
return ip_pattern.match(address) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def is_mac_address(address):
|
||||||
|
mac_pattern = re.compile(r"^([0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2}$")
|
||||||
|
return mac_pattern.match(address) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def load_config(cfg_file):
|
||||||
|
"""Loads configuration from the given file path."""
|
||||||
|
with open(cfg_file, 'r') as config_file:
|
||||||
|
return json.load(config_file)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_sniff(ctx, param, value):
|
||||||
|
logger.info('Validating sniff...')
|
||||||
|
if ctx.params.get('unsafe') and not value:
|
||||||
|
return None
|
||||||
|
if not ctx.params.get('unsafe') and not value:
|
||||||
|
raise click.BadParameter('Address is required unless --unsafe is set.')
|
||||||
|
if not is_ip_address(value) and not is_mac_address(value):
|
||||||
|
raise click.BadParameter('Address must be a valid IP address or MAC address.')
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def run_pre(pre):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def run_post(post):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@click.command('sniff', help='Sniff packets with tcpdump')
|
||||||
|
@optgroup.group('Testbed sources')
|
||||||
|
@optgroup.option('--db', '--database', type=str, envvar='IOTTB_DB', show_envvar=True,
|
||||||
|
help='Database of device. Only needed if not current default.')
|
||||||
|
@optgroup.option('--app', type=str, help='Companion app being used during capture', required=False)
|
||||||
|
@optgroup.group('Runtime behaviour')
|
||||||
|
@optgroup.option('--unsafe', is_flag=True, default=False, envvar='IOTTB_UNSAFE', is_eager=True,
|
||||||
|
help='Disable checks for otherwise required options.\n', show_envvar=True)
|
||||||
|
@optgroup.option('--guided', is_flag=True, default=False, envvar='IOTTB_GUIDED', show_envvar=True)
|
||||||
|
@optgroup.option('--pre', type=click.Path(exists=True, executable=True), help='Script to be executed before main '
|
||||||
|
'command'
|
||||||
|
'is started.')
|
||||||
|
@optgroup.group('Tcpdump options')
|
||||||
|
@optgroup.option('-i', '--interface',
|
||||||
|
help='Network interface to capture on.' +
|
||||||
|
'If not specified tcpdump tries to find and appropriate one.\n', show_envvar=True,
|
||||||
|
envvar='IOTTB_CAPTURE_INTERFACE')
|
||||||
|
@optgroup.option('-a', '--address', callback=validate_sniff,
|
||||||
|
help='IP or MAC address to filter packets by.\n', show_envvar=True,
|
||||||
|
envvar='IOTTB_CAPTURE_ADDRESS')
|
||||||
|
@optgroup.option('-I', '--monitor-mode', help='Put interface into monitor mode.', is_flag=True)
|
||||||
|
@optgroup.option('--ff', type=str, envvar='IOTTB_CAPTURE_FILTER', show_envvar=True,
|
||||||
|
help='tcpdump filter as string or file path.')
|
||||||
|
@optgroup.option('-#', '--print-pacno', is_flag=True, default=True,
|
||||||
|
help='Print packet number at beginning of line. True by default.')
|
||||||
|
@optgroup.option('-e', '--print-ll', is_flag=True, default=False,
|
||||||
|
help='Print link layer headers. True by default.')
|
||||||
|
@optgroup.option('-c', '--count', type=int, help='Number of packets to capture.', default=1000)
|
||||||
|
# @optgroup.option('--mins', type=int, help='Time in minutes to capture.', default=1)
|
||||||
|
@click.argument('tcpdump-args', nargs=-1, required=False, metavar='[TCPDUMP-ARGS]')
|
||||||
|
@click.argument('device', required=False)
|
||||||
|
@click.pass_context
|
||||||
|
def sniff(ctx, device, interface, print_pacno, ff, count, monitor_mode, print_ll, address, db, unsafe, guided,
|
||||||
|
app, tcpdump_args, pre, post, **params):
|
||||||
|
""" Sniff packets from a device """
|
||||||
|
logger.info('sniff command invoked')
|
||||||
|
# Step 0: run pre script:
|
||||||
|
if pre:
|
||||||
|
click.echo(f'Running pre command {pre}')
|
||||||
|
run_pre(pre)
|
||||||
|
# Step1: Load Config
|
||||||
|
config = ctx.obj['CONFIG']
|
||||||
|
logger.debug(f'Config loaded: {config}')
|
||||||
|
|
||||||
|
# Step2: determine relevant database
|
||||||
|
database = db if db else config.default_database
|
||||||
|
path = config.db_path_dict[database]
|
||||||
|
full_db_path = Path(path) / database
|
||||||
|
logger.debug(f'Full db path is {str(full_db_path)}')
|
||||||
|
|
||||||
|
# 2.2: Check if it exists
|
||||||
|
if not full_db_path.is_dir():
|
||||||
|
logger.error('DB unexpectedly missing')
|
||||||
|
click.echo('DB unexpectedly missing')
|
||||||
|
return
|
||||||
|
|
||||||
|
canonical_name, aliases = make_canonical_name(device)
|
||||||
|
click.echo(f'Using canonical device name {canonical_name}')
|
||||||
|
device_path = full_db_path / canonical_name
|
||||||
|
|
||||||
|
# Step 3: now the device
|
||||||
|
if not device_path.exists():
|
||||||
|
if not unsafe:
|
||||||
|
logger.error(f'Device path {device_path} does not exist')
|
||||||
|
click.echo(f'Device path {device_path} does not exist')
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
device_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
logger.info(f'Device path {device_path} created')
|
||||||
|
|
||||||
|
click.echo(f'Found device at path {device_path}')
|
||||||
|
# Step 4: Generate filter
|
||||||
|
generic_filter = None
|
||||||
|
cap_filter = None
|
||||||
|
if ff:
|
||||||
|
logger.debug(f'ff: {ff}')
|
||||||
|
if Path(ff).is_file():
|
||||||
|
logger.info('Given filter option is a file')
|
||||||
|
with open(ff, 'r') as f:
|
||||||
|
cap_filter = f.read().strip()
|
||||||
|
else:
|
||||||
|
logger.info('Given filter option is an expression')
|
||||||
|
cap_filter = ff
|
||||||
|
else:
|
||||||
|
if address is not None:
|
||||||
|
if is_ip_address(address):
|
||||||
|
generic_filter = 'net'
|
||||||
|
cap_filter = f'{generic_filter} {address}'
|
||||||
|
elif is_mac_address(address):
|
||||||
|
generic_filter = 'ether net'
|
||||||
|
cap_filter = f'{generic_filter} {address}'
|
||||||
|
elif not unsafe:
|
||||||
|
logger.error('Invalid address format')
|
||||||
|
click.echo('Invalid address format')
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f'Generic filter {generic_filter}')
|
||||||
|
click.echo(f'Using filter {cap_filter}')
|
||||||
|
|
||||||
|
# Step 5: prep capture directory
|
||||||
|
capture_date = datetime.now().strftime('%Y-%m-%d')
|
||||||
|
capture_base_dir = device_path / f'sniffs/{capture_date}'
|
||||||
|
capture_base_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
logger.debug(f'Previous captures {capture_base_dir.glob('cap*')}')
|
||||||
|
capture_count = sum(1 for _ in capture_base_dir.glob('cap*'))
|
||||||
|
logger.debug(f'Capture count is {capture_count}')
|
||||||
|
|
||||||
|
capture_dir = f'cap{capture_count:04d}-{datetime.now().strftime('%H%M')}'
|
||||||
|
logger.debug(f'capture_dir: {capture_dir}')
|
||||||
|
|
||||||
|
# Full path
|
||||||
|
capture_dir_full_path = capture_base_dir / capture_dir
|
||||||
|
capture_dir_full_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
click.echo(f'Files will be placed in {str(capture_dir_full_path)}')
|
||||||
|
logger.debug(f'successfully created capture directory')
|
||||||
|
|
||||||
|
# Step 6: Prepare capture file names
|
||||||
|
# Generate UUID for filenames
|
||||||
|
capture_uuid = str(uuid.uuid4())
|
||||||
|
click.echo(f'Capture has id {capture_uuid}')
|
||||||
|
|
||||||
|
pcap_file = f"{canonical_name}_{capture_uuid}.pcap"
|
||||||
|
pcap_file_full_path = capture_dir_full_path / pcap_file
|
||||||
|
stdout_log_file = f'stdout_{capture_uuid}.log'
|
||||||
|
stderr_log_file = f'stderr_{capture_uuid}.log'
|
||||||
|
|
||||||
|
logger.debug(f'Full pcap file path is {pcap_file_full_path}')
|
||||||
|
logger.info(f'pcap file name is {pcap_file}')
|
||||||
|
logger.info(f'stdout log file is {stdout_log_file}')
|
||||||
|
logger.info(f'stderr log file is {stderr_log_file}')
|
||||||
|
|
||||||
|
# Step 7: Build tcpdump command
|
||||||
|
logger.debug(f'pgid {os.getpgrp()}')
|
||||||
|
logger.debug(f'ppid {os.getppid()}')
|
||||||
|
logger.debug(f'(real, effective, saved) user id: {os.getresuid()}')
|
||||||
|
logger.debug(f'(real, effective, saved) group id: {os.getresgid()}')
|
||||||
|
|
||||||
|
cmd = ['sudo', 'tcpdump']
|
||||||
|
|
||||||
|
# 7.1 process flags
|
||||||
|
flags = []
|
||||||
|
if print_pacno:
|
||||||
|
flags.append('-#')
|
||||||
|
if print_ll:
|
||||||
|
flags.append('-e')
|
||||||
|
if monitor_mode:
|
||||||
|
flags.append('-I')
|
||||||
|
flags.append('-n') # TODO: Integrate, in case name resolution is wanted!
|
||||||
|
cmd.extend(flags)
|
||||||
|
flags_string = " ".join(flags)
|
||||||
|
logger.debug(f'Flags: {flags_string}')
|
||||||
|
|
||||||
|
# debug interlude
|
||||||
|
verbosity = ctx.obj['VERBOSITY']
|
||||||
|
if verbosity > 0:
|
||||||
|
verbosity_flag = '-'
|
||||||
|
for i in range(0, verbosity):
|
||||||
|
verbosity_flag = verbosity_flag + 'v'
|
||||||
|
logger.debug(f'verbosity string to pass to tcpdump: {verbosity_flag}')
|
||||||
|
cmd.append(verbosity_flag)
|
||||||
|
|
||||||
|
# 7.2 generic (i.e. reusable) kw args
|
||||||
|
generic_kw_args = []
|
||||||
|
if count:
|
||||||
|
generic_kw_args.extend(['-c', str(count)])
|
||||||
|
# if mins:
|
||||||
|
# generic_kw_args.extend(['-G', str(mins * 60)]) TODO: this currently loads to errors with sudo
|
||||||
|
cmd.extend(generic_kw_args)
|
||||||
|
generic_kw_args_string = " ".join(generic_kw_args)
|
||||||
|
logger.debug(f'KW args: {generic_kw_args_string}')
|
||||||
|
|
||||||
|
# 7.3 special kw args (not a priori reusable)
|
||||||
|
non_generic_kw_args = []
|
||||||
|
if interface:
|
||||||
|
non_generic_kw_args.extend(['-i', interface])
|
||||||
|
non_generic_kw_args.extend(['-w', str(pcap_file_full_path)])
|
||||||
|
cmd.extend(non_generic_kw_args)
|
||||||
|
non_generic_kw_args_string = " ".join(non_generic_kw_args)
|
||||||
|
logger.debug(f'Non transferable (special) kw args: {non_generic_kw_args_string}')
|
||||||
|
|
||||||
|
# 7.4 add filter expression
|
||||||
|
if cap_filter:
|
||||||
|
logger.debug(f'cap_filter (not generic): {cap_filter}')
|
||||||
|
cmd.append(cap_filter)
|
||||||
|
|
||||||
|
full_cmd_string = " ".join(cmd)
|
||||||
|
|
||||||
|
logger.info(f'tcpdump command: {"".join(full_cmd_string)}')
|
||||||
|
click.echo('Capture setup complete!')
|
||||||
|
# Step 8: Execute tcpdump command
|
||||||
|
start_time = datetime.now().strftime("%H:%M:%S")
|
||||||
|
start = time()
|
||||||
|
try:
|
||||||
|
if guided:
|
||||||
|
click.confirm(f'Execute following command: {full_cmd_string}')
|
||||||
|
stdout_log_file_abs_path = capture_dir_full_path / stdout_log_file
|
||||||
|
stderr_log_file_abs_path = capture_dir_full_path / stderr_log_file
|
||||||
|
stdout_log_file_abs_path.touch(mode=0o777)
|
||||||
|
stderr_log_file_abs_path.touch(mode=0o777)
|
||||||
|
with open(stdout_log_file_abs_path, 'w') as out, open(stderr_log_file_abs_path, 'w') as err:
|
||||||
|
logger.debug(f'\nstdout: {out}.\nstderr: {err}.\n')
|
||||||
|
|
||||||
|
tcp_complete = subprocess.run(cmd, check=True, capture_output=True, text=True)
|
||||||
|
|
||||||
|
out.write(tcp_complete.stdout)
|
||||||
|
err.write(tcp_complete.stderr)
|
||||||
|
|
||||||
|
# click.echo(f'Mock sniff execution')
|
||||||
|
click.echo(f"Capture complete. Saved to {pcap_file}")
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
logger.error(f'Failed to capture packets: {e}')
|
||||||
|
click.echo(f'Failed to capture packets: {e}')
|
||||||
|
click.echo(f'Check {stderr_log_file} for more info.')
|
||||||
|
if ctx.obj['DEBUG']:
|
||||||
|
msg = [f'STDERR log {stderr_log_file} contents:\n']
|
||||||
|
with open(capture_dir_full_path / stderr_log_file) as log:
|
||||||
|
for line in log:
|
||||||
|
msg.append(line)
|
||||||
|
|
||||||
|
click.echo("\t".join(msg), lvl='e')
|
||||||
|
# print('DEBUG ACTIVE')
|
||||||
|
if guided:
|
||||||
|
click.prompt('Create metadata anyway?')
|
||||||
|
else:
|
||||||
|
click.echo('Aborting capture...')
|
||||||
|
exit()
|
||||||
|
end_time = datetime.now().strftime("%H:%M:%S")
|
||||||
|
end = time()
|
||||||
|
delta = end - start
|
||||||
|
click.echo(f'tcpdump took {delta:.2f} seconds.')
|
||||||
|
# Step 9: Register metadata
|
||||||
|
metadata = {
|
||||||
|
'device': canonical_name,
|
||||||
|
'device_id': device,
|
||||||
|
'capture_id': capture_uuid,
|
||||||
|
'capture_date_iso': datetime.now().isoformat(),
|
||||||
|
'invoked_command': " ".join(map(str, cmd)),
|
||||||
|
'capture_duration': delta,
|
||||||
|
'generic_parameters': {
|
||||||
|
'flags': flags_string,
|
||||||
|
'kwargs': generic_kw_args_string,
|
||||||
|
'filter': generic_filter
|
||||||
|
},
|
||||||
|
'non_generic_parameters': {
|
||||||
|
'kwargs': non_generic_kw_args_string,
|
||||||
|
'filter': cap_filter
|
||||||
|
},
|
||||||
|
'features': {
|
||||||
|
'interface': interface,
|
||||||
|
'address': address
|
||||||
|
},
|
||||||
|
'resources': {
|
||||||
|
'pcap_file': str(pcap_file),
|
||||||
|
'stdout_log': str(stdout_log_file),
|
||||||
|
'stderr_log': str(stderr_log_file)
|
||||||
|
},
|
||||||
|
'environment': {
|
||||||
|
'capture_dir': capture_dir,
|
||||||
|
'database': database,
|
||||||
|
'capture_base_dir': str(capture_base_dir),
|
||||||
|
'capture_dir_abs_path': str(capture_dir_full_path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
click.echo('Ensuring correct ownership of created files.')
|
||||||
|
username = os.getlogin()
|
||||||
|
gid = os.getgid()
|
||||||
|
|
||||||
|
# Else there are issues when running with sudo:
|
||||||
|
try:
|
||||||
|
subprocess.run(f'sudo chown -R {username}:{username} {device_path}', shell=True)
|
||||||
|
except OSError as e:
|
||||||
|
click.echo(f'Some error {e}')
|
||||||
|
|
||||||
|
click.echo(f'Saving metadata.')
|
||||||
|
metadata_abs_path = capture_dir_full_path / 'capture_metadata.json'
|
||||||
|
with open(metadata_abs_path, 'w') as f:
|
||||||
|
json.dump(metadata, f, indent=4)
|
||||||
|
|
||||||
|
click.echo(f'END SNIFF SUBCOMMAND')
|
||||||
|
if post:
|
||||||
|
click.echo(f'Running post command {post}')
|
||||||
|
run_post(post)
|
||||||
37
thesis/string_processing.py
Normal file
37
thesis/string_processing.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
import re
|
||||||
|
from iottb import definitions
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_string(s, chars_to_replace=None, replacement=None, allow_unicode=False):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def make_canonical_name(name):
|
||||||
|
"""
|
||||||
|
Normalize the device name to a canonical form:
|
||||||
|
- Replace the first two occurrences of spaces
|
||||||
|
- transform characters with dashes.
|
||||||
|
- Remove remaining spaces.
|
||||||
|
- Convert to lowercase.
|
||||||
|
"""
|
||||||
|
aliases = [name]
|
||||||
|
# We first normalize
|
||||||
|
chars_to_replace = definitions.REPLACEMENT_SET_CANONICAL_DEVICE_NAMES
|
||||||
|
pattern = re.compile('|'.join(re.escape(char) for char in chars_to_replace))
|
||||||
|
norm_name = pattern.sub('-', name)
|
||||||
|
# Remove non ascii chars
|
||||||
|
norm_name = re.sub(r'[^\x00-\x7F]+', '', norm_name)
|
||||||
|
|
||||||
|
aliases.append(norm_name)
|
||||||
|
# Lower case
|
||||||
|
norm_name = norm_name.lower()
|
||||||
|
aliases.append(norm_name)
|
||||||
|
# canoncial name is only first two tokens
|
||||||
|
parts = norm_name.split('-')
|
||||||
|
canonical_name = canonical_name = '-'.join(parts[:2])
|
||||||
|
aliases.append(canonical_name)
|
||||||
|
aliases = list(set(aliases))
|
||||||
|
return canonical_name, aliases
|
||||||
1241
thesis/thesis.bst
Normal file
1241
thesis/thesis.bst
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user