Compare commits
No commits in common. "main" and "sync" have entirely different histories.
39
.gitignore
vendored
39
.gitignore
vendored
@ -1,38 +1,3 @@
|
|||||||
__pycache__
|
|
||||||
.venv
|
|
||||||
iottb.egg-info
|
|
||||||
.idea/
|
|
||||||
*.log
|
|
||||||
logs/
|
|
||||||
*.pyc
|
|
||||||
.obsidian
|
.obsidian
|
||||||
dist/
|
venv
|
||||||
build/
|
__pycache__
|
||||||
|
|
||||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
|
||||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
|
||||||
|
|
||||||
# User-specific stuff
|
|
||||||
.idea/**/workspace.xml
|
|
||||||
.idea/**/tasks.xml
|
|
||||||
.idea/**/usage.statistics.xml
|
|
||||||
.idea/**/dictionaries
|
|
||||||
.idea/**/shelf
|
|
||||||
|
|
||||||
# AWS User-specific
|
|
||||||
.idea/**/aws.xml
|
|
||||||
|
|
||||||
# Generated files
|
|
||||||
.idea/**/contentModel.xml
|
|
||||||
|
|
||||||
# Sensitive or high-churn files
|
|
||||||
.idea/**/dataSources/
|
|
||||||
.idea/**/dataSources.ids
|
|
||||||
.idea/**/dataSources.local.xml
|
|
||||||
.idea/**/sqlDataSources.xml
|
|
||||||
.idea/**/dynamic.xml
|
|
||||||
.idea/**/uiDesigner.xml
|
|
||||||
.idea/**/dbnavigator.xml
|
|
||||||
|
|
||||||
.private/
|
|
||||||
*.pcap
|
|
||||||
|
|||||||
7
.idea/2024-bsc-sebastian-lenzlinger.iml
generated
Normal file
7
.idea/2024-bsc-sebastian-lenzlinger.iml
generated
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<module version="4">
|
||||||
|
<component name="PyDocumentationSettings">
|
||||||
|
<option name="format" value="PLAIN" />
|
||||||
|
<option name="myDocStringFormat" value="Plain" />
|
||||||
|
</component>
|
||||||
|
</module>
|
||||||
17
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
17
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
<component name="InspectionProjectProfileManager">
|
||||||
|
<profile version="1.0">
|
||||||
|
<option name="myName" value="Project Default" />
|
||||||
|
<inspection_tool class="DuplicatedCode" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
||||||
|
<Languages>
|
||||||
|
<language minSize="67" name="Python" />
|
||||||
|
</Languages>
|
||||||
|
</inspection_tool>
|
||||||
|
<inspection_tool class="PyPep8NamingInspection" enabled="true" level="WEAK WARNING" enabled_by_default="true">
|
||||||
|
<option name="ignoredErrors">
|
||||||
|
<list>
|
||||||
|
<option value="N806" />
|
||||||
|
</list>
|
||||||
|
</option>
|
||||||
|
</inspection_tool>
|
||||||
|
</profile>
|
||||||
|
</component>
|
||||||
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
<component name="InspectionProjectProfileManager">
|
||||||
|
<settings>
|
||||||
|
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||||
|
<version value="1.0" />
|
||||||
|
</settings>
|
||||||
|
</component>
|
||||||
7
.idea/misc.xml
generated
Normal file
7
.idea/misc.xml
generated
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="Black">
|
||||||
|
<option name="sdkName" value="Python 3.12 (pythonProject)" />
|
||||||
|
</component>
|
||||||
|
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 (2024-bsc-sebastian-lenzlinger)" project-jdk-type="Python SDK" />
|
||||||
|
</project>
|
||||||
6
.idea/vcs.xml
generated
Normal file
6
.idea/vcs.xml
generated
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="VcsDirectoryMappings">
|
||||||
|
<mapping directory="" vcs="Git" />
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
14
.idea/webResources.xml
generated
Normal file
14
.idea/webResources.xml
generated
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="WebResourcesPaths">
|
||||||
|
<contentEntries>
|
||||||
|
<entry url="file://$PROJECT_DIR$">
|
||||||
|
<entryData>
|
||||||
|
<resourceRoots>
|
||||||
|
<path value="file://$PROJECT_DIR$/data" />
|
||||||
|
</resourceRoots>
|
||||||
|
</entryData>
|
||||||
|
</entry>
|
||||||
|
</contentEntries>
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
261
.idea/workspace.xml
generated
Normal file
261
.idea/workspace.xml
generated
Normal file
@ -0,0 +1,261 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="AutoImportSettings">
|
||||||
|
<option name="autoReloadType" value="SELECTIVE" />
|
||||||
|
</component>
|
||||||
|
<component name="ChangeListManager">
|
||||||
|
<list default="true" id="7a3ac8e1-7fbf-4aa7-9cf9-a51d7ade8503" name="Changes" comment="Start tracking development config files.">
|
||||||
|
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
||||||
|
<change beforePath="$PROJECT_DIR$/code/kydcap/subcommands/sniff.py" beforeDir="false" afterPath="$PROJECT_DIR$/code/kydcap/subcommands/sniff.py" afterDir="false" />
|
||||||
|
</list>
|
||||||
|
<option name="SHOW_DIALOG" value="false" />
|
||||||
|
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||||
|
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
||||||
|
<option name="LAST_RESOLUTION" value="IGNORE" />
|
||||||
|
</component>
|
||||||
|
<component name="FileTemplateManagerImpl">
|
||||||
|
<option name="RECENT_TEMPLATES">
|
||||||
|
<list>
|
||||||
|
<option value="Python Script" />
|
||||||
|
</list>
|
||||||
|
</option>
|
||||||
|
</component>
|
||||||
|
<component name="Git.Settings">
|
||||||
|
<option name="PUSH_AUTO_UPDATE" value="true" />
|
||||||
|
<option name="RECENT_BRANCH_BY_REPOSITORY">
|
||||||
|
<map>
|
||||||
|
<entry key="$PROJECT_DIR$" value="main" />
|
||||||
|
</map>
|
||||||
|
</option>
|
||||||
|
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
|
||||||
|
</component>
|
||||||
|
<component name="ProblemsViewState">
|
||||||
|
<option name="selectedTabId" value="CurrentFile" />
|
||||||
|
</component>
|
||||||
|
<component name="ProjectColorInfo">{
|
||||||
|
"associatedIndex": 3
|
||||||
|
}</component>
|
||||||
|
<component name="ProjectId" id="2fYAAba0AnH9jx9D0JkB8Xbuv0r" />
|
||||||
|
<component name="ProjectViewState">
|
||||||
|
<option name="hideEmptyMiddlePackages" value="true" />
|
||||||
|
<option name="showLibraryContents" value="true" />
|
||||||
|
</component>
|
||||||
|
<component name="PropertiesComponent">{
|
||||||
|
"keyToString": {
|
||||||
|
"ASKED_ADD_EXTERNAL_FILES": "true",
|
||||||
|
"ASKED_MARK_IGNORED_FILES_AS_EXCLUDED": "true",
|
||||||
|
"ASKED_SHARE_PROJECT_CONFIGURATION_FILES": "true",
|
||||||
|
"Python.__init__.executor": "Run",
|
||||||
|
"Python.__main__.executor": "Run",
|
||||||
|
"Python.main.executor": "Run",
|
||||||
|
"RunOnceActivity.ShowReadmeOnStart": "true",
|
||||||
|
"SHARE_PROJECT_CONFIGURATION_FILES": "true",
|
||||||
|
"git-widget-placeholder": "main",
|
||||||
|
"last_opened_file_path": "/home/slnopriv/projects/2024-bsc-sebastian-lenzlinger/code/kydcap/utils/device_metadata_utils.py",
|
||||||
|
"node.js.detected.package.eslint": "true",
|
||||||
|
"node.js.detected.package.tslint": "true",
|
||||||
|
"node.js.selected.package.eslint": "(autodetect)",
|
||||||
|
"node.js.selected.package.tslint": "(autodetect)",
|
||||||
|
"nodejs_package_manager_path": "npm",
|
||||||
|
"settings.editor.selected.configurable": "com.jetbrains.python.configuration.PyActiveSdkModuleConfigurable",
|
||||||
|
"vue.rearranger.settings.migration": "true"
|
||||||
|
}
|
||||||
|
}</component>
|
||||||
|
<component name="RecentsManager">
|
||||||
|
<key name="MoveFile.RECENT_KEYS">
|
||||||
|
<recent name="$PROJECT_DIR$/archive" />
|
||||||
|
<recent name="$PROJECT_DIR$" />
|
||||||
|
<recent name="$PROJECT_DIR$/code/misc/archive" />
|
||||||
|
<recent name="$PROJECT_DIR$/code/misc" />
|
||||||
|
<recent name="$PROJECT_DIR$/code/kydcap/utils" />
|
||||||
|
</key>
|
||||||
|
</component>
|
||||||
|
<component name="RunManager" selected="Python.__main__">
|
||||||
|
<configuration name="__init__" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
|
||||||
|
<module name="2024-bsc-sebastian-lenzlinger" />
|
||||||
|
<option name="ENV_FILES" value="" />
|
||||||
|
<option name="INTERPRETER_OPTIONS" value="" />
|
||||||
|
<option name="PARENT_ENVS" value="true" />
|
||||||
|
<envs>
|
||||||
|
<env name="PYTHONUNBUFFERED" value="1" />
|
||||||
|
</envs>
|
||||||
|
<option name="SDK_HOME" value="" />
|
||||||
|
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/code/kydcap" />
|
||||||
|
<option name="IS_MODULE_SDK" value="true" />
|
||||||
|
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||||
|
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||||
|
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||||
|
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/code/kydcap/__init__.py" />
|
||||||
|
<option name="PARAMETERS" value="" />
|
||||||
|
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||||
|
<option name="EMULATE_TERMINAL" value="false" />
|
||||||
|
<option name="MODULE_MODE" value="false" />
|
||||||
|
<option name="REDIRECT_INPUT" value="false" />
|
||||||
|
<option name="INPUT_FILE" value="" />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
<configuration name="__main__" type="PythonConfigurationType" factoryName="Python" nameIsGenerated="true">
|
||||||
|
<module name="2024-bsc-sebastian-lenzlinger" />
|
||||||
|
<option name="ENV_FILES" value="" />
|
||||||
|
<option name="INTERPRETER_OPTIONS" value="" />
|
||||||
|
<option name="PARENT_ENVS" value="true" />
|
||||||
|
<envs>
|
||||||
|
<env name="PYTHONUNBUFFERED" value="1" />
|
||||||
|
</envs>
|
||||||
|
<option name="SDK_HOME" value="" />
|
||||||
|
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/code/kydcap" />
|
||||||
|
<option name="IS_MODULE_SDK" value="true" />
|
||||||
|
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||||
|
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||||
|
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||||
|
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/code/kydcap/__main__.py" />
|
||||||
|
<option name="PARAMETERS" value="init-device-root --dynamic" />
|
||||||
|
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||||
|
<option name="EMULATE_TERMINAL" value="false" />
|
||||||
|
<option name="MODULE_MODE" value="false" />
|
||||||
|
<option name="REDIRECT_INPUT" value="false" />
|
||||||
|
<option name="INPUT_FILE" value="" />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
<list>
|
||||||
|
<item itemvalue="Python.__main__" />
|
||||||
|
<item itemvalue="Python.__init__" />
|
||||||
|
</list>
|
||||||
|
<recent_temporary>
|
||||||
|
<list>
|
||||||
|
<item itemvalue="Python.__init__" />
|
||||||
|
</list>
|
||||||
|
</recent_temporary>
|
||||||
|
</component>
|
||||||
|
<component name="SharedIndexes">
|
||||||
|
<attachedChunks>
|
||||||
|
<set>
|
||||||
|
<option value="bundled-js-predefined-1d06a55b98c1-74d2a5396914-JavaScript-PY-241.14494.241" />
|
||||||
|
<option value="bundled-python-sdk-0509580d9d50-28c9f5db9ffe-com.jetbrains.pycharm.pro.sharedIndexes.bundled-PY-241.14494.241" />
|
||||||
|
</set>
|
||||||
|
</attachedChunks>
|
||||||
|
</component>
|
||||||
|
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
|
||||||
|
<component name="TaskManager">
|
||||||
|
<task active="true" id="Default" summary="Default task">
|
||||||
|
<changelist id="7a3ac8e1-7fbf-4aa7-9cf9-a51d7ade8503" name="Changes" comment="" />
|
||||||
|
<created>1713967494544</created>
|
||||||
|
<option name="number" value="Default" />
|
||||||
|
<option name="presentableId" value="Default" />
|
||||||
|
<updated>1713967494544</updated>
|
||||||
|
<workItem from="1713967495566" duration="6927000" />
|
||||||
|
<workItem from="1714554228183" duration="34000" />
|
||||||
|
<workItem from="1714554269789" duration="56478000" />
|
||||||
|
<workItem from="1714616237168" duration="6135000" />
|
||||||
|
</task>
|
||||||
|
<task id="LOCAL-00001" summary="Add code for capture testbed. This is a huge commit. End of day sync...">
|
||||||
|
<option name="closed" value="true" />
|
||||||
|
<created>1714615532115</created>
|
||||||
|
<option name="number" value="00001" />
|
||||||
|
<option name="presentableId" value="LOCAL-00001" />
|
||||||
|
<option name="project" value="LOCAL" />
|
||||||
|
<updated>1714615532115</updated>
|
||||||
|
</task>
|
||||||
|
<task id="LOCAL-00002" summary="Add some notes.">
|
||||||
|
<option name="closed" value="true" />
|
||||||
|
<created>1714615608142</created>
|
||||||
|
<option name="number" value="00002" />
|
||||||
|
<option name="presentableId" value="LOCAL-00002" />
|
||||||
|
<option name="project" value="LOCAL" />
|
||||||
|
<updated>1714615608142</updated>
|
||||||
|
</task>
|
||||||
|
<task id="LOCAL-00003" summary="Update gitignore">
|
||||||
|
<option name="closed" value="true" />
|
||||||
|
<created>1714616343905</created>
|
||||||
|
<option name="number" value="00003" />
|
||||||
|
<option name="presentableId" value="LOCAL-00003" />
|
||||||
|
<option name="project" value="LOCAL" />
|
||||||
|
<updated>1714616343905</updated>
|
||||||
|
</task>
|
||||||
|
<task id="LOCAL-00004" summary="Add test module.">
|
||||||
|
<option name="closed" value="true" />
|
||||||
|
<created>1714617162903</created>
|
||||||
|
<option name="number" value="00004" />
|
||||||
|
<option name="presentableId" value="LOCAL-00004" />
|
||||||
|
<option name="project" value="LOCAL" />
|
||||||
|
<updated>1714617162903</updated>
|
||||||
|
</task>
|
||||||
|
<task id="LOCAL-00005" summary="Update gitignore again.">
|
||||||
|
<option name="closed" value="true" />
|
||||||
|
<created>1714617231842</created>
|
||||||
|
<option name="number" value="00005" />
|
||||||
|
<option name="presentableId" value="LOCAL-00005" />
|
||||||
|
<option name="project" value="LOCAL" />
|
||||||
|
<updated>1714617231842</updated>
|
||||||
|
</task>
|
||||||
|
<task id="LOCAL-00006" summary="Start tracking development config files.">
|
||||||
|
<option name="closed" value="true" />
|
||||||
|
<created>1714617266799</created>
|
||||||
|
<option name="number" value="00006" />
|
||||||
|
<option name="presentableId" value="LOCAL-00006" />
|
||||||
|
<option name="project" value="LOCAL" />
|
||||||
|
<updated>1714617266799</updated>
|
||||||
|
</task>
|
||||||
|
<option name="localTasksCounter" value="7" />
|
||||||
|
<servers />
|
||||||
|
</component>
|
||||||
|
<component name="TypeScriptGeneratedFilesManager">
|
||||||
|
<option name="version" value="3" />
|
||||||
|
</component>
|
||||||
|
<component name="Vcs.Log.Tabs.Properties">
|
||||||
|
<option name="RECENT_FILTERS">
|
||||||
|
<map>
|
||||||
|
<entry key="Branch">
|
||||||
|
<value>
|
||||||
|
<list>
|
||||||
|
<RecentGroup>
|
||||||
|
<option name="FILTER_VALUES">
|
||||||
|
<option value="HEAD" />
|
||||||
|
</option>
|
||||||
|
</RecentGroup>
|
||||||
|
<RecentGroup>
|
||||||
|
<option name="FILTER_VALUES">
|
||||||
|
<option value="devel" />
|
||||||
|
</option>
|
||||||
|
</RecentGroup>
|
||||||
|
</list>
|
||||||
|
</value>
|
||||||
|
</entry>
|
||||||
|
</map>
|
||||||
|
</option>
|
||||||
|
<option name="TAB_STATES">
|
||||||
|
<map>
|
||||||
|
<entry key="MAIN">
|
||||||
|
<value>
|
||||||
|
<State>
|
||||||
|
<option name="FILTERS">
|
||||||
|
<map>
|
||||||
|
<entry key="branch">
|
||||||
|
<value>
|
||||||
|
<list>
|
||||||
|
<option value="HEAD" />
|
||||||
|
</list>
|
||||||
|
</value>
|
||||||
|
</entry>
|
||||||
|
</map>
|
||||||
|
</option>
|
||||||
|
</State>
|
||||||
|
</value>
|
||||||
|
</entry>
|
||||||
|
</map>
|
||||||
|
</option>
|
||||||
|
</component>
|
||||||
|
<component name="VcsManagerConfiguration">
|
||||||
|
<MESSAGE value="Add code for capture testbed. This is a huge commit. End of day sync..." />
|
||||||
|
<MESSAGE value="Add some notes." />
|
||||||
|
<MESSAGE value="Update gitignore" />
|
||||||
|
<MESSAGE value="Add test module." />
|
||||||
|
<MESSAGE value="Update gitignore again." />
|
||||||
|
<MESSAGE value="Start tracking development config files." />
|
||||||
|
<option name="LAST_COMMIT_MESSAGE" value="Start tracking development config files." />
|
||||||
|
</component>
|
||||||
|
<component name="com.intellij.coverage.CoverageDataManagerImpl">
|
||||||
|
<SUITE FILE_PATH="coverage/2024_bsc_sebastian_lenzlinger$__init__.coverage" NAME="__init__ Coverage Results" MODIFIED="1714619300966" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/code/kydcap" />
|
||||||
|
<SUITE FILE_PATH="coverage/2024_bsc_sebastian_lenzlinger$main.coverage" NAME="__main__ Coverage Results" MODIFIED="1714619560177" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="false" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/code/kydcap" />
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
39
README.md
39
README.md
@ -1,4 +1,4 @@
|
|||||||
# IOTTB
|
# Your Project Name
|
||||||
|
|
||||||
Hello! This is the README file that accompanies the Gitlab repository for your Bachelor or Master thesis. You'll need to update this README as you work on your thesis to reflect relevant information about your thesis.
|
Hello! This is the README file that accompanies the Gitlab repository for your Bachelor or Master thesis. You'll need to update this README as you work on your thesis to reflect relevant information about your thesis.
|
||||||
|
|
||||||
@ -6,26 +6,29 @@ Hello! This is the README file that accompanies the Gitlab repository for your B
|
|||||||
|
|
||||||
## Organization of the repository
|
## Organization of the repository
|
||||||
- **code** folder: holds source code
|
- **code** folder: holds source code
|
||||||
- **data** folder: Holds no relevant data for this thesis. Files in here where used for debugging and testing.
|
- **data** folder: holds (input) data required for the project. If your input data files are larger than 100MB, create a sample data file smaller than 100MB and commit the sample instead of the full data file. Include a note explaining how the full data can be retrieved.
|
||||||
- **thesis** folder: contains the latex sources + PDF of the final thesis.
|
- **results** folder: holds results files generated as part of the project
|
||||||
- **presentation** folder: contains PDF and sources of the presentation.
|
- **thesis** folder: contains the latex sources + PDF of the final thesis. You can use the [basilea-latex template](https://github.com/ivangiangreco/basilea-latex) as a starting point.
|
||||||
- **literature** used can be found in the **thesis** folder .bib or in the **presentation** folders .bib file.
|
- **presentation** folder: contains the sources of the presentation (e.g., latex or PPT)
|
||||||
- **notes** folder: Various notes and the beginnings of a wiki.
|
- **literature** folder: contains any research paper that the student needs to read or finds interesting
|
||||||
- `iottb` is the python testbed as a single executable (including python interpreter) which should be able to run on Linux machines.
|
- **notes** folder: holds minutes of meetings
|
||||||
|
|
||||||
|
## Useful resources
|
||||||
|
- [Efficient Reading of Papers in Science and Technology](https://www.cs.columbia.edu/~hgs/netbib/efficientReading.pdf)
|
||||||
|
- [Heilmeier's catechism](https://en.wikipedia.org/wiki/George_H._Heilmeier#Heilmeier%27s_Catechism)
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
In this thesis I design a automation testbed for IoT devices.
|
Let people know what your project can do specifically. Provide context and add a link to any reference visitors might be unfamiliar with. A list of Features or a Background subsection can also be added here. If there are alternatives to your project, this is a good place to list differentiating factors.
|
||||||
The main result is the software `iottb` which automates some aspects of experimenting with IoT devices.
|
|
||||||
Currently, it implements a database guided by the FAIR principles of open data as well as wraps tcpdump such that metadata is stored.
|
## Visuals
|
||||||
|
Depending on what you are making, it can be a good idea to include screenshots or even a video (you'll frequently see GIFs rather than actual videos). Tools like ttygif can help, but check out Asciinema for a more sophisticated method.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
Within a particular ecosystem, there may be a common way of installing things, such as using Yarn, NuGet, or Homebrew. However, consider the possibility that whoever is reading your README is a novice and would like more guidance. Listing specific steps helps remove ambiguity and gets people to using your project as quickly as possible. If it only runs in a specific context like a particular programming language version or operating system or has dependencies that have to be installed manually, also add a Requirements subsection.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
For more info see `code/iottb-project/README.md`.
|
Use examples liberally, and show the expected output if you can. It's helpful to have inline the smallest example of usage that you can demonstrate, while providing links to more sophisticated examples if they are too long to reasonably include in the README.
|
||||||
As well as examples in the thesis writeup at `thesis/BScThesisUnibas_main-5.pdf`. <br>
|
|
||||||
In general:
|
|
||||||
```bash
|
|
||||||
iottb --help # Most general overview
|
|
||||||
iottb <subcommand> --help
|
|
||||||
```
|
|
||||||
## License
|
## License
|
||||||
The code is licensed under a BSD 3-clause license, a copy of which is provided in the file `code/iottb-project/LICENSE`.
|
To allow further development and use during public events of the implemented system through the University of Basel, the system is expected to be well documented and provided to the university under a license that allows such reuse, e.g., the [BSD 3-clause license](https://opensource.org/license/bsd-3-clause/). The student agrees that all code produced during the project may be released open-source in the context of the PET group's projects.
|
||||||
|
|
||||||
|
|||||||
@ -2,58 +2,31 @@ def setup_sniff_tcpdump_parser(parser_sniff):
|
|||||||
# arguments which will be passed to tcpdump
|
# arguments which will be passed to tcpdump
|
||||||
parser_sniff_tcpdump = parser_sniff.add_argument_group('tcpdump arguments')
|
parser_sniff_tcpdump = parser_sniff.add_argument_group('tcpdump arguments')
|
||||||
# TODO: tcpdump_parser.add_argument('-c', '--count', re)
|
# TODO: tcpdump_parser.add_argument('-c', '--count', re)
|
||||||
parser_sniff_tcpdump.add_argument('-a', '--ip-address=', help='IP address of the device to sniff', dest='device_ip')
|
parser_sniff_tcpdump.add_argument("-a", "--ip-address=", help="IP address of the device to sniff", dest="device_ip")
|
||||||
parser_sniff_tcpdump.add_argument('-i', '--interface=', help='Interface of the capture device.', dest='capture_interface',default='')
|
parser_sniff_tcpdump.add_argument("-i", "--interface=", help="Interface of the capture device.", dest="capture_interface",default="")
|
||||||
parser_sniff_tcpdump.add_argument('-I', '--monitor-mode', help='Put interface into monitor mode',
|
parser_sniff_tcpdump.add_argument("-I", "--monitor-mode", help="Put interface into monitor mode",
|
||||||
action='store_true')
|
action="store_true")
|
||||||
parser_sniff_tcpdump.add_argument('-n', help='Deactivate name resolution. Option is set by default.',
|
parser_sniff_tcpdump.add_argument("-n", help="Deactivate name resolution. Option is set by default.",
|
||||||
action='store_true')
|
action="store_true")
|
||||||
parser_sniff_tcpdump.add_argument('-#', '--number',
|
parser_sniff_tcpdump.add_argument("-#", "--number",
|
||||||
help='Print packet number at beginning of line. Set by default.',
|
help="Print packet number at beginning of line. Set by default.",
|
||||||
action='store_true')
|
action="store_true")
|
||||||
parser_sniff_tcpdump.add_argument('-e', help='Print link layer headers. Option is set by default.',
|
parser_sniff_tcpdump.add_argument("-e", help="Print link layer headers. Option is set by default.",
|
||||||
action='store_true')
|
action="store_true")
|
||||||
parser_sniff_tcpdump.add_argument('-t', action='count', default=0,
|
parser_sniff_tcpdump.add_argument("-t", action="count", default=0,
|
||||||
help='Please see tcpdump manual for details. Unused by default.')
|
help="Please see tcpdump manual for details. Unused by default.")
|
||||||
|
|
||||||
|
|
||||||
def setup_sniff_parser(subparsers):
|
def setup_sniff_parser(subparsers):
|
||||||
# create parser for 'sniff' command
|
# create parser for "sniff" command
|
||||||
parser_sniff = subparsers.add_parser('sniff', help='Start tcpdump capture.')
|
parser_sniff = subparsers.add_parser("sniff", help="Start tcpdump capture.")
|
||||||
setup_sniff_tcpdump_parser(parser_sniff)
|
setup_sniff_tcpdump_parser(parser_sniff)
|
||||||
setup_pcap_filter_parser(parser_sniff)
|
setup_pcap_filter_parser(parser_sniff)
|
||||||
cap_size_group = parser_sniff.add_mutually_exclusive_group(required=True)
|
cap_size_group = parser_sniff.add_mutually_exclusive_group(required=True)
|
||||||
cap_size_group.add_argument('-c', '--count', type=int, help='Number of packets to capture.', default=0)
|
cap_size_group.add_argument("-c", "--count", type=int, help="Number of packets to capture.", default=0)
|
||||||
cap_size_group.add_argument('--mins', type=int, help='Time in minutes to capture.', default=60)
|
cap_size_group.add_argument("--mins", type=int, help="Time in minutes to capture.", default=60)
|
||||||
|
|
||||||
|
|
||||||
def setup_pcap_filter_parser(parser_sniff):
|
def setup_pcap_filter_parser(parser_sniff):
|
||||||
parser_pcap_filter = parser_sniff.add_argument_parser('pcap-filter expression')
|
parser_pcap_filter = parser_sniff.add_argument_parser("pcap-filter expression")
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def check_iottb_env():
|
|
||||||
# This makes the option '--root-dir' obsolescent # TODO How to streamline this?\
|
|
||||||
try:
|
|
||||||
iottb_home = environ['IOTTB_HOME'] # TODO WARN implicit declaration of env var name!
|
|
||||||
except KeyError:
|
|
||||||
logger.error(f"Environment variable 'IOTTB_HOME' is not set."
|
|
||||||
f"Setting environment variable 'IOTTB_HOME' to '~/{IOTTB_HOME_ABS}'")
|
|
||||||
environ['IOTTB_HOME'] = IOTTB_HOME_ABS
|
|
||||||
finally:
|
|
||||||
if not Path(IOTTB_HOME_ABS).exists():
|
|
||||||
print(f'"{IOTTB_HOME_ABS}" does not exist.')
|
|
||||||
response = input('Do you want to create it now? [y/N]')
|
|
||||||
logger.debug(f'response: {response}')
|
|
||||||
if response.lower() != 'y':
|
|
||||||
logger.debug(f'Not setting "IOTTB_HOME"')
|
|
||||||
print('TODO')
|
|
||||||
print("Aborting execution...")
|
|
||||||
return ReturnCodes.ABORTED
|
|
||||||
else:
|
|
||||||
print(f'Setting environment variable IOTTB_HOME""')
|
|
||||||
Path(IOTTB_HOME_ABS).mkdir(parents=True,
|
|
||||||
exist_ok=False) # Should always work since in 'not exist' code path
|
|
||||||
return ReturnCodes.SUCCESS
|
|
||||||
logger.info(f'"{IOTTB_HOME_ABS}" exists.')
|
|
||||||
# TODO: Check that it is a valid iottb dir or can we say it is valid by definition if?
|
|
||||||
return ReturnCodes.SUCCESS
|
|
||||||
|
|||||||
@ -1,107 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
import argparse
|
|
||||||
from os import environ
|
|
||||||
from pathlib import Path
|
|
||||||
import logging
|
|
||||||
from archive.iottb.subcommands.add_device import setup_init_device_root_parser
|
|
||||||
# from iottb.subcommands.capture import setup_capture_parser
|
|
||||||
from iottb.subcommands.sniff import setup_sniff_parser
|
|
||||||
from iottb.utils.tcpdump_utils import list_interfaces
|
|
||||||
from iottb.logger import setup_logging
|
|
||||||
|
|
||||||
logger = logging.getLogger('iottbLogger.__main__')
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
|
|
||||||
######################
|
|
||||||
# Argparse setup
|
|
||||||
######################
|
|
||||||
def setup_argparse():
|
|
||||||
# create top level parser
|
|
||||||
root_parser = argparse.ArgumentParser(prog='iottb')
|
|
||||||
# shared options
|
|
||||||
root_parser.add_argument('--verbose', '-v', action='count', default=0)
|
|
||||||
root_parser.add_argument('--script-mode', action='store_true', help='Run in script mode (non-interactive)')
|
|
||||||
# Group of args w.r.t iottb.db creation
|
|
||||||
group = root_parser.add_argument_group('database options')
|
|
||||||
group.add_argument('--db-home', default=Path.home() / 'IoTtb.db')
|
|
||||||
group.add_argument('--config-home', default=Path.home() / '.config' / 'iottb.conf', type=Path, )
|
|
||||||
group.add_argument('--user', default=Path.home().stem, type=Path, )
|
|
||||||
|
|
||||||
# configure subcommands
|
|
||||||
subparsers = root_parser.add_subparsers(title='subcommands', required=True, dest='command')
|
|
||||||
# setup_capture_parser(subparsers)
|
|
||||||
setup_init_device_root_parser(subparsers)
|
|
||||||
setup_sniff_parser(subparsers)
|
|
||||||
# Utility to list interfaces directly with iottb instead of relying on external tooling
|
|
||||||
|
|
||||||
interfaces_parser = subparsers.add_parser('list-interfaces', aliases=['li', 'if'],
|
|
||||||
help='List available network interfaces.')
|
|
||||||
interfaces_parser.set_defaults(func=list_interfaces)
|
|
||||||
|
|
||||||
return root_parser
|
|
||||||
|
|
||||||
|
|
||||||
###
|
|
||||||
# Where put ?!
|
|
||||||
###
|
|
||||||
class IoTdb:
|
|
||||||
def __init__(self, db_home=Path.home() / 'IoTtb.db', iottb_config=Path.home() / '.conf' / 'iottb.conf',
|
|
||||||
user=Path.home().stem):
|
|
||||||
self.db_home = db_home
|
|
||||||
self.config_home = iottb_config
|
|
||||||
self.default_filters_home = db_home / 'default_filters'
|
|
||||||
self.user = user
|
|
||||||
|
|
||||||
def create_db(self, mode=0o777, parents=False, exist_ok=False):
|
|
||||||
logger.info(f'Creating db at {self.db_home}')
|
|
||||||
try:
|
|
||||||
self.db_home.mkdir(mode=mode, parents=parents, exist_ok=exist_ok)
|
|
||||||
except FileExistsError:
|
|
||||||
logger.error(f'Database path already at {self.db_home} exists and is not a directory')
|
|
||||||
finally:
|
|
||||||
logger.debug(f'Leaving finally clause in create_db')
|
|
||||||
|
|
||||||
def create_device_tree(self, mode=0o777, parents=False, exist_ok=False):
|
|
||||||
logger.info(f'Creating device tree at {self.db_home / 'devices'}')
|
|
||||||
#TODO
|
|
||||||
|
|
||||||
def parse_db_config(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def parse_iottb_config(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get_known_devices(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def iottb_db_exists(db_home=Path.home() / 'IoTtb.db'):
|
|
||||||
res = db_home.is_dir()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
logger.debug(f'Pre setup_argparse()')
|
|
||||||
parser = setup_argparse()
|
|
||||||
logger.debug('Post setup_argparse().')
|
|
||||||
args = parser.parse_args()
|
|
||||||
logger.debug(f'Args parsed: {args}')
|
|
||||||
if args.command:
|
|
||||||
try:
|
|
||||||
args.func(args)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print('Received keyboard interrupt. Exiting...')
|
|
||||||
exit(1)
|
|
||||||
except Exception as e:
|
|
||||||
logger.debug(f'Error in main: {e}')
|
|
||||||
print(f'Error: {e}')
|
|
||||||
# create_capture_directory(args.device_name)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
setup_logging()
|
|
||||||
logger.debug("Debug level is working")
|
|
||||||
logger.info("Info level is working")
|
|
||||||
logger.warning("Warning level is working")
|
|
||||||
|
|
||||||
main()
|
|
||||||
@ -1,41 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from enum import Flag, unique, global_enum
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
'''
|
|
||||||
Defining IOTTB_HOME_ABS here implies that it be immutable.
|
|
||||||
It is used here so that one could configure it.
|
|
||||||
But after its used in __man__ this cannot be relied upon.
|
|
||||||
'''
|
|
||||||
IOTTB_HOME_ABS = Path().home() / 'IOTTB.db'
|
|
||||||
|
|
||||||
# TODO maybe wrap this into class to make it easier to pass along to different objects
|
|
||||||
# But will need more refactoring
|
|
||||||
DEVICE_METADATA_FILE = 'device_metadata.json'
|
|
||||||
CAPTURE_METADATA_FILE = 'capture_metadata.json'
|
|
||||||
TODAY_DATE_STRING = datetime.now().strftime('%d%b%Y').lower() # TODO convert to function in utils or so
|
|
||||||
|
|
||||||
CAPTURE_FOLDER_BASENAME = 'capture_###'
|
|
||||||
|
|
||||||
AFFIRMATIVE_USER_RESPONSE = {'yes', 'y', 'true', 'Y', 'Yes', 'YES'}
|
|
||||||
NEGATIVE_USER_RESPONSE = {'no', 'n', 'N', 'No'}
|
|
||||||
YES_DEFAULT = AFFIRMATIVE_USER_RESPONSE.union({'', ' '})
|
|
||||||
NO_DEFAULT = NEGATIVE_USER_RESPONSE.union({'', ' '})
|
|
||||||
|
|
||||||
|
|
||||||
@unique
|
|
||||||
@global_enum
|
|
||||||
class ReturnCodes(Flag):
|
|
||||||
SUCCESS = 0
|
|
||||||
ABORTED = 1
|
|
||||||
FAILURE = 2
|
|
||||||
UNKNOWN = 3
|
|
||||||
FILE_NOT_FOUND = 4
|
|
||||||
FILE_ALREADY_EXISTS = 5
|
|
||||||
INVALID_ARGUMENT = 6
|
|
||||||
INVALID_ARGUMENT_VALUE = 7
|
|
||||||
|
|
||||||
|
|
||||||
def iottb_home_abs():
|
|
||||||
return None
|
|
||||||
@ -1,35 +0,0 @@
|
|||||||
import logging
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging():
|
|
||||||
# Ensure the logs directory exists
|
|
||||||
log_directory = 'logs'
|
|
||||||
if not os.path.exists(log_directory):
|
|
||||||
os.makedirs(log_directory)
|
|
||||||
|
|
||||||
# Create handlers
|
|
||||||
file_handler = RotatingFileHandler(os.path.join(log_directory, 'iottb.log'), maxBytes=1048576, backupCount=5)
|
|
||||||
console_handler = logging.StreamHandler(sys.stdout)
|
|
||||||
|
|
||||||
# Create formatters and add it to handlers
|
|
||||||
file_fmt = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
|
|
||||||
console_fmt = logging.Formatter(
|
|
||||||
'%(asctime)s - %(levelname)s - %(filename)s:%(lineno)d - %(funcName)s - %(message)s')
|
|
||||||
|
|
||||||
file_handler.setFormatter(file_fmt)
|
|
||||||
console_handler.setFormatter(console_fmt)
|
|
||||||
|
|
||||||
# Get the root logger and add handlers
|
|
||||||
root_logger = logging.getLogger()
|
|
||||||
root_logger.setLevel(logging.DEBUG)
|
|
||||||
root_logger.addHandler(file_handler)
|
|
||||||
root_logger.addHandler(console_handler)
|
|
||||||
|
|
||||||
# Prevent propagation to the root logger to avoid duplicate logs
|
|
||||||
root_logger.propagate = False
|
|
||||||
|
|
||||||
|
|
||||||
setup_logging()
|
|
||||||
@ -1,106 +0,0 @@
|
|||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from iottb.definitions import ReturnCodes, CAPTURE_METADATA_FILE
|
|
||||||
from iottb.models.device_metadata_model import DeviceMetadata
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger('iottbLogger.capture_metadata_model')
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
|
|
||||||
class CaptureMetadata:
|
|
||||||
# Required Fields
|
|
||||||
device_metadata: DeviceMetadata
|
|
||||||
|
|
||||||
device_id: str
|
|
||||||
capture_dir: Path
|
|
||||||
capture_file: str
|
|
||||||
|
|
||||||
|
|
||||||
# Statistics
|
|
||||||
start_time: str
|
|
||||||
stop_time: str
|
|
||||||
|
|
||||||
# tcpdump
|
|
||||||
packet_count: Optional[int]
|
|
||||||
pcap_filter: str = ''
|
|
||||||
tcpdump_command: str = ''
|
|
||||||
interface: str = ''
|
|
||||||
|
|
||||||
# Optional Fields
|
|
||||||
device_ip_address: str = 'No IP Address set'
|
|
||||||
device_mac_address: Optional[str] = None
|
|
||||||
|
|
||||||
app: Optional[str] = None
|
|
||||||
app_version: Optional[str] = None
|
|
||||||
firmware_version: Optional[str] = None
|
|
||||||
|
|
||||||
def __init__(self, device_metadata: DeviceMetadata, capture_dir: Path):
|
|
||||||
logger.info(f'Creating CaptureMetadata model from DeviceMetadata: {device_metadata}')
|
|
||||||
self.device_metadata = device_metadata
|
|
||||||
self.capture_id = str(uuid.uuid4())
|
|
||||||
self.capture_date = datetime.now().strftime('%d-%m-%YT%H:%M:%S').lower()
|
|
||||||
self.capture_dir = capture_dir
|
|
||||||
assert capture_dir.is_dir(), f'Capture directory {capture_dir} does not exist'
|
|
||||||
|
|
||||||
def build_capture_file_name(self):
|
|
||||||
logger.info(f'Building capture file name')
|
|
||||||
if self.app is None:
|
|
||||||
logger.debug(f'No app specified')
|
|
||||||
prefix = "iphone-14" #self.device_metadata.device_short_name
|
|
||||||
else:
|
|
||||||
logger.debug(f'App specified: {self.app}')
|
|
||||||
assert str(self.app).strip() not in {'', ' '}, f'app is not a valid name: {self.app}'
|
|
||||||
prefix = self.app.lower().replace(' ', '_')
|
|
||||||
# assert self.capture_dir is not None, f'{self.capture_dir} does not exist'
|
|
||||||
filename = f'{prefix}_{str(self.capture_id)}.pcap'
|
|
||||||
logger.debug(f'Capture file name: {filename}')
|
|
||||||
self.capture_file = filename
|
|
||||||
|
|
||||||
def save_capture_metadata_to_json(self, file_path: Path = Path(CAPTURE_METADATA_FILE)):
|
|
||||||
assert self.capture_dir.is_dir(), f'capture_dir is not a directory: {self.capture_dir}'
|
|
||||||
if file_path.is_file():
|
|
||||||
print(f'File {file_path} already exists, update instead.')
|
|
||||||
return ReturnCodes.FILE_ALREADY_EXISTS
|
|
||||||
metadata = self.to_json(indent=2)
|
|
||||||
with file_path.open('w') as file:
|
|
||||||
json.dump(metadata, file)
|
|
||||||
return ReturnCodes.SUCCESS
|
|
||||||
|
|
||||||
def to_json(self, indent=2):
|
|
||||||
# TODO: Where to validate data?
|
|
||||||
logger.info(f'Converting CaptureMetadata to JSON')
|
|
||||||
data = {}
|
|
||||||
|
|
||||||
# List of fields from CaptureData class, if fields[key]==True, then it is a required field
|
|
||||||
fields = {
|
|
||||||
'capture_id': True, #
|
|
||||||
'device_id': True,
|
|
||||||
'capture_dir': True,
|
|
||||||
'capture_file': False,
|
|
||||||
'capture_date': False,
|
|
||||||
'start_time': True,
|
|
||||||
'stop_time': True,
|
|
||||||
'packet_count': False,
|
|
||||||
'pcap_filter': False,
|
|
||||||
'tcpdump_command': False,
|
|
||||||
'interface': False,
|
|
||||||
'device_ip_address': False,
|
|
||||||
'device_mac_address': False,
|
|
||||||
'app': False,
|
|
||||||
'app_version': False,
|
|
||||||
'firmware_version': False
|
|
||||||
}
|
|
||||||
|
|
||||||
for field, is_mandatory in fields.items():
|
|
||||||
value = getattr(self, field, None)
|
|
||||||
if value not in [None, ''] or is_mandatory:
|
|
||||||
if value in [None, ''] and is_mandatory:
|
|
||||||
raise ValueError(f'Field {field} is required and cannot be empty.')
|
|
||||||
data[field] = str(value) if not isinstance(value, str) else value
|
|
||||||
logger.debug(f'Capture metadata: {data}')
|
|
||||||
return json.dumps(data, indent=indent)
|
|
||||||
@ -1,114 +0,0 @@
|
|||||||
import json
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional, List
|
|
||||||
|
|
||||||
# iottb modules
|
|
||||||
from iottb.definitions import ReturnCodes, DEVICE_METADATA_FILE
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger('iottbLogger.device_metadata_model')
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
# 3rd party libs
|
|
||||||
|
|
||||||
IMMUTABLE_FIELDS = {'device_name', 'device_short_name', 'device_id', 'date_created'}
|
|
||||||
|
|
||||||
|
|
||||||
class DeviceMetadata:
|
|
||||||
# Required fields
|
|
||||||
device_name: str
|
|
||||||
device_short_name: str
|
|
||||||
device_id: str
|
|
||||||
date_created: str
|
|
||||||
|
|
||||||
device_root_path: Path
|
|
||||||
# Optional Fields
|
|
||||||
aliases: Optional[List[str]] = None
|
|
||||||
device_type: Optional[str] = None
|
|
||||||
device_serial_number: Optional[str] = None
|
|
||||||
device_firmware_version: Optional[str] = None
|
|
||||||
date_updated: Optional[str] = None
|
|
||||||
|
|
||||||
capture_files: Optional[List[str]] = []
|
|
||||||
|
|
||||||
def __init__(self, device_name: str, device_root_path: Path):
|
|
||||||
self.device_name = device_name
|
|
||||||
self.device_short_name = device_name.lower().replace(' ', '_')
|
|
||||||
self.device_id = str(uuid.uuid4())
|
|
||||||
self.date_created = datetime.now().strftime('%d-%m-%YT%H:%M:%S').lower()
|
|
||||||
self.device_root_path = device_root_path
|
|
||||||
if not self.device_root_path or not self.device_root_path.is_dir():
|
|
||||||
logger.error(f'Invalid device root path: {device_root_path}')
|
|
||||||
raise ValueError(f'Invalid device root path: {device_root_path}')
|
|
||||||
logger.debug(f'Device name: {device_name}')
|
|
||||||
logger.debug(f'Device short_name: {self.device_short_name}')
|
|
||||||
logger.debug(f'Device root dir: {device_root_path}')
|
|
||||||
logger.info(f'Initialized DeviceMetadata model: {device_name}')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def load_from_json(cls, device_file_path: Path):
|
|
||||||
logger.info(f'Loading DeviceMetadata from JSON file: {device_file_path}')
|
|
||||||
assert device_file_path.is_file(), f'{device_file_path} is not a file'
|
|
||||||
assert device_file_path.name == DEVICE_METADATA_FILE, f'{device_file_path} is not a {DEVICE_METADATA_FILE}'
|
|
||||||
device_meta_filename = device_file_path
|
|
||||||
|
|
||||||
with device_meta_filename.open('r') as file:
|
|
||||||
metadata_json = json.load(file)
|
|
||||||
metadata_model_obj = cls.from_json(metadata_json)
|
|
||||||
return metadata_model_obj
|
|
||||||
|
|
||||||
def save_to_json(self, file_path: Path):
|
|
||||||
logger.info(f'Saving DeviceMetadata to JSON file: {file_path}')
|
|
||||||
if file_path.is_file():
|
|
||||||
print(f'File {file_path} already exists, update instead.')
|
|
||||||
return ReturnCodes.FILE_ALREADY_EXISTS
|
|
||||||
metadata = self.to_json(indent=2)
|
|
||||||
with file_path.open('w') as file:
|
|
||||||
json.dump(metadata, file)
|
|
||||||
return ReturnCodes.SUCCESS
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_json(cls, metadata_json):
|
|
||||||
if isinstance(metadata_json, dict):
|
|
||||||
return DeviceMetadata(**metadata_json)
|
|
||||||
|
|
||||||
def to_json(self, indent=2):
|
|
||||||
# TODO: atm almost exact copy as in CaptureMetadata
|
|
||||||
data = {}
|
|
||||||
|
|
||||||
fields = {
|
|
||||||
'device_name': True,
|
|
||||||
'device_short_name': True,
|
|
||||||
'device_id': True,
|
|
||||||
'date_created': True,
|
|
||||||
'device_root_path': True,
|
|
||||||
'aliases': False,
|
|
||||||
'device_type': False,
|
|
||||||
'device_serial_number': False,
|
|
||||||
'device_firmware_version': False,
|
|
||||||
'date_updated': False,
|
|
||||||
'capture_files': False,
|
|
||||||
}
|
|
||||||
|
|
||||||
for field, is_mandatory in fields.items():
|
|
||||||
value = getattr(self, field, None)
|
|
||||||
if value not in [None, ''] or is_mandatory:
|
|
||||||
if value in [None, ''] and is_mandatory:
|
|
||||||
logger.debug(f'Mandatory field {field}: {value}')
|
|
||||||
raise ValueError(f'Field {field} is required and cannot be empty.')
|
|
||||||
data[field] = str(value) if not isinstance(value, str) else value
|
|
||||||
logger.debug(f'Device metadata: {data}')
|
|
||||||
return json.dumps(data, indent=indent)
|
|
||||||
|
|
||||||
|
|
||||||
def dir_contains_device_metadata(dir_path: Path):
|
|
||||||
if not dir_path.is_dir():
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
meta_file_path = dir_path / DEVICE_METADATA_FILE
|
|
||||||
print(f'Device metadata file path {str(meta_file_path)}')
|
|
||||||
if not meta_file_path.is_file():
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
@ -1,77 +0,0 @@
|
|||||||
import logging
|
|
||||||
import os
|
|
||||||
import pathlib
|
|
||||||
|
|
||||||
from iottb import definitions
|
|
||||||
from iottb.definitions import DEVICE_METADATA_FILE, ReturnCodes
|
|
||||||
from iottb.models.device_metadata_model import DeviceMetadata
|
|
||||||
|
|
||||||
# logger.setLevel(logging.INFO) # Since module currently passes all tests
|
|
||||||
logger = logging.getLogger('iottbLogger.add_device')
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
def setup_init_device_root_parser(subparsers):
|
|
||||||
#assert os.environ['IOTTB_HOME'] is not None, "IOTTB_HOME environment variable is not set"
|
|
||||||
parser = subparsers.add_parser('add-device', aliases=['add-device-root', 'add'],
|
|
||||||
help='Initialize a folder for a device.')
|
|
||||||
parser.add_argument('--root_dir', type=pathlib.Path,
|
|
||||||
default=definitions.IOTTB_HOME_ABS) # TODO: Refactor code to not use this or handle iottb here
|
|
||||||
group = parser.add_mutually_exclusive_group()
|
|
||||||
group.add_argument('--guided', action='store_true', help='Guided setup', default=False)
|
|
||||||
group.add_argument('--name', action='store', type=str, help='name of device')
|
|
||||||
parser.set_defaults(func=handle_add)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_add(args):
|
|
||||||
# TODO: This whole function should be refactored into using the fact that IOTTB_HOME is set, and the dir exists
|
|
||||||
logger.info(f'Add device handler called with args {args}')
|
|
||||||
|
|
||||||
if args.guided:
|
|
||||||
logger.debug('begin guided setup')
|
|
||||||
metadata = guided_setup(args.root_dir) # TODO refactor to use IOTTB_HOME
|
|
||||||
logger.debug('guided setup complete')
|
|
||||||
else:
|
|
||||||
logger.debug('Setup through passed args: setup')
|
|
||||||
if not args.name:
|
|
||||||
logger.error('No device name specified with unguided setup.')
|
|
||||||
return ReturnCodes.ERROR
|
|
||||||
metadata = DeviceMetadata(args.name, args.root_dir)
|
|
||||||
|
|
||||||
file_path = args.root_dir / DEVICE_METADATA_FILE # TODO IOTTB_HOME REFACTOR
|
|
||||||
if file_path.exists():
|
|
||||||
print('Directory already contains a metadata file. Aborting.')
|
|
||||||
return ReturnCodes.ABORTED
|
|
||||||
serialized_metadata = metadata.to_json()
|
|
||||||
response = input(f'Confirm device metadata: {serialized_metadata} [y/N]')
|
|
||||||
logger.debug(f'response: {response}')
|
|
||||||
if response not in definitions.AFFIRMATIVE_USER_RESPONSE:
|
|
||||||
print('Adding device aborted by user.')
|
|
||||||
return ReturnCodes.ABORTED
|
|
||||||
|
|
||||||
logger.debug(f'Device metadata file {file_path}')
|
|
||||||
if metadata.save_to_json(file_path) == ReturnCodes.FILE_ALREADY_EXISTS:
|
|
||||||
logger.error('File exists after checking, which should not happen.')
|
|
||||||
return ReturnCodes.ABORTED
|
|
||||||
|
|
||||||
print('Device metadata successfully created.')
|
|
||||||
return ReturnCodes.SUCCESS
|
|
||||||
|
|
||||||
|
|
||||||
def configure_metadata():
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def guided_setup(device_root) -> DeviceMetadata:
|
|
||||||
logger.info('Guided setup')
|
|
||||||
response = 'N'
|
|
||||||
device_name = ''
|
|
||||||
while response.upper() == 'N':
|
|
||||||
device_name = input('Please enter name of device: ')
|
|
||||||
response = input(f'Confirm device name: {device_name} [y/N] ')
|
|
||||||
if device_name == '' or device_name is None:
|
|
||||||
print('Name cannot be empty')
|
|
||||||
logger.warning('Name cannot be empty')
|
|
||||||
logger.debug(f'Response is {response}')
|
|
||||||
logger.debug(f'Device name is {device_name}')
|
|
||||||
|
|
||||||
return DeviceMetadata(device_name, device_root)
|
|
||||||
@ -1,179 +0,0 @@
|
|||||||
import subprocess
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from iottb.definitions import *
|
|
||||||
import logging
|
|
||||||
from iottb.models.capture_metadata_model import CaptureMetadata
|
|
||||||
from iottb.models.device_metadata_model import DeviceMetadata, dir_contains_device_metadata
|
|
||||||
from iottb.utils.capture_utils import get_capture_src_folder, make_capture_src_folder
|
|
||||||
from iottb.utils.tcpdump_utils import check_installed
|
|
||||||
|
|
||||||
logger = logging.getLogger('iottbLogger.capture')
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
def setup_capture_parser(subparsers):
|
|
||||||
parser = subparsers.add_parser('sniff', help='Sniff packets with tcpdump')
|
|
||||||
# metadata args
|
|
||||||
parser.add_argument('-a', '--ip-address', help='IP address of the device to sniff', dest='device_ip')
|
|
||||||
# tcpdump args
|
|
||||||
parser.add_argument('device_root', help='Root folder for device to sniff',
|
|
||||||
type=Path, default=Path.cwd())
|
|
||||||
parser.add_argument('-s', '--safe', help='Ensure correct device root folder before sniffing', action='store_true')
|
|
||||||
parser.add_argument('--app', help='Application name to sniff', dest='app_name', default=None)
|
|
||||||
|
|
||||||
parser_sniff_tcpdump = parser.add_argument_group('tcpdump arguments')
|
|
||||||
parser_sniff_tcpdump.add_argument('-i', '--interface', help='Interface to capture on.', dest='capture_interface',
|
|
||||||
required=True)
|
|
||||||
parser_sniff_tcpdump.add_argument('-I', '--monitor-mode', help='Put interface into monitor mode',
|
|
||||||
action='store_true')
|
|
||||||
parser_sniff_tcpdump.add_argument('-n', help='Deactivate name resolution. True by default.',
|
|
||||||
action='store_true', dest='no_name_resolution')
|
|
||||||
parser_sniff_tcpdump.add_argument('-#', '--number',
|
|
||||||
help='Print packet number at beginning of line. True by default.',
|
|
||||||
action='store_true')
|
|
||||||
parser_sniff_tcpdump.add_argument('-e', help='Print link layer headers. True by default.',
|
|
||||||
action='store_true', dest='print_link_layer')
|
|
||||||
parser_sniff_tcpdump.add_argument('-t', action='count', default=0,
|
|
||||||
help='Please see tcpdump manual for details. Unused by default.')
|
|
||||||
|
|
||||||
cap_size_group = parser.add_mutually_exclusive_group(required=False)
|
|
||||||
cap_size_group.add_argument('-c', '--count', type=int, help='Number of packets to capture.', default=10)
|
|
||||||
cap_size_group.add_argument('--mins', type=int, help='Time in minutes to capture.', default=1)
|
|
||||||
|
|
||||||
parser.set_defaults(func=handle_capture)
|
|
||||||
|
|
||||||
|
|
||||||
def cwd_is_device_root_dir() -> bool:
|
|
||||||
device_metadata_file = Path.cwd() / DEVICE_METADATA_FILE
|
|
||||||
return device_metadata_file.is_file()
|
|
||||||
|
|
||||||
|
|
||||||
def start_guided_device_root_dir_setup():
|
|
||||||
assert False, 'Not implemented'
|
|
||||||
|
|
||||||
|
|
||||||
def handle_metadata():
|
|
||||||
assert not cwd_is_device_root_dir()
|
|
||||||
print(f'Unable to find {DEVICE_METADATA_FILE} in current working directory')
|
|
||||||
print('You need to setup a device root directory before using this command')
|
|
||||||
response = input('Would you like to be guided through the setup? [y/n]')
|
|
||||||
if response.lower() == 'y':
|
|
||||||
start_guided_device_root_dir_setup()
|
|
||||||
else:
|
|
||||||
print('\'iottb init-device-root --help\' for more information.')
|
|
||||||
exit(ReturnCodes.ABORTED)
|
|
||||||
# device_id = handle_capture_metadata()
|
|
||||||
return ReturnCodes.SUCCESS
|
|
||||||
|
|
||||||
|
|
||||||
def get_device_metadata_from_file(device_metadata_filename: Path) -> str:
|
|
||||||
assert device_metadata_filename.is_file(), f'Device metadata file f"{device_metadata_filename}" does not exist'
|
|
||||||
device_metadata = DeviceMetadata.load_from_json(device_metadata_filename)
|
|
||||||
return device_metadata
|
|
||||||
|
|
||||||
|
|
||||||
def run_tcpdump(cmd):
|
|
||||||
# TODO: Maybe specify files for stout and stderr
|
|
||||||
try:
|
|
||||||
p = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
|
||||||
if p.returncode != 0:
|
|
||||||
print(f'Error running tcpdump {p.stderr}')
|
|
||||||
# TODO add logging
|
|
||||||
else:
|
|
||||||
print(f'tcpdump run successfully\n: {p.stdout}')
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def handle_capture(args):
|
|
||||||
if not check_installed():
|
|
||||||
print('Please install tcpdump first')
|
|
||||||
exit(ReturnCodes.ABORTED)
|
|
||||||
assert args.device_root is not None, f'Device root directory is required'
|
|
||||||
assert dir_contains_device_metadata(args.device_root), f'Device metadata file \'{args.device_root}\' does not exist'
|
|
||||||
# get device metadata
|
|
||||||
logger.info(f'Device root directory: {args.device_root}')
|
|
||||||
if args.safe and not dir_contains_device_metadata(args.device_root):
|
|
||||||
print(f'Supplied folder contains no device metadata. '
|
|
||||||
f'Please setup a device root directory before using this command')
|
|
||||||
exit(ReturnCodes.ABORTED)
|
|
||||||
elif dir_contains_device_metadata(args.device_root):
|
|
||||||
device_metadata_filename = args.device_root / DEVICE_METADATA_FILE
|
|
||||||
device_data = DeviceMetadata.load_from_json(device_metadata_filename)
|
|
||||||
else:
|
|
||||||
name = input('Please enter a device name: ')
|
|
||||||
args.device_root.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
device_data = DeviceMetadata(name, args.device_root)
|
|
||||||
# start constructing environment for capture
|
|
||||||
capture_dir = get_capture_src_folder(args.device_root)
|
|
||||||
make_capture_src_folder(capture_dir)
|
|
||||||
capture_metadata = CaptureMetadata(device_data, capture_dir)
|
|
||||||
|
|
||||||
capture_metadata.interface = args.capture_interface
|
|
||||||
cmd = ['sudo', 'tcpdump', '-i', args.capture_interface]
|
|
||||||
cmd = build_tcpdump_args(args, cmd, capture_metadata)
|
|
||||||
capture_metadata.tcpdump_command = cmd
|
|
||||||
|
|
||||||
print('Executing: ' + ' '.join(cmd))
|
|
||||||
|
|
||||||
# run capture
|
|
||||||
try:
|
|
||||||
start_time = datetime.now().strftime('%H:%M:%S')
|
|
||||||
run_tcpdump(cmd)
|
|
||||||
stop_time = datetime.now().strftime('%H:%M:%S')
|
|
||||||
capture_metadata.start_time = start_time
|
|
||||||
capture_metadata.stop_time = stop_time
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
print('Received keyboard interrupt.')
|
|
||||||
exit(ReturnCodes.ABORTED)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print(f'Failed to capture packet: {e}')
|
|
||||||
exit(ReturnCodes.FAILURE)
|
|
||||||
except Exception as e:
|
|
||||||
print(f'Failed to capture packet: {e}')
|
|
||||||
exit(ReturnCodes.FAILURE)
|
|
||||||
|
|
||||||
return ReturnCodes.SUCCESS
|
|
||||||
|
|
||||||
|
|
||||||
def build_tcpdump_args(args, cmd, capture_metadata: CaptureMetadata):
|
|
||||||
if args.monitor_mode:
|
|
||||||
cmd.append('-I')
|
|
||||||
if args.no_name_resolution:
|
|
||||||
cmd.append('-n')
|
|
||||||
if args.number:
|
|
||||||
cmd.append('-#')
|
|
||||||
if args.print_link_layer:
|
|
||||||
cmd.append('-e')
|
|
||||||
|
|
||||||
if args.count:
|
|
||||||
cmd.append('-c')
|
|
||||||
cmd.append(str(args.count))
|
|
||||||
elif args.mins:
|
|
||||||
assert False, 'Unimplemented option'
|
|
||||||
|
|
||||||
if args.app_name is not None:
|
|
||||||
capture_metadata.app = args.app_name
|
|
||||||
|
|
||||||
capture_metadata.build_capture_file_name()
|
|
||||||
cmd.append('-w')
|
|
||||||
cmd.append(str(capture_metadata.capture_dir) + "/" + capture_metadata.capture_file)
|
|
||||||
|
|
||||||
if args.safe:
|
|
||||||
cmd.append(f'host {args.device_ip}') # if not specified, filter 'any' implied by tcpdump
|
|
||||||
capture_metadata.device_id = args.device_ip
|
|
||||||
|
|
||||||
return cmd
|
|
||||||
|
|
||||||
# def capture_file_cmd(args, cmd, capture_dir, capture_metadata: CaptureMetadata):
|
|
||||||
# capture_file_prefix = capture_metadata.get_device_metadata().get_device_short_name()
|
|
||||||
# if args.app_name is not None:
|
|
||||||
# capture_file_prefix = args.app_name
|
|
||||||
# capture_metadata.set_app(args.app_name)
|
|
||||||
# capfile_name = capture_file_prefix + '_' + str(capture_metadata.get_capture_id()) + '.pcap'
|
|
||||||
# capture_metadata.set_capture_file(capfile_name)
|
|
||||||
# capfile_abs_path = capture_dir / capfile_name
|
|
||||||
# capture_metadata.set_capture_file(capfile_name)
|
|
||||||
# cmd.append('-w')
|
|
||||||
# cmd.append(str(capfile_abs_path))
|
|
||||||
@ -1,63 +0,0 @@
|
|||||||
import subprocess
|
|
||||||
import logging
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('iottbLogger.capture')
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
class Sniffer:
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def setup_sniff_parser(subparsers):
|
|
||||||
parser = subparsers.add_parser('sniff', help='Sniff packets with tcpdump')
|
|
||||||
# metadata args
|
|
||||||
parser.add_argument('-a', '--addr', help='IP or MAC address of IoT device')
|
|
||||||
# tcpdump args
|
|
||||||
parser.add_argument('--app', help='Application name to sniff', default=None)
|
|
||||||
|
|
||||||
parser_sniff_tcpdump = parser.add_argument_group('tcpdump arguments')
|
|
||||||
|
|
||||||
parser_sniff_tcpdump.add_argument('-i', '--interface', help='Interface to capture on.', dest='capture_interface',
|
|
||||||
required=True)
|
|
||||||
parser_sniff_tcpdump.add_argument('-I', '--monitor-mode', help='Put interface into monitor mode',
|
|
||||||
action='store_true')
|
|
||||||
parser_sniff_tcpdump.add_argument('-n', help='Deactivate name resolution. True by default.',
|
|
||||||
action='store_true', dest='no_name_resolution')
|
|
||||||
parser_sniff_tcpdump.add_argument('-#', '--number',
|
|
||||||
help='Print packet number at beginning of line. True by default.',
|
|
||||||
action='store_true')
|
|
||||||
parser_sniff_tcpdump.add_argument('-e', help='Print link layer headers. True by default.',
|
|
||||||
action='store_true', dest='print_link_layer')
|
|
||||||
parser_sniff_tcpdump.add_argument('-t', action='count', default=0,
|
|
||||||
help='Please see tcpdump manual for details. Unused by default.')
|
|
||||||
|
|
||||||
cap_size_group = parser.add_mutually_exclusive_group(required=False)
|
|
||||||
cap_size_group.add_argument('-c', '--count', type=int, help='Number of packets to capture.', default=10)
|
|
||||||
cap_size_group.add_argument('--mins', type=int, help='Time in minutes to capture.', default=1)
|
|
||||||
|
|
||||||
parser.set_defaults(func=sniff)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_addr(addr):
|
|
||||||
#TODO Implement
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def sniff(args):
|
|
||||||
if args.addr is None:
|
|
||||||
print('You must supply either a MAC or IP(v4) address to use this tool!')
|
|
||||||
logger.info("Exiting on account of missing MAC/IP.")
|
|
||||||
exit(1)
|
|
||||||
else:
|
|
||||||
(type, value) = parse_addr(args.addr)
|
|
||||||
#TODO Get this party started
|
|
||||||
|
|
||||||
def sniff_tcpdump(args, filter):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def sniff_mitmproxy(args, filter):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def sniff_raw(cmd,args):
|
|
||||||
pass
|
|
||||||
@ -1,44 +0,0 @@
|
|||||||
import uuid
|
|
||||||
from pathlib import Path
|
|
||||||
from iottb.models.device_metadata_model import dir_contains_device_metadata
|
|
||||||
from iottb.utils.utils import get_iso_date
|
|
||||||
|
|
||||||
|
|
||||||
def get_capture_uuid():
|
|
||||||
return str(uuid.uuid4())
|
|
||||||
|
|
||||||
|
|
||||||
def get_capture_date_folder(device_root: Path):
|
|
||||||
today_iso = get_iso_date()
|
|
||||||
today_folder = device_root / today_iso
|
|
||||||
if dir_contains_device_metadata(device_root):
|
|
||||||
if not today_folder.is_dir():
|
|
||||||
try:
|
|
||||||
today_folder.mkdir()
|
|
||||||
except FileExistsError:
|
|
||||||
print(f'Folder {today_folder} already exists')
|
|
||||||
return today_folder
|
|
||||||
raise FileNotFoundError(f'Given path {device_root} is not a device root directory')
|
|
||||||
|
|
||||||
|
|
||||||
def get_capture_src_folder(device_folder: Path):
|
|
||||||
assert device_folder.is_dir(), f'Given path {device_folder} is not a folder'
|
|
||||||
today_iso = get_iso_date()
|
|
||||||
max_sequence_number = 1
|
|
||||||
for d in device_folder.iterdir():
|
|
||||||
if d.is_dir() and d.name.startswith(f'{today_iso}_capture_'):
|
|
||||||
name = d.name
|
|
||||||
num = int(name.split('_')[2])
|
|
||||||
max_sequence_number = max(max_sequence_number, num)
|
|
||||||
|
|
||||||
next_sequence_number = max_sequence_number + 1
|
|
||||||
return device_folder.joinpath(f'{today_iso}_capture_{next_sequence_number:03}')
|
|
||||||
|
|
||||||
|
|
||||||
def make_capture_src_folder(capture_src_folder: Path):
|
|
||||||
try:
|
|
||||||
capture_src_folder.mkdir()
|
|
||||||
except FileExistsError:
|
|
||||||
print(f'Folder {capture_src_folder} already exists')
|
|
||||||
finally:
|
|
||||||
return capture_src_folder
|
|
||||||
@ -1,41 +0,0 @@
|
|||||||
import ipaddress
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
|
|
||||||
def check_installed() -> bool:
|
|
||||||
"""Check if tcpdump is installed and available on the system path."""
|
|
||||||
return shutil.which('tcpdump') is not None
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_installed():
|
|
||||||
"""Ensure that tcpdump is installed, raise an error if not."""
|
|
||||||
if not check_installed():
|
|
||||||
raise RuntimeError('tcpdump is not installed. Please install it to continue.')
|
|
||||||
|
|
||||||
|
|
||||||
def list_interfaces(args) -> str:
|
|
||||||
"""List available network interfaces using tcpdump."""
|
|
||||||
ensure_installed()
|
|
||||||
try:
|
|
||||||
result = subprocess.run(['tcpdump', '--list-interfaces'], capture_output=True, text=True, check=True)
|
|
||||||
print(result.stdout)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print(f'Failed to list interfaces: {e}')
|
|
||||||
return ''
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_ipv4(ip: str) -> bool:
|
|
||||||
try:
|
|
||||||
ipaddress.IPv4Address(ip)
|
|
||||||
return True
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def str_to_ipv4(ip: str) -> (bool, Optional[ipaddress]):
|
|
||||||
try:
|
|
||||||
address = ipaddress.IPv4Address(ip)
|
|
||||||
return address == ipaddress.IPv4Address(ip), address
|
|
||||||
except ipaddress.AddressValueError:
|
|
||||||
return False, None
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from iottb.definitions import TODAY_DATE_STRING, DEVICE_METADATA_FILE, CAPTURE_METADATA_FILE
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
def get_iso_date():
|
|
||||||
return datetime.now().strftime('%Y-%m-%d')
|
|
||||||
|
|
||||||
|
|
||||||
def subfolder_exists(parent: Path, child: str):
|
|
||||||
return parent.joinpath(child).exists()
|
|
||||||
|
|
||||||
|
|
||||||
def generate_unique_string_with_prefix(prefix: str):
|
|
||||||
return prefix + '_' + str(uuid.uuid4())
|
|
||||||
|
|
||||||
|
|
||||||
@ -15,5 +15,5 @@ class Metadata:
|
|||||||
|
|
||||||
|
|
||||||
def create_metadata(filename, unique_id, device_details):
|
def create_metadata(filename, unique_id, device_details):
|
||||||
date_string = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')
|
date_string = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
||||||
meta_filename = f'meta_{date_string}_{unique_id}.json'
|
meta_filename = f"meta_{date_string}_{unique_id}.json"
|
||||||
|
|||||||
@ -3,38 +3,38 @@ from pathlib import Path
|
|||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from iottb.models.device_metadata_model import DeviceMetadata
|
from kydcap.models.device_metadata_model import DeviceMetadata
|
||||||
from iottb.definitions import DEVICE_METADATA_FILE
|
from kydcap.config import DEVICE_METADATA_FILE
|
||||||
|
|
||||||
|
|
||||||
def write_device_metadata_to_file(metadata: DeviceMetadata, device_path: Path):
|
def write_device_metadata_to_file(metadata: DeviceMetadata, device_path: Path):
|
||||||
'''Write the device metadata to a JSON file in the specified directory.'''
|
"""Write the device metadata to a JSON file in the specified directory."""
|
||||||
meta_file_path = device_path / 'meta.json'
|
meta_file_path = device_path / "meta.json"
|
||||||
meta_file_path.write_text(metadata.json(indent=2))
|
meta_file_path.write_text(metadata.json(indent=2))
|
||||||
|
|
||||||
|
|
||||||
def confirm_device_metadata(metadata: DeviceMetadata) -> bool:
|
def confirm_device_metadata(metadata: DeviceMetadata) -> bool:
|
||||||
'''Display device metadata for user confirmation.'''
|
"""Display device metadata for user confirmation."""
|
||||||
print(metadata.json(indent=2))
|
print(metadata.json(indent=2))
|
||||||
return input('Confirm device metadata? (y/n): ').strip().lower() == 'y'
|
return input("Confirm device metadata? (y/n): ").strip().lower() == 'y'
|
||||||
|
|
||||||
|
|
||||||
def get_device_metadata_from_user() -> DeviceMetadata:
|
def get_device_metadata_from_user() -> DeviceMetadata:
|
||||||
'''Prompt the user to enter device details and return a populated DeviceMetadata object.'''
|
"""Prompt the user to enter device details and return a populated DeviceMetadata object."""
|
||||||
device_name = input('Device name: ')
|
device_name = input("Device name: ")
|
||||||
device_short_name = device_name.lower().replace(' ', '-')
|
device_short_name = device_name.lower().replace(" ", "-")
|
||||||
return DeviceMetadata(device_name=device_name, device_short_name=device_short_name)
|
return DeviceMetadata(device_name=device_name, device_short_name=device_short_name)
|
||||||
|
|
||||||
|
|
||||||
def initialize_device_root_dir(device_name: str) -> Path:
|
def initialize_device_root_dir(device_name: str) -> Path:
|
||||||
'''Create and return the path for the device directory.'''
|
"""Create and return the path for the device directory."""
|
||||||
device_path = Path.cwd() / device_name
|
device_path = Path.cwd() / device_name
|
||||||
device_path.mkdir(exist_ok=True)
|
device_path.mkdir(exist_ok=True)
|
||||||
return device_path
|
return device_path
|
||||||
|
|
||||||
|
|
||||||
def write_metadata(metadata: BaseModel, device_name: str):
|
def write_metadata(metadata: BaseModel, device_name: str):
|
||||||
'''Write device metadata to a JSON file.'''
|
"""Write device metadata to a JSON file."""
|
||||||
meta_path = Path.cwd() / device_name / DEVICE_METADATA_FILE
|
meta_path = Path.cwd() / device_name / DEVICE_METADATA_FILE
|
||||||
meta_path.parent.mkdir(parents=True, exist_ok=True)
|
meta_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
with meta_path.open('w') as f:
|
with meta_path.open('w') as f:
|
||||||
@ -42,19 +42,19 @@ def write_metadata(metadata: BaseModel, device_name: str):
|
|||||||
|
|
||||||
|
|
||||||
def get_device_metadata(file_path: Path) -> DeviceMetadata | None:
|
def get_device_metadata(file_path: Path) -> DeviceMetadata | None:
|
||||||
'''Fetch device metadata from a JSON file.'''
|
"""Fetch device metadata from a JSON file."""
|
||||||
|
|
||||||
if dev_metadata_exists(file_path):
|
if dev_metadata_exists(file_path):
|
||||||
with file_path.open('r') as f:
|
with file_path.open('r') as f:
|
||||||
device_metadata_json = json.load(f)
|
device_metadata_json = json.load(f)
|
||||||
try:
|
try:
|
||||||
device_metadata = DeviceMetadata.from_json(device_metadata_json)
|
device_metadata = DeviceMetadata.model_validate_json(device_metadata_json)
|
||||||
return device_metadata
|
return device_metadata
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
print(f'Validation error for device metadata: {e}')
|
print(f"Validation error for device metadata: {e}")
|
||||||
else:
|
else:
|
||||||
# TODO Decide what to do (e.g. search for file etc)
|
# TODO Decide what to do (e.g. search for file etc)
|
||||||
print(f'No device metadata at {file_path}')
|
print(f"No device metadata at {file_path}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,16 +0,0 @@
|
|||||||
[build-system]
|
|
||||||
requires = ["setuptools>=42", "wheel"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
|
||||||
name = 'iottb'
|
|
||||||
version = '0.1.0'
|
|
||||||
authors = [{name = "Sebastian Lenzlinger", email = "sebastian.lenzlinger@unibas.ch"}]
|
|
||||||
description = "Automation Tool for Capturing Network packets of IoT devices."
|
|
||||||
requires-python = ">=3.8"
|
|
||||||
|
|
||||||
[tool.setuptools]
|
|
||||||
packages = ["iottb"]
|
|
||||||
|
|
||||||
[project.scripts]
|
|
||||||
iottb = "iottb.__main__:main"
|
|
||||||
@ -1,47 +0,0 @@
|
|||||||
import sys
|
|
||||||
import unittest
|
|
||||||
from io import StringIO
|
|
||||||
from unittest.mock import patch, MagicMock
|
|
||||||
from pathlib import Path
|
|
||||||
from iottb.definitions import DEVICE_METADATA_FILE
|
|
||||||
import shutil
|
|
||||||
from iottb.__main__ import main
|
|
||||||
|
|
||||||
|
|
||||||
class TestDeviceMetadataFileCreation(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.test_dir = Path('/tmp/iottbtest/test_add_device')
|
|
||||||
self.test_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
# self.captured_output = StringIO()
|
|
||||||
# sys.stdout = self.captured_output
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
# shutil.rmtree(str(self.test_dir))
|
|
||||||
for item in self.test_dir.iterdir():
|
|
||||||
if item.is_dir():
|
|
||||||
item.rmdir()
|
|
||||||
else:
|
|
||||||
item.unlink()
|
|
||||||
self.test_dir.rmdir()
|
|
||||||
# sys.stdout = sys.__stdout__
|
|
||||||
|
|
||||||
@patch('builtins.input', side_effect=['iPhone 14', 'y', 'y'])
|
|
||||||
def test_guided_device_setup(self, mock_input):
|
|
||||||
sys.argv = ['__main__.py', 'add', '--root_dir', str(self.test_dir), '--guided']
|
|
||||||
main()
|
|
||||||
expected_file = self.test_dir / DEVICE_METADATA_FILE
|
|
||||||
self.assertTrue(expected_file.exists()), f'Expected file not created: {expected_file}'
|
|
||||||
|
|
||||||
@patch('builtins.input', side_effect=['y']) # need mock_input else wont work
|
|
||||||
def test_device_setup(self, mock_input):
|
|
||||||
sys.argv = ['__main__.py', 'add', '--root_dir', str(self.test_dir), '--name', 'iPhone 14']
|
|
||||||
main()
|
|
||||||
expected_file = self.test_dir / DEVICE_METADATA_FILE
|
|
||||||
self.assertTrue(expected_file.exists()), f'Expected file not created: {expected_file}'
|
|
||||||
|
|
||||||
def test_add_when_file_exists(self):
|
|
||||||
# TODO
|
|
||||||
pass
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
def test_save_to_json():
|
|
||||||
assert False
|
|
||||||
@ -1,2 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
@ -1,28 +0,0 @@
|
|||||||
BSD 3-Clause License
|
|
||||||
|
|
||||||
Copyright (c) 2024, Sebastian Lenzlinger
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright notice, this
|
|
||||||
list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
3. Neither the name of the copyright holder nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from
|
|
||||||
this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
||||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
||||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
||||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
||||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
@ -1,82 +0,0 @@
|
|||||||
# Iottb
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
There are a few different ways to install `iottb`.
|
|
||||||
|
|
||||||
In Linux, to install to a users local bin directory using poetry or pip:
|
|
||||||
- Move into the project root `cd path/to/iottb-project`, so that you are in the directory which contains the `pyproject.toml` file.
|
|
||||||
```bash
|
|
||||||
poetry install --editable
|
|
||||||
# or with pip
|
|
||||||
pip install -e .
|
|
||||||
```
|
|
||||||
Currently, this is the recommended method.
|
|
||||||
Alternatively install with pip into any activated environment:
|
|
||||||
```bash
|
|
||||||
pip install -r requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
It is possible to make a single executable for you machine which you can just put in your path using pyinstaller.
|
|
||||||
1. Install pyinstaller
|
|
||||||
```bash
|
|
||||||
pip install pyinstaller
|
|
||||||
```
|
|
||||||
2. Make the executable
|
|
||||||
```bash
|
|
||||||
pyinstaller --onefile --name iottb --distpath ~/opt iottb/main.py
|
|
||||||
```
|
|
||||||
to be able to run it as `iottb` if `~/opt' is a directory on your PATH.
|
|
||||||
A executable which should be able to run on linux is included in the repo.
|
|
||||||
## Basic Invocation
|
|
||||||
```bash
|
|
||||||
Usage: iottb [OPTIONS] COMMAND [ARGS]...
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-v, --verbosity Set verbosity [default: 0; 0<=x<=3]
|
|
||||||
-d, --debug Enable debug mode
|
|
||||||
--cfg-file PATH Path to iottb config file [default:
|
|
||||||
/home/seb/.config/iottb/iottb.cfg]
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
--dry-run BOOLEAN currently NOT USED! [default: True]
|
|
||||||
|
|
||||||
Commands:
|
|
||||||
add-device Add a device to a database
|
|
||||||
init-db
|
|
||||||
sniff Sniff packets with tcpdump
|
|
||||||
|
|
||||||
Debugging Commands:
|
|
||||||
show-all Show everything: configuration, databases, and...
|
|
||||||
show-cfg Show the current configuration context
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage Examples
|
|
||||||
### Initializing a database
|
|
||||||
Before devices can be added and packet captures performed, there must be a database.
|
|
||||||
Initialze a database with default values at the default location:
|
|
||||||
```bash
|
|
||||||
iottb init-db
|
|
||||||
```
|
|
||||||
### Adding a device
|
|
||||||
Typically, captures are performed for devices. To add a device (to the current default database)
|
|
||||||
```bash
|
|
||||||
iottb add-device 'Echo Dot 2'
|
|
||||||
```
|
|
||||||
if the devices is named 'Echo Dot 2'. This will get the cannonical name 'echo-dot'. This name should be used when performing
|
|
||||||
captures with `iottb`.
|
|
||||||
### Performing captures/sniffing traffic
|
|
||||||
```bash
|
|
||||||
iottb sniff -a <ipv4-addr or mac-addr> 'echo-dot'
|
|
||||||
```
|
|
||||||
to sniff traffic on the previously added device 'Echo Dot 2' which received the canonical name 'echo-dot'.
|
|
||||||
You can get the subcommand specif helptext by adding the `--help` option.
|
|
||||||
## Configuration
|
|
||||||
### Env Vars
|
|
||||||
- IOTTB_CONF_HOME
|
|
||||||
|
|
||||||
By setting this variable you control where the basic iottb application
|
|
||||||
configuration should be looked for
|
|
||||||
|
|
||||||
## License
|
|
||||||
This project is licensed under a BSD 3-clause License, a copy of which is provided in the file `code/iottb-project/LICENSE`.
|
|
||||||
|
|
||||||
@ -1,110 +0,0 @@
|
|||||||
Usage: iottb [OPTIONS] COMMAND [ARGS]...
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-v, --verbosity Set verbosity [default: 0; 0<=x<=3]
|
|
||||||
-d, --debug Enable debug mode
|
|
||||||
--dry-run [default: True]
|
|
||||||
--cfg-file PATH Path to iottb config file [default:
|
|
||||||
/home/seb/.config/iottb/iottb.cfg]
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
Commands:
|
|
||||||
add-device Add a device to a database
|
|
||||||
init-db
|
|
||||||
rm-cfg Removes the cfg file from the filesystem.
|
|
||||||
rm-dbs Removes ALL(!) databases from the filesystem if...
|
|
||||||
set-key-in-table-to Edit config or metadata files.
|
|
||||||
show-all Show everything: configuration, databases, and...
|
|
||||||
show-cfg Show the current configuration context
|
|
||||||
sniff Sniff packets with tcpdump
|
|
||||||
Usage: iottb init-db [OPTIONS]
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-d, --dest PATH Location to put (new) iottb database
|
|
||||||
-n, --name TEXT Name of new database. [default: iottb.db]
|
|
||||||
--update-default / --no-update-default
|
|
||||||
If new db should be set as the new default
|
|
||||||
[default: update-default]
|
|
||||||
--help Show this message and exit.
|
|
||||||
Usage: iottb add-device [OPTIONS]
|
|
||||||
|
|
||||||
Add a device to a database
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--dev, --device-name TEXT The name of the device to be added. If this
|
|
||||||
string contains spaces or other special
|
|
||||||
characters normalization is
|
|
||||||
performed to derive a canonical name [required]
|
|
||||||
--db, --database DIRECTORY Database in which to add this device. If not
|
|
||||||
specified use default from config. [env var:
|
|
||||||
IOTTB_DB]
|
|
||||||
--guided Add device interactively [env var:
|
|
||||||
IOTTB_GUIDED_ADD]
|
|
||||||
--help Show this message and exit.
|
|
||||||
Usage: iottb sniff [OPTIONS] [TCPDUMP-ARGS] [DEVICE]
|
|
||||||
|
|
||||||
Sniff packets with tcpdump
|
|
||||||
|
|
||||||
Options:
|
|
||||||
Testbed sources:
|
|
||||||
--db, --database TEXT Database of device. Only needed if not current
|
|
||||||
default. [env var: IOTTB_DB]
|
|
||||||
--app TEXT Companion app being used during capture
|
|
||||||
Runtime behaviour:
|
|
||||||
--unsafe Disable checks for otherwise required options.
|
|
||||||
[env var: IOTTB_UNSAFE]
|
|
||||||
--guided [env var: IOTTB_GUIDED]
|
|
||||||
--pre TEXT Script to be executed before main command is
|
|
||||||
started.
|
|
||||||
--post TEXT Script to be executed upon completion of main
|
|
||||||
command.
|
|
||||||
Tcpdump options:
|
|
||||||
-i, --interface TEXT Network interface to capture on.If not specified
|
|
||||||
tcpdump tries to find and appropriate one. [env
|
|
||||||
var: IOTTB_CAPTURE_INTERFACE]
|
|
||||||
-a, --address TEXT IP or MAC address to filter packets by. [env var:
|
|
||||||
IOTTB_CAPTURE_ADDRESS]
|
|
||||||
-I, --monitor-mode Put interface into monitor mode.
|
|
||||||
--ff TEXT tcpdump filter as string or file path. [env var:
|
|
||||||
IOTTB_CAPTURE_FILTER]
|
|
||||||
-#, --print-pacno Print packet number at beginning of line. True by
|
|
||||||
default. [default: True]
|
|
||||||
-e, --print-ll Print link layer headers. True by default.
|
|
||||||
-c, --count INTEGER Number of packets to capture. [default: 1000]
|
|
||||||
--help Show this message and exit.
|
|
||||||
Utility Commands mostly for development
|
|
||||||
Usage: iottb rm-cfg [OPTIONS]
|
|
||||||
|
|
||||||
Removes the cfg file from the filesystem.
|
|
||||||
|
|
||||||
This is mostly a utility during development. Once non-standard database
|
|
||||||
locations are implemented, deleting this would lead to iottb not being able
|
|
||||||
to find them anymore.
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--yes Confirm the action without prompting.
|
|
||||||
--help Show this message and exit.
|
|
||||||
Usage: iottb rm-dbs [OPTIONS]
|
|
||||||
|
|
||||||
Removes ALL(!) databases from the filesystem if they're empty.
|
|
||||||
|
|
||||||
Development utility currently unfit for use.
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--yes Confirm the action without prompting.
|
|
||||||
--help Show this message and exit.
|
|
||||||
Usage: iottb show-cfg [OPTIONS]
|
|
||||||
|
|
||||||
Show the current configuration context
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--cfg-file PATH Path to the config file [default:
|
|
||||||
/home/seb/.config/iottb/iottb.cfg]
|
|
||||||
-pp Pretty Print
|
|
||||||
--help Show this message and exit.
|
|
||||||
Usage: iottb show-all [OPTIONS]
|
|
||||||
|
|
||||||
Show everything: configuration, databases, and device metadata
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--help Show this message and exit.
|
|
||||||
@ -1,38 +0,0 @@
|
|||||||
Usage: iottb [OPTIONS] COMMAND [ARGS]...
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-v, --verbosity Set verbosity [default: 0; 0<=x<=3]
|
|
||||||
-d, --debug Enable debug mode
|
|
||||||
--dry-run [default: True]
|
|
||||||
--cfg-file PATH Path to iottb config file [default:
|
|
||||||
/home/seb/.config/iottb/iottb.cfg]
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
Commands:
|
|
||||||
add-device Add a device to a database
|
|
||||||
init-db
|
|
||||||
rm-cfg Removes the cfg file from the filesystem.
|
|
||||||
rm-dbs Removes ALL(!) databases from the filesystem if...
|
|
||||||
set-key-in-table-to Edit config or metadata files.
|
|
||||||
show-all Show everything: configuration, databases, and...
|
|
||||||
show-cfg Show the current configuration context
|
|
||||||
sniff Sniff packets with tcpdump
|
|
||||||
Usage: iottb [OPTIONS] COMMAND [ARGS]...
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-v, --verbosity Set verbosity [default: 0; 0<=x<=3]
|
|
||||||
-d, --debug Enable debug mode
|
|
||||||
--dry-run [default: True]
|
|
||||||
--cfg-file PATH Path to iottb config file [default:
|
|
||||||
/home/seb/.config/iottb/iottb.cfg]
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
Commands:
|
|
||||||
add-device Add a device to a database
|
|
||||||
init-db
|
|
||||||
rm-cfg Removes the cfg file from the filesystem.
|
|
||||||
rm-dbs Removes ALL(!) databases from the filesystem if...
|
|
||||||
set-key-in-table-to Edit config or metadata files.
|
|
||||||
show-all Show everything: configuration, databases, and...
|
|
||||||
show-cfg Show the current configuration context
|
|
||||||
sniff Sniff packets with tcpdump
|
|
||||||
@ -1,142 +0,0 @@
|
|||||||
# Main Command: `iottb`
|
|
||||||
|
|
||||||
Usage: `iottb [OPTIONS] COMMAND [ARGS]...`
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-v, --verbosity Set verbosity [0<=x<=3] \n
|
|
||||||
-d, --debug Enable debug mode
|
|
||||||
--dry-run
|
|
||||||
--cfg-file PATH Path to iottb config file
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
Commands:
|
|
||||||
add-device Add a device to a database
|
|
||||||
init-db
|
|
||||||
rm-cfg Removes the cfg file from the filesystem.
|
|
||||||
rm-dbs Removes ALL(!) databases from the filesystem if...
|
|
||||||
set-key-in-table-to Edit config or metadata files.
|
|
||||||
show-all Show everything: configuration, databases, and...
|
|
||||||
show-cfg Show the current configuration context
|
|
||||||
sniff Sniff packets with tcpdump
|
|
||||||
|
|
||||||
|
|
||||||
Command: init-db
|
|
||||||
Usage: [OPTIONS]
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-d, --dest PATH Location to put (new) iottb database
|
|
||||||
-n, --name TEXT Name of new database.
|
|
||||||
--update-default / --no-update-default
|
|
||||||
If new db should be set as the new default
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
|
|
||||||
Command: rm-cfg
|
|
||||||
Usage: [OPTIONS]
|
|
||||||
|
|
||||||
Removes the cfg file from the filesystem.
|
|
||||||
|
|
||||||
This is mostly a utility during development. Once non-standard database
|
|
||||||
locations are implemented, deleting this would lead to iottb not being able
|
|
||||||
to find them anymore.
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--yes Confirm the action without prompting.
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
|
|
||||||
Command: set-key-in-table-to
|
|
||||||
Usage: [OPTIONS]
|
|
||||||
|
|
||||||
Edit config or metadata files. TODO: Implement
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--file TEXT
|
|
||||||
--table TEXT
|
|
||||||
--key TEXT
|
|
||||||
--value TEXT
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
|
|
||||||
Command: rm-dbs
|
|
||||||
Usage: [OPTIONS]
|
|
||||||
|
|
||||||
Removes ALL(!) databases from the filesystem if they're empty.
|
|
||||||
|
|
||||||
Development utility currently unfit for use.
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--yes Confirm the action without prompting.
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
|
|
||||||
Command: add-device
|
|
||||||
Usage: [OPTIONS]
|
|
||||||
|
|
||||||
Add a device to a database
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--dev, --device-name TEXT The name of the device to be added. If this
|
|
||||||
string contains spaces or other special
|
|
||||||
characters normalization is
|
|
||||||
performed to derive a canonical name [required]
|
|
||||||
--db, --database DIRECTORY Database in which to add this device. If not
|
|
||||||
specified use default from config. [env var:
|
|
||||||
IOTTB_DB]
|
|
||||||
--guided Add device interactively [env var:
|
|
||||||
IOTTB_GUIDED_ADD]
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
|
|
||||||
Command: show-cfg
|
|
||||||
Usage: [OPTIONS]
|
|
||||||
|
|
||||||
Show the current configuration context
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--cfg-file PATH Path to the config file
|
|
||||||
-pp Pretty Print
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
|
|
||||||
Command: sniff
|
|
||||||
Usage: [OPTIONS] [TCPDUMP-ARGS] [DEVICE]
|
|
||||||
|
|
||||||
Sniff packets with tcpdump
|
|
||||||
|
|
||||||
Options:
|
|
||||||
Testbed sources:
|
|
||||||
--db, --database TEXT Database of device. Only needed if not current
|
|
||||||
default. [env var: IOTTB_DB]
|
|
||||||
--app TEXT Companion app being used during capture
|
|
||||||
Runtime behaviour:
|
|
||||||
--unsafe Disable checks for otherwise required options.
|
|
||||||
[env var: IOTTB_UNSAFE]
|
|
||||||
--guided [env var: IOTTB_GUIDED]
|
|
||||||
--pre PATH Script to be executed before main commandis
|
|
||||||
started.
|
|
||||||
Tcpdump options:
|
|
||||||
-i, --interface TEXT Network interface to capture on.If not specified
|
|
||||||
tcpdump tries to find and appropriate one. [env
|
|
||||||
var: IOTTB_CAPTURE_INTERFACE]
|
|
||||||
-a, --address TEXT IP or MAC address to filter packets by. [env var:
|
|
||||||
IOTTB_CAPTURE_ADDRESS]
|
|
||||||
-I, --monitor-mode Put interface into monitor mode.
|
|
||||||
--ff TEXT tcpdump filter as string or file path. [env var:
|
|
||||||
IOTTB_CAPTURE_FILTER]
|
|
||||||
-#, --print-pacno Print packet number at beginning of line. True by
|
|
||||||
default.
|
|
||||||
-e, --print-ll Print link layer headers. True by default.
|
|
||||||
-c, --count INTEGER Number of packets to capture.
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
|
|
||||||
Command: show-all
|
|
||||||
Usage: [OPTIONS]
|
|
||||||
|
|
||||||
Show everything: configuration, databases, and device metadata
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--help Show this message and exit.
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from iottb import definitions
|
|
||||||
import logging
|
|
||||||
from iottb.utils.user_interaction import tb_echo
|
|
||||||
import click
|
|
||||||
|
|
||||||
click.echo = tb_echo # This is very hacky
|
|
||||||
logging.basicConfig(level=definitions.LOGLEVEL)
|
|
||||||
log_dir = definitions.LOGDIR
|
|
||||||
# Ensure logs dir exists before new handlers are registered in main.py
|
|
||||||
if not log_dir.is_dir():
|
|
||||||
log_dir.mkdir()
|
|
||||||
|
|
||||||
DOCS_FOLDER = Path.cwd() / 'docs'
|
|
||||||
|
|
||||||
@ -1,199 +0,0 @@
|
|||||||
import json
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import click
|
|
||||||
from pathlib import Path
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
|
|
||||||
from iottb import definitions
|
|
||||||
from iottb.models.device_metadata import DeviceMetadata
|
|
||||||
from iottb.models.iottb_config import IottbConfig
|
|
||||||
from iottb.definitions import CFG_FILE_PATH, TB_ECHO_STYLES
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def prompt_for_device_details():
|
|
||||||
device_details = {}
|
|
||||||
aliases = []
|
|
||||||
while True:
|
|
||||||
click.echo("\nEnter the details for the new device:")
|
|
||||||
click.echo("1. Device Name")
|
|
||||||
click.echo("2. Description")
|
|
||||||
click.echo("3. Model")
|
|
||||||
click.echo("4. Manufacturer")
|
|
||||||
click.echo("5. Current Firmware Version")
|
|
||||||
click.echo("6. Device Type")
|
|
||||||
click.echo("7. Supported Interfaces")
|
|
||||||
click.echo("8. Companion Applications")
|
|
||||||
click.echo("9. Add Alias")
|
|
||||||
click.echo("10. Finish and Save")
|
|
||||||
|
|
||||||
choice = click.prompt("Choose an option", type=int)
|
|
||||||
|
|
||||||
if choice == 1:
|
|
||||||
device_details['device_name'] = click.prompt("Enter the device name")
|
|
||||||
elif choice == 2:
|
|
||||||
device_details['description'] = click.prompt("Enter the description")
|
|
||||||
elif choice == 3:
|
|
||||||
device_details['model'] = click.prompt("Enter the model")
|
|
||||||
elif choice == 4:
|
|
||||||
device_details['manufacturer'] = click.prompt("Enter the manufacturer")
|
|
||||||
elif choice == 5:
|
|
||||||
device_details['firmware_version'] = click.prompt("Enter the current firmware version")
|
|
||||||
elif choice == 6:
|
|
||||||
device_details['device_type'] = click.prompt("Enter the device type")
|
|
||||||
elif choice == 7:
|
|
||||||
device_details['supported_interfaces'] = click.prompt("Enter the supported interfaces")
|
|
||||||
elif choice == 8:
|
|
||||||
device_details['companion_applications'] = click.prompt("Enter the companion applications")
|
|
||||||
elif choice == 9:
|
|
||||||
alias = click.prompt("Enter an alias")
|
|
||||||
aliases.append(alias)
|
|
||||||
elif choice == 10:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
click.echo("Invalid choice. Please try again.")
|
|
||||||
|
|
||||||
device_details['aliases'] = aliases
|
|
||||||
return device_details
|
|
||||||
|
|
||||||
|
|
||||||
def confirm_and_add_device(device_details, db_path):
|
|
||||||
click.echo("\nDevice metadata:")
|
|
||||||
for key, value in device_details.items():
|
|
||||||
click.echo(f"{key.replace('_', ' ').title()}: {value}")
|
|
||||||
|
|
||||||
confirm = click.confirm("Do you want to add this device with above metadata?")
|
|
||||||
if confirm:
|
|
||||||
device_name = device_details.get('device_name')
|
|
||||||
if not device_name:
|
|
||||||
click.echo("Device name is required. Exiting...")
|
|
||||||
return
|
|
||||||
|
|
||||||
device_metadata = DeviceMetadata(**device_details)
|
|
||||||
device_dir = db_path / device_metadata.canonical_name
|
|
||||||
|
|
||||||
if device_dir.exists():
|
|
||||||
click.echo(f"Device {device_name} already exists in the database.")
|
|
||||||
click.echo("Exiting...")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
device_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
metadata_path = device_dir / definitions.DEVICE_METADATA_FILE_NAME
|
|
||||||
device_metadata.save_metadata_to_file(metadata_path)
|
|
||||||
click.echo(f"Successfully added device {device_name} to database.")
|
|
||||||
except OSError as e:
|
|
||||||
click.echo(f"Error trying to create device directory: {e}")
|
|
||||||
click.echo("Exiting...")
|
|
||||||
else:
|
|
||||||
click.echo("Operation cancelled. Exiting...")
|
|
||||||
|
|
||||||
|
|
||||||
def add_device_guided(cfg, db):
|
|
||||||
logger.info('Adding device interactively')
|
|
||||||
# logger.debug(f'Parameters: {params}. value: {value}')
|
|
||||||
databases = cfg.db_path_dict
|
|
||||||
if not databases:
|
|
||||||
click.echo('No databases found in config file.')
|
|
||||||
return
|
|
||||||
click.echo('Available Databases:')
|
|
||||||
last = 0
|
|
||||||
for i, db_name in enumerate(databases.keys(), start=1):
|
|
||||||
click.echo(f'[{i}] {db_name}')
|
|
||||||
last = i if last < i else last
|
|
||||||
db_choice = click.prompt('Select the database to add the new device to (1 - {last}, 0 to quit)',
|
|
||||||
type=int, default=1)
|
|
||||||
if 1 <= db_choice <= last:
|
|
||||||
selected_db = list(databases.keys())[db_choice - 1]
|
|
||||||
click.confirm(f'Use {selected_db}?', abort=True)
|
|
||||||
db_path = Path(databases[selected_db]) / selected_db
|
|
||||||
logger.debug(f'DB Path {str(db_path)}')
|
|
||||||
device_details = prompt_for_device_details()
|
|
||||||
confirm_and_add_device(device_details, db_path)
|
|
||||||
elif db_choice == 0:
|
|
||||||
click.echo(f'Quitting...')
|
|
||||||
else:
|
|
||||||
click.echo(f'{db_choice} is not a valid choice. Please rerun command and select a valid database.')
|
|
||||||
|
|
||||||
|
|
||||||
@click.command('add-device', help='Add a device to a database')
|
|
||||||
@click.argument('device', type=str, default="")
|
|
||||||
@click.option('--db', '--database', type=str,
|
|
||||||
envvar='IOTTB_DB', show_envvar=True, default="",
|
|
||||||
help='Database in which to add this device. If not specified use default from config.')
|
|
||||||
@click.option('--guided', is_flag=True,
|
|
||||||
help='Add device interactively')
|
|
||||||
def add_device(device, db, guided):
|
|
||||||
"""Add a new device to a database
|
|
||||||
|
|
||||||
Device name must be supplied unless in an interactive setup.
|
|
||||||
Database is taken from config by default.
|
|
||||||
If this device name contains spaces or other special characters normalization is performed to derive a canonical name.
|
|
||||||
"""
|
|
||||||
logger.info('add-device invoked')
|
|
||||||
|
|
||||||
# Step 1: Load Config
|
|
||||||
# Dependency: Config file must exist
|
|
||||||
config = IottbConfig(Path(CFG_FILE_PATH))
|
|
||||||
logger.debug(f'Config loaded: {config}')
|
|
||||||
# If guided flag set, continue with guided add and leave
|
|
||||||
if guided:
|
|
||||||
click.echo('Guided option set. Continuing with guided add.')
|
|
||||||
add_device_guided(config, device, db)
|
|
||||||
logger.info('Finished guided device add.')
|
|
||||||
return
|
|
||||||
|
|
||||||
# Step 2: Load database
|
|
||||||
# dependency: Database folder must exist
|
|
||||||
if db != "":
|
|
||||||
database = db
|
|
||||||
path = config.db_path_dict[database]
|
|
||||||
logger.debug(f'Resolved (path, db) {path}, {database}')
|
|
||||||
else:
|
|
||||||
path = config.default_db_location
|
|
||||||
database = config.default_database
|
|
||||||
logger.debug(f'Default (path, db) {path}, {database}')
|
|
||||||
click.secho(f'Using database {database}')
|
|
||||||
full_db_path = Path(path) / database
|
|
||||||
if not full_db_path.is_dir():
|
|
||||||
logger.warning(f'No database at {database}')
|
|
||||||
click.echo(f'No database found at {full_db_path}', lvl='w')
|
|
||||||
click.echo(
|
|
||||||
f'You need to initialize the testbed before before you add devices!')
|
|
||||||
click.echo(
|
|
||||||
f'To initialize the testbed in the default location run "iottb init-db"')
|
|
||||||
click.echo('Exiting...')
|
|
||||||
sys.exit()
|
|
||||||
# Ensure a device name was passed as argument
|
|
||||||
if device == "":
|
|
||||||
click.echo("Device name cannot be an empty string. Exiting...", lvl='w')
|
|
||||||
return
|
|
||||||
|
|
||||||
# Step 3: Check if device already exists in database
|
|
||||||
# dependency: DeviceMetadata object
|
|
||||||
device_metadata = DeviceMetadata(device_name=device)
|
|
||||||
device_dir = full_db_path / device_metadata.canonical_name
|
|
||||||
|
|
||||||
# Check if device is already registered
|
|
||||||
if device_dir.exists():
|
|
||||||
logger.warning(f'Device directory {device_dir} already exists.')
|
|
||||||
click.echo(f'Device {device} already exists in the database.')
|
|
||||||
click.echo('Exiting...')
|
|
||||||
sys.exit()
|
|
||||||
try:
|
|
||||||
device_dir.mkdir()
|
|
||||||
except OSError as e:
|
|
||||||
logger.error(f'Error trying to create device {e}')
|
|
||||||
click.echo('Exiting...')
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
# Step 4: Save metadata into device_dir
|
|
||||||
metadata_path = device_dir / definitions.DEVICE_METADATA_FILE_NAME
|
|
||||||
with metadata_path.open('w') as metadata_file:
|
|
||||||
json.dump(device_metadata.__dict__, metadata_file, indent=4)
|
|
||||||
click.echo(f'Successfully added device {device} to database')
|
|
||||||
logger.debug(f'Added device {device} to database {database}. Full path of metadata {metadata_path}')
|
|
||||||
logger.info(f'Metadata for {device} {device_metadata.print_attributes()}')
|
|
||||||
@ -1,130 +0,0 @@
|
|||||||
from pathlib import Path
|
|
||||||
import logging
|
|
||||||
import click
|
|
||||||
|
|
||||||
from iottb import tb_echo
|
|
||||||
from iottb.definitions import DB_NAME, CFG_FILE_PATH
|
|
||||||
from iottb.models.iottb_config import IottbConfig
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@click.group('util')
|
|
||||||
def tb():
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.option('--file', default=DB_NAME)
|
|
||||||
@click.option('--table', type=str, default='DefaultDatabase')
|
|
||||||
@click.option('--key')
|
|
||||||
@click.option('--value')
|
|
||||||
@click.pass_context
|
|
||||||
def set_key_in_table_to(ctx, file, table, key, value):
|
|
||||||
"""Edit config or metadata files. TODO: Implement"""
|
|
||||||
click.echo(f'set_key_in_table_to invoked')
|
|
||||||
logger.warning("Unimplemented subcommand invoked.")
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.confirmation_option(prompt="Are you certain that you want to delete the cfg file?")
|
|
||||||
def rm_cfg():
|
|
||||||
""" Removes the cfg file from the filesystem.
|
|
||||||
|
|
||||||
This is mostly a utility during development. Once non-standard database locations are implemented,
|
|
||||||
deleting this would lead to iottb not being able to find them anymore.
|
|
||||||
"""
|
|
||||||
Path(CFG_FILE_PATH).unlink()
|
|
||||||
click.echo(f'Iottb configuration removed at {CFG_FILE_PATH}')
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.confirmation_option(prompt="Are you certain that you want to delete the databases file?")
|
|
||||||
def rm_dbs(dbs):
|
|
||||||
""" Removes ALL(!) databases from the filesystem if they're empty.
|
|
||||||
|
|
||||||
Development utility currently unfit for use.
|
|
||||||
"""
|
|
||||||
config = IottbConfig()
|
|
||||||
paths = config.get_know_database_paths()
|
|
||||||
logger.debug(f'Known db paths: {str(paths)}')
|
|
||||||
for dbs in paths:
|
|
||||||
try:
|
|
||||||
Path(dbs).rmdir()
|
|
||||||
click.echo(f'{dbs} deleted')
|
|
||||||
except Exception as e:
|
|
||||||
logger.debug(f'Failed unlinking db {dbs} with error {e}')
|
|
||||||
logger.info(f'All databases deleted')
|
|
||||||
|
|
||||||
|
|
||||||
@click.command('show-cfg', help='Show the current configuration context')
|
|
||||||
@click.option('--cfg-file', type=click.Path(), default=CFG_FILE_PATH, help='Path to the config file')
|
|
||||||
@click.option('-pp', is_flag=True, default=False, help='Pretty Print')
|
|
||||||
@click.pass_context
|
|
||||||
def show_cfg(ctx, cfg_file, pp):
|
|
||||||
logger.debug(f'Pretty print option set to {pp}')
|
|
||||||
if pp:
|
|
||||||
try:
|
|
||||||
config = IottbConfig(Path(cfg_file))
|
|
||||||
click.echo("Configuration Context:")
|
|
||||||
click.echo(f"Default Database: {config.default_database}")
|
|
||||||
click.echo(f"Default Database Path: {config.default_db_location}")
|
|
||||||
click.echo("Database Locations:")
|
|
||||||
for db_name, db_path in config.db_path_dict.items():
|
|
||||||
click.echo(f" - {db_name}: {db_path}")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error loading configuration: {e}")
|
|
||||||
click.echo(f"Failed to load configuration from {cfg_file}")
|
|
||||||
else:
|
|
||||||
path = Path(cfg_file)
|
|
||||||
|
|
||||||
if path.is_file():
|
|
||||||
with path.open('r') as file:
|
|
||||||
content = file.read()
|
|
||||||
click.echo(content)
|
|
||||||
else:
|
|
||||||
click.echo(f"Configuration file not found at {cfg_file}")
|
|
||||||
|
|
||||||
|
|
||||||
@click.command('show-all', help='Show everything: configuration, databases, and device metadata')
|
|
||||||
@click.pass_context
|
|
||||||
def show_everything(ctx):
|
|
||||||
"""Show everything that can be recursively found based on config except file contents."""
|
|
||||||
config = ctx.obj['CONFIG']
|
|
||||||
click.echo("Configuration Context:")
|
|
||||||
click.echo(f"Default Database: {config.default_database}")
|
|
||||||
click.echo(f"Default Database Path: {config.default_db_location}")
|
|
||||||
click.echo("Database Locations:")
|
|
||||||
everything_dict = {}
|
|
||||||
for db_name, db_path in config.db_path_dict.items():
|
|
||||||
|
|
||||||
click.echo(f" - {db_name}: {db_path}")
|
|
||||||
for db_name, db_path in config.db_path_dict.items():
|
|
||||||
full_db_path = Path(db_path) / db_name
|
|
||||||
if full_db_path.is_dir():
|
|
||||||
click.echo(f"\nContents of {full_db_path}:")
|
|
||||||
flag = True
|
|
||||||
for item in full_db_path.iterdir():
|
|
||||||
flag = False
|
|
||||||
if item.is_file():
|
|
||||||
click.echo(f" - {item.name}")
|
|
||||||
try:
|
|
||||||
with item.open('r', encoding='utf-8') as file:
|
|
||||||
content = file.read()
|
|
||||||
click.echo(f" Content:\n{content}")
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
click.echo(" Content is not readable as text")
|
|
||||||
elif item.is_dir():
|
|
||||||
click.echo(f" - {item.name}/")
|
|
||||||
for subitem in item.iterdir():
|
|
||||||
if subitem.is_file():
|
|
||||||
click.echo(f" - {subitem.name}")
|
|
||||||
elif subitem.is_dir():
|
|
||||||
click.echo(f" - {subitem.name}/")
|
|
||||||
if flag:
|
|
||||||
tb_echo(f'\t EMPTY')
|
|
||||||
else:
|
|
||||||
click.echo(f"{full_db_path} is not a directory")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,347 +0,0 @@
|
|||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from time import time
|
|
||||||
|
|
||||||
import click
|
|
||||||
from click_option_group import optgroup
|
|
||||||
|
|
||||||
from iottb.utils.string_processing import make_canonical_name
|
|
||||||
|
|
||||||
# Setup logger
|
|
||||||
logger = logging.getLogger('iottb.sniff')
|
|
||||||
|
|
||||||
|
|
||||||
def is_ip_address(address):
|
|
||||||
ip_pattern = re.compile(r"^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$")
|
|
||||||
return ip_pattern.match(address) is not None
|
|
||||||
|
|
||||||
|
|
||||||
def is_mac_address(address):
|
|
||||||
mac_pattern = re.compile(r"^([0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2}$")
|
|
||||||
return mac_pattern.match(address) is not None
|
|
||||||
|
|
||||||
|
|
||||||
def load_config(cfg_file):
|
|
||||||
"""Loads configuration from the given file path."""
|
|
||||||
with open(cfg_file, 'r') as config_file:
|
|
||||||
return json.load(config_file)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_sniff(ctx, param, value):
|
|
||||||
logger.info('Validating sniff...')
|
|
||||||
if ctx.params.get('unsafe') and not value:
|
|
||||||
return None
|
|
||||||
if not ctx.params.get('unsafe') and not value:
|
|
||||||
raise click.BadParameter('Address is required unless --unsafe is set.')
|
|
||||||
if not is_ip_address(value) and not is_mac_address(value):
|
|
||||||
raise click.BadParameter('Address must be a valid IP address or MAC address.')
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def run_pre(pre):
|
|
||||||
subprocess.run(pre, shell=True)
|
|
||||||
logger.debug(f'finnished {pre}')
|
|
||||||
|
|
||||||
|
|
||||||
def run_post(post):
|
|
||||||
subprocess.run(post, shell=True)
|
|
||||||
logger.debug(f'finnished {post}')
|
|
||||||
|
|
||||||
|
|
||||||
@click.command('sniff', help='Sniff packets with tcpdump')
|
|
||||||
@optgroup.group('Testbed sources')
|
|
||||||
@optgroup.option('--db', '--database', type=str, envvar='IOTTB_DB', show_envvar=True,
|
|
||||||
help='Database of device. Only needed if not current default.')
|
|
||||||
@optgroup.option('--app', type=str, help='Companion app being used during capture', required=False)
|
|
||||||
@optgroup.group('Runtime behaviour')
|
|
||||||
@optgroup.option('--unsafe', is_flag=True, default=False, envvar='IOTTB_UNSAFE', is_eager=True,
|
|
||||||
help='Disable checks for otherwise required options.\n', show_envvar=True)
|
|
||||||
@optgroup.option('--guided', is_flag=True, default=False, envvar='IOTTB_GUIDED', show_envvar=True)
|
|
||||||
@optgroup.option('--pre', help='Script to be executed before main command is started.')
|
|
||||||
@optgroup.option('--post', help='Script to be executed upon completion of main command.')
|
|
||||||
@optgroup.group('Tcpdump options')
|
|
||||||
@optgroup.option('-i', '--interface',
|
|
||||||
help='Network interface to capture on.' +
|
|
||||||
'If not specified tcpdump tries to find and appropriate one.\n', show_envvar=True,
|
|
||||||
envvar='IOTTB_CAPTURE_INTERFACE')
|
|
||||||
@optgroup.option('-a', '--address', callback=validate_sniff,
|
|
||||||
help='IP or MAC address to filter packets by.\n', show_envvar=True,
|
|
||||||
envvar='IOTTB_CAPTURE_ADDRESS')
|
|
||||||
@optgroup.option('-I', '--monitor-mode', help='Put interface into monitor mode.\n', is_flag=True)
|
|
||||||
@optgroup.option('--ff', type=str, envvar='IOTTB_CAPTURE_FILTER', show_envvar=True,
|
|
||||||
help='tcpdump filter as string or file path.')
|
|
||||||
@optgroup.option('-#', '--print-pacno', is_flag=True, default=True,
|
|
||||||
help='Print packet number at beginning of line. True by default.\n')
|
|
||||||
@optgroup.option('-e', '--print-ll', is_flag=True, default=False,
|
|
||||||
help='Print link layer headers. True by default.')
|
|
||||||
@optgroup.option('-c', '--count', type=int, help='Number of packets to capture.', default=1000)
|
|
||||||
# @optgroup.option('--mins', type=int, help='Time in minutes to capture.', default=1)
|
|
||||||
@click.argument('tcpdump-args', nargs=-1, required=False, metavar='[TCPDUMP-ARGS]')
|
|
||||||
@click.argument('device', required=False)
|
|
||||||
@click.pass_context
|
|
||||||
def sniff(ctx, device, interface, print_pacno, ff, count, monitor_mode, print_ll, address, db, unsafe, guided,
|
|
||||||
app, tcpdump_args, pre, post, **params):
|
|
||||||
""" Sniff packets from a device """
|
|
||||||
logger.info('sniff command invoked')
|
|
||||||
# Step 0: run pre script:
|
|
||||||
if pre:
|
|
||||||
click.echo(f'Running pre command {pre}')
|
|
||||||
run_pre(pre)
|
|
||||||
# Step1: Load Config
|
|
||||||
config = ctx.obj['CONFIG']
|
|
||||||
logger.debug(f'Config loaded: {config}')
|
|
||||||
|
|
||||||
# Step2: determine relevant database
|
|
||||||
database = db if db else config.default_database
|
|
||||||
path = config.db_path_dict[database]
|
|
||||||
full_db_path = Path(path) / database
|
|
||||||
logger.debug(f'Full db path is {str(full_db_path)}')
|
|
||||||
|
|
||||||
# 2.2: Check if it exists
|
|
||||||
if not full_db_path.is_dir():
|
|
||||||
logger.error('DB unexpectedly missing')
|
|
||||||
click.echo('DB unexpectedly missing')
|
|
||||||
return
|
|
||||||
|
|
||||||
canonical_name, aliases = make_canonical_name(device)
|
|
||||||
click.echo(f'Using canonical device name {canonical_name}')
|
|
||||||
device_path = full_db_path / canonical_name
|
|
||||||
|
|
||||||
# Step 3: now the device
|
|
||||||
if not device_path.exists():
|
|
||||||
if not unsafe:
|
|
||||||
logger.error(f'Device path {device_path} does not exist')
|
|
||||||
click.echo(f'Device path {device_path} does not exist')
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
device_path.mkdir(parents=True, exist_ok=True)
|
|
||||||
logger.info(f'Device path {device_path} created')
|
|
||||||
|
|
||||||
click.echo(f'Found device at path {device_path}')
|
|
||||||
# Step 4: Generate filter
|
|
||||||
generic_filter = None
|
|
||||||
cap_filter = None
|
|
||||||
if ff:
|
|
||||||
logger.debug(f'ff: {ff}')
|
|
||||||
if Path(ff).is_file():
|
|
||||||
logger.info('Given filter option is a file')
|
|
||||||
with open(ff, 'r') as f:
|
|
||||||
cap_filter = f.read().strip()
|
|
||||||
else:
|
|
||||||
logger.info('Given filter option is an expression')
|
|
||||||
cap_filter = ff
|
|
||||||
else:
|
|
||||||
if address is not None:
|
|
||||||
if is_ip_address(address):
|
|
||||||
generic_filter = 'net'
|
|
||||||
cap_filter = f'{generic_filter} {address}'
|
|
||||||
elif is_mac_address(address):
|
|
||||||
generic_filter = 'ether net'
|
|
||||||
cap_filter = f'{generic_filter} {address}'
|
|
||||||
elif not unsafe:
|
|
||||||
logger.error('Invalid address format')
|
|
||||||
click.echo('Invalid address format')
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info(f'Generic filter {generic_filter}')
|
|
||||||
click.echo(f'Using filter {cap_filter}')
|
|
||||||
|
|
||||||
# Step 5: prep capture directory
|
|
||||||
capture_date = datetime.now().strftime('%Y-%m-%d')
|
|
||||||
capture_base_dir = device_path / f'sniffs/{capture_date}'
|
|
||||||
capture_base_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
logger.debug(f'Previous captures {capture_base_dir.glob('cap*')}')
|
|
||||||
capture_count = sum(1 for _ in capture_base_dir.glob('cap*'))
|
|
||||||
logger.debug(f'Capture count is {capture_count}')
|
|
||||||
|
|
||||||
capture_dir = f'cap{capture_count:04d}-{datetime.now().strftime('%H%M')}'
|
|
||||||
logger.debug(f'capture_dir: {capture_dir}')
|
|
||||||
|
|
||||||
# Full path
|
|
||||||
capture_dir_full_path = capture_base_dir / capture_dir
|
|
||||||
capture_dir_full_path.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
click.echo(f'Files will be placed in {str(capture_dir_full_path)}')
|
|
||||||
logger.debug(f'successfully created capture directory')
|
|
||||||
|
|
||||||
# Step 6: Prepare capture file names
|
|
||||||
# Generate UUID for filenames
|
|
||||||
capture_uuid = str(uuid.uuid4())
|
|
||||||
click.echo(f'Capture has id {capture_uuid}')
|
|
||||||
|
|
||||||
pcap_file = f"{canonical_name}_{capture_uuid}.pcap"
|
|
||||||
pcap_file_full_path = capture_dir_full_path / pcap_file
|
|
||||||
stdout_log_file = f'stdout_{capture_uuid}.log'
|
|
||||||
stderr_log_file = f'stderr_{capture_uuid}.log'
|
|
||||||
|
|
||||||
logger.debug(f'Full pcap file path is {pcap_file_full_path}')
|
|
||||||
logger.info(f'pcap file name is {pcap_file}')
|
|
||||||
logger.info(f'stdout log file is {stdout_log_file}')
|
|
||||||
logger.info(f'stderr log file is {stderr_log_file}')
|
|
||||||
|
|
||||||
# Step 7: Build tcpdump command
|
|
||||||
logger.debug(f'pgid {os.getpgrp()}')
|
|
||||||
logger.debug(f'ppid {os.getppid()}')
|
|
||||||
logger.debug(f'(real, effective, saved) user id: {os.getresuid()}')
|
|
||||||
logger.debug(f'(real, effective, saved) group id: {os.getresgid()}')
|
|
||||||
|
|
||||||
cmd = ['sudo', 'tcpdump']
|
|
||||||
|
|
||||||
# 7.1 process flags
|
|
||||||
flags = []
|
|
||||||
if print_pacno:
|
|
||||||
flags.append('-#')
|
|
||||||
if print_ll:
|
|
||||||
flags.append('-e')
|
|
||||||
if monitor_mode:
|
|
||||||
flags.append('-I')
|
|
||||||
flags.append('-n') # TODO: Integrate, in case name resolution is wanted!
|
|
||||||
cmd.extend(flags)
|
|
||||||
flags_string = " ".join(flags)
|
|
||||||
logger.debug(f'Flags: {flags_string}')
|
|
||||||
|
|
||||||
# debug interlude
|
|
||||||
verbosity = ctx.obj['VERBOSITY']
|
|
||||||
if verbosity > 0:
|
|
||||||
verbosity_flag = '-'
|
|
||||||
for i in range(0, verbosity):
|
|
||||||
verbosity_flag = verbosity_flag + 'v'
|
|
||||||
logger.debug(f'verbosity string to pass to tcpdump: {verbosity_flag}')
|
|
||||||
cmd.append(verbosity_flag)
|
|
||||||
|
|
||||||
# 7.2 generic (i.e. reusable) kw args
|
|
||||||
generic_kw_args = []
|
|
||||||
if count:
|
|
||||||
generic_kw_args.extend(['-c', str(count)])
|
|
||||||
# if mins:
|
|
||||||
# generic_kw_args.extend(['-G', str(mins * 60)]) TODO: this currently loads to errors with sudo
|
|
||||||
cmd.extend(generic_kw_args)
|
|
||||||
generic_kw_args_string = " ".join(generic_kw_args)
|
|
||||||
logger.debug(f'KW args: {generic_kw_args_string}')
|
|
||||||
|
|
||||||
# 7.3 special kw args (not a priori reusable)
|
|
||||||
non_generic_kw_args = []
|
|
||||||
if interface:
|
|
||||||
non_generic_kw_args.extend(['-i', interface])
|
|
||||||
non_generic_kw_args.extend(['-w', str(pcap_file_full_path)])
|
|
||||||
cmd.extend(non_generic_kw_args)
|
|
||||||
non_generic_kw_args_string = " ".join(non_generic_kw_args)
|
|
||||||
logger.debug(f'Non transferable (special) kw args: {non_generic_kw_args_string}')
|
|
||||||
|
|
||||||
# 7.4 add filter expression
|
|
||||||
if cap_filter:
|
|
||||||
logger.debug(f'cap_filter (not generic): {cap_filter}')
|
|
||||||
cmd.append(cap_filter)
|
|
||||||
|
|
||||||
full_cmd_string = " ".join(cmd)
|
|
||||||
|
|
||||||
logger.info(f'tcpdump command: {"".join(full_cmd_string)}')
|
|
||||||
click.echo('Capture setup complete!')
|
|
||||||
# Step 8: Execute tcpdump command
|
|
||||||
start_time = datetime.now().strftime("%H:%M:%S")
|
|
||||||
start = time()
|
|
||||||
try:
|
|
||||||
if guided:
|
|
||||||
click.confirm(f'Execute following command: {full_cmd_string}')
|
|
||||||
stdout_log_file_abs_path = capture_dir_full_path / stdout_log_file
|
|
||||||
stderr_log_file_abs_path = capture_dir_full_path / stderr_log_file
|
|
||||||
stdout_log_file_abs_path.touch(mode=0o777)
|
|
||||||
stderr_log_file_abs_path.touch(mode=0o777)
|
|
||||||
with open(stdout_log_file_abs_path, 'w') as out, open(stderr_log_file_abs_path, 'w') as err:
|
|
||||||
logger.debug(f'\nstdout: {out}.\nstderr: {err}.\n')
|
|
||||||
|
|
||||||
tcp_complete = subprocess.run(cmd, check=True, capture_output=True, text=True)
|
|
||||||
|
|
||||||
out.write(tcp_complete.stdout)
|
|
||||||
err.write(tcp_complete.stderr)
|
|
||||||
|
|
||||||
# click.echo(f'Mock sniff execution')
|
|
||||||
click.echo(f"Capture complete. Saved to {pcap_file}")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
logger.error(f'Failed to capture packets: {e}')
|
|
||||||
click.echo(f'Failed to capture packets: {e}')
|
|
||||||
click.echo(f'Check {stderr_log_file} for more info.')
|
|
||||||
if ctx.obj['DEBUG']:
|
|
||||||
msg = [f'STDERR log {stderr_log_file} contents:\n']
|
|
||||||
with open(capture_dir_full_path / stderr_log_file) as log:
|
|
||||||
for line in log:
|
|
||||||
msg.append(line)
|
|
||||||
|
|
||||||
click.echo("\t".join(msg), lvl='e')
|
|
||||||
# print('DEBUG ACTIVE')
|
|
||||||
if guided:
|
|
||||||
click.prompt('Create metadata anyway?')
|
|
||||||
else:
|
|
||||||
click.echo('Aborting capture...')
|
|
||||||
sys.exit()
|
|
||||||
end_time = datetime.now().strftime("%H:%M:%S")
|
|
||||||
end = time()
|
|
||||||
delta = end - start
|
|
||||||
|
|
||||||
|
|
||||||
click.echo(f'tcpdump took {delta:.2f} seconds.')
|
|
||||||
# Step 9: Register metadata
|
|
||||||
metadata = {
|
|
||||||
'device': canonical_name,
|
|
||||||
'device_id': device,
|
|
||||||
'capture_id': capture_uuid,
|
|
||||||
'capture_date_iso': datetime.now().isoformat(),
|
|
||||||
'invoked_command': " ".join(map(str, cmd)),
|
|
||||||
'capture_duration': delta,
|
|
||||||
'generic_parameters': {
|
|
||||||
'flags': flags_string,
|
|
||||||
'kwargs': generic_kw_args_string,
|
|
||||||
'filter': generic_filter
|
|
||||||
},
|
|
||||||
'non_generic_parameters': {
|
|
||||||
'kwargs': non_generic_kw_args_string,
|
|
||||||
'filter': cap_filter
|
|
||||||
},
|
|
||||||
'features': {
|
|
||||||
'interface': interface,
|
|
||||||
'address': address
|
|
||||||
},
|
|
||||||
'resources': {
|
|
||||||
'pcap_file': str(pcap_file),
|
|
||||||
'stdout_log': str(stdout_log_file),
|
|
||||||
'stderr_log': str(stderr_log_file),
|
|
||||||
'pre': str(pre),
|
|
||||||
'post': str(post)
|
|
||||||
},
|
|
||||||
'environment': {
|
|
||||||
'capture_dir': capture_dir,
|
|
||||||
'database': database,
|
|
||||||
'capture_base_dir': str(capture_base_dir),
|
|
||||||
'capture_dir_abs_path': str(capture_dir_full_path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
click.echo('Ensuring correct ownership of created files.')
|
|
||||||
username = os.getlogin()
|
|
||||||
gid = os.getgid()
|
|
||||||
|
|
||||||
# Else there are issues when running with sudo:
|
|
||||||
try:
|
|
||||||
subprocess.run(f'sudo chown -R {username}:{username} {device_path}', shell=True)
|
|
||||||
except OSError as e:
|
|
||||||
click.echo(f'Some error {e}')
|
|
||||||
|
|
||||||
click.echo(f'Saving metadata.')
|
|
||||||
metadata_abs_path = capture_dir_full_path / 'capture_metadata.json'
|
|
||||||
with open(metadata_abs_path, 'w') as f:
|
|
||||||
json.dump(metadata, f, indent=4)
|
|
||||||
|
|
||||||
click.echo(f'END SNIFF SUBCOMMAND')
|
|
||||||
if post:
|
|
||||||
click.echo(f'Running post script {post}')
|
|
||||||
run_post(post)
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,70 +0,0 @@
|
|||||||
import click
|
|
||||||
from pathlib import Path
|
|
||||||
import logging
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
import sys
|
|
||||||
from iottb.models.iottb_config import IottbConfig
|
|
||||||
from iottb.definitions import DB_NAME, CFG_FILE_PATH
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.option('-d', '--dest', type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
|
||||||
help='Location to put (new) iottb database')
|
|
||||||
@click.option('-n', '--name', default=DB_NAME, type=str,
|
|
||||||
help='Name of new database.')
|
|
||||||
@click.option('--update-default/--no-update-default', default=True,
|
|
||||||
help='If new db should be set as the new default')
|
|
||||||
@click.pass_context
|
|
||||||
def init_db(ctx, dest, name, update_default):
|
|
||||||
logger.info('init-db invoked')
|
|
||||||
config = ctx.obj['CONFIG']
|
|
||||||
logger.debug(f'str(config)')
|
|
||||||
# Use the default path from config if dest is not provided
|
|
||||||
known_dbs = config.get_known_databases()
|
|
||||||
logger.debug(f'Known databases: {known_dbs}')
|
|
||||||
if name in known_dbs:
|
|
||||||
dest = config.get_database_location(name)
|
|
||||||
if Path(dest).joinpath(name).is_dir():
|
|
||||||
click.echo(f'A database {name} already exists.')
|
|
||||||
logger.debug(f'DB {name} exists in {dest}')
|
|
||||||
click.echo(f'Exiting...')
|
|
||||||
sys.exit()
|
|
||||||
logger.debug(f'DB name {name} registered but does not exist.')
|
|
||||||
if not dest:
|
|
||||||
logger.info('No dest set, choosing default destination.')
|
|
||||||
dest = Path(config.default_db_location)
|
|
||||||
|
|
||||||
db_path = Path(dest).joinpath(name)
|
|
||||||
logger.debug(f'Full path for db {str(db_path)}')
|
|
||||||
# Create the directory if it doesn't exist
|
|
||||||
db_path.mkdir(parents=True, exist_ok=True)
|
|
||||||
logger.info(f"mkdir {db_path} successful")
|
|
||||||
click.echo(f'Created {db_path}')
|
|
||||||
|
|
||||||
# Update configuration
|
|
||||||
config.set_database_location(name, str(dest))
|
|
||||||
if update_default:
|
|
||||||
config.set_default_database(name, str(dest))
|
|
||||||
config.save_config()
|
|
||||||
logger.info(f"Updated configuration with database {name} at {db_path}")
|
|
||||||
|
|
||||||
|
|
||||||
# @click.group('config')
|
|
||||||
# @click.pass_context
|
|
||||||
# def cfg(ctx):
|
|
||||||
# pass
|
|
||||||
#
|
|
||||||
# @click.command('set', help='Set the location of a database.')
|
|
||||||
# @click.argument('database', help='Name of database')
|
|
||||||
# @click.argument('location', help='Where the database is located (i.e. its parent directory)')
|
|
||||||
# @click.pass_context
|
|
||||||
# def set(ctx, key, value):
|
|
||||||
# click.echo(f'Setting {key} to {value} in config')
|
|
||||||
# config = ctx.obj['CONFIG']
|
|
||||||
# logger.warning('No checks performed!')
|
|
||||||
# config.set_database_location(key, value)
|
|
||||||
# config.save_config()
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,48 +0,0 @@
|
|||||||
import logging
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import click
|
|
||||||
|
|
||||||
APP_NAME = 'iottb'
|
|
||||||
DB_NAME = 'iottb.db'
|
|
||||||
CFG_FILE_PATH = str(Path(click.get_app_dir(APP_NAME)).joinpath('iottb.cfg'))
|
|
||||||
CONSOLE_LOG_FORMATS = {
|
|
||||||
0: '%(levelname)s - %(message)s',
|
|
||||||
1: '%(levelname)s - %(module)s - %(message)s',
|
|
||||||
2: '%(levelname)s - %(module)s - %(funcName)s - %(lineno)d - %(message)s'
|
|
||||||
}
|
|
||||||
|
|
||||||
LOGFILE_LOG_FORMAT = {
|
|
||||||
0: '%(levelname)s - %(asctime)s - %(module)s - %(message)s',
|
|
||||||
1: '%(levelname)s - %(asctime)s - %(module)s - %(funcName)s - %(message)s',
|
|
||||||
2: '%(levelname)s - %(asctime)s - %(module)s - %(funcName)s - %(lineno)d - %(message)s'
|
|
||||||
}
|
|
||||||
MAX_VERBOSITY = len(CONSOLE_LOG_FORMATS) - 1
|
|
||||||
assert len(LOGFILE_LOG_FORMAT) == len(CONSOLE_LOG_FORMATS), 'Log formats must be same size'
|
|
||||||
|
|
||||||
LOGLEVEL = logging.DEBUG
|
|
||||||
LOGDIR = Path.cwd() / 'logs'
|
|
||||||
|
|
||||||
# Characters to just replace
|
|
||||||
REPLACEMENT_SET_CANONICAL_DEVICE_NAMES = {' ', '_', ',', '!', '@', '#', '$', '%', '^', '&', '*', '(', ')', '+', '=',
|
|
||||||
'{', '}', '[', ']',
|
|
||||||
'|',
|
|
||||||
'\\', ':', ';', '"', "'", '<', '>', '?', '/', '`', '~'}
|
|
||||||
# Characters to possibly error on
|
|
||||||
ERROR_SET_CANONICAL_DEVICE_NAMES = {',', '!', '@', '#', '$', '%', '^', '&', '*', '(', ')', '+', '=', '{', '}', '[', ']',
|
|
||||||
'|',
|
|
||||||
'\\', ':', ';', '"', "'", '<', '>', '?', '/', '`', '~'}
|
|
||||||
|
|
||||||
DEVICE_METADATA_FILE_NAME = 'device_metadata.json'
|
|
||||||
|
|
||||||
TB_ECHO_STYLES = {
|
|
||||||
'w': {'fg': 'yellow', 'bold': True},
|
|
||||||
'i': {'fg': 'blue', 'italic': True},
|
|
||||||
's': {'fg': 'green', 'bold': True},
|
|
||||||
'e': {'fg': 'red', 'bold': True},
|
|
||||||
'header': {'fg': 'bright_cyan', 'bold': True, 'italic': True}
|
|
||||||
}
|
|
||||||
|
|
||||||
NAME_OF_CAPTURE_DIR = 'sniffs'
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,77 +0,0 @@
|
|||||||
import sys
|
|
||||||
|
|
||||||
import click
|
|
||||||
from pathlib import Path
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from iottb.commands.sniff import sniff
|
|
||||||
from iottb.commands.developer import set_key_in_table_to, rm_cfg, rm_dbs, show_cfg, show_everything
|
|
||||||
|
|
||||||
##################################################
|
|
||||||
# Import package modules
|
|
||||||
#################################################
|
|
||||||
from iottb.utils.logger_config import setup_logging
|
|
||||||
from iottb import definitions
|
|
||||||
from iottb.models.iottb_config import IottbConfig
|
|
||||||
from iottb.commands.testbed import init_db
|
|
||||||
from iottb.commands.add_device import add_device
|
|
||||||
|
|
||||||
############################################################################
|
|
||||||
# Module shortcuts for global definitions
|
|
||||||
###########################################################################
|
|
||||||
APP_NAME = definitions.APP_NAME
|
|
||||||
DB_NAME = definitions.DB_NAME
|
|
||||||
CFG_FILE_PATH = definitions.CFG_FILE_PATH
|
|
||||||
# These are (possibly) redundant when defined in definitions.py
|
|
||||||
# keeping them here until refactored and tested
|
|
||||||
MAX_VERBOSITY = definitions.MAX_VERBOSITY
|
|
||||||
|
|
||||||
# Logger stuff
|
|
||||||
loglevel = definitions.LOGLEVEL
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@click.group(context_settings=dict(auto_envvar_prefix='IOTTB', show_default=True))
|
|
||||||
@click.option('-v', '--verbosity', count=True, type=click.IntRange(0, 3), default=0, is_eager=True,
|
|
||||||
help='Set verbosity')
|
|
||||||
@click.option('-d', '--debug', is_flag=True, default=False, is_eager=True,
|
|
||||||
help='Enable debug mode')
|
|
||||||
@click.option('--dry-run', is_flag=False, default=True, is_eager=True, help='NOT USED!')
|
|
||||||
@click.option('--cfg-file', type=click.Path(),
|
|
||||||
default=Path(click.get_app_dir(APP_NAME)).joinpath('iottb.cfg'),
|
|
||||||
envvar='IOTTB_CONF_HOME', help='Path to iottb config file')
|
|
||||||
@click.pass_context
|
|
||||||
def cli(ctx, verbosity, debug, dry_run, cfg_file):
|
|
||||||
# Setup logging based on the loaded configuration and other options
|
|
||||||
setup_logging(verbosity, debug)
|
|
||||||
ctx.ensure_object(dict) # Make sure context is ready for use
|
|
||||||
logger.info("Starting execution.")
|
|
||||||
ctx.obj['CONFIG'] = IottbConfig(cfg_file) # Load configuration directly
|
|
||||||
ctx.meta['FULL_PATH_CONFIG_FILE'] = str(cfg_file)
|
|
||||||
ctx.meta['DRY_RUN'] = dry_run
|
|
||||||
logger.debug(f'Verbosity: {verbosity}')
|
|
||||||
ctx.obj['VERBOSITY'] = verbosity
|
|
||||||
logger.debug(f'Debug: {debug}')
|
|
||||||
ctx.obj['DEBUG'] = debug
|
|
||||||
|
|
||||||
|
|
||||||
##################################################################################
|
|
||||||
# Add all subcommands to group here
|
|
||||||
#################################################################################
|
|
||||||
# TODO: Is there a way to do this without pylint freaking out?
|
|
||||||
# noinspection PyTypeChecker
|
|
||||||
cli.add_command(init_db)
|
|
||||||
cli.add_command(rm_cfg)
|
|
||||||
cli.add_command(set_key_in_table_to)
|
|
||||||
cli.add_command(rm_dbs)
|
|
||||||
# noinspection PyTypeChecker
|
|
||||||
cli.add_command(add_device)
|
|
||||||
cli.add_command(show_cfg)
|
|
||||||
cli.add_command(sniff)
|
|
||||||
cli.add_command(show_everything)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
cli()
|
|
||||||
for log in Path.cwd().iterdir():
|
|
||||||
log.chmod(0o777)
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
class Database:
|
|
||||||
|
|
||||||
def __init__(self, name, path):
|
|
||||||
self.name = name
|
|
||||||
self.path = path
|
|
||||||
self.device_list = [] # List of the canonical names of devices registered in this database
|
|
||||||
@ -1,50 +0,0 @@
|
|||||||
import json
|
|
||||||
import logging
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
|
||||||
import click
|
|
||||||
|
|
||||||
from iottb.utils.string_processing import make_canonical_name
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class DeviceMetadata:
|
|
||||||
def __init__(self, device_name, description="", model="", manufacturer="", firmware_version="", device_type="",
|
|
||||||
supported_interfaces="", companion_applications="", save_to_file=None, aliases=None):
|
|
||||||
self.device_id = str(uuid.uuid4())
|
|
||||||
self.device_name = device_name
|
|
||||||
cn, default_aliases = make_canonical_name(device_name)
|
|
||||||
logger.debug(f'cn, default aliases = {cn}, {str(default_aliases)}')
|
|
||||||
self.aliases = default_aliases if aliases is None else default_aliases + aliases
|
|
||||||
self.canonical_name = cn
|
|
||||||
self.date_added = datetime.now().isoformat()
|
|
||||||
self.description = description
|
|
||||||
self.model = model
|
|
||||||
self.manufacturer = manufacturer
|
|
||||||
self.current_firmware_version = firmware_version
|
|
||||||
self.device_type = device_type
|
|
||||||
self.supported_interfaces = supported_interfaces
|
|
||||||
self.companion_applications = companion_applications
|
|
||||||
self.last_metadata_update = datetime.now().isoformat()
|
|
||||||
if save_to_file is not None:
|
|
||||||
click.echo('TODO: Implement saving config to file after creation!')
|
|
||||||
|
|
||||||
def add_alias(self, alias: str = ""):
|
|
||||||
if alias == "":
|
|
||||||
return
|
|
||||||
self.aliases.append(alias)
|
|
||||||
|
|
||||||
def get_canonical_name(self):
|
|
||||||
return self.canonical_name
|
|
||||||
|
|
||||||
def print_attributes(self):
|
|
||||||
print(f'Printing attribute value pairs in {__name__}')
|
|
||||||
for attr, value in self.__dict__.items():
|
|
||||||
print(f'{attr}: {value}')
|
|
||||||
|
|
||||||
def save_metadata_to_file(self, metadata_path):
|
|
||||||
with open(metadata_path, 'w') as metadata_file:
|
|
||||||
json.dump(self.__dict__, metadata_file, indent=4)
|
|
||||||
click.echo(f'Metadata saved to {metadata_path}')
|
|
||||||
@ -1,124 +0,0 @@
|
|||||||
import json
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from iottb import definitions
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
DB_NAME = definitions.DB_NAME
|
|
||||||
|
|
||||||
|
|
||||||
class IottbConfig:
|
|
||||||
""" Class to handle testbed configuration.
|
|
||||||
|
|
||||||
TODO: Add instead of overwrite Database locations when initializing if a location with valid db
|
|
||||||
exists.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def warn():
|
|
||||||
logger.warning(f'DatabaseLocations are DatabaseLocationMap in the class {__name__}')
|
|
||||||
|
|
||||||
def __init__(self, cfg_file=definitions.CFG_FILE_PATH):
|
|
||||||
logger.info('Initializing Config object')
|
|
||||||
IottbConfig.warn()
|
|
||||||
self.cfg_file = Path(cfg_file)
|
|
||||||
self.default_database = None
|
|
||||||
self.default_db_location = None
|
|
||||||
self.db_path_dict = dict()
|
|
||||||
self.load_config()
|
|
||||||
|
|
||||||
def create_default_config(self):
|
|
||||||
"""Create default iottb config file."""
|
|
||||||
logger.info(f'Creating default config file at {self.cfg_file}')
|
|
||||||
self.default_database = DB_NAME
|
|
||||||
self.default_db_location = str(Path.home())
|
|
||||||
self.db_path_dict = {
|
|
||||||
DB_NAME: self.default_db_location
|
|
||||||
}
|
|
||||||
|
|
||||||
defaults = {
|
|
||||||
'DefaultDatabase': self.default_database,
|
|
||||||
'DefaultDatabasePath': self.default_db_location,
|
|
||||||
'DatabaseLocations': self.db_path_dict
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.cfg_file.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
with self.cfg_file.open('w') as config_file:
|
|
||||||
json.dump(defaults, config_file, indent=4)
|
|
||||||
except IOError as e:
|
|
||||||
logger.error(f"Failed to create default configuration file at {self.cfg_file}: {e}")
|
|
||||||
raise RuntimeError(f"Failed to create configuration file: {e}") from e
|
|
||||||
|
|
||||||
def load_config(self):
|
|
||||||
"""Loads or creates default configuration from given file path."""
|
|
||||||
logger.info('Loading configuration file')
|
|
||||||
if not self.cfg_file.is_file():
|
|
||||||
logger.info('Config file does not exist.')
|
|
||||||
self.create_default_config()
|
|
||||||
else:
|
|
||||||
logger.info('Config file exists, opening.')
|
|
||||||
with self.cfg_file.open('r') as config_file:
|
|
||||||
data = json.load(config_file)
|
|
||||||
self.default_database = data.get('DefaultDatabase')
|
|
||||||
self.default_db_location = data.get('DefaultDatabasePath')
|
|
||||||
self.db_path_dict = data.get('DatabaseLocations', {})
|
|
||||||
|
|
||||||
def save_config(self):
|
|
||||||
"""Save the current configuration to the config file."""
|
|
||||||
data = {
|
|
||||||
'DefaultDatabase': self.default_database,
|
|
||||||
'DefaultDatabasePath': self.default_db_location,
|
|
||||||
'DatabaseLocations': self.db_path_dict
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
with self.cfg_file.open('w') as config_file:
|
|
||||||
json.dump(data, config_file, indent=4)
|
|
||||||
except IOError as e:
|
|
||||||
logger.error(f"Failed to save configuration file at {self.cfg_file}: {e}")
|
|
||||||
raise RuntimeError(f"Failed to save configuration file: {e}") from e
|
|
||||||
|
|
||||||
def set_default_database(self, name, path):
|
|
||||||
"""Set the default database and its path."""
|
|
||||||
self.default_database = name
|
|
||||||
self.default_db_location = path
|
|
||||||
self.db_path_dict[name] = path
|
|
||||||
|
|
||||||
def get_default_database_location(self):
|
|
||||||
return self.default_db_location
|
|
||||||
|
|
||||||
def get_default_database(self):
|
|
||||||
return self.default_database
|
|
||||||
|
|
||||||
def get_database_location(self, name):
|
|
||||||
"""Get the location of a specific database."""
|
|
||||||
return self.db_path_dict.get(name)
|
|
||||||
|
|
||||||
def set_database_location(self, name, path):
|
|
||||||
"""Set the location for a database."""
|
|
||||||
logger.debug(f'Type of "path" parameter {type(path)}')
|
|
||||||
logger.debug(f'String value of "path" parameter {str(path)}')
|
|
||||||
logger.debug(f'Type of "name" parameter {type(name)}')
|
|
||||||
logger.debug(f'String value of "name" parameter {str(name)}')
|
|
||||||
path = Path(path)
|
|
||||||
name = Path(name)
|
|
||||||
logger.debug(f'path:name = {path}:{name}')
|
|
||||||
if path.name == name:
|
|
||||||
path = path.parent
|
|
||||||
self.db_path_dict[str(name)] = str(path)
|
|
||||||
|
|
||||||
def get_known_databases(self):
|
|
||||||
"""Get the set of known databases"""
|
|
||||||
logger.info(f'Getting known databases.')
|
|
||||||
|
|
||||||
return self.db_path_dict.keys()
|
|
||||||
|
|
||||||
def get_know_database_paths(self):
|
|
||||||
"""Get the paths of all known databases"""
|
|
||||||
logger.info(f'Getting known database paths.')
|
|
||||||
return self.db_path_dict.values()
|
|
||||||
|
|
||||||
def get_full_default_path(self):
|
|
||||||
return Path(self.default_db_location) / self.default_database
|
|
||||||
@ -1,39 +0,0 @@
|
|||||||
import json
|
|
||||||
import logging
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
logger = logging.getLogger('iottb.sniff') # Log with sniff subcommand
|
|
||||||
|
|
||||||
class CaptureMetadata:
|
|
||||||
def __init__(self, device_id, capture_dir, interface, address, capture_file, tcpdump_command, tcpdump_stdout, tcpdump_stderr, packet_filter, alias):
|
|
||||||
self.base_data = {
|
|
||||||
'device_id': device_id,
|
|
||||||
'capture_id': str(uuid.uuid4()),
|
|
||||||
'capture_date': datetime.now().isoformat(),
|
|
||||||
'capture_dir': str(capture_dir),
|
|
||||||
'capture_file': capture_file,
|
|
||||||
'start_time': "",
|
|
||||||
'stop_time': "",
|
|
||||||
'alias': alias
|
|
||||||
}
|
|
||||||
self.features = {
|
|
||||||
'interface': interface,
|
|
||||||
'device_ip_address': address if address else "No IP Address set",
|
|
||||||
'tcpdump_stdout': str(tcpdump_stdout),
|
|
||||||
'tcpdump_stderr': str(tcpdump_stderr),
|
|
||||||
'packet_filter': packet_filter
|
|
||||||
}
|
|
||||||
self.command = tcpdump_command
|
|
||||||
|
|
||||||
def save_to_file(self):
|
|
||||||
metadata = {
|
|
||||||
'base_data': self.base_data,
|
|
||||||
'features': self.features,
|
|
||||||
'command': self.command
|
|
||||||
}
|
|
||||||
metadata_file_path = Path(self.base_data['capture_dir']) / 'metadata.json'
|
|
||||||
with open(metadata_file_path, 'w') as f:
|
|
||||||
json.dump(metadata, f, indent=4)
|
|
||||||
logger.info(f'Metadata saved to {metadata_file_path}')
|
|
||||||
@ -1,74 +0,0 @@
|
|||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import click
|
|
||||||
from io import StringIO
|
|
||||||
import sys
|
|
||||||
from iottb import DOCS_FOLDER
|
|
||||||
# Import your CLI app here
|
|
||||||
from iottb.main import cli
|
|
||||||
|
|
||||||
"""Script to generate the help text and write to file.
|
|
||||||
|
|
||||||
Definitely needs better formatting.
|
|
||||||
Script is also not very flexible.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def get_help_text(command):
|
|
||||||
"""Get the help text for a given command."""
|
|
||||||
help_text = StringIO()
|
|
||||||
with click.Context(command) as ctx:
|
|
||||||
# chatgpt says this helps: was right
|
|
||||||
sys_stdout = sys.stdout
|
|
||||||
sys.stdout = help_text
|
|
||||||
try:
|
|
||||||
click.echo(command.get_help(ctx))
|
|
||||||
finally:
|
|
||||||
sys.stdout = sys_stdout
|
|
||||||
return help_text.getvalue()
|
|
||||||
|
|
||||||
|
|
||||||
def write_help_to_file(cli, filename):
|
|
||||||
"""Write help messages of all commands and subcommands to a file."""
|
|
||||||
with open(filename, 'w+') as f:
|
|
||||||
# main
|
|
||||||
f.write(f"Main Command: iottb\n")
|
|
||||||
f.write(get_help_text(cli))
|
|
||||||
f.write("\n\n")
|
|
||||||
|
|
||||||
# go through subcommands
|
|
||||||
for cmd_name, cmd in cli.commands.items():
|
|
||||||
f.write(f"Command: {cmd_name}\n")
|
|
||||||
f.write(get_help_text(cmd))
|
|
||||||
f.write("\n\n")
|
|
||||||
|
|
||||||
# subcommands of subcommands
|
|
||||||
if isinstance(cmd, click.Group):
|
|
||||||
for sub_cmd_name, sub_cmd in cmd.commands.items():
|
|
||||||
f.write(f"Subcommand: {cmd_name} {sub_cmd_name}\n")
|
|
||||||
f.write(get_help_text(sub_cmd))
|
|
||||||
f.write("\n\n")
|
|
||||||
|
|
||||||
|
|
||||||
def manual():
|
|
||||||
comands = [
|
|
||||||
'init-db',
|
|
||||||
'add-device',
|
|
||||||
'sniff'
|
|
||||||
]
|
|
||||||
dev_commands = [
|
|
||||||
'show-all',
|
|
||||||
'rm-dbs',
|
|
||||||
'show-cfg',
|
|
||||||
'show-all'
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
from iottb import DOCS_FOLDER
|
|
||||||
|
|
||||||
print('Must be in project root for this to work properly!')
|
|
||||||
print(f'CWD is {str(Path.cwd())}')
|
|
||||||
DOCS_FOLDER.mkdir(exist_ok=True)
|
|
||||||
write_help_to_file(cli, str(DOCS_FOLDER / "help_messages.md"))
|
|
||||||
print(f'Wrote help_messages.md to {str(DOCS_FOLDER / "help_messages.md")}')
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
#/bin/sh
|
|
||||||
echo 'Running iottb as sudo'
|
|
||||||
sudo $(which python) iottb $@
|
|
||||||
echo 'Finished executing iottb with sudo'
|
|
||||||
@ -1,41 +0,0 @@
|
|||||||
import logging
|
|
||||||
import sys
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
|
|
||||||
from iottb import definitions
|
|
||||||
from iottb.definitions import MAX_VERBOSITY, CONSOLE_LOG_FORMATS, APP_NAME, LOGFILE_LOG_FORMAT
|
|
||||||
|
|
||||||
loglevel = definitions.LOGLEVEL
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging(verbosity, debug=loglevel):
|
|
||||||
""" Setup root logger for iottb """
|
|
||||||
log_level = loglevel
|
|
||||||
handlers = []
|
|
||||||
date_format = '%Y-%m-%d %H:%M:%S'
|
|
||||||
if verbosity > 0:
|
|
||||||
log_level = logging.WARNING
|
|
||||||
if verbosity > MAX_VERBOSITY:
|
|
||||||
verbosity = MAX_VERBOSITY
|
|
||||||
log_level = logging.INFO
|
|
||||||
assert verbosity <= MAX_VERBOSITY, f'Verbosity must be <= {MAX_VERBOSITY}'
|
|
||||||
console_handler = logging.StreamHandler(sys.stdout)
|
|
||||||
print(str(sys.stdout))
|
|
||||||
console_handler.setFormatter(logging.Formatter(CONSOLE_LOG_FORMATS[verbosity], datefmt=date_format))
|
|
||||||
console_handler.setLevel(logging.DEBUG) # can keep at debug since it depends on global level?
|
|
||||||
handlers.append(console_handler)
|
|
||||||
|
|
||||||
if debug:
|
|
||||||
log_level = logging.DEBUG
|
|
||||||
|
|
||||||
# Logfile logs INFO+, no debugs though
|
|
||||||
file_handler = RotatingFileHandler(f'{str(definitions.LOGDIR / APP_NAME)}.log', maxBytes=10240, backupCount=5)
|
|
||||||
file_handler.setFormatter(logging.Formatter(LOGFILE_LOG_FORMAT[verbosity], datefmt=date_format))
|
|
||||||
file_handler.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
# finnish root logger setup
|
|
||||||
handlers.append(file_handler)
|
|
||||||
# Force this config to be applied to root logger
|
|
||||||
logging.basicConfig(level=log_level, handlers=handlers, force=True)
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,40 +0,0 @@
|
|||||||
import re
|
|
||||||
from iottb import definitions
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_string(s, chars_to_replace=None, replacement=None, allow_unicode=False):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def make_canonical_name(name):
|
|
||||||
"""
|
|
||||||
Normalize the device name to a canonical form:
|
|
||||||
- Replace the first two occurrences of spaces and transform characters with dashes.
|
|
||||||
- Remove any remaining spaces and non-ASCII characters.
|
|
||||||
- Convert to lowercase.
|
|
||||||
"""
|
|
||||||
aliases = [name]
|
|
||||||
logger.info(f'Normalizing name {name}')
|
|
||||||
|
|
||||||
# We first normalize
|
|
||||||
chars_to_replace = definitions.REPLACEMENT_SET_CANONICAL_DEVICE_NAMES
|
|
||||||
pattern = re.compile('|'.join(re.escape(char) for char in chars_to_replace))
|
|
||||||
norm_name = pattern.sub('-', name)
|
|
||||||
norm_name = re.sub(r'[^\x00-\x7F]+', '', norm_name) # removes non ascii chars
|
|
||||||
|
|
||||||
aliases.append(norm_name)
|
|
||||||
# Lower case
|
|
||||||
norm_name = norm_name.lower()
|
|
||||||
aliases.append(norm_name)
|
|
||||||
|
|
||||||
# canonical name is only first two parts of resulting string
|
|
||||||
parts = norm_name.split('-')
|
|
||||||
canonical_name = canonical_name = '-'.join(parts[:2])
|
|
||||||
aliases.append(canonical_name)
|
|
||||||
aliases = list(set(aliases))
|
|
||||||
logger.debug(f'Canonical name: {canonical_name}')
|
|
||||||
logger.debug(f'Aliases: {aliases}')
|
|
||||||
return canonical_name, aliases
|
|
||||||
@ -1,42 +0,0 @@
|
|||||||
# iottb/utils/user_interaction.py
|
|
||||||
|
|
||||||
import click
|
|
||||||
from iottb.definitions import TB_ECHO_STYLES
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
def tb_echo2(msg: str, lvl='i', log=True):
|
|
||||||
style = TB_ECHO_STYLES.get(lvl, {})
|
|
||||||
click.secho(f'[IOTTB]', **style)
|
|
||||||
click.secho(f'[IOTTB] \t {msg}', **style)
|
|
||||||
|
|
||||||
|
|
||||||
last_prefix = None
|
|
||||||
|
|
||||||
|
|
||||||
def tb_echo(msg: str, lvl='i', log=True):
|
|
||||||
global last_prefix
|
|
||||||
prefix = f'Testbed [{lvl.upper()}]\n'
|
|
||||||
|
|
||||||
if last_prefix != prefix:
|
|
||||||
click.secho(prefix, nl=False, **TB_ECHO_STYLES['header'])
|
|
||||||
last_prefix = prefix
|
|
||||||
|
|
||||||
click.secho(f' {msg}', **TB_ECHO_STYLES[lvl])
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
tb_echo('Info message', 'i')
|
|
||||||
tb_echo('Warning message', 'w')
|
|
||||||
tb_echo('Error message', 'e')
|
|
||||||
tb_echo('Success message', 's')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
# arrrgggg hacky
|
|
||||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
project_root = os.path.abspath(os.path.join(current_dir, '../../'))
|
|
||||||
sys.path.insert(0, project_root)
|
|
||||||
|
|
||||||
main()
|
|
||||||
107
code/iottb-project/poetry.lock
generated
107
code/iottb-project/poetry.lock
generated
@ -1,107 +0,0 @@
|
|||||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "click"
|
|
||||||
version = "8.1.7"
|
|
||||||
description = "Composable command line interface toolkit"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
|
|
||||||
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "click-option-group"
|
|
||||||
version = "0.5.6"
|
|
||||||
description = "Option groups missing in Click"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6,<4"
|
|
||||||
files = [
|
|
||||||
{file = "click-option-group-0.5.6.tar.gz", hash = "sha256:97d06703873518cc5038509443742b25069a3c7562d1ea72ff08bfadde1ce777"},
|
|
||||||
{file = "click_option_group-0.5.6-py3-none-any.whl", hash = "sha256:38a26d963ee3ad93332ddf782f9259c5bdfe405e73408d943ef5e7d0c3767ec7"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
Click = ">=7.0,<9"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
docs = ["Pallets-Sphinx-Themes", "m2r2", "sphinx"]
|
|
||||||
tests = ["pytest"]
|
|
||||||
tests-cov = ["coverage", "coveralls", "pytest", "pytest-cov"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "colorama"
|
|
||||||
version = "0.4.6"
|
|
||||||
description = "Cross-platform colored terminal text."
|
|
||||||
optional = false
|
|
||||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
|
||||||
files = [
|
|
||||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
|
||||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "iniconfig"
|
|
||||||
version = "2.0.0"
|
|
||||||
description = "brain-dead simple config-ini parsing"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
|
|
||||||
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "packaging"
|
|
||||||
version = "24.1"
|
|
||||||
description = "Core utilities for Python packages"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
|
|
||||||
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pluggy"
|
|
||||||
version = "1.5.0"
|
|
||||||
description = "plugin and hook calling mechanisms for python"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
|
|
||||||
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["pre-commit", "tox"]
|
|
||||||
testing = ["pytest", "pytest-benchmark"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pytest"
|
|
||||||
version = "8.2.2"
|
|
||||||
description = "pytest: simple powerful testing with Python"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"},
|
|
||||||
{file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
|
||||||
iniconfig = "*"
|
|
||||||
packaging = "*"
|
|
||||||
pluggy = ">=1.5,<2.0"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
|
||||||
|
|
||||||
[metadata]
|
|
||||||
lock-version = "2.0"
|
|
||||||
python-versions = "^3.12"
|
|
||||||
content-hash = "05aa11a74b8a6411a4413684f1a4cb0e5bcd271e16b4de9ae5205d52232c91a3"
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
[tool.poetry]
|
|
||||||
name = "iottb"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = "IoT Testbed"
|
|
||||||
authors = ["Sebastian Lenzlinger <sebastian.lenzlinger@unibas.ch>"]
|
|
||||||
readme = "README.md"
|
|
||||||
license = "LICENSE"
|
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
|
||||||
python = "^3.12"
|
|
||||||
click = "^8.1"
|
|
||||||
# scapy = "^2.5"
|
|
||||||
click-option-group = "^0.5.6"
|
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
|
||||||
iottb = "iottb.main:cli"
|
|
||||||
|
|
||||||
[tool.poetry.group.test.dependencies]
|
|
||||||
pytest = "^8.2.2"
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["poetry-core"]
|
|
||||||
build-backend = "poetry.core.masonry.api"
|
|
||||||
@ -1,9 +0,0 @@
|
|||||||
click-option-group==0.5.6 ; python_version >= "3.12" and python_version < "4" \
|
|
||||||
--hash=sha256:38a26d963ee3ad93332ddf782f9259c5bdfe405e73408d943ef5e7d0c3767ec7 \
|
|
||||||
--hash=sha256:97d06703873518cc5038509443742b25069a3c7562d1ea72ff08bfadde1ce777
|
|
||||||
click==8.1.7 ; python_version >= "3.12" and python_version < "4.0" \
|
|
||||||
--hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \
|
|
||||||
--hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de
|
|
||||||
colorama==0.4.6 ; python_version >= "3.12" and python_version < "4.0" and platform_system == "Windows" \
|
|
||||||
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
|
|
||||||
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
|
|
||||||
@ -1,23 +0,0 @@
|
|||||||
from iottb.utils.string_processing import make_canonical_name
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
class TestMakeCanonicalName:
|
|
||||||
|
|
||||||
def test_normalizes_name_with_spaces_to_dashes(self):
|
|
||||||
name = "Device Name With Spaces"
|
|
||||||
expected_canonical_name = "device-name"
|
|
||||||
canonical_name, aliases = make_canonical_name(name)
|
|
||||||
assert canonical_name == expected_canonical_name
|
|
||||||
assert "device-name-with-spaces" in aliases
|
|
||||||
assert "device-name" in aliases
|
|
||||||
assert "Device Name With Spaces" in aliases
|
|
||||||
|
|
||||||
def test_name_with_no_spaces_or_special_characters(self):
|
|
||||||
name = "DeviceName123"
|
|
||||||
expected_canonical_name = "devicename123"
|
|
||||||
canonical_name, aliases = make_canonical_name(name)
|
|
||||||
assert canonical_name == expected_canonical_name
|
|
||||||
assert "DeviceName123" in aliases
|
|
||||||
assert "devicename123" in aliases
|
|
||||||
40
code/kydcap/__main__.py
Normal file
40
code/kydcap/__main__.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from kydcap.subcommands.sniff import setup_sniff_parser
|
||||||
|
from kydcap.subcommands.initialize_device_root_dir import setup_init_root_dir_parser
|
||||||
|
|
||||||
|
CAP_DIR_PREFIX = ...
|
||||||
|
|
||||||
|
|
||||||
|
######################
|
||||||
|
# Argparse setup
|
||||||
|
######################
|
||||||
|
def setup_argparse():
|
||||||
|
# create top level parser
|
||||||
|
root_parser = argparse.ArgumentParser(prog="kydcap")
|
||||||
|
subparsers = root_parser.add_subparsers(title="subcommands", required=True, dest="command")
|
||||||
|
|
||||||
|
setup_sniff_parser(subparsers)
|
||||||
|
setup_init_root_dir_parser(subparsers)
|
||||||
|
|
||||||
|
return root_parser
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = setup_argparse()
|
||||||
|
args = parser.parse_args()
|
||||||
|
print(args)
|
||||||
|
if args.command:
|
||||||
|
try:
|
||||||
|
args.func(args)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("Received keyboard interrupt. Exiting...")
|
||||||
|
exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {e}")
|
||||||
|
# create_capture_directory(args.device_name)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
20
code/kydcap/config.py
Normal file
20
code/kydcap/config.py
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from enum import Flag, unique, global_enum
|
||||||
|
|
||||||
|
|
||||||
|
DEVICE_METADATA_FILE = "device-metadata.json"
|
||||||
|
CAPTURE_METADATA_FILE = "capture-metadata.json"
|
||||||
|
TODAY_DATE_STRING = datetime.now().strftime("%d%b%Y").lower()
|
||||||
|
|
||||||
|
|
||||||
|
@unique
|
||||||
|
@global_enum
|
||||||
|
class ReturnCodes(Flag):
|
||||||
|
SUCCESS = 0
|
||||||
|
ABORTED = 1
|
||||||
|
FAILURE = 2
|
||||||
|
UNKNOWN = 3
|
||||||
|
FILE_NOT_FOUND = 4
|
||||||
|
FILE_ALREADY_EXISTS = 5
|
||||||
|
INVALID_ARGUMENT = 6
|
||||||
|
INVALID_ARGUMENT_VALUE = 7
|
||||||
47
code/kydcap/models/capture_metadata_model.py
Normal file
47
code/kydcap/models/capture_metadata_model.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from kydcap.config import ReturnCodes
|
||||||
|
|
||||||
|
|
||||||
|
class KydcapCaptureMetadata(BaseModel):
|
||||||
|
# Required Fields
|
||||||
|
device_id: str
|
||||||
|
capture_id: uuid.UUID = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||||
|
capture_date: str = Field(default_factory=lambda: datetime.now().strftime('%d-%m-%YT%H:%M:%S').lower())
|
||||||
|
|
||||||
|
# Statistics
|
||||||
|
start_time: str
|
||||||
|
stop_time: str
|
||||||
|
packet_count: Optional[int]
|
||||||
|
|
||||||
|
# Optional Fields
|
||||||
|
device_ip_address: Optional[str] = None
|
||||||
|
device_mac_address: Optional[str] = None
|
||||||
|
|
||||||
|
app: Optional[str] = None
|
||||||
|
app_version: Optional[str] = None
|
||||||
|
firmware_version: Optional[str] = None
|
||||||
|
|
||||||
|
def __init__(self, device_id: str, start_time: str, stop_time: str, /, **data: Any):
|
||||||
|
super().__init__(**data) # Pycharms orders
|
||||||
|
assert isinstance(device_id, str)
|
||||||
|
assert isinstance(start_time, str)
|
||||||
|
assert isinstance(stop_time, str)
|
||||||
|
self.device_id = device_id
|
||||||
|
self.start_time = start_time
|
||||||
|
self.stop_time = stop_time
|
||||||
|
|
||||||
|
def save_to_json(self, file_path: Path):
|
||||||
|
if file_path.is_file():
|
||||||
|
print(f"File {file_path} already exists, update instead.")
|
||||||
|
return ReturnCodes.FILE_ALREADY_EXISTS
|
||||||
|
metadata = self.model_dump_json(indent=2)
|
||||||
|
with file_path.open('w') as file:
|
||||||
|
json.dump(metadata, file)
|
||||||
|
return ReturnCodes.SUCCESS
|
||||||
66
code/kydcap/models/device_metadata_model.py
Normal file
66
code/kydcap/models/device_metadata_model.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
import json
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, List, Any
|
||||||
|
|
||||||
|
# kydcap modules
|
||||||
|
from kydcap.config import ReturnCodes
|
||||||
|
|
||||||
|
# 3rd party libs
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
IMMUTABLE_FIELDS = {"device_name", "device_short_name", "device_id", "date_created"}
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceMetadata(BaseModel):
|
||||||
|
# Required fields
|
||||||
|
device_name: str
|
||||||
|
device_short_name: str
|
||||||
|
device_id: str = Field(default_factory=lambda: str(uuid.uuid4()))
|
||||||
|
date_created: str = Field(default_factory=lambda: datetime.now().strftime('%d-%m-%YT%H:%M:%S').lower())
|
||||||
|
|
||||||
|
# Optional Fields
|
||||||
|
device_type: Optional[str] = None
|
||||||
|
device_serial_number: Optional[str] = None
|
||||||
|
device_firmware_version: Optional[str] = None
|
||||||
|
date_updated: Optional[str] = None
|
||||||
|
|
||||||
|
capture_files: Optional[List[str]] = []
|
||||||
|
|
||||||
|
def __init__(self, device_name: str, /, **data: Any):
|
||||||
|
super().__init__(**data)
|
||||||
|
self.device_name = device_name
|
||||||
|
self.device_short_name = device_name.lower().replace(" ", "_")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load_from_json(cls, file_path: Path):
|
||||||
|
assert file_path.is_file()
|
||||||
|
with file_path.open('r') as file:
|
||||||
|
metadata_json = json.load(file)
|
||||||
|
metadata_model_obj = cls.model_validate_json(metadata_json)
|
||||||
|
return metadata_model_obj
|
||||||
|
|
||||||
|
def save_to_json(self, file_path: Path):
|
||||||
|
if file_path.is_file():
|
||||||
|
print(f"File {file_path} already exists, update instead.")
|
||||||
|
return ReturnCodes.FILE_ALREADY_EXISTS
|
||||||
|
metadata = self.model_dump_json(indent=2)
|
||||||
|
with file_path.open('w') as file:
|
||||||
|
json.dump(metadata, file)
|
||||||
|
return ReturnCodes.SUCCESS
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def update_metadata_in_json(cls, file_path: Path, **kwargs):
|
||||||
|
# TODO Maybe not needed at all.
|
||||||
|
assert file_path.is_file()
|
||||||
|
for field in IMMUTABLE_FIELDS:
|
||||||
|
if field in kwargs:
|
||||||
|
print(f"Field {field} is immutable")
|
||||||
|
return ReturnCodes.IMMUTABLE
|
||||||
|
metadata = cls.load_from_json(file_path)
|
||||||
|
for field, value in kwargs.items():
|
||||||
|
if field in metadata.model_fields_set:
|
||||||
|
setattr(metadata, field, value)
|
||||||
|
metadata.date_updated = datetime.now().strftime('%d-%m-%YT%H:%M:%S').lower()
|
||||||
|
pass
|
||||||
40
code/kydcap/subcommands/initialize_device_root_dir.py
Normal file
40
code/kydcap/subcommands/initialize_device_root_dir.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
import pathlib
|
||||||
|
|
||||||
|
from kydcap.config import DEVICE_METADATA_FILE
|
||||||
|
from kydcap.models.device_metadata_model import DeviceMetadata
|
||||||
|
|
||||||
|
|
||||||
|
def setup_init_root_dir_parser(subparsers):
|
||||||
|
parser = subparsers.add_parser("init-device-root", aliases=["idr"])
|
||||||
|
parser.add_argument("--root_dir", type=pathlib.Path, default=pathlib.Path.cwd())
|
||||||
|
group = parser.add_mutually_exclusive_group()
|
||||||
|
group.add_argument("--dynamic", action="store_true", help="enable guided setup", default=False)
|
||||||
|
group.add_argument("-n", "--name", action="store", type=str, help="name of device")
|
||||||
|
parser.set_defaults(func=handle_idr)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_idr(args):
|
||||||
|
print("Entered kydcap initialize-device-root")
|
||||||
|
root_dir = args.root_dir
|
||||||
|
device_name = None
|
||||||
|
if args.dynamic:
|
||||||
|
response = "N"
|
||||||
|
while response == "N":
|
||||||
|
name = input("Please enter name of device: ")
|
||||||
|
# TODO extended config for other fields like apps, firmware etc.
|
||||||
|
response = input(f"Confirm device name: {name} [y/N]")
|
||||||
|
device_name = name
|
||||||
|
else:
|
||||||
|
device_name = args.name
|
||||||
|
root_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
root_dir.chdir()
|
||||||
|
dev_metadata_model = DeviceMetadata(device_name)
|
||||||
|
file_path = root_dir / device_name / DEVICE_METADATA_FILE
|
||||||
|
assert not file_path.exists(), f"{file_path} already exists"
|
||||||
|
if args.dynamic:
|
||||||
|
response = input(f"Confirm device metadata: {dev_metadata_model.model_dump()} [y/N]")
|
||||||
|
if response.lower() != "y":
|
||||||
|
assert False, "TODO implement dynamic setup"
|
||||||
|
code = dev_metadata_model.save_to_json(file_path)
|
||||||
|
print(f"Device metadata saved to {file_path}")
|
||||||
|
return code
|
||||||
111
code/kydcap/subcommands/sniff.py
Normal file
111
code/kydcap/subcommands/sniff.py
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from kydcap.config import *
|
||||||
|
from kydcap.models.device_metadata_model import DeviceMetadata
|
||||||
|
|
||||||
|
|
||||||
|
def setup_sniff_parser(subparsers):
|
||||||
|
parser = subparsers.add_parser('sniff', help='Sniff packets with tcpdump')
|
||||||
|
# metadata args
|
||||||
|
parser.add_argument("-a", "--ip-address=", help="IP address of the device to sniff", dest="device_ip")
|
||||||
|
# tcpdump args
|
||||||
|
parser_sniff_tcpdump = parser.add_argument_group('tcpdump arguments')
|
||||||
|
parser_sniff_tcpdump.add_argument("-i", "--interface=", help="Interface to capture on.", dest="capture_interface",
|
||||||
|
default="any")
|
||||||
|
parser_sniff_tcpdump.add_argument("-I", "--monitor-mode", help="Put interface into monitor mode",
|
||||||
|
action="store_true")
|
||||||
|
parser_sniff_tcpdump.add_argument("-n", help="Deactivate name resolution. Option is set by default.",
|
||||||
|
action="store_true", dest="no_name_resolution")
|
||||||
|
parser_sniff_tcpdump.add_argument("-#", "--number",
|
||||||
|
help="Print packet number at beginning of line. Set by default.",
|
||||||
|
action="store_true")
|
||||||
|
parser_sniff_tcpdump.add_argument("-e", help="Print link layer headers. Option is set by default.",
|
||||||
|
action="store_true", dest="print_link_layer")
|
||||||
|
parser_sniff_tcpdump.add_argument("-t", action="count", default=0,
|
||||||
|
help="Please see tcpdump manual for details. Unused by default.")
|
||||||
|
# parser_sniff_tcpdump.add_argument("--filter",type=str,default="ip help=f"pcap filter expression. \
|
||||||
|
# Defaults is '{default}'")
|
||||||
|
# shared args
|
||||||
|
cap_size_group = parser.add_mutually_exclusive_group(required=False)
|
||||||
|
cap_size_group.add_argument("-c", "--count", type=int, help="Number of packets to capture.", default=0)
|
||||||
|
cap_size_group.add_argument("--mins", type=int, help="Time in minutes to capture.", default=60)
|
||||||
|
parser.set_defaults(func=handle_sniff)
|
||||||
|
# return parser
|
||||||
|
# parser.add_default(func=handle_sniff(args=sniff_args))
|
||||||
|
|
||||||
|
|
||||||
|
def cwd_is_device_root_dir() -> bool:
|
||||||
|
device_metadata_file = Path.cwd() / DEVICE_METADATA_FILE
|
||||||
|
return device_metadata_file.exists()
|
||||||
|
|
||||||
|
|
||||||
|
def start_guided_device_root_dir_setup():
|
||||||
|
assert False, "Not implemented"
|
||||||
|
|
||||||
|
|
||||||
|
def handle_metadata():
|
||||||
|
assert not cwd_is_device_root_dir()
|
||||||
|
print(f"Unable to find {DEVICE_METADATA_FILE} in current working directory")
|
||||||
|
print("You need to setup a device root directory before using this command")
|
||||||
|
response = input("Would you like to be guided through the setup? [y/n]")
|
||||||
|
if response.lower() == "y":
|
||||||
|
start_guided_device_root_dir_setup()
|
||||||
|
else:
|
||||||
|
print("'kydcap init-device-root --help' for more information.")
|
||||||
|
exit(ReturnCodes.ABORTED)
|
||||||
|
# device_id = handle_capture_metadata()
|
||||||
|
return ReturnCodes.SUCCESS
|
||||||
|
|
||||||
|
|
||||||
|
def handle_capture_metadata():
|
||||||
|
device_metadata_json = Path.cwd() / DEVICE_METADATA_FILE
|
||||||
|
device_metadata = DeviceMetadata.load_from_json(device_metadata_json)
|
||||||
|
device_id = device_metadata.device_id
|
||||||
|
return device_id
|
||||||
|
|
||||||
|
|
||||||
|
def handle_date_dir():
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run_tcpdum(cmd):
|
||||||
|
subprocess.run(cmd)
|
||||||
|
def handle_sniff(args):
|
||||||
|
if cwd_is_device_root_dir():
|
||||||
|
handle_date_dir()
|
||||||
|
cmd = ['sudo tcpdump', '-i', args.capture_interface]
|
||||||
|
if args.monitor_mode:
|
||||||
|
cmd.append('-I')
|
||||||
|
if args.no_name_resolution:
|
||||||
|
cmd.append('-n')
|
||||||
|
if args.number:
|
||||||
|
cmd.append('-#')
|
||||||
|
if args.print_link_layer:
|
||||||
|
cmd.append('-e')
|
||||||
|
if args.count:
|
||||||
|
cmd.append('-c')
|
||||||
|
cmd.append(str(args.count))
|
||||||
|
elif args.mins:
|
||||||
|
pass
|
||||||
|
print('Executing: ' + ' '.join(cmd))
|
||||||
|
# TODO maybe dump this into file -> put into device metadata
|
||||||
|
try:
|
||||||
|
start_time = datetime.now().strftime('%H:%M:%S')
|
||||||
|
run_tcpdum(cmd)
|
||||||
|
stop_time = datetime.now().strftime('%H:%M:%S')
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("Received keyboard interrupt.")
|
||||||
|
exit(ReturnCodes.ABORTED)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(f"Failed to capture packet: {e}")
|
||||||
|
exit(ReturnCodes.FAILURE)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Failed to capture packet: {e}")
|
||||||
|
exit(ReturnCodes.FAILURE)
|
||||||
|
|
||||||
|
return ReturnCodes.SUCCESS
|
||||||
|
else:
|
||||||
|
handle_metadata()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@ -1,7 +1,7 @@
|
|||||||
import json
|
import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from iottb.definitions import ReturnCodes
|
from kydcap.config import ReturnCodes
|
||||||
|
|
||||||
|
|
||||||
def set_device_ip_address(ip_addr: str, file_path: Path):
|
def set_device_ip_address(ip_addr: str, file_path: Path):
|
||||||
@ -9,12 +9,12 @@ def set_device_ip_address(ip_addr: str, file_path: Path):
|
|||||||
assert file_path.is_file()
|
assert file_path.is_file()
|
||||||
with file_path.open('r') as f:
|
with file_path.open('r') as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
current_ip = data['device_ip_address']
|
current_ip = data["device_ip_address"]
|
||||||
if current_ip is not None:
|
if current_ip is not None:
|
||||||
print(f'Device IP Address is set to {current_ip}')
|
print(f"Device IP Address is set to {current_ip}")
|
||||||
response = input(f'Do you want to change the recorded IP address to {ip_addr}? [Y/N] ')
|
response = input(f"Do you want to change the recorded IP address to {ip_addr}? [Y/N] ")
|
||||||
if response.upper() == 'N':
|
if response.upper() == "N":
|
||||||
print('Aborting change to device IP address')
|
print("Aborting change to device IP address")
|
||||||
return ReturnCodes.ABORTED
|
return ReturnCodes.ABORTED
|
||||||
with file_path.open('w') as f:
|
with file_path.open('w') as f:
|
||||||
json.dump(data, f)
|
json.dump(data, f)
|
||||||
@ -26,13 +26,15 @@ def set_device_mac_address(mac_addr: str, file_path: Path):
|
|||||||
assert file_path.is_file()
|
assert file_path.is_file()
|
||||||
with file_path.open('r') as f:
|
with file_path.open('r') as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
current_mac = data['device_mac_address']
|
current_mac = data["device_mac_address"]
|
||||||
if current_mac is not None:
|
if current_mac is not None:
|
||||||
print(f'Device MAC Address is set to {current_mac}')
|
print(f"Device MAC Address is set to {current_mac}")
|
||||||
response = input(f'Do you want to change the recorded MAC address to {mac_addr}? [Y/N] ')
|
response = input(f"Do you want to change the recorded MAC address to {mac_addr}? [Y/N] ")
|
||||||
if response.upper() == 'N':
|
if response.upper() == "N":
|
||||||
print('Aborting change to device MAC address')
|
print("Aborting change to device MAC address")
|
||||||
return ReturnCodes.ABORTED
|
return ReturnCodes.ABORTED
|
||||||
with file_path.open('w') as f:
|
with file_path.open('w') as f:
|
||||||
json.dump(data, f)
|
json.dump(data, f)
|
||||||
return ReturnCodes.SUCCESS
|
return ReturnCodes.SUCCESS
|
||||||
|
|
||||||
|
# TODO finnish for other fields in capture metadata
|
||||||
@ -2,7 +2,7 @@ import json
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from iottb.definitions import ReturnCodes
|
from kydcap.config import ReturnCodes
|
||||||
|
|
||||||
|
|
||||||
def update_firmware_version(version: str, file_path: Path):
|
def update_firmware_version(version: str, file_path: Path):
|
||||||
@ -47,5 +47,3 @@ def update_device_type(device_type: str, file_path: Path):
|
|||||||
with file_path.open('w') as file:
|
with file_path.open('w') as file:
|
||||||
json.dump(metadata, file)
|
json.dump(metadata, file)
|
||||||
return ReturnCodes.SUCCESS
|
return ReturnCodes.SUCCESS
|
||||||
|
|
||||||
|
|
||||||
28
code/kydcap/utils/tcpdump_utils.py
Normal file
28
code/kydcap/utils/tcpdump_utils.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
def check_installed() -> bool:
|
||||||
|
"""Check if tcpdump is installed and available on the system path."""
|
||||||
|
return shutil.which('tcpdump') is not None
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_installed():
|
||||||
|
"""Ensure that tcpdump is installed, raise an error if not."""
|
||||||
|
if not check_installed():
|
||||||
|
raise RuntimeError("tcpdump is not installed. Please install it to continue.")
|
||||||
|
|
||||||
|
|
||||||
|
def list_interfaces() -> str:
|
||||||
|
"""List available network interfaces using tcpdump."""
|
||||||
|
ensure_installed()
|
||||||
|
try:
|
||||||
|
result = subprocess.run(['tcpdump', '--list-interfaces'], capture_output=True, text=True, check=True)
|
||||||
|
return result.stdout
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(f"Failed to list interfaces: {e}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def start_tcpdump():
|
||||||
|
return None
|
||||||
6
code/tests/utils/test_capture_metadata_utils.py
Normal file
6
code/tests/utils/test_capture_metadata_utils.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import mock_open, patch
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from kydcap.utils.capture_metadata_utils import set_device_ip_address
|
||||||
@ -1,177 +0,0 @@
|
|||||||
# Commands to remember + sample output
|
|
||||||
Used commands: [[nmcli]], [[iw]], [[grep]], [[sed]]
|
|
||||||
Resources: [Capturing Wireless LAN Packets in Monitor Mode with iw](https://sandilands.info/sgordon/capturing-wifi-in-monitor-mode-with-iw)
|
|
||||||
Foreign BSSIDs have been made anonymous by replacing with `XX:XX:XX:XX:XX:XX`.
|
|
||||||
## [[nmcli]]
|
|
||||||
Useful for getting channel needed to setup monitor mode properly.
|
|
||||||
### `nmcli dev wifi`
|
|
||||||
```
|
|
||||||
IN-USE BSSID SSID MODE CHAN RATE SIGNAL BARS SECURITY
|
|
||||||
XX:XX:XX:XX:XX:XX FRITZ!Box 5490 PB Infra 6 195 Mbit/s 75 ▂▄▆_ WPA2
|
|
||||||
* 4C:1B:86:D1:06:7B LenbrO Infra 100 540 Mbit/s 67 ▂▄▆_ WPA2
|
|
||||||
4C:1B:86:D1:06:7C LenbrO Infra 6 260 Mbit/s 64 ▂▄▆_ WPA2
|
|
||||||
B8:BE:F4:4D:48:17 LenbrO Infra 1 130 Mbit/s 62 ▂▄▆_ WPA
|
|
||||||
XX:XX:XX:XX:XX:XX -- Infra 6 260 Mbit/s 60 ▂▄▆_ WPA2
|
|
||||||
XX:XX:XX:XX:XX:XX FRITZ!Box 5490 PB Infra 60 405 Mbit/s 37 ▂▄__ WPA2
|
|
||||||
XX:XX:XX:XX:XX:XX FRITZ!Box Fon WLAN 7360 BP Infra 1 130 Mbit/s 34 ▂▄__ WPA1 WPA2
|
|
||||||
XX:XX:XX:XX:XX:XX FRITZ!Box 5490 PB Infra 6 195 Mbit/s 34 ▂▄__ WPA2
|
|
||||||
XX:XX:XX:XX:XX:XX Sunrise_Wi-Fi_09FB29 Infra 7 540 Mbit/s 34 ▂▄__ WPA2 WPA3
|
|
||||||
XX:XX:XX:XX:XX:XX Madchenband Infra 11 260 Mbit/s 34 ▂▄__ WPA2
|
|
||||||
XX:XX:XX:XX:XX:XX LenbrO Infra 36 270 Mbit/s 34 ▂▄__ WPA2
|
|
||||||
XX:XX:XX:XX:XX:XX FibreBox_X6-01EF47 Infra 1 260 Mbit/s 32 ▂▄__ WPA2
|
|
||||||
XX:XX:XX:XX:XX:XX -- Infra 11 260 Mbit/s 32 ▂▄__ WPA2
|
|
||||||
XX:XX:XX:XX:XX:XX EEG-04666 Infra 1 405 Mbit/s 30 ▂___ WPA2
|
|
||||||
XX:XX:XX:XX:XX:XX Salt_2GHz_8A9170 Infra 11 260 Mbit/s 29 ▂___ WPA2
|
|
||||||
XX:XX:XX:XX:XX:XX -- Infra 11 260 Mbit/s 24 ▂___ WPA2
|
|
||||||
XX:XX:XX:XX:XX:XX FRITZ!Box 5490 PB Infra 60 405 Mbit/s 19 ▂___ WPA2
|
|
||||||
```
|
|
||||||
### `nmcli -t dev wifi`
|
|
||||||
```
|
|
||||||
XX\:XX\:XX\:XX\:XX\:XX:FRITZ!Box 5490 PB:Infra:6:195 Mbit/s:79:▂▄▆_:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX::Infra:6:260 Mbit/s:75:▂▄▆_:WPA2
|
|
||||||
:4C\:1B\:86\:D1\:06\:7C:LenbrO:Infra:6:260 Mbit/s:74:▂▄▆_:WPA2
|
|
||||||
*:4C\:1B\:86\:D1\:06\:7B:LenbrO:Infra:100:540 Mbit/s:72:▂▄▆_:WPA2
|
|
||||||
:B8\:BE\:F4\:4D\:48\:17:LenbrO:Infra:1:130 Mbit/s:65:▂▄▆_:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:Sunrise_Wi-Fi_09FB29:Infra:7:540 Mbit/s:52:▂▄__:WPA2 WPA3
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:FRITZ!Box 5490 PB:Infra:60:405 Mbit/s:50:▂▄__:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:FRITZ!Box Fon WLAN 7360 BP:Infra:1:130 Mbit/s:47:▂▄__:WPA1 WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:FRITZ!Box 5490 PB:Infra:6:195 Mbit/s:45:▂▄__:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:Zentrum der Macht:Infra:1:195 Mbit/s:44:▂▄__:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:FibreBox_X6-01EF47:Infra:1:260 Mbit/s:42:▂▄__:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:Madchenband:Infra:11:260 Mbit/s:40:▂▄__:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:LenbrO:Infra:36:270 Mbit/s:37:▂▄__:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX::Infra:11:260 Mbit/s:34:▂▄__:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:EEG-04666:Infra:1:405 Mbit/s:30:▂___:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:Salt_2GHz_8A9170:Infra:11:260 Mbit/s:29:▂___:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:FRITZ!Box 5490 PB:Infra:60:405 Mbit/s:27:▂___:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:Madchenband2.0:Infra:100:540 Mbit/s:25:▂___:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX::Infra:11:260 Mbit/s:24:▂___:WPA2
|
|
||||||
:XX\:XX\:XX\:XX\:XX\:XX:FibreBox_X6-01EF47:Infra:44:540 Mbit/s:20:▂___:WPA2
|
|
||||||
```
|
|
||||||
## [[iw]]
|
|
||||||
### `iw dev`
|
|
||||||
Useful to list interfaces and see which hardware they correspond to.
|
|
||||||
Can use that to create a monitor interface with an easier to remember name.
|
|
||||||
```
|
|
||||||
phy#1
|
|
||||||
Unnamed/non-netdev interface
|
|
||||||
wdev 0x100000002
|
|
||||||
addr 3c:21:9c:f2:e4:00
|
|
||||||
type P2P-device
|
|
||||||
Interface wlp44s0
|
|
||||||
ifindex 5
|
|
||||||
wdev 0x100000001
|
|
||||||
addr e6:bf:0c:3c:47:ba
|
|
||||||
ssid LenbrO
|
|
||||||
type managed
|
|
||||||
channel 100 (5500 MHz), width: 80 MHz, center1: 5530 MHz
|
|
||||||
txpower 22.00 dBm
|
|
||||||
multicast TXQ:
|
|
||||||
qsz-byt qsz-pkt flows drops marks overlmt hashcol tx-bytes tx-packets
|
|
||||||
0 0 0 0 0 0 0 0 0
|
|
||||||
phy#0
|
|
||||||
Interface mon0
|
|
||||||
ifindex 7
|
|
||||||
wdev 0x2
|
|
||||||
addr a8:42:a1:8b:f4:e3
|
|
||||||
type monitor
|
|
||||||
channel 6 (2437 MHz), width: 20 MHz (no HT), center1: 2437 MHz
|
|
||||||
txpower 20.00 dBm
|
|
||||||
Interface wlp0s20f0u6
|
|
||||||
ifindex 4
|
|
||||||
wdev 0x1
|
|
||||||
addr a8:42:a1:8b:f4:e3
|
|
||||||
type monitor
|
|
||||||
channel 6 (2437 MHz), width: 20 MHz (no HT), center1: 2437 MHz
|
|
||||||
txpower 20.00 dBm
|
|
||||||
multicast TXQ:
|
|
||||||
qsz-byt qsz-pkt flows drops marks overlmt hashcol tx-bytes tx-packets
|
|
||||||
0 0 0 0 0 0 0 0 0
|
|
||||||
|
|
||||||
```
|
|
||||||
Here, `phy#1` is my laptops built-in WiFi card, and `phy#0` is a WiFi USB adapter.
|
|
||||||
### `iw [phy phy<index> | phy#<index>] info | grep -f monitor -B 10`
|
|
||||||
```
|
|
||||||
➜ iw phy phy0 info | fgrep monitor -B 10
|
|
||||||
* CMAC-256 (00-0f-ac:13)
|
|
||||||
* GMAC-128 (00-0f-ac:11)
|
|
||||||
* GMAC-256 (00-0f-ac:12)
|
|
||||||
Available Antennas: TX 0x3 RX 0x3
|
|
||||||
Configured Antennas: TX 0x3 RX 0x3
|
|
||||||
Supported interface modes:
|
|
||||||
* IBSS
|
|
||||||
* managed
|
|
||||||
* AP
|
|
||||||
* AP/VLAN
|
|
||||||
* monitor
|
|
||||||
--
|
|
||||||
* register_beacons
|
|
||||||
* start_p2p_device
|
|
||||||
* set_mcast_rate
|
|
||||||
* connect
|
|
||||||
* disconnect
|
|
||||||
* set_qos_map
|
|
||||||
* set_multicast_to_unicast
|
|
||||||
* set_sar_specs
|
|
||||||
software interface modes (can always be added):
|
|
||||||
* AP/VLAN
|
|
||||||
* monitor
|
|
||||||
```
|
|
||||||
Can do better
|
|
||||||
### `iw phy#0 info | grep monitor`
|
|
||||||
```
|
|
||||||
* monitor
|
|
||||||
* monitor
|
|
||||||
```
|
|
||||||
Concise but possible need more context to be sure?
|
|
||||||
### `iw phy phy0 info | sed -n '/software interface modes/,/monitor/p'`
|
|
||||||
More concise but with good context. Assuming only sw interfaces need to support monitor mode
|
|
||||||
```
|
|
||||||
software interface modes (can always be added):
|
|
||||||
* AP/VLAN
|
|
||||||
* monitor
|
|
||||||
```
|
|
||||||
### Getting a monitor interface
|
|
||||||
```
|
|
||||||
iw phy#0 interface add mon0 type monitor
|
|
||||||
```
|
|
||||||
Add a easy interface to wifi hw and make it a monitor. Can check again with 'iw dev' to make sure it is really in monitor mode. If there is an other interface it must be taken down or deleted e.g with
|
|
||||||
```
|
|
||||||
iw dev <phy#0 other interface> del # or
|
|
||||||
ip link set <phy#0 other interface> down
|
|
||||||
```
|
|
||||||
Then to enable `mon0` interface,
|
|
||||||
```
|
|
||||||
ip link set mon0 up
|
|
||||||
```
|
|
||||||
To effectively capture packets, we should set the interface to the correct frequency. For this we get the channel e.g. via the above mentioned `nmcli dev wifi`. We can see that, e.g. the BSSID I am connected to (marked with `*`) is on channel 100. We can also see that it there is also a BSSID belonging to the same SSID with the interface on channel 6. I.e., it is running one interface in 2.4 GHz (802.11b/g/n/ax/be) and one in 5 GHz (802.11a/h/n/ac/ax/be). We chose which which channel to tune our `mon0` interface to, then we can lookup what the center frequency is on [wikipedia(List of Wifi Channels)](https://en.wikipedia.org/wiki/List_of_WLAN_channels). E.g. for channel 6 (i.e. 2.4 GHz radio) we see that the center frequency is 2437. We set our interface to that frequency:
|
|
||||||
```
|
|
||||||
iw dev mon0 set freq 2437
|
|
||||||
```
|
|
||||||
Now double check that the interface is in monitor mode and tunedto the correct frequency:
|
|
||||||
```
|
|
||||||
iw dev mon0 info
|
|
||||||
```
|
|
||||||
Should give an output like
|
|
||||||
```
|
|
||||||
Interface mon0
|
|
||||||
ifindex 7
|
|
||||||
wdev 0x2
|
|
||||||
addr a8:42:a1:8b:f4:e3
|
|
||||||
type monitor
|
|
||||||
wiphy 0
|
|
||||||
channel 6 (2437 MHz), width: 20 MHz (no HT), center1: 2437 MHz
|
|
||||||
txpower 20.00 dBm
|
|
||||||
```
|
|
||||||
This concludes preparing the wifi card for packet capture in monitor mode.
|
|
||||||
### [remarks]
|
|
||||||
- `sudo` is probably required for these commands
|
|
||||||
- These network tools are what is available on fedora 40, on $(uname -r)= 6.8.8 Linux Kernel. It might be that other OSs still use older tools, which are being phased out. But other operating systems might still be using older versions of these commands. For a table on how they match up, see [this](https://www.tecmint.com/deprecated-linux-networking-commands-and-their-replacements/) recent article (July 2023), according to which the old commands are even deprecated in recent Debian and Ubuntu releases.
|
|
||||||
- If smth is not working run `rfkill list` to check device is blocked. If it is, `rfkill unblock 0`, where `0` is the same index used above and represents `phy0` /`phy#0`.
|
|
||||||
- To ensure that [[NetworkManager]] not managing you card, `nmcli device set wlp0s20f0u6 managed no` if the interface is called `wlp0s20f0u6`. Check with `nmcli dev`, the STATE should be "unmanaged".
|
|
||||||
- See resources on how to put interface/wifi hardware back into managed mode, if you need the card for personal use.
|
|
||||||
|
|
||||||
# Important
|
|
||||||
Monitor mode is actually completely useless, unless we can observe the EAPOL handshake. That means the Wifi AP should be using WPA/WPA2 with psk. Also we need to know the SSID and passphrase. So it is still better if we can setup an environment where we can just do port mirroring from the wifi router, or setup ourselves in AP mode, but then we need to be able to bridge to the internet somehow, which I haven't managed reliably. Have done some testing on raspberry pi seemed to work. But raspberry pi sometimes goes to sleep so the AP goes down which means the IoT device loses connection.
|
|
||||||
|
|
||||||
If we happen to know the MAC address we need, then in wireshark we can filter `wlan.addr == [MAC]`. In tcpdump we can use the filter
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
# `IOTTB_HOME`
|
|
||||||
I introduced the environment variable `IOTTB_HOME` into the code. It is used to configure where the root of a iottb database is. #TODO this means that some code needs refactoring. But, I think it will streamline the code. The path in `IOTTB_HOME` shall be used to define the database root. Then, all the code handling adding devices and running captures can rely on the fact that a canonical home exists. Unfortunately I've hard coded quite a bit of ad-hoc configuration to use `Path.cwd()`, i.e. the current working directory, by default. So there will be some refactoring involved in switching over to using `IOTTB_HOME`s value as the default path.
|
|
||||||
|
|
||||||
# Adding Functionality
|
|
||||||
## Quick and dirty capture
|
|
||||||
I want to have a mode which just takes a command and runs it directly with its arguments.
|
|
||||||
The question is weather to only allow a preconfigured list of commands or in principle allow any command to be passed and write the output. I tend toward providing a subcommand for each utility we want to support. The question is what to do about the syntax errors of those commands. Maybe the thing to do is only write a file into the db if the command runs successfully.
|
|
||||||
### Refactoring the tcpdump capture
|
|
||||||
With the above idea it would be possible to also refactor or rewrite how tcpdump is called completely. But, the command has a lot of options and maybe its better also offer some guidance to users via `-h`, e.g. to only input the needed and correct filters for example. Choosing the wrong filter could make the capture potentially useless and one might only see that after the capture has completed.
|
|
||||||
## Converting pcap to csv
|
|
||||||
I want an option such that one can automatically convert a captures resulting file into a csv. Probably will focus on tcpdump for now, since other tools like [[mitmproxy]] have different output files.
|
|
||||||
|
|
||||||
## Defining Experiment
|
|
||||||
I want a pair of commands that 1. provide a guided cli interface to define an experiment and 2. to run that experiment -> Here [Collective Knowledge Framework](https://github.com/mlcommons/ck) might actually come in handy. The already have tooling for setting up and defining aspects of experiments so that they become reproducible. So maybe one part of the `iottb` as a tool would be to write the correct json files into the directory which contain the informatin on how the command was run. Caveat: All all option values are the same, basically only, if it was used or not (flagging options) or that it was used (e.g. an ip address was used in the filter but the specific value of the ip is of no use for reproducing). Also, Collective Minds tooling relies very common ML algos/framework and static data. So maybe this only comes into play after a capture has been done. So maybe a feature extraction tool (see [[further considerations#Usage paths/ Workflows]]) should create the data and built the database separately.
|
|
||||||
#remark TCP dump filter could also be exported into an environment variable? But then again what is the use of defining a conformance, then could use the raw capture idea for tcpdump, too.
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user