diff --git a/.github/workflows/shellcheck.yaml b/.github/workflows/shellcheck.yaml index b757ad23..771690b3 100644 --- a/.github/workflows/shellcheck.yaml +++ b/.github/workflows/shellcheck.yaml @@ -1,42 +1,27 @@ name: ShellCheck on: - push: - branches: - - develop - - main - - 'releases/**' - paths: - - 'lib/**' - - 'tools/**' - - 'uac' - pull_request: branches: - develop - main - - 'releases/**' paths: - 'lib/**' - - 'tools/**' - 'uac' # Allows you to run this workflow manually from the Actions tab workflow_dispatch: jobs: - shellcheck: + build: name: ShellCheck runs-on: ubuntu-latest steps: - name: Clone uac repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: uac - name: Run ShellCheck uses: ludeeus/action-shellcheck@master - with: - ignore_paths: artifacts bin config profiles - \ No newline at end of file diff --git a/.github/workflows/unit-testing.yaml b/.github/workflows/unit-testing.yaml new file mode 100644 index 00000000..1fabd0fd --- /dev/null +++ b/.github/workflows/unit-testing.yaml @@ -0,0 +1,44 @@ +name: Unit testing + +on: + pull_request: + branches: + - develop + - main + paths: + - 'lib/**' + - 'uac' + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + build: + name: Unit testing + runs-on: ubuntu-latest + + steps: + - name: Clone uac repo + uses: actions/checkout@v4 + with: + repository: tclahr/uac + path: uac + + - name: Clone ushunit repo + uses: actions/checkout@v4 + with: + repository: tclahr/ushunit + ref: main + path: ushunit + + - name: Clone uac-tests repo + uses: actions/checkout@v4 + with: + repository: tclahr/uac-tests + ref: main + path: uac-tests + + - name: Run tests + working-directory: ushunit + run: | + UAC_DIR="../uac" ./ushunit -i ../uac-tests/tests/lib/*.sh ../uac-tests/tests/*.sh diff --git a/.github/workflows/validate-artifacts-file.yaml b/.github/workflows/validate-artifacts-file.yaml deleted file mode 100644 index 1b79f030..00000000 --- a/.github/workflows/validate-artifacts-file.yaml +++ /dev/null @@ -1,41 +0,0 @@ -name: Validate Artifacts File - -on: - push: - branches: - - develop - - main - - 'releases/**' - paths: - - 'artifacts/**' - - pull_request: - branches: - - develop - - main - - 'releases/**' - paths: - - 'artifacts/**' - -jobs: - validate-artifacts-file: - runs-on: ubuntu-latest - - steps: - - name: Clone uac repo - uses: actions/checkout@v3 - with: - path: uac - - - name: Get added/modified files - id: files - uses: jitterbit/get-changed-files@v1 - - - name: Validate all added/modified artifacts files - run: | - cd uac - for file in ${{ steps.files.outputs.added_modified }}; do - if [[ "${file}" == "artifacts/"*.yaml ]]; then - ./uac --validate-artifacts-file "$file" - fi - done diff --git a/.github/workflows/validate-artifacts.yaml b/.github/workflows/validate-artifacts.yaml new file mode 100644 index 00000000..a24b02a0 --- /dev/null +++ b/.github/workflows/validate-artifacts.yaml @@ -0,0 +1,31 @@ +name: Validate Artifacts + +on: + pull_request: + branches: + - develop + - main + paths: + - 'artifacts/**' + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + validate-artifact-files: + name: Validate Artifacts + runs-on: ubuntu-latest + + steps: + - name: Clone uac repo + uses: actions/checkout@v4 + with: + path: uac + + - name: Validate Artifacts + working-directory: uac + run: | + find artifacts/* -name "*.yaml" -type f \ + | while read file || [ -n "${file}" ]; do + ./uac --validate-artifact "${file}" + done diff --git a/.github/workflows/validate-profiles.yaml b/.github/workflows/validate-profiles.yaml new file mode 100644 index 00000000..ca673af3 --- /dev/null +++ b/.github/workflows/validate-profiles.yaml @@ -0,0 +1,32 @@ +name: Validate Profiles + +on: + pull_request: + branches: + - develop + - main + paths: + - 'profiles/**' + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + validate-profile-files: + name: Validate Profiles + runs-on: ubuntu-latest + + steps: + - name: Clone uac repo + uses: actions/checkout@v4 + with: + path: uac + + - name: Validate Profiles + working-directory: uac + run: | + find profiles/* -name "*.yaml" -type f \ + | while read file || [ -n "${file}" ]; do + ./uac --validate-profile "${file}" + done + \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 1b3a0c73..60436ea8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,5 +2,85 @@ ## DEVELOPMENT VERSION +### Features + +- UAC now completely skips an artifact file (YAML) that has no artifacts to be collected for the target operating system. You can use '--artifacts list [OPERATING_SYSTEM]' to display artifacts for a specific operating system only. +- New output file formats: + - none: Collected data will not be archived or compressed. Instead, it will be copied directly to an output directory ([#188](https://github.com/tclahr/uac/issues/188)). + - zip: Collected data will be archived and compressed into a zip file. Additionally, you can create a password-protected zip file using the '--output-password' option ([#149](https://github.com/tclahr/uac/issues/149)). +- You can now set a custom output file name using the '-o/--output-base-name' command line option. Variables are available to format the filename ([#179](https://github.com/tclahr/uac/issues/179)). +- Now you have the option to supply a file path to a custom profile located outside the profiles directory. +- Now you have the option to supply a file path to a custom artifact located outside the artifacts directory ([#154](https://github.com/tclahr/uac/issues/154)). +- Now you can have the option to supply a file path to a custom config file located outside the config directory using the '-c/--config' command line option. +- New remote transfer options for Amazon, Google and IBM cloud storage locations. +- UAC will now use 'wget' to transfer files to remote cloud storage locations when 'curl' is not available. +- You can now increase the verbosity level using the '-v/--verbose' command line option. Enabling a higher verbosity level will result in the display of all executed commands. +- UAC will now use the built-in function 'astrings' to extract strings from binary files when 'strings' is not available on the system. +- The message 'The strings command requires the command line developer tools.' will no longer appear on macOS systems without developer tools installed ([#171](https://github.com/tclahr/uac/issues/171)). +- Error messages generated by executed commands (stderr) are now recorded in the uac.log file ([#150](https://github.com/tclahr/uac/issues/150)). +- New '-H/--hash-collected' command line option. Enabling this option will cause UAC to hash all collected files and save the results in a hash file. To accomplish this, all collected data must first be copied to the destination directory. Therefore, ensure you have twice the free space available on the system: once for the collected data and once for the output file. Additionally, note that this process will increase the running time ([#189](https://github.com/tclahr/uac/issues/189)). +- New '-t/--max-thread' command line option. It can be used to specify the number of files that will be processed in parallel by the 'hash' and 'stat' collectors. +- You can now validate profiles using the '--validate-profile' command line option. + ### Artifacts +- bodyfile/bodyfile.yaml: Updated to remove max_depth limit. +- files/applications/whatsapp.yaml: Added collection of WhatsApp Desktop files [macos]. +- files/logs/additional_logs.yaml: Artifact was renamed to advanced_log_search.yaml. +- files/logs/relink.yaml: Added collection of the kernel relink log file [openbsd] [Herbert-Karl](https://github.com/Herbert-Karl)). +- files/system/acct.yaml: Added collection of system accounting files [freebsd, netbsd, openbsd] [Herbert-Karl](https://github.com/Herbert-Karl)). +- files/system/dev_db.yaml: Added collection of the database file used for device lookups [netbsd, openbsd] [Herbert-Karl](https://github.com/Herbert-Karl)). +- files/system/dev_shm.yaml: Updated to increase max_file_size to 10MB. +- files/system/locate_db.yaml: Added collection of the database file used by locate command, representing a snapshot of the virtual file system accessible with minimal permissions [freebsd, netbsd, openbsd] [Herbert-Karl](https://github.com/Herbert-Karl)). +- files/system/netscaler.yaml: Updated to increase max_file_size to 10MB. +- files/system/run_shm.yaml: Updated to increase max_file_size to 10MB. +- files/system/security_backups.yaml: Added collection of file backups and hashes created by the integrated security script [freebsd, netbsd, openbsd] [Herbert-Karl](https://github.com/Herbert-Karl)). +- files/system/tmp.yaml: Updated to increase max_file_size to 10MB. +- files/system/var_tmp.yaml: Updated to increase max_file_size to 10MB. +- hash_executables/hash_executables.yaml: Updated to remove max_depth and max_file_size properties. +- live_response/hardware/dmesg.yaml: Updated collection of console message bufffer [esxi, freebsd, netscaler, openbsd, solaris] [Herbert-Karl](https://github.com/Herbert-Karl)). +- live_response/process/deleted.yaml: Collection of deleted processes will no longer use dd conv=swab. The binary file will be collected in its raw format now [linux]. +- live_response/process/deleted.yaml: Updated to fix the collection of open files of (malicious) processes [linux] [mnrkbys](https://github.com/mnrkbys)). +- live_response/process/hash_running_processes.yaml: Updated to add support to hash running processes on FreeBSD systems that are using procfs (/proc) [freebsd]. +- live_response/process/procfs_information.yaml: Added artifact collection using cat when strings is not available. +- live_response/process/strings_running_processes.yaml: Added collection of strings from running processes for ESXi systems [esxi]. +- live_response/process/strings_running_processes.yaml: Added condition to check whether developer tools are installed before running strings on macOS [macos]. +- live_response/process/strings_running_processes.yaml: Added support for collecting strings even when the strings command is unavailable. In such cases, the built-in astrings command will be used instead [all]. +- live_response/system/hidden_directories.yaml: Updated to remove max_depth limit. +- live_response/system/hidden_files.yaml: Updated to remove max_depth limit. +- live_response/system/lastcomm.yaml: Added collection of the last commands executed in a reverse order based on the default and historic accounting file [freebsd, netbsd, openbsd] [Herbert-Karl](https://github.com/Herbert-Karl)). +- live_response/system/sgid.yaml: Updated to remove max_depth limit. +- live_response/system/socket_files.yaml: Updated to remove max_depth limit. +- live_response/system/suid.yaml: Updated to remove max_depth limit. +- live_response/system/world_writable_directories.yaml: Updated to remove max_depth limit. +- live_response/system/world_writable_files.yaml: Updated to remove max_depth limit. + +### Command Line Option Changes + +- '--date-range-start' was renamed to '--start-date' ([#186](https://github.com/tclahr/uac/issues/186)). +- '--date-range-end' was renamed to '--end-date' ([#186](https://github.com/tclahr/uac/issues/186)). +- '--validate-artifacts-file' was renamed to '--validate-artifact'. +- '--s3-presigned-url' was renamed to '--aws-s3-presigned-url'. +- '--s3-presigned-url-log-file' was renamed to '--aws-s3-presigned-url-log-file'. +- '--ibm-cos-url', '--ibm-cos-url-log-file' and '--ibm-cloud-api-key' were removed and now transfers to IBM cloud should be done using '--s3-provider', '--s3-region', '--s3-bucket' and '--s3-token' options. + +### Artifacts Properties + +- The 'output_directory' property is now mandatory for the following collectors: command, find, hash and stat. +- Introduced a new 'condition' property that ensures the collection runs only if the specified condition returns true. + +### uac.conf + +- Introduced a new global 'max_depth' configuration option to limit the depth of directory tree searches globally. + +### Tools + +- Statically linked 'zip' is now available for the following systems: + - linux/esxi (arm, arm64, i386 and x86_64) + - freebsd/netscaler (i386 and x86_64) +- 'avml' and 'linux_procmemdump.sh' tools were moved to the 'bin' directory. +- AVML updated to v0.14.0. + +### Deprecated + +- Android support was removed, but UAC can still be executed on Android systems using '--operating-system linux' option. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 622ea5ee..10fa6d46 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,13 +8,13 @@ Here are a couple of things we are looking for help with: ## New artifacts -Have you identified a new artifact that is still not collected by UAC? Please create a new artifact file and submit it via a new Pull Request. +Have you identified a new artifact that is still not collected by UAC? Please create a new artifact and submit it via a new Pull Request. -Please see [Artifacts file definition](https://tclahr.github.io/uac-docs/latest/artifacts_file/) docs for more information. +Please see [Artifacts definition](https://tclahr.github.io/uac-docs/artifacts/) docs for more information. ## New features -You can request a new feature by submitting an issue to our GitHub Repository. If you would like to implement a new feature, please submit an issue with a proposal for your work first, to be sure that we can use it. This will also allow us to better coordinate our efforts, prevent duplication of work, and help you to craft the change so that it is successfully accepted into the project. +You can request a new feature by submitting an issue to our GitHub Repository. If you would like to implement a new feature, please submit an issue with a proposal for your work first, to be sure that we can use it. This will also allow us to better coordinate our efforts, prevent duplication of work, and help you craft the change so that it is successfully accepted into the project. ## Found a bug? @@ -84,7 +84,7 @@ git checkout -b my-feature-branch develop 1. Create your code following our [Coding Rules](#coding-rules). -1. Test your code against as many systems as you can using the [uac-unit-test](https://github.com/tclahr/uac-unit-test). For instance, your code can fully work on a Linux but not on a FreeBSD system. +1. Test your code against as many systems as you can. For instance, your code can fully work on a Linux but not on a FreeBSD system. 1. Commit your changes using a descriptive commit message that follows our [commit message guidelines](#commit-message-guidelines). *Don’t commit code as an unrecognized author. Having commits with unrecognized authors makes it more difficult to track who wrote which part of the code. Ensure your Git client is configured with the correct email address and linked to your GitHub user.* diff --git a/LICENSE b/LICENSE index d6456956..261eeb9e 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,3 @@ - Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ diff --git a/LICENSES.md b/LICENSES.md index 89879f52..751226fe 100644 --- a/LICENSES.md +++ b/LICENSES.md @@ -3,5 +3,6 @@ Use of the following Third-Party Software is subject to the license agreements a |Product|Copyright|URL| |---|---|---| |AVML|Use rights in accordance with the information displayed at: https://github.com/microsoft/avml/blob/main/LICENSE|https://github.com/microsoft/avml| -|linux_procmemdump.sh|Use rights in accordance with the information displayed at: https://creativecommons.org/licenses/by-sa/4.0|| +|linux_procmemdump.sh|Use rights in accordance with the information displayed at: https://creativecommons.org/licenses/by-sa/4.0| |statx|Use rights in accordance with the information displayed at: https://github.com/tclahr/statx/blob/main/LICENSE|https://github.com/tclahr/statx| +|zip|Use rights in accordance with the information displayed at: https://infozip.sourceforge.net/license.html|https://infozip.sourceforge.net| diff --git a/README.md b/README.md index 531d2dd5..3eecf540 100644 --- a/README.md +++ b/README.md @@ -1,50 +1,68 @@ -## Unix-like Artifacts Collector - -[![ShellCheck](https://github.com/tclahr/uac/actions/workflows/shellcheck.yaml/badge.svg)](https://github.com/tclahr/uac/actions/workflows/shellcheck.yaml) -[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/5640/badge)](https://bestpractices.coreinfrastructure.org/projects/5640) -[![GitHub release (latest by date including pre-releases)](https://img.shields.io/github/v/release/tclahr/uac?include_prereleases&style=flat)](https://github.com/tclahr/uac/releases) -[![GitHub](https://img.shields.io/github/license/tclahr/uac?style=flat)](LICENSE) - -UAC is a Live Response collection script for Incident Response that makes use of native binaries and tools to automate the collection of AIX, Android, ESXi, FreeBSD, Linux, macOS, NetBSD, NetScaler, OpenBSD and Solaris systems artifacts. It was created to facilitate and speed up data collection, and depend less on remote support during incident response engagements. - -[Documentation](#-documentation) • -[Main Features](#-main-features) • -[Supported Operating Systems](#-supported-operating-systems) • -[Using UAC](#-using-uac) • -[Contributing](#-contributing) • -[Support](#-community-support) • -[License](#-license) - -[![Imgur](https://i.imgur.com/1aEnAyA.gif)](#) - -*** +

+ + + logo + + +

Unix-like Artifacts Collector

+ +

+ + + + + + + + +

+ +

+ Documentation + • + Main Features + • + Supported Operating Systems + • + Using UAC + • + Contributing + • + Support + • + License +

+ +

+ +## 🔎 About UAC + +UAC is a Live Response collection script for Incident Response that makes use of native binaries and tools to automate the collection of AIX, ESXi, FreeBSD, Linux, macOS, NetBSD, NetScaler, OpenBSD and Solaris systems artifacts. It was created to facilitate and speed up data collection, and depend less on remote support during incident response engagements. + +UAC reads YAML files on the fly and, based on their contents, collects relevant artifacts. This makes UAC very customizable and extensible. + +[![uac_collection](https://tclahr.github.io/uac-docs/img/uac_collection.gif)](#) ## 📘 Documentation Project documentation page: [https://tclahr.github.io/uac-docs](https://tclahr.github.io/uac-docs) -*** - ## 🌟 Main Features - Run everywhere with no dependencies (no installation required). - Customizable and extensible collections and artifacts. - Respect the order of volatility during artifact collection. -- Collect information from processes running without a binary on disk. +- Collect information about current running processes (including processes without a binary on disk). - Hash running processes and executable files. -- Extract information from files and directories to create a bodyfile (including enhanced file attributes for ext4). -- Collect user and system configuration files and logs. -- Collect artifacts from applications. +- Extract files and directories status to create a bodyfile. +- Collect system and user-specific data, configuration files and logs. - Acquire volatile memory from Linux systems using different methods and tools. -*** - ## 💾 Supported Operating Systems -UAC runs on any Unix-like system (regardless of the processor architecture). All UAC needs is shell :) +UAC runs on any Unix-like system, regardless of the processor architecture. All UAC needs is shell :) [![AIX](https://img.shields.io/static/v1?label=&message=AIX&color=brightgreen&style=for-the-badge)](#-supported-operating-systems) -[![Android](https://img.shields.io/static/v1?label=&message=Android&color=green&style=for-the-badge)](#-supported-operating-systems) [![ESXi](https://img.shields.io/static/v1?label=&message=ESXi&color=blue&style=for-the-badge)](#-supported-operating-systems) [![FreeBSD](https://img.shields.io/static/v1?label=&message=FreeBSD&color=red&style=for-the-badge)](#-supported-operating-systems) [![Linux](https://img.shields.io/static/v1?label=&message=Linux&color=lightgray&style=for-the-badge)](#-supported-operating-systems) @@ -56,71 +74,53 @@ UAC runs on any Unix-like system (regardless of the processor architecture). All *Note that UAC even runs on systems like Network Attached Storage (NAS) devices, Network devices such as OpenWrt, and IoT devices.* -*** - -## 🚀 Using UAC - -UAC does not need to be installed on the target system. You only need to download the latest version from the [releases page](https://github.com/tclahr/uac/releases), uncompress and run it. As simple as that! +## 🚀 Usage -A profile name and/or a list of artifacts, and the destination directory need to be provided to run a collection. The remaining parameters are optional. +UAC does not need to be installed on the target system. Simply download the latest version from the [releases page](https://github.com/tclahr/uac/releases), uncompress it, and launch. It's that simple! > **Full Disk Access** permission is a privacy feature introduced in macOS Mojave (10.14) that prevents some applications from accessing important data, such as Mail, Messages, and Safari files. So it is strongly recommended that you manually [grant permission for Terminal application](https://support.apple.com/guide/mac-help/allow-access-to-system-configuration-files-mchlccb25729/mac) before running UAC from the terminal, or [grant permission for remote users](https://support.apple.com/guide/mac-help/allow-a-remote-computer-to-access-your-mac-mchlp1066/mac) before running UAC via ssh. -Common usage scenarios may include the following: +To execute a collection, you must supply at least a profile and/or a list of artifacts, and specify the destination directory. Any additional parameters are optional. -**Collect all artifacts based on the ```full``` profile, and create the output file in ```/tmp```.** +Examples: -```shell -./uac -p full /tmp -``` - -**Collect all ```live_response```, and the ```bodyfile/bodyfile.yaml``` artifact, and create the output file in the current directory.** - -```shell -./uac -a live_response/\*,bodyfile/bodyfile.yaml . -``` - -**Collect all artifacts based on the ```full``` profile, but exclude the ```bodyfile/bodyfile.yaml``` artifact, and create the output file in ```/tmp```.** +Collect all artifacts based on the ir_triage profile, and save the output file to /tmp. ```shell -./uac -p full -a \!bodyfile/bodyfile.yaml /tmp +./uac -p ir_triage /tmp ``` -**Collect the memory dump, then all artifacts based on the ```full``` profile.** +Collect all artifacts located in the artifacts/live_response directory, and save the output file to /tmp. ```shell -./uac -a artifacts/memory_dump/avml.yaml -p full /tmp +./uac -a ./artifacts/live_response/\* /tmp ``` -**Collect the memory dump, then all artifacts based on the ```ir_triage``` profile excluding the ```bodyfile/bodyfile.yaml``` artifact.** +Collect all artifacts based on the ir_triage profile, along with all artifacts located in the /my_custom_artifacts directory, and save the output file to /mnt/sda1. ```shell -./uac -a ./artifacts/memory_dump/avml.yaml -p ir_triage -a \!artifacts/bodyfile/bodyfile.yaml /tmp +./uac -p ir_triage -a /my_custom_artifacts/\* /mnt/sda1 ``` -**Collect all artifacts based on the ```full``` profile, but limit the data collection based on the date range provided.** +Collect a memory dump and all artifacts based on the full profile. ```shell -./uac -p full /tmp --date-range-start 2021-05-01 --date-range-end 2021-08-31 +./uac -a ./artifacts/memory_dump/avml.yaml -p full /tmp ``` -**Collect all but live response artifacts from a Linux disk image mounted in ```/mnt/ewf```.** +Collect all artifacts based on the ir_triage profile excluding the bodyfile/bodyfile.yaml artifact. ```shell -./uac -p full -a \!live_response/\* /tmp --mount-point /mnt/ewf --operating-system linux +./uac -p ir_triage -a \!artifacts/bodyfile/bodyfile.yaml /tmp ``` -Please check the [project documentation page](https://tclahr.github.io/uac-docs) for more information about command line options, how to create your own artifacts, profiles, and more! - -*** - ## 💙 Contributing -Have you created any artifact files? Please share them with us! +Contributions are what makes the open source community such an amazing place to learn, inspire, and create. Any contributions you make are greatly appreciated. -You can contribute with new artifacts, profiles, bug fixes or even propose new features. Please read our [Contributing Guide](CONTRIBUTING.md) before submitting a Pull Request to the project. +Have you created any artifacts? Please share them with us! -*** +You can contribute with new artifacts, profiles, bug fixes or even propose new features. Please read our [Contributing Guide](CONTRIBUTING.md) before submitting a Pull Request to the project. ## 👨‍💻 Community Support @@ -130,8 +130,6 @@ For general help using UAC, please refer to the [project documentation page](htt - [GitHub](https://github.com/tclahr/uac/issues) (Bug reports and contributions) - [Twitter](https://twitter.com/tclahr) (Get the news fast) -*** - ## 📜 License -The UAC project uses the [Apache License Version 2.0](LICENSE) software license. \ No newline at end of file +The UAC project uses the [Apache License Version 2.0](LICENSE) software license. diff --git a/artifacts/bodyfile/bodyfile.yaml b/artifacts/bodyfile/bodyfile.yaml index d5e5f00d..3a62690b 100644 --- a/artifacts/bodyfile/bodyfile.yaml +++ b/artifacts/bodyfile/bodyfile.yaml @@ -1,10 +1,18 @@ -version: 2.0 +version: 3.0 +output_directory: /bodyfile artifacts: - description: Collect file stat information to create a bodyfile. - supported_os: [all] + supported_os: [aix, esxi, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: stat path: / exclude_file_system: [proc, procfs] - max_depth: 6 + output_file: bodyfile.txt + - + description: Collect file stat information to create a bodyfile. + supported_os: [macos] + collector: stat + path: / + exclude_path_pattern: ["/System/Volumes/Data", "/System/Volumes/Preboot", "/System/Volumes/Recovery", "/System/Volumes/VM"] + exclude_file_system: [proc, procfs] output_file: bodyfile.txt \ No newline at end of file diff --git a/artifacts/chkrootkit/chkrootkit.yaml b/artifacts/chkrootkit/chkrootkit.yaml index de199be9..9eb96483 100644 --- a/artifacts/chkrootkit/chkrootkit.yaml +++ b/artifacts/chkrootkit/chkrootkit.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "chkrootkit" +output_directory: /chkrootkit artifacts: - description: Locally checks for signs of a rootkit. @@ -12,4 +14,4 @@ artifacts: collector: command command: chkrootkit -n -x -r "%mount_point%" output_file: chkrootkit_-n_-r_-x.txt - \ No newline at end of file + diff --git a/artifacts/files/applications/anydesk.yaml b/artifacts/files/applications/anydesk.yaml index cb0b61b1..64c34766 100644 --- a/artifacts/files/applications/anydesk.yaml +++ b/artifacts/files/applications/anydesk.yaml @@ -22,7 +22,7 @@ artifacts: collector: file path: /%user_home%/ name_pattern: ["anydesk*.png"] - file_type: f + file_type: [f] max_depth: 4 exclude_nologin_users: true - @@ -31,7 +31,7 @@ artifacts: collector: file path: /%user_home%/ name_pattern: ["*.anydesk"] - file_type: f + file_type: [f] max_depth: 4 exclude_nologin_users: true - diff --git a/artifacts/files/applications/dropbox.yaml b/artifacts/files/applications/dropbox.yaml index e2ec53e9..cf6b1271 100644 --- a/artifacts/files/applications/dropbox.yaml +++ b/artifacts/files/applications/dropbox.yaml @@ -5,6 +5,6 @@ artifacts: supported_os: [linux, macos] collector: file path: /%user_home%/.dropbox - file_type: f + file_type: [f] ignore_date_range: true exclude_nologin_users: true diff --git a/artifacts/files/applications/steam.yaml b/artifacts/files/applications/steam.yaml index f207ef98..83d739c5 100644 --- a/artifacts/files/applications/steam.yaml +++ b/artifacts/files/applications/steam.yaml @@ -52,7 +52,7 @@ artifacts: collector: file path: /%user_home%/.local/share/Steam/config/htmlcache name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -100,7 +100,7 @@ artifacts: collector: file path: /%user_home%/Library/"Application Support"/Steam/config/htmlcache name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true diff --git a/artifacts/files/applications/whatsapp.yaml b/artifacts/files/applications/whatsapp.yaml index a68b9306..418b88e9 100644 --- a/artifacts/files/applications/whatsapp.yaml +++ b/artifacts/files/applications/whatsapp.yaml @@ -1,4 +1,4 @@ -version: 1.0 +version: 2.0 artifacts: - description: Collect WhatsApp cache files. @@ -12,6 +12,12 @@ artifacts: collector: file path: /%user_home%/Library/"Application Support"/WhatsApp/"Local Storage"/leveldb exclude_nologin_users: true + - + description: Collect WhatsApp Desktop files. + supported_os: [macos] + collector: file + path: /%user_home%/Library/"Group Containers"/group.net.whatsapp.WhatsApp.shared + exclude_nologin_users: true # WhatsApp is a cloud-based application. All chats are in the cloud. In part, chats can be found on mobile devices. # diff --git a/artifacts/files/browsers/brave.yaml b/artifacts/files/browsers/brave.yaml index de88a52b..e69a6b5c 100644 --- a/artifacts/files/browsers/brave.yaml +++ b/artifacts/files/browsers/brave.yaml @@ -14,7 +14,7 @@ artifacts: collector: file path: /%user_home%/.config/BraveSoftware/Brave-Browser name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -31,7 +31,7 @@ artifacts: collector: file path: /%user_home%/.var/app/com.brave.Browser name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -48,7 +48,7 @@ artifacts: collector: file path: /%user_home%/snap/brave name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -65,7 +65,7 @@ artifacts: collector: file path: /%user_home%/Library/"Application Support"/BraveSoftware/Brave-Browser name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true diff --git a/artifacts/files/browsers/chrome.yaml b/artifacts/files/browsers/chrome.yaml index e4db2ce2..33899be3 100644 --- a/artifacts/files/browsers/chrome.yaml +++ b/artifacts/files/browsers/chrome.yaml @@ -14,7 +14,7 @@ artifacts: collector: file path: /%user_home%/.config/google-chrome name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -31,7 +31,7 @@ artifacts: collector: file path: /%user_home%/.var/app/com.google.Chrome name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -48,7 +48,7 @@ artifacts: collector: file path: /%user_home%/Library/"Application Support"/Google/Chrome name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true diff --git a/artifacts/files/browsers/chromium.yaml b/artifacts/files/browsers/chromium.yaml index 60035cc5..a63a255c 100644 --- a/artifacts/files/browsers/chromium.yaml +++ b/artifacts/files/browsers/chromium.yaml @@ -14,7 +14,7 @@ artifacts: collector: file path: /%user_home%/.var/app/org.chromium.Chromium name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -31,7 +31,7 @@ artifacts: collector: file path: /%user_home%/snap/chromium name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true diff --git a/artifacts/files/browsers/edge.yaml b/artifacts/files/browsers/edge.yaml index 0c7f9719..6588f64f 100644 --- a/artifacts/files/browsers/edge.yaml +++ b/artifacts/files/browsers/edge.yaml @@ -14,7 +14,7 @@ artifacts: collector: file path: /%user_home%/.config/microsoft-edge name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -31,7 +31,7 @@ artifacts: collector: file path: /%user_home%/.var/app/com.microsoft.Edge name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -48,7 +48,7 @@ artifacts: collector: file path: /%user_home%/Library/"Application Support"/"Microsoft Edge" name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true diff --git a/artifacts/files/browsers/firefox.yaml b/artifacts/files/browsers/firefox.yaml index 3d0908dc..39a44b00 100644 --- a/artifacts/files/browsers/firefox.yaml +++ b/artifacts/files/browsers/firefox.yaml @@ -14,7 +14,7 @@ artifacts: collector: file path: /%user_home%/.mozilla/firefox name_pattern: ["bookmarkbackups", "sessionstore*"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -31,7 +31,7 @@ artifacts: collector: file path: /%user_home%/.var/app/org.mozilla.firefox name_pattern: ["bookmarkbackups", "sessionstore*"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -48,7 +48,7 @@ artifacts: collector: file path: /%user_home%/snap/firefox name_pattern: ["bookmarkbackups", "sessionstore*"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -65,7 +65,7 @@ artifacts: collector: file path: /%user_home%/Library/"Application Support"/Firefox name_pattern: ["bookmarkbackups", "sessionstore*"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true diff --git a/artifacts/files/browsers/opera.yaml b/artifacts/files/browsers/opera.yaml index c46abdc4..c1d87069 100644 --- a/artifacts/files/browsers/opera.yaml +++ b/artifacts/files/browsers/opera.yaml @@ -14,7 +14,7 @@ artifacts: collector: file path: /%user_home%/.config/opera name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -31,7 +31,7 @@ artifacts: collector: file path: /%user_home%/.var/app/com.opera.Opera name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -48,7 +48,7 @@ artifacts: collector: file path: /%user_home%/snap/opera name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -67,7 +67,7 @@ artifacts: # will match com.operasoftware.Opera and Opera path: /%user_home%/Library/"Application Support"/*Opera name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true diff --git a/artifacts/files/browsers/safari.yaml b/artifacts/files/browsers/safari.yaml index fd234671..396614d3 100644 --- a/artifacts/files/browsers/safari.yaml +++ b/artifacts/files/browsers/safari.yaml @@ -22,7 +22,7 @@ artifacts: collector: file path: /%user_home%/Library/Safari* name_pattern: ["Favicon Cache", "Form Values"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - diff --git a/artifacts/files/browsers/vivaldi.yaml b/artifacts/files/browsers/vivaldi.yaml index a5ad0a24..edb288f0 100644 --- a/artifacts/files/browsers/vivaldi.yaml +++ b/artifacts/files/browsers/vivaldi.yaml @@ -14,7 +14,7 @@ artifacts: collector: file path: /%user_home%/.config/vivaldi name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -31,7 +31,7 @@ artifacts: collector: file path: /%user_home%/.var/app/com.vivaldi.Vivaldi name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true - @@ -48,6 +48,6 @@ artifacts: collector: file path: /%user_home%/Library/"Application Support"/Vivaldi name_pattern: ["Extensions", "File System", "Sessions"] - file_type: d + file_type: [d] ignore_date_range: true exclude_nologin_users: true diff --git a/artifacts/files/logs/additional_logs.yaml b/artifacts/files/logs/advanced_log_search.yaml similarity index 95% rename from artifacts/files/logs/additional_logs.yaml rename to artifacts/files/logs/advanced_log_search.yaml index 06faaca5..da72ee6d 100644 --- a/artifacts/files/logs/additional_logs.yaml +++ b/artifacts/files/logs/advanced_log_search.yaml @@ -1,4 +1,4 @@ -version: 1.0 +version: 2.0 artifacts: - description: Collect all log files and directories. diff --git a/artifacts/files/logs/openbsd.yaml b/artifacts/files/logs/relink.yaml similarity index 64% rename from artifacts/files/logs/openbsd.yaml rename to artifacts/files/logs/relink.yaml index 6124cceb..ba3a6df8 100644 --- a/artifacts/files/logs/openbsd.yaml +++ b/artifacts/files/logs/relink.yaml @@ -4,5 +4,5 @@ artifacts: description: Collect kernel relink log file. supported_os: [openbsd] collector: file - path: /usr/share/relink/kernel - path_pattern: ["*/relink.log"] + path: /usr/share/relink/kernel/relink.log + \ No newline at end of file diff --git a/artifacts/files/shell/history.yaml b/artifacts/files/shell/history.yaml index 5ebbfdbb..da6daa5b 100644 --- a/artifacts/files/shell/history.yaml +++ b/artifacts/files/shell/history.yaml @@ -1,4 +1,4 @@ -version: 2.1 +version: 3.0 artifacts: - description: Collect shell history files. @@ -8,6 +8,3 @@ artifacts: # lesshst: less command history file name_pattern: [".*_history", ".*history", "*.historynew", ".lesshst", ".zhistory", "fish_history"] max_depth: 4 - -# References: -# https://dfir.ch/posts/today_i_learned_zsh_sessions/ \ No newline at end of file diff --git a/artifacts/files/shell/sessions.yaml b/artifacts/files/shell/sessions.yaml index e59c1046..0ec57faf 100644 --- a/artifacts/files/shell/sessions.yaml +++ b/artifacts/files/shell/sessions.yaml @@ -1,4 +1,4 @@ -version: 1.1 +version: 2.0 artifacts: - description: Collect shell sessions files. @@ -8,5 +8,3 @@ artifacts: name_pattern: ["*.session", "*.sessions", ".*_sessions"] max_depth: 2 -# References: -# https://dfir.ch/posts/today_i_learned_zsh_sessions/ \ No newline at end of file diff --git a/artifacts/files/system/acct.yaml b/artifacts/files/system/acct.yaml index c27e8583..a9150cae 100644 --- a/artifacts/files/system/acct.yaml +++ b/artifacts/files/system/acct.yaml @@ -1,5 +1,7 @@ version: 1.0 artifacts: + # system accounting files, covering processes that terminated on the system, allowing one to see past program executions + # this is deactivated by default, but quite usefull when active - description: Collect system accounting files. supported_os: [freebsd, netbsd, openbsd] @@ -17,4 +19,4 @@ artifacts: supported_os: [freebsd, netbsd, openbsd] collector: file path: /var/account/savacct - ignore_date_range: true + ignore_date_range: true \ No newline at end of file diff --git a/artifacts/files/system/device_db.yaml b/artifacts/files/system/dev_db.yaml similarity index 91% rename from artifacts/files/system/device_db.yaml rename to artifacts/files/system/dev_db.yaml index ce25a399..d3e162d8 100644 --- a/artifacts/files/system/device_db.yaml +++ b/artifacts/files/system/dev_db.yaml @@ -9,4 +9,4 @@ artifacts: description: Collect database file used for device lookups. supported_os: [netbsd] collector: file - path: /var/run/dev.cdb + path: /var/run/dev.cdb \ No newline at end of file diff --git a/artifacts/files/system/dev_shm.yaml b/artifacts/files/system/dev_shm.yaml index 0f2a8bc0..305ce9e8 100644 --- a/artifacts/files/system/dev_shm.yaml +++ b/artifacts/files/system/dev_shm.yaml @@ -1,9 +1,9 @@ -version: 1.0 +version: 2.0 artifacts: - description: Collect system temporary files. supported_os: [all] collector: file path: /dev/shm - file_type: f - max_file_size: 5242880 # 5MB + file_type: [f] + max_file_size: 10485760 # 10MB diff --git a/artifacts/files/system/ds_store.yaml b/artifacts/files/system/ds_store.yaml index decb42f2..7f460a71 100644 --- a/artifacts/files/system/ds_store.yaml +++ b/artifacts/files/system/ds_store.yaml @@ -6,5 +6,5 @@ artifacts: collector: file path: / name_pattern: [".DS_Store"] - file_type: f + file_type: [f] \ No newline at end of file diff --git a/artifacts/files/system/etc.yaml b/artifacts/files/system/etc.yaml index f55e1caf..dd3292c8 100644 --- a/artifacts/files/system/etc.yaml +++ b/artifacts/files/system/etc.yaml @@ -2,16 +2,18 @@ version: 4.0 artifacts: - description: Collect system configuration files. - supported_os: [aix, android, esxi, freebsd, linux, netbsd, netscaler, openbsd, solaris] + supported_os: [aix, esxi, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: file path: /etc exclude_name_pattern: ["shadow", "shadow-", "master.passwd", "spwd.db", "gshadow", "gshadow-"] + file_type: [f] ignore_date_range: true - description: Collect system configuration files. - supported_os: [aix, android, esxi, freebsd, linux, netbsd, netscaler, openbsd, solaris] + supported_os: [aix, esxi, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: file path: /usr/local/etc + file_type: [f] ignore_date_range: true - description: Collect system configuration files. @@ -19,4 +21,5 @@ artifacts: collector: file path: /private/etc exclude_name_pattern: ["shadow", "shadow-"] + file_type: [f] ignore_date_range: true diff --git a/artifacts/files/system/locate_db.yaml b/artifacts/files/system/locate_db.yaml index 11d935fa..9eea7fb6 100644 --- a/artifacts/files/system/locate_db.yaml +++ b/artifacts/files/system/locate_db.yaml @@ -1,7 +1,10 @@ version: 1.0 artifacts: + # snapshot of file system paths that can be queried with "locate". + # can be used to compare file system structure at the time of database update against current file system structure. - description: Collect database file used by locate command, representing a snapshot of the virtual file system accessible with minimal permissions. supported_os: [freebsd, netbsd, openbsd] collector: file path: /var/db/locate.database + \ No newline at end of file diff --git a/artifacts/files/system/netscaler.yaml b/artifacts/files/system/netscaler.yaml index 2dac0fd8..70c92d2b 100644 --- a/artifacts/files/system/netscaler.yaml +++ b/artifacts/files/system/netscaler.yaml @@ -1,4 +1,4 @@ -version: 2.0 +version: 3.0 artifacts: - description: Collect system configuration files. @@ -11,19 +11,19 @@ artifacts: supported_os: [netscaler] collector: file path: /var/vpn - max_file_size: 5242880 # 5 MB + max_file_size: 10485760 # 10MB - description: Collect files from /var/netscaler/logon. supported_os: [netscaler] collector: file path: /var/netscaler/logon - max_file_size: 5242880 # 5 MB + max_file_size: 10485760 # 10MB - description: Collect files from /netscaler/ns_gui. supported_os: [netscaler] collector: file path: /netscaler/ns_gui - max_file_size: 5242880 # 5 MB + max_file_size: 10485760 # 10MB # References: # https://www.mandiant.com/resources/blog/citrix-zero-day-espionage diff --git a/artifacts/files/system/run_shm.yaml b/artifacts/files/system/run_shm.yaml index e646b2b5..78ede603 100644 --- a/artifacts/files/system/run_shm.yaml +++ b/artifacts/files/system/run_shm.yaml @@ -1,10 +1,10 @@ -version: 1.0 +version: 3.0 artifacts: - description: Collect system temporary files. supported_os: [all] collector: file path: /run/shm - file_type: f - max_file_size: 5242880 # 5MB + file_type: [f] + max_file_size: 10485760 # 10MB diff --git a/artifacts/files/system/security_backups.yaml b/artifacts/files/system/security_backups.yaml index e828b5ef..75e57740 100644 --- a/artifacts/files/system/security_backups.yaml +++ b/artifacts/files/system/security_backups.yaml @@ -5,6 +5,6 @@ artifacts: supported_os: [freebsd, netbsd, openbsd] collector: file path: /var/backups - name_pattern: ["*.current", "*.backup", "*.current.sha256", "*.backup.sha256"] + name_pattern: ["*.backup", "*.backup.sha256", "*.current", "*.current.sha256"] exclude_name_pattern: ["master.passwd.current", "master.passwd.backup"] - ignore_date_range: true + ignore_date_range: true \ No newline at end of file diff --git a/artifacts/files/system/systemd.yaml b/artifacts/files/system/systemd.yaml index ce60ef21..c63483f6 100644 --- a/artifacts/files/system/systemd.yaml +++ b/artifacts/files/system/systemd.yaml @@ -17,7 +17,7 @@ artifacts: supported_os: [linux] collector: file path: /run/systemd/sessions - file_type: f + file_type: [f] - description: Collect systemd scope and transient timer files. supported_os: [linux] @@ -34,4 +34,4 @@ artifacts: description: Collect systemd per-user configuration. supported_os: [linux] collector: file - path: /%user_home%/.config/systemd + path: /%user_home%/.config/systemd \ No newline at end of file diff --git a/artifacts/files/system/tmp.yaml b/artifacts/files/system/tmp.yaml index 1d977b51..b9f050bf 100644 --- a/artifacts/files/system/tmp.yaml +++ b/artifacts/files/system/tmp.yaml @@ -1,16 +1,16 @@ -version: 3.0 +version: 4.0 artifacts: - description: Collect system temporary files. supported_os: [aix, esxi, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: file path: /tmp - file_type: f - max_file_size: 5242880 # 5MB + file_type: [f] + max_file_size: 10485760 # 10MB - description: Collect system temporary files. supported_os: [macos] collector: file path: /private/tmp - file_type: f - max_file_size: 5242880 # 5MB + file_type: [f] + max_file_size: 10485760 # 10MB diff --git a/artifacts/files/system/var_spool.yaml b/artifacts/files/system/var_spool.yaml index d7e588d6..02c0c75b 100644 --- a/artifacts/files/system/var_spool.yaml +++ b/artifacts/files/system/var_spool.yaml @@ -2,7 +2,7 @@ version: 2.0 artifacts: - description: Collect spool files. - supported_os: [aix, android, esxi, freebsd, linux, netbsd, netscaler, openbsd, solaris] + supported_os: [aix, esxi, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: file path: /var/spool - diff --git a/artifacts/files/system/var_tmp.yaml b/artifacts/files/system/var_tmp.yaml index 3f9f2539..61a807b4 100644 --- a/artifacts/files/system/var_tmp.yaml +++ b/artifacts/files/system/var_tmp.yaml @@ -1,16 +1,16 @@ -version: 2.0 +version: 3.0 artifacts: - description: Collect system temporary files. supported_os: [aix, esxi, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: file path: /var/tmp - file_type: f - max_file_size: 5242880 # 5MB + file_type: [f] + max_file_size: 10485760 # 10MB - description: Collect system temporary files. supported_os: [macos] collector: file path: /private/var/tmp - file_type: f - max_file_size: 5242880 # 5MB + file_type: [f] + max_file_size: 10485760 # 10MB diff --git a/artifacts/hash_executables/hash_executables.yaml b/artifacts/hash_executables/hash_executables.yaml index 14078190..a059fb9a 100644 --- a/artifacts/hash_executables/hash_executables.yaml +++ b/artifacts/hash_executables/hash_executables.yaml @@ -1,43 +1,12 @@ -version: 1.0 +version: 2.0 +output_directory: /hash_executables artifacts: - - description: Find files that contain at least +x flag set for other. + description: Find files that contain the +x flag set. supported_os: [all] - collector: find - path: / - exclude_file_system: [proc, procfs] - file_type: f - max_depth: 4 - max_file_size: 3072000 - permissions: -001 - output_file: list_of_executable_files.txt - - - description: Find files that contain at least +x flag set for group. - supported_os: [all] - collector: find - path: / - exclude_file_system: [proc, procfs] - file_type: f - max_depth: 4 - max_file_size: 3072000 - permissions: -010 - output_file: list_of_executable_files.txt - - - description: Find files that contain at least +x flag set for owner. - supported_os: [all] - collector: find + collector: hash path: / exclude_file_system: [proc, procfs] - file_type: f - max_depth: 4 - max_file_size: 3072000 - permissions: -100 - output_file: list_of_executable_files.txt - - - description: Hash files that contain at least +x flag set for owner. - supported_os: [all] - collector: hash - path: list_of_executable_files.txt - is_file_list: true + file_type: [f] + permissions: [-001, -010, -100] output_file: hash_executables - \ No newline at end of file diff --git a/artifacts/live_response/containers/containerd.yaml b/artifacts/live_response/containers/containerd.yaml index 61c7e5f5..4a7936c1 100644 --- a/artifacts/live_response/containers/containerd.yaml +++ b/artifacts/live_response/containers/containerd.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "containerd" +output_directory: /live_response/containers artifacts: - description: Dump containerd configuration. diff --git a/artifacts/live_response/containers/docker.yaml b/artifacts/live_response/containers/docker.yaml index 99b60c6b..75315395 100644 --- a/artifacts/live_response/containers/docker.yaml +++ b/artifacts/live_response/containers/docker.yaml @@ -1,4 +1,6 @@ -version: 2.1 +version: 2.0 +condition: command_exists "docker" +output_directory: /live_response/containers artifacts: - description: List all containers. @@ -98,4 +100,3 @@ artifacts: foreach: docker container ps --all | sed 1d | awk '{print $1}' command: docker stats --no-stream --no-trunc %line% output_file: docker_stats_%line%.txt - \ No newline at end of file diff --git a/artifacts/live_response/containers/lxc.yaml b/artifacts/live_response/containers/lxc.yaml index 89bb4a6a..274b338c 100644 --- a/artifacts/live_response/containers/lxc.yaml +++ b/artifacts/live_response/containers/lxc.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lxc" || command_exists "lxc-ls" +output_directory: /live_response/containers artifacts: - description: List all containers and virtual machines. @@ -77,4 +79,17 @@ artifacts: foreach: lxc storage list --format compact | sed 1d | awk '{print $1}' command: lxc storage show %line% output_file: lxc_storage_show_%line%.txt - \ No newline at end of file + - + description: List the containers existing on the system.. + supported_os: [linux] + collector: command + command: lxc-ls -f + output_file: lxc-ls_-f.txt + - + description: Show information about all containers. + supported_os: [linux] + collector: command + foreach: lxc-ls -1 + command: lxc-info -i -p -S -s %line% + output_file: lxc-info_-i_-p_-S_-s_%line%.txt + \ No newline at end of file diff --git a/artifacts/live_response/containers/pct.yaml b/artifacts/live_response/containers/pct.yaml index e215c203..0ec43682 100644 --- a/artifacts/live_response/containers/pct.yaml +++ b/artifacts/live_response/containers/pct.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pct" +output_directory: /live_response/containers artifacts: - description: List all Proxmox VE active and inactive LXC containers. diff --git a/artifacts/live_response/containers/podman.yaml b/artifacts/live_response/containers/podman.yaml index 5bfbead2..7f4b2ebe 100644 --- a/artifacts/live_response/containers/podman.yaml +++ b/artifacts/live_response/containers/podman.yaml @@ -1,4 +1,6 @@ version: 2.0 +condition: command_exists "podman" +output_directory: /live_response/containers artifacts: - description: List all containers. @@ -25,7 +27,7 @@ artifacts: command: podman version output_file: podman_version.txt - - description: Display a live stream of one or more container’s resource usage statistics. + description: Display a live stream of one or more container's resource usage statistics. supported_os: [linux] collector: command command: podman stats --all --no-stream diff --git a/artifacts/live_response/hardware/alog.yaml b/artifacts/live_response/hardware/alog.yaml index 5011d586..c390f8bb 100644 --- a/artifacts/live_response/hardware/alog.yaml +++ b/artifacts/live_response/hardware/alog.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "alog" +output_directory: /live_response/hardware artifacts: - description: Print boot log. diff --git a/artifacts/live_response/hardware/bootlist.yaml b/artifacts/live_response/hardware/bootlist.yaml index 7610eb98..87bd3436 100644 --- a/artifacts/live_response/hardware/bootlist.yaml +++ b/artifacts/live_response/hardware/bootlist.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "bootlist" +output_directory: /live_response/hardware artifacts: - description: Display the list of boot devices available to the system. diff --git a/artifacts/live_response/hardware/cfgadm.yaml b/artifacts/live_response/hardware/cfgadm.yaml index 7366ae82..cf1d8cee 100644 --- a/artifacts/live_response/hardware/cfgadm.yaml +++ b/artifacts/live_response/hardware/cfgadm.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "cfgadm" +output_directory: /live_response/hardware artifacts: - description: Display configuration administration status information. diff --git a/artifacts/live_response/hardware/cpuinfo.yaml b/artifacts/live_response/hardware/cpuinfo.yaml index 6ddddcda..bf118e4f 100644 --- a/artifacts/live_response/hardware/cpuinfo.yaml +++ b/artifacts/live_response/hardware/cpuinfo.yaml @@ -1,8 +1,10 @@ -version: 1.0 +version: 2.0 +condition: command_exists "cpuinfo" +output_directory: /live_response/hardware artifacts: - description: Display CPU information. - supported_os: [android, linux, netbsd] + supported_os: [linux, netbsd] collector: command command: cat /proc/cpuinfo output_file: cpuinfo.txt diff --git a/artifacts/live_response/hardware/devinfo.yaml b/artifacts/live_response/hardware/devinfo.yaml index 5f0f6670..cb658f1b 100644 --- a/artifacts/live_response/hardware/devinfo.yaml +++ b/artifacts/live_response/hardware/devinfo.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "devinfo" +output_directory: /live_response/hardware artifacts: - description: Display information about system device configuration. diff --git a/artifacts/live_response/hardware/dmesg.yaml b/artifacts/live_response/hardware/dmesg.yaml index e4dbcab0..5050acbb 100644 --- a/artifacts/live_response/hardware/dmesg.yaml +++ b/artifacts/live_response/hardware/dmesg.yaml @@ -1,20 +1,23 @@ -version: 1.1 +version: 3.0 +condition: command_exists "dmesg" +output_directory: /live_response/hardware artifacts: - description: Display the system/kernel message buffer. - supported_os: [android, freebsd, linux, macos, netbsd, netscaler, openbsd, solaris] + supported_os: [freebsd, linux, macos, netbsd, netscaler, openbsd, solaris] collector: command command: dmesg output_file: dmesg.txt - - - description: Display the console message buffer. - supported_os: [openbsd] - collector: command - command: dmesg -s - output_file: dmesg_-s.txt - description: Display all data from the message buffer, including syslog records and console output. - supported_os: [freebsd] + supported_os: [esxi, freebsd, netscaler, solaris] collector: command command: dmesg -a output_file: dmesg_-a.txt + - + description: Display the console message buffer. + supported_os: [esxi, openbsd, solaris] + collector: command + command: dmesg -s + output_file: dmesg_-s.txt + \ No newline at end of file diff --git a/artifacts/live_response/hardware/dmidecode.yaml b/artifacts/live_response/hardware/dmidecode.yaml index e0f30544..43abf8a2 100644 --- a/artifacts/live_response/hardware/dmidecode.yaml +++ b/artifacts/live_response/hardware/dmidecode.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "dmidecode" +output_directory: /live_response/hardware artifacts: - description: Display the computer's DMI (some say SMBIOS) table contents in a human-readable format. diff --git a/artifacts/live_response/hardware/esxcli.yaml b/artifacts/live_response/hardware/esxcli.yaml index 22cb5da7..d6c0635c 100644 --- a/artifacts/live_response/hardware/esxcli.yaml +++ b/artifacts/live_response/hardware/esxcli.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "esxcli" +output_directory: /live_response/hardware artifacts: - description: List all of the CPUs on this host. diff --git a/artifacts/live_response/hardware/hostinfo.yaml b/artifacts/live_response/hardware/hostinfo.yaml index f7ad97e1..a340745f 100644 --- a/artifacts/live_response/hardware/hostinfo.yaml +++ b/artifacts/live_response/hardware/hostinfo.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "hostinfo" +output_directory: /live_response/hardware artifacts: - description: Display kernel version description, processor configuration data, available physical memory, and various scheduling statistics. diff --git a/artifacts/live_response/hardware/hwinfo.yaml b/artifacts/live_response/hardware/hwinfo.yaml index f1957645..46a8424f 100644 --- a/artifacts/live_response/hardware/hwinfo.yaml +++ b/artifacts/live_response/hardware/hwinfo.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "hwinfo" +output_directory: /live_response/hardware artifacts: - description: Display hardware information. diff --git a/artifacts/live_response/hardware/ioreg.yaml b/artifacts/live_response/hardware/ioreg.yaml index 048be113..3df24e38 100644 --- a/artifacts/live_response/hardware/ioreg.yaml +++ b/artifacts/live_response/hardware/ioreg.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "ioreg" +output_directory: /live_response/hardware artifacts: - description: Display I/O Kit registry. diff --git a/artifacts/live_response/hardware/lscpu.yaml b/artifacts/live_response/hardware/lscpu.yaml index 79e02c32..026b0737 100644 --- a/artifacts/live_response/hardware/lscpu.yaml +++ b/artifacts/live_response/hardware/lscpu.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lscpu" +output_directory: /live_response/hardware artifacts: - description: Display information about the CPU architecture. diff --git a/artifacts/live_response/hardware/lsdev.yaml b/artifacts/live_response/hardware/lsdev.yaml index 89ac43a5..c718b528 100644 --- a/artifacts/live_response/hardware/lsdev.yaml +++ b/artifacts/live_response/hardware/lsdev.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lsdev" +output_directory: /live_response/hardware artifacts: - description: Display devices in the system and their characteristics. diff --git a/artifacts/live_response/hardware/lshw.yaml b/artifacts/live_response/hardware/lshw.yaml index 27578949..3ad54b8a 100644 --- a/artifacts/live_response/hardware/lshw.yaml +++ b/artifacts/live_response/hardware/lshw.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lshw" +output_directory: /live_response/hardware artifacts: - description: Display hardware information. diff --git a/artifacts/live_response/hardware/lspci.yaml b/artifacts/live_response/hardware/lspci.yaml index d56e3306..d9d02fd2 100644 --- a/artifacts/live_response/hardware/lspci.yaml +++ b/artifacts/live_response/hardware/lspci.yaml @@ -1,17 +1,13 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lspci" +output_directory: /live_response/hardware artifacts: - description: List all PCI devices. - supported_os: [android, linux] + supported_os: [linux] collector: command command: lspci output_file: lspci.txt - - - description: List all PCI devices including the kernel driver. - supported_os: [android] - collector: command - command: lspci -e -k - output_file: lspci_-e_-k.txt - description: List all PCI devices. supported_os: [linux] diff --git a/artifacts/live_response/hardware/lsscsi.yaml b/artifacts/live_response/hardware/lsscsi.yaml index 8f6ad6b0..06593bfd 100644 --- a/artifacts/live_response/hardware/lsscsi.yaml +++ b/artifacts/live_response/hardware/lsscsi.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lsscsi" +output_directory: /live_response/hardware artifacts: - description: Display SCSI devices (or hosts) and their attributes. diff --git a/artifacts/live_response/hardware/lsusb.yaml b/artifacts/live_response/hardware/lsusb.yaml index 2b5b9b19..211d641f 100644 --- a/artifacts/live_response/hardware/lsusb.yaml +++ b/artifacts/live_response/hardware/lsusb.yaml @@ -1,8 +1,10 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lsusb" +output_directory: /live_response/hardware artifacts: - description: Display information about USB devices. - supported_os: [android, linux] + supported_os: [linux] collector: command command: lsusb output_file: lsusb.txt diff --git a/artifacts/live_response/hardware/mpstat.yaml b/artifacts/live_response/hardware/mpstat.yaml index 0ac7b5ca..f5b15e98 100644 --- a/artifacts/live_response/hardware/mpstat.yaml +++ b/artifacts/live_response/hardware/mpstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "mpstat" +output_directory: /live_response/hardware artifacts: - description: Collect and display performance statistics for all logical processors in the system. diff --git a/artifacts/live_response/hardware/nvram.yaml b/artifacts/live_response/hardware/nvram.yaml index f996d728..512fd713 100644 --- a/artifacts/live_response/hardware/nvram.yaml +++ b/artifacts/live_response/hardware/nvram.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "nvram" +output_directory: /live_response/hardware artifacts: - description: Print all of the firmware variables. diff --git a/artifacts/live_response/hardware/pciconf.yaml b/artifacts/live_response/hardware/pciconf.yaml index 7b5995f9..e4bbe261 100644 --- a/artifacts/live_response/hardware/pciconf.yaml +++ b/artifacts/live_response/hardware/pciconf.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pciconf" +output_directory: /live_response/hardware artifacts: - description: Display PCI devices information. diff --git a/artifacts/live_response/hardware/pcidump.yaml b/artifacts/live_response/hardware/pcidump.yaml index 6f757d80..2b3df14a 100644 --- a/artifacts/live_response/hardware/pcidump.yaml +++ b/artifacts/live_response/hardware/pcidump.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pcidump" +output_directory: /live_response/hardware artifacts: - description: Display PCI device data. diff --git a/artifacts/live_response/hardware/prtconf.yaml b/artifacts/live_response/hardware/prtconf.yaml index 69d8b018..e74d816c 100644 --- a/artifacts/live_response/hardware/prtconf.yaml +++ b/artifacts/live_response/hardware/prtconf.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "prtconf" +output_directory: /live_response/hardware artifacts: - description: Display system configuration information. diff --git a/artifacts/live_response/hardware/psrinfo.yaml b/artifacts/live_response/hardware/psrinfo.yaml index d9ddd2b5..fa9b0959 100644 --- a/artifacts/live_response/hardware/psrinfo.yaml +++ b/artifacts/live_response/hardware/psrinfo.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "psrinfo" +output_directory: /live_response/hardware artifacts: - description: Display information about processors. diff --git a/artifacts/live_response/hardware/smbios.yaml b/artifacts/live_response/hardware/smbios.yaml index 48c9e5e3..3cfa6ca6 100644 --- a/artifacts/live_response/hardware/smbios.yaml +++ b/artifacts/live_response/hardware/smbios.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "smbios" +output_directory: /live_response/hardware artifacts: - description: Display the contents of the System Management BIOS image. diff --git a/artifacts/live_response/hardware/systemstats.yaml b/artifacts/live_response/hardware/systemstats.yaml index 59e47883..deaba042 100644 --- a/artifacts/live_response/hardware/systemstats.yaml +++ b/artifacts/live_response/hardware/systemstats.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "systemstats" +output_directory: /live_response/hardware artifacts: - description: Display a selection of statistics about system power usage. diff --git a/artifacts/live_response/hardware/usbconfig.yaml b/artifacts/live_response/hardware/usbconfig.yaml index c1a46877..915bf085 100644 --- a/artifacts/live_response/hardware/usbconfig.yaml +++ b/artifacts/live_response/hardware/usbconfig.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "usbconfig" +output_directory: /live_response/hardware artifacts: - description: Display all connected USB devices and their attached interface drivers. diff --git a/artifacts/live_response/hardware/usbdevs.yaml b/artifacts/live_response/hardware/usbdevs.yaml index aeafcda8..c6247581 100644 --- a/artifacts/live_response/hardware/usbdevs.yaml +++ b/artifacts/live_response/hardware/usbdevs.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "usbdevs" +output_directory: /live_response/hardware artifacts: - description: Display USB devices connected to the system. diff --git a/artifacts/live_response/network/arp.yaml b/artifacts/live_response/network/arp.yaml index 3d0d102b..9985bf94 100644 --- a/artifacts/live_response/network/arp.yaml +++ b/artifacts/live_response/network/arp.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "arp" +output_directory: /live_response/network artifacts: - description: Collect the kernel's IPv4 network neighbour cache. diff --git a/artifacts/live_response/network/esxcli.yaml b/artifacts/live_response/network/esxcli.yaml index 28547a18..7aa77f8e 100644 --- a/artifacts/live_response/network/esxcli.yaml +++ b/artifacts/live_response/network/esxcli.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "esxcli" +output_directory: /live_response/network artifacts: - description: List active TCP/IP connections. diff --git a/artifacts/live_response/network/firewall-cmd.yaml b/artifacts/live_response/network/firewall-cmd.yaml index a6344021..e884ef73 100644 --- a/artifacts/live_response/network/firewall-cmd.yaml +++ b/artifacts/live_response/network/firewall-cmd.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "firewall-cmd" +output_directory: /live_response/network artifacts: - description: Collect currently active zones altogether with interfaces and sources used in these zones. @@ -29,4 +31,4 @@ artifacts: supported_os: [linux] collector: command command: firewall-cmd --list-all-zones - output_file: firewall-cmd_--list-all-zones.txt \ No newline at end of file + output_file: firewall-cmd_--list-all-zones.txt diff --git a/artifacts/live_response/network/hostname.yaml b/artifacts/live_response/network/hostname.yaml index b3d1bbcd..07223f66 100644 --- a/artifacts/live_response/network/hostname.yaml +++ b/artifacts/live_response/network/hostname.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/network artifacts: - description: Collect the system's host name. @@ -17,6 +18,7 @@ artifacts: supported_os: [linux] collector: command command: hostnamectl + condition: command_exists "hostnamectl" output_file: hostnamectl.txt - description: Collect the system's host name. diff --git a/artifacts/live_response/network/ifconfig.yaml b/artifacts/live_response/network/ifconfig.yaml index f8356d05..207a5705 100644 --- a/artifacts/live_response/network/ifconfig.yaml +++ b/artifacts/live_response/network/ifconfig.yaml @@ -1,8 +1,10 @@ -version: 2.0 +version: 3.0 +condition: command_exists "ifconfig" +output_directory: /live_response/network artifacts: - description: Collect information about all interfaces which are currently available, even if down. - supported_os: [android, aix, freebsd, linux, openbsd, netbsd, netscaler, openbsd, solaris] + supported_os: [aix, freebsd, linux, openbsd, netbsd, netscaler, openbsd, solaris] collector: command command: ifconfig -a output_file: ifconfig_-a.txt \ No newline at end of file diff --git a/artifacts/live_response/network/inetadm.yaml b/artifacts/live_response/network/inetadm.yaml index 8a0ebb37..77b5eee7 100644 --- a/artifacts/live_response/network/inetadm.yaml +++ b/artifacts/live_response/network/inetadm.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "inetadm" +output_directory: /live_response/network artifacts: - description: Collect information about inetd services. diff --git a/artifacts/live_response/network/ip.yaml b/artifacts/live_response/network/ip.yaml index 1429ba7b..b1a923fd 100644 --- a/artifacts/live_response/network/ip.yaml +++ b/artifacts/live_response/network/ip.yaml @@ -1,26 +1,28 @@ -version: 1.0 +version: 2.0 +condition: command_exists "ip" +output_directory: /live_response/network artifacts: - description: Collect addresses assigned to all network interfaces. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ip addr show output_file: ip_addr_show.txt - description: Collect information about network devices. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ip link show output_file: ip_link_show.txt - description: Collect neightbour entries. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ip neighbor show output_file: ip_neighbor_show.txt - description: Collect the kernel routing tables. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ip route show output_file: ip_route_show.txt \ No newline at end of file diff --git a/artifacts/live_response/network/ip6tables.yaml b/artifacts/live_response/network/ip6tables.yaml index f7e67200..dbed6416 100644 --- a/artifacts/live_response/network/ip6tables.yaml +++ b/artifacts/live_response/network/ip6tables.yaml @@ -1,14 +1,16 @@ -version: 1.0 +version: 2.0 +condition: command_exists "ip6table" +output_directory: /live_response/network artifacts: - description: Collect firewall rules from all chains with numerical addresses instead of trying to determine symbolic host, port or user names. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ip6tables -L -v -n output_file: iptables_-L_-v_-n.txt - description: Collect firewall rules from nat chain with numerical addresses instead of trying to determine symbolic host, port or user names. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ip6tables -t nat -L -v -n output_file: iptables_-t_nat_-L_-v_-n.txt \ No newline at end of file diff --git a/artifacts/live_response/network/ipfstat.yaml b/artifacts/live_response/network/ipfstat.yaml index 8156624c..e4d07a88 100644 --- a/artifacts/live_response/network/ipfstat.yaml +++ b/artifacts/live_response/network/ipfstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "ipfstat" +output_directory: /live_response/network artifacts: - description: Collect packet filter statistics and filter list. diff --git a/artifacts/live_response/network/ipfw.yaml b/artifacts/live_response/network/ipfw.yaml index 1bb5e430..4f377823 100644 --- a/artifacts/live_response/network/ipfw.yaml +++ b/artifacts/live_response/network/ipfw.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "ipfw" +output_directory: /live_response/network artifacts: - description: List all firewall entries. diff --git a/artifacts/live_response/network/iptables.yaml b/artifacts/live_response/network/iptables.yaml index 4b4b6b08..40ec3cbf 100644 --- a/artifacts/live_response/network/iptables.yaml +++ b/artifacts/live_response/network/iptables.yaml @@ -1,14 +1,16 @@ -version: 2.0 +version: 3.0 +condition: command_exists "iptables" +output_directory: /live_response/network artifacts: - description: Collect firewall rules from all chains with numerical addresses instead of trying to determine symbolic host, port or user names. - supported_os: [android, linux] + supported_os: [linux] collector: command command: iptables -L -v -n output_file: iptables_-L_-v_-n.txt - description: Collect firewall rules from nat chain with numerical addresses instead of trying to determine symbolic host, port or user names. - supported_os: [android, linux] + supported_os: [linux] collector: command command: iptables -t nat -L -v -n output_file: iptables_-t_nat_-L_-v_-n.txt \ No newline at end of file diff --git a/artifacts/live_response/network/lsdev.yaml b/artifacts/live_response/network/lsdev.yaml index f41c239b..77e92bc8 100644 --- a/artifacts/live_response/network/lsdev.yaml +++ b/artifacts/live_response/network/lsdev.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lsdev" +output_directory: /live_response/network artifacts: - description: Collect information about tcpip class devices in the system and their characteristics. diff --git a/artifacts/live_response/network/lsof.yaml b/artifacts/live_response/network/lsof.yaml index 67a239aa..d57e2fa8 100644 --- a/artifacts/live_response/network/lsof.yaml +++ b/artifacts/live_response/network/lsof.yaml @@ -1,4 +1,6 @@ -version: 3.0 +version: 4.0 +condition: command_exists "lsof" +output_directory: /live_response/network artifacts: - description: Collect the listing of all Internet network files. diff --git a/artifacts/live_response/network/lssrc.yaml b/artifacts/live_response/network/lssrc.yaml index 28d71a32..115c1017 100644 --- a/artifacts/live_response/network/lssrc.yaml +++ b/artifacts/live_response/network/lssrc.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lssrc" +output_directory: /live_response/network artifacts: - description: Display status of the subsystem, group of subsystems, or subserver. diff --git a/artifacts/live_response/network/macos_network_preferences.yaml b/artifacts/live_response/network/macos_network_preferences.yaml index 7acbbb28..5704d585 100644 --- a/artifacts/live_response/network/macos_network_preferences.yaml +++ b/artifacts/live_response/network/macos_network_preferences.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/network artifacts: - description: Collect network preferences. diff --git a/artifacts/live_response/network/ndp.yaml b/artifacts/live_response/network/ndp.yaml index d7378459..803a13d9 100644 --- a/artifacts/live_response/network/ndp.yaml +++ b/artifacts/live_response/network/ndp.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "ndp" +output_directory: /live_response/network artifacts: - description: Collect the kernel's IPv6 network neighbour cache. diff --git a/artifacts/live_response/network/netstat.yaml b/artifacts/live_response/network/netstat.yaml index f70fc35e..c44d40db 100644 --- a/artifacts/live_response/network/netstat.yaml +++ b/artifacts/live_response/network/netstat.yaml @@ -1,32 +1,34 @@ -version: 2.0 +version: 3.0 +condition: command_exists "netstat" +output_directory: /live_response/network artifacts: - description: Collect both listening and non-listening (for TCP this means established connections) sockets. - supported_os: [android, aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] + supported_os: [aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: command command: netstat -a output_file: netstat_-a.txt - description: Collect both listening and non-listening (for TCP this means established connections) sockets with numerical addresses instead of trying to determine symbolic host, port or user names. - supported_os: [android, aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] + supported_os: [aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: command command: netstat -an output_file: netstat_-an.txt - description: Collect a table of all network interfaces. - supported_os: [android, aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] + supported_os: [aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: command command: netstat -i output_file: netstat_-i.txt - description: Collect the kernel routing tables. - supported_os: [android, aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] + supported_os: [aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: command command: netstat -r output_file: netstat_-r.txt - description: Collect the kernel routing tables with numerical addresses instead of trying to determine symbolic host, port or user names. - supported_os: [android, aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] + supported_os: [aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: command command: netstat -rn output_file: netstat_-rn.txt @@ -50,13 +52,13 @@ artifacts: output_file: netstat_-v.txt - description: Collect both listening and non-listening (for TCP this means established connections) sockets with numerical addresses instead of trying to determine symbolic host, port or user names, and show the PID and name of the program to which each socket belongs. - supported_os: [android, linux, solaris] + supported_os: [linux, solaris] collector: command command: netstat -anp output_file: netstat_-anp.txt - description: Collect extended information about listening and non-listening (for TCP this means established connections) sockets, the PID and name of the program to which each socket belongs with numerical addresses instead of trying to determine symbolic host, port or user names. - supported_os: [android, linux] + supported_os: [linux] collector: command command: netstat -lpeanut - output_file: netstat_-lpeanut.txt + output_file: netstat_-lpeanut.txt \ No newline at end of file diff --git a/artifacts/live_response/network/nfso.yaml b/artifacts/live_response/network/nfso.yaml index e9c9c1cb..97a7b52e 100644 --- a/artifacts/live_response/network/nfso.yaml +++ b/artifacts/live_response/network/nfso.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "nfso" +output_directory: /live_response/network artifacts: - description: Collect the the current, reboot (when used in conjunction with -r) or permanent (when used in conjunction with -p) value for all tunable parameters, one per line in pairs Tunable = Value. diff --git a/artifacts/live_response/network/nfsstat.yaml b/artifacts/live_response/network/nfsstat.yaml index bce58b49..5983bb07 100644 --- a/artifacts/live_response/network/nfsstat.yaml +++ b/artifacts/live_response/network/nfsstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "nfsstat" +output_directory: /live_response/network artifacts: - description: Collect statistics kept about NFS client and server activity. diff --git a/artifacts/live_response/network/nft.yaml b/artifacts/live_response/network/nft.yaml index 31fe1c0b..a2bdd826 100644 --- a/artifacts/live_response/network/nft.yaml +++ b/artifacts/live_response/network/nft.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "nft" +output_directory: /live_response/network artifacts: - description: Collect complete nftables ruleset. diff --git a/artifacts/live_response/network/pfctl.yaml b/artifacts/live_response/network/pfctl.yaml index c1955c5e..04f4d4cf 100644 --- a/artifacts/live_response/network/pfctl.yaml +++ b/artifacts/live_response/network/pfctl.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pfctl" +output_directory: /live_response/network artifacts: - description: Collect all packet filter information. diff --git a/artifacts/live_response/network/scutil.yaml b/artifacts/live_response/network/scutil.yaml index 1f64a003..e41acccf 100644 --- a/artifacts/live_response/network/scutil.yaml +++ b/artifacts/live_response/network/scutil.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "scutil" +output_directory: /live_response/network artifacts: - description: Collect DNS configuration. diff --git a/artifacts/live_response/network/showmount.yaml b/artifacts/live_response/network/showmount.yaml index 1a6da661..5e1b7a22 100644 --- a/artifacts/live_response/network/showmount.yaml +++ b/artifacts/live_response/network/showmount.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "showmount" +output_directory: /live_response/network artifacts: - description: Collect both the client hostname or IP address and mounted directory in host:dir format. diff --git a/artifacts/live_response/network/sockstat.yaml b/artifacts/live_response/network/sockstat.yaml index c853255a..5ac74714 100644 --- a/artifacts/live_response/network/sockstat.yaml +++ b/artifacts/live_response/network/sockstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "sockstat" +output_directory: /live_response/network artifacts: - description: Collect the list of open sockets. diff --git a/artifacts/live_response/network/ss.yaml b/artifacts/live_response/network/ss.yaml index 40297214..9d792726 100644 --- a/artifacts/live_response/network/ss.yaml +++ b/artifacts/live_response/network/ss.yaml @@ -1,62 +1,64 @@ -version: 2.0 +version: 3.0 +condition: command_exists "ss" +output_directory: /live_response/network artifacts: - description: Display both listening and non-listening (for TCP this means established connections) sockets with numerical addresses instead of trying to determine symbolic host, port or user names, and show the PID and name of the program to which each socket belongs. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ss -anp output_file: ss_-anp.txt - description: Display both listening and non-listening (for TCP this means established connections) sockets, and show the PID and name of the program to which each socket belongs. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ss -ap output_file: ss_-ap.txt - description: Display both listening and non-listening TCP sockets only with numerical addresses instead of trying to determine symbolic host, and show the PID of the program to which socket belongs. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ss -tanp output_file: ss_-tanp.txt - description: Display both listening and non-listening TCP sockets only, and show the PID of the program to which socket belongs. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ss -tap output_file: ss_-tap.txt - description: Display both listening and non-listening UDP sockets only with numerical addresses instead of trying to determine symbolic host, and show the PID of the program to which socket belongs. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ss -uanp output_file: ss_-uanp.txt - description: Display both listening and non-listening UDP sockets only, and show the PID of the program to which socket belongs. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ss -uap output_file: ss_-uap.txt - description: Display listening TCP sockets only, and show the PID of the program to which socket belongs. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ss -tlp output_file: ss_-tlp.txt - description: Display listening TCP sockets only with numerical addresses instead of trying to determine symbolic host, and show the PID of the program to which socket belongs. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ss -tlnp output_file: ss_-tlnp.txt - description: Display listening UDP sockets only, and show the PID of the program to which socket belongs. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ss -ulp output_file: ss_-ulp.txt - description: Display listening UDP sockets only with numerical addresses instead of trying to determine symbolic host, and show the PID of the program to which socket belongs. - supported_os: [android, linux] + supported_os: [linux] collector: command command: ss -ulnp output_file: ss_-ulnp.txt \ No newline at end of file diff --git a/artifacts/live_response/network/ufw.yaml b/artifacts/live_response/network/ufw.yaml index 3a7a6181..8e3d728f 100644 --- a/artifacts/live_response/network/ufw.yaml +++ b/artifacts/live_response/network/ufw.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "ufw" +output_directory: /live_response/network artifacts: - description: Show status of firewall and ufw managed rules. diff --git a/artifacts/live_response/network/vim-cmd.yaml b/artifacts/live_response/network/vim-cmd.yaml index c8889f8f..a0673ad2 100644 --- a/artifacts/live_response/network/vim-cmd.yaml +++ b/artifacts/live_response/network/vim-cmd.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "vim-cmd" +output_directory: /live_response/network artifacts: - description: Get the network configuration. diff --git a/artifacts/live_response/packages/brew.yaml b/artifacts/live_response/packages/brew.yaml index 126ab2e5..7061ed0f 100644 --- a/artifacts/live_response/packages/brew.yaml +++ b/artifacts/live_response/packages/brew.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "brew" +output_directory: /live_response/packages artifacts: - description: Display installed packages. diff --git a/artifacts/live_response/packages/dnf.yaml b/artifacts/live_response/packages/dnf.yaml index 4f22cd0e..a43421ec 100644 --- a/artifacts/live_response/packages/dnf.yaml +++ b/artifacts/live_response/packages/dnf.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "dnf" +output_directory: /live_response/packages artifacts: - description: Display installed packages. diff --git a/artifacts/live_response/packages/dpkg.yaml b/artifacts/live_response/packages/dpkg.yaml index bb988472..f150ec6d 100644 --- a/artifacts/live_response/packages/dpkg.yaml +++ b/artifacts/live_response/packages/dpkg.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "dpkg" +output_directory: /live_response/packages artifacts: - description: Display installed packages. diff --git a/artifacts/live_response/packages/equo.yaml b/artifacts/live_response/packages/equo.yaml index 5a3ef865..70c6b8df 100644 --- a/artifacts/live_response/packages/equo.yaml +++ b/artifacts/live_response/packages/equo.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "equo" +output_directory: /live_response/packages artifacts: - description: Display installed packages. diff --git a/artifacts/live_response/packages/esxcli.yaml b/artifacts/live_response/packages/esxcli.yaml index 2a10bf0d..47e46cd7 100644 --- a/artifacts/live_response/packages/esxcli.yaml +++ b/artifacts/live_response/packages/esxcli.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "esxcli" +output_directory: /live_response/packages artifacts: - description: Lists the installed VIB packages. diff --git a/artifacts/live_response/packages/flatpak.yaml b/artifacts/live_response/packages/flatpak.yaml index 873e1632..752c9143 100644 --- a/artifacts/live_response/packages/flatpak.yaml +++ b/artifacts/live_response/packages/flatpak.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "flatpak" +output_directory: /live_response/packages artifacts: - description: Display installed FlatPak packages. diff --git a/artifacts/live_response/packages/instfix.yaml b/artifacts/live_response/packages/instfix.yaml index f32a1f3a..c49399ff 100644 --- a/artifacts/live_response/packages/instfix.yaml +++ b/artifacts/live_response/packages/instfix.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "instfix" +output_directory: /live_response/packages artifacts: - description: Displays information about each fileset associated with a fix or keyword. Also display filesets that are not installed. diff --git a/artifacts/live_response/packages/ipkg.yaml b/artifacts/live_response/packages/ipkg.yaml index c3158248..12744b0c 100644 --- a/artifacts/live_response/packages/ipkg.yaml +++ b/artifacts/live_response/packages/ipkg.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "ipkg" +output_directory: /live_response/packages artifacts: - description: Display installed packages. @@ -12,4 +14,4 @@ artifacts: collector: command command: ipkg list_installed output_file: ipkg_list_installed.txt - \ No newline at end of file + diff --git a/artifacts/live_response/packages/lppchk.yaml b/artifacts/live_response/packages/lppchk.yaml index 007478db..b1f7465f 100644 --- a/artifacts/live_response/packages/lppchk.yaml +++ b/artifacts/live_response/packages/lppchk.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lppchk" +output_directory: /live_response/packages artifacts: - description: Verifies that all software products installed on the / (root) file system are also installed on the /usr file system and, conversely, all the software products installed in the /usr file system are also installed on the / (root) file system. diff --git a/artifacts/live_response/packages/lsappinfo.yaml b/artifacts/live_response/packages/lsappinfo.yaml index b650f903..79f36f60 100644 --- a/artifacts/live_response/packages/lsappinfo.yaml +++ b/artifacts/live_response/packages/lsappinfo.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lsappinfo" +output_directory: /live_response/packages artifacts: - description: List all of the running applications. diff --git a/artifacts/live_response/packages/lslpp.yaml b/artifacts/live_response/packages/lslpp.yaml index d7ce2d51..6c26633f 100644 --- a/artifacts/live_response/packages/lslpp.yaml +++ b/artifacts/live_response/packages/lslpp.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lslpp" +output_directory: /live_response/packages artifacts: - description: Displays the installation and update history information for the specified fileset. diff --git a/artifacts/live_response/packages/macos_applications.yaml b/artifacts/live_response/packages/macos_applications.yaml index 63f882a6..d37d5a8d 100644 --- a/artifacts/live_response/packages/macos_applications.yaml +++ b/artifacts/live_response/packages/macos_applications.yaml @@ -1,10 +1,11 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/packages artifacts: - description: Display all applications located at /Applications directory. supported_os: [macos] collector: command - command: ls -l -a /Applications + command: ls -l -a /%mount_point%/Applications output_file: ls_-l_-a_Applications.txt - description: Display all applications located at /Users/%user%/Applications directory. @@ -12,3 +13,4 @@ artifacts: collector: command command: ls -l -a /%user_home%/Applications output_file: ls_-l_-a_%user_home%_Applications.txt + exclude_nologin_users: true diff --git a/artifacts/live_response/packages/nix.yaml b/artifacts/live_response/packages/nix.yaml index 73bafaed..b8eb4794 100644 --- a/artifacts/live_response/packages/nix.yaml +++ b/artifacts/live_response/packages/nix.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "nix-env" +output_directory: /live_response/packages artifacts: - description: Display installed packages. diff --git a/artifacts/live_response/packages/opkg.yaml b/artifacts/live_response/packages/opkg.yaml index eb6f5891..484cf842 100644 --- a/artifacts/live_response/packages/opkg.yaml +++ b/artifacts/live_response/packages/opkg.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "opkg" +output_directory: /live_response/packages artifacts: - description: Display installed packages. diff --git a/artifacts/live_response/packages/pacman.yaml b/artifacts/live_response/packages/pacman.yaml index 132348f2..ad950114 100644 --- a/artifacts/live_response/packages/pacman.yaml +++ b/artifacts/live_response/packages/pacman.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pacman" +output_directory: /live_response/packages artifacts: - description: Display installed packages. diff --git a/artifacts/live_response/packages/pip.yaml b/artifacts/live_response/packages/pip.yaml index 47f3dfa2..6f7ebf9a 100644 --- a/artifacts/live_response/packages/pip.yaml +++ b/artifacts/live_response/packages/pip.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pip" +output_directory: /live_response/packages artifacts: - description: Display Python packages installed through pip. diff --git a/artifacts/live_response/packages/pisi.yaml b/artifacts/live_response/packages/pisi.yaml index 165652c5..365bb0cc 100644 --- a/artifacts/live_response/packages/pisi.yaml +++ b/artifacts/live_response/packages/pisi.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pisi" +output_directory: /live_response/packages artifacts: - description: Display installed packages. diff --git a/artifacts/live_response/packages/pkg.yaml b/artifacts/live_response/packages/pkg.yaml index fb74cb5d..c1ba44d7 100644 --- a/artifacts/live_response/packages/pkg.yaml +++ b/artifacts/live_response/packages/pkg.yaml @@ -1,4 +1,6 @@ -version: 2.0 +version: 3.0 +condition: command_exists "pkg" +output_directory: /live_response/packages artifacts: - description: Displays information about installed packages. @@ -6,4 +8,4 @@ artifacts: collector: command command: pkg info output_file: pkg_info.txt - \ No newline at end of file + diff --git a/artifacts/live_response/packages/pkg_info.yaml b/artifacts/live_response/packages/pkg_info.yaml index 8a05842f..e8e6b20b 100644 --- a/artifacts/live_response/packages/pkg_info.yaml +++ b/artifacts/live_response/packages/pkg_info.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pkg_info" +output_directory: /live_response/packages artifacts: - description: Display information for all currently installed packages, including internal packages. diff --git a/artifacts/live_response/packages/pkginfo.yaml b/artifacts/live_response/packages/pkginfo.yaml index 588aa4f6..db815bac 100644 --- a/artifacts/live_response/packages/pkginfo.yaml +++ b/artifacts/live_response/packages/pkginfo.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pkginfo" +output_directory: /live_response/packages artifacts: - description: Display information for fully installed packages only. diff --git a/artifacts/live_response/packages/pkgutil.yaml b/artifacts/live_response/packages/pkgutil.yaml index 473c1045..0cd507bc 100644 --- a/artifacts/live_response/packages/pkgutil.yaml +++ b/artifacts/live_response/packages/pkgutil.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pkgutil" +output_directory: /live_response/packages artifacts: - description: Display all installed packages and receipts. diff --git a/artifacts/live_response/packages/qlist.yaml b/artifacts/live_response/packages/qlist.yaml index b0d3d02b..ed1e5fe9 100644 --- a/artifacts/live_response/packages/qlist.yaml +++ b/artifacts/live_response/packages/qlist.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "qlist" +output_directory: /live_response/packages artifacts: - description: Display installed packages with repository (Gentoo). diff --git a/artifacts/live_response/packages/rpm.yaml b/artifacts/live_response/packages/rpm.yaml index 817322aa..83cad07a 100644 --- a/artifacts/live_response/packages/rpm.yaml +++ b/artifacts/live_response/packages/rpm.yaml @@ -1,4 +1,6 @@ -version: 2.0 +version: 3.0 +condition: command_exists "rpm" +output_directory: /live_response/packages artifacts: - description: Displays all installed packages. diff --git a/artifacts/live_response/packages/snap.yaml b/artifacts/live_response/packages/snap.yaml index 5e8a4c9a..594110f4 100644 --- a/artifacts/live_response/packages/snap.yaml +++ b/artifacts/live_response/packages/snap.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "snap" +output_directory: /live_response/packages artifacts: - description: Display installed Snap packages. diff --git a/artifacts/live_response/packages/softwareupdate.yaml b/artifacts/live_response/packages/softwareupdate.yaml index 6cc45ed5..a8d9a69a 100644 --- a/artifacts/live_response/packages/softwareupdate.yaml +++ b/artifacts/live_response/packages/softwareupdate.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "softwareupdate" +output_directory: /live_response/packages artifacts: - description: Display installed updates. diff --git a/artifacts/live_response/packages/swupd.yaml b/artifacts/live_response/packages/swupd.yaml index ac42a74c..f7c3ee61 100644 --- a/artifacts/live_response/packages/swupd.yaml +++ b/artifacts/live_response/packages/swupd.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "swupd" +output_directory: /live_response/packages artifacts: - description: List installed bundles. diff --git a/artifacts/live_response/packages/synopkg.yaml b/artifacts/live_response/packages/synopkg.yaml index f1d38038..a8732f0e 100644 --- a/artifacts/live_response/packages/synopkg.yaml +++ b/artifacts/live_response/packages/synopkg.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "synopkg" +output_directory: /live_response/packages artifacts: - description: Displays information about installed packages on Synology DSM system. diff --git a/artifacts/live_response/packages/xbps.yaml b/artifacts/live_response/packages/xbps.yaml index deefaefe..e8294b4e 100644 --- a/artifacts/live_response/packages/xbps.yaml +++ b/artifacts/live_response/packages/xbps.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "xbps-query" +output_directory: /live_response/packages artifacts: - description: Display installed packages. diff --git a/artifacts/live_response/packages/yay.yaml b/artifacts/live_response/packages/yay.yaml index fc4ca162..ad233703 100644 --- a/artifacts/live_response/packages/yay.yaml +++ b/artifacts/live_response/packages/yay.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "yay" +output_directory: /live_response/packages artifacts: - description: Display installed packages. diff --git a/artifacts/live_response/packages/yum.yaml b/artifacts/live_response/packages/yum.yaml index a773dc3a..a7c0962e 100644 --- a/artifacts/live_response/packages/yum.yaml +++ b/artifacts/live_response/packages/yum.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "yum" +output_directory: /live_response/packages artifacts: - description: Display installed packages. diff --git a/artifacts/live_response/packages/zypper.yaml b/artifacts/live_response/packages/zypper.yaml index 9ba97e25..c193c4f0 100644 --- a/artifacts/live_response/packages/zypper.yaml +++ b/artifacts/live_response/packages/zypper.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "zypper" +output_directory: /live_response/packages artifacts: - description: List installed packages. zypper is a command-line interface to ZYpp system management library (libzypp). diff --git a/artifacts/live_response/process/deleted.yaml b/artifacts/live_response/process/deleted.yaml index e619e809..fbfc9fcf 100644 --- a/artifacts/live_response/process/deleted.yaml +++ b/artifacts/live_response/process/deleted.yaml @@ -1,91 +1,83 @@ -version: 3.0 +version: 4.0 artifacts: - description: Collect the binary of (malicious) processes if they are shown up as being (deleted). - # the collection will be limited to the first 50M of data only. + # the collection will be limited to the first 20M of data only. # this is to avoid dd hitting an invalid file descriptor (such as /dev/null) and generating an endless output file supported_os: [linux] collector: command foreach: ls -l /proc/[0-9]*/exe | grep -E "\(deleted\)" | grep -v -E "> /proc/" | awk -F"/proc/|/exe" '{print $2}' - command: dd if=/proc/%line%/exe of=%output_file% conv=swab bs=1024 count=50000 - output_directory: proc/%line% - output_file: recovered_exe.dd.swab + command: dd if=/proc/%line%/exe of=recovered_exe bs=1024 count=20000 + output_directory: /live_response/process/proc/%line% - description: Collect the memory sections and strings from (deleted) processes. supported_os: [linux] collector: command foreach: ls -l /proc/[0-9]*/exe | grep -E "\(deleted\)" | grep -v -E "> /proc/" | awk -F"/proc/|/exe" '{print $2}' - command: linux_procmemdump.sh -p %line% -u -b -d %output_file% - output_file: proc - stderr_output_file: linux_procmemdump.txt.stderr + command: linux_procmemdump.sh -p %line% -u -b -d proc + output_directory: /live_response/process - description: Collect the list of deleted files of (malicious) processes if they are shown up as being (deleted). supported_os: [linux] collector: command foreach: ls -l /proc/[0-9]*/exe | grep -E "\(deleted\)" | awk -F"/proc/|/exe" '{print $2}' command: ls -l /proc/%line%/fd/[0-9]* | grep -E "\(deleted\)" | grep -v -E "> /dev/|> /proc/" | awk -F"/proc/%line%/fd/| ->" '{print "%line%/fd/"$2}' - output_file: .deleted_file_descriptors.txt + output_directory: /%temp_directory%/process + output_file: file_descriptors_of_deleted_processes.txt - description: Collect the list of deleted files located in /dev/shm of (malicious) processes if they are shown up as being (deleted). supported_os: [linux] collector: command foreach: ls -l /proc/[0-9]*/exe | grep -E "\(deleted\)" | awk -F"/proc/|/exe" '{print $2}' command: ls -l /proc/%line%/fd/[0-9]* | grep -E "\(deleted\)" | grep -E "> /dev/shm" | awk -F"/proc/%line%/fd/| ->" '{print "%line%/fd/"$2}' - output_file: .deleted_file_descriptors.txt + output_directory: /%temp_directory%/process + output_file: file_descriptors_of_deleted_processes.txt - description: Collect open files of (malicious) processes if they are shown up as being (deleted). - # the collection will be limited to the first 50M of data only. + # the collection will be limited to the first 20M of data only. # this is to avoid dd hitting an invalid file descriptor (such as /dev/null) and generating an endless output file supported_os: [linux] collector: command - foreach: cat "%destination_directory%/.deleted_file_descriptors.txt" - command: dd if=/proc/%line% of=%output_file% bs=1024 count=50000 - output_directory: proc/%line% - output_file: recovered_file.dd + foreach: cat "/%temp_directory%/process/file_descriptors_of_deleted_processes.txt" + command: dd if=/proc/%line% of=recovered_file bs=1024 count=20000 + output_directory: /live_response/process/proc/%line% - description: Collect the list of files being hidden in a memfd socket. supported_os: [linux] collector: command foreach: ls -l /proc/[0-9]*/exe | awk -F"/proc/|/exe" '{print $2}' command: ls -l /proc/%line%/fd/[0-9]* | grep "memfd" | grep -E "\(deleted\)" | awk -F"/proc/%line%/fd/| ->" '{print "%line%/fd/"$2}' - output_file: .files_hidden_in_memfd_socket.txt + output_directory: /%temp_directory%/process + output_file: files_hidden_in_memfd_socket.txt - description: Collect open files of (malicious) processes if they are being hidden in a memfd socket. - # the collection will be limited to the first 50M of data only. + # the collection will be limited to the first 20M of data only. # this is to avoid dd hitting an invalid file descriptor (such as /dev/null) and generating an endless output file supported_os: [linux] collector: command - foreach: cat "%destination_directory%/.files_hidden_in_memfd_socket.txt" - command: dd if=/proc/%line% of=%output_file% bs=1024 count=50000 - output_directory: proc/%line% - output_file: recovered_file.dd + foreach: cat "/%temp_directory%/process/files_hidden_in_memfd_socket.txt" + command: dd if=/proc/%line% of=recovered_file bs=1024 count=20000 + output_directory: /live_response/process/proc/%line% - description: List of open files of (malicious) processes. supported_os: [linux] collector: command foreach: ls -l /proc/[0-9]*/exe | grep -E "\(deleted\)" | awk -F"/proc/|/exe" '{print $2}' command: ls -l /proc/%line%/fd/[0-9]* | grep -v -E "\(deleted\)" | awk -F'-> ' '{print $2}' | sed -e "s:^'::" -e "s:'$::" -e ':^"::' -e ':"$::' - output_file: .list_open_file_descriptors.txt - - - description: Find open files of (malicious) processes. - supported_os: [linux] - collector: command - foreach: cat "%destination_directory%/.list_open_file_descriptors.txt" - command: find %line% -type f -print - output_file: .open_file_descriptors.txt + output_directory: /%temp_directory%/process + output_file: list_open_file_descriptors.txt - description: Collect open files of (malicious) processes. supported_os: [linux] collector: file - path: .open_file_descriptors.txt + path: /%temp_directory%/process/list_open_file_descriptors.txt is_file_list: true - description: Collect the binary of (malicious) processes if they are shown up as being (deleted). - # the collection will be limited to the first 50M of data only. + # the collection will be limited to the first 20M of data only. # this is to avoid dd hitting an invalid file descriptor (such as /dev/null) and generating an endless output file supported_os: [solaris] collector: command foreach: ls -l /proc/[0-9]*/path/a.out | grep -E "a\.out$" | awk -F"/proc/|/path/" '{print $2}' - command: dd if=/proc/%line%/object/a.out of=%output_file% conv=swab bs=1024 count=50000 - output_directory: proc/%line% - output_file: recovered_a.out.dd.swab + command: dd if=/proc/%line%/object/a.out of=recovered_a.out bs=1024 count=20000 + output_directory: /live_response/process/proc/%line% diff --git a/artifacts/live_response/process/esxcli.yaml b/artifacts/live_response/process/esxcli.yaml index 9b1e72c8..9a09d748 100644 --- a/artifacts/live_response/process/esxcli.yaml +++ b/artifacts/live_response/process/esxcli.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "esxcli" +output_directory: /live_response/process artifacts: - description: List the VMkernel UserWorld processes currently on the host. diff --git a/artifacts/live_response/process/fstat.yaml b/artifacts/live_response/process/fstat.yaml index 87dd5923..25032a3b 100644 --- a/artifacts/live_response/process/fstat.yaml +++ b/artifacts/live_response/process/fstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "fstat" +output_directory: /live_response/process artifacts: - description: Collect status of open files. diff --git a/artifacts/live_response/process/hash_running_processes.yaml b/artifacts/live_response/process/hash_running_processes.yaml index dc7622e7..a75b8191 100644 --- a/artifacts/live_response/process/hash_running_processes.yaml +++ b/artifacts/live_response/process/hash_running_processes.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/process artifacts: - description: Hash running processes. @@ -8,10 +9,17 @@ artifacts: output_file: hash_running_processes - description: Hash running processes. - supported_os: [android, linux, netbsd] + supported_os: [linux, netbsd] collector: hash path: /proc/[0-9]*/exe output_file: hash_running_processes + - + description: Hash running processes. + supported_os: [freebsd] + collector: hash + condition: ls /proc/$$ + path: /proc/[0-9]*/file + output_file: hash_running_processes - description: Hash running processes. supported_os: [netscaler] @@ -26,20 +34,38 @@ artifacts: output_file: hash_running_processes - description: Collect running processes executable path. - supported_os: [freebsd, macos, openbsd] + supported_os: [esxi] collector: command - command: ps -eo args | grep "^/" | awk '{print $1}' | sort -u + command: ps -c | awk '{print $4}' | grep "^/" | sort -u + output_directory: /%temp_directory%/process output_file: hash_running_processes_full_paths.txt - description: Collect running processes executable path. - supported_os: [esxi] + supported_os: [freebsd] + collector: command + condition: ! ls /proc/$$ + command: ps -axo args | grep "^/" | awk '{print $1}' | sort -u + output_directory: /%temp_directory%/process + output_file: hash_running_processes_full_paths.txt + - + description: Collect running processes executable path. + supported_os: [macos] + collector: command + command: ps -axo comm | grep "^/" | sort -u + output_directory: /%temp_directory%/process + output_file: hash_running_processes_full_paths.txt + - + description: Collect running processes executable path. + supported_os: [openbsd] collector: command - command: ps -c | awk '{print $4}' | sort -u | grep "^/" + command: ps -axo args | grep "^/" | awk '{print $1}' | sort -u + output_directory: /%temp_directory%/process output_file: hash_running_processes_full_paths.txt - description: Hash running processes. supported_os: [esxi, freebsd, macos, openbsd] collector: hash - path: hash_running_processes_full_paths.txt + condition: ! ls /proc/$$ + path: /%temp_directory%/process/hash_running_processes_full_paths.txt is_file_list: true output_file: hash_running_processes \ No newline at end of file diff --git a/artifacts/live_response/process/lsof.yaml b/artifacts/live_response/process/lsof.yaml index aa47c143..b4c7d79a 100644 --- a/artifacts/live_response/process/lsof.yaml +++ b/artifacts/live_response/process/lsof.yaml @@ -1,11 +1,7 @@ -version: 2.0 +version: 3.0 +condition: command_exists "lsof" +output_directory: /live_response/process artifacts: - - - description: Collect the list open files. - supported_os: [android] - collector: command - command: lsof -l - output_file: lsof_-l.txt - description: Collect the list open files. supported_os: [aix, esxi, freebsd, linux, macos, netscaler, openbsd, solaris] diff --git a/artifacts/live_response/process/procfs_information.yaml b/artifacts/live_response/process/procfs_information.yaml index 7da7b4a0..6301299b 100644 --- a/artifacts/live_response/process/procfs_information.yaml +++ b/artifacts/live_response/process/procfs_information.yaml @@ -1,11 +1,18 @@ version: 2.0 +output_directory: /live_response/process artifacts: - description: Collect current running processes information. - supported_os: [aix, android, linux, netbsd, netscaler] + supported_os: [aix, freebsd, linux, netbsd, netscaler, solaris] collector: command command: ls -l /proc/[0-9]* output_file: ls_-l_proc.txt + + # each operating system has its own particularities and artifacts available in /proc + # to make this artifact easier to read and more organized, each operating system + # will have its own section + + # aix - description: Collect running processes executable path. supported_os: [aix] @@ -13,147 +20,451 @@ artifacts: command: ls -l /proc/[0-9]*/object/a.out output_file: running_processes_full_paths.txt - - description: Collect running processes executable path. - supported_os: [android, linux, netbsd] + description: Collect the path to the current working directory of the process. + supported_os: [aix] collector: command - command: ls -l /proc/[0-9]*/exe - output_file: running_processes_full_paths.txt + command: ls -l /proc/[0-9]*/cwd + output_file: ls_-l_proc_pid_cwd.txt + - + description: Collect the list of files which the process has open. + supported_os: [aix] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: ls -la /proc/%line%/fd + output_directory: /live_response/process/proc/%line% + output_file: fd.txt + - + description: Collect information about all file descriptors opened by a process. + supported_os: [aix] + collector: command + condition: command_exists "procfiles" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: procfiles -n -c %line% + output_directory: /live_response/process/proc/%line% + output_file: procfiles.txt + # aix: strings available + - + description: Collect running process information. + supported_os: [aix] + collector: command + condition: command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: strings /proc/%line%/psinfo + output_directory: /live_response/process/proc/%line% + output_file: psinfo.txt + - + description: Collect status information about the process. + supported_os: [aix] + collector: command + condition: command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: strings /proc/%line%/status + output_directory: /live_response/process/proc/%line% + output_file: status.txt + # aix: no strings available + - + description: Collect running process information. + supported_os: [aix] + collector: command + condition: ! command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: astrings /proc/%line%/psinfo + output_directory: /live_response/process/proc/%line% + output_file: psinfo.txt + - + description: Collect status information about the process. + supported_os: [aix] + collector: command + condition: ! command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: astrings /proc/%line%/status + output_directory: /live_response/process/proc/%line% + output_file: status.txt + + # freebsd + # some freebsd systems use profcs, some don't - description: Collect running processes executable path. - supported_os: [freebsd, macos, openbsd] + supported_os: [freebsd] collector: command - command: ps -eo args | grep ^/ | awk '{print $1}' | sort -u + condition: ls /proc/$$ + command: ls -l /proc/[0-9]*/file output_file: running_processes_full_paths.txt - description: Collect running processes executable path. - supported_os: [netscaler] + supported_os: [freebsd] collector: command - command: ls -l /proc/[0-9]*/file + condition: ! ls /proc/$$ + command: ps -axo args | grep ^/ | awk '{print $1}' | sort -u output_file: running_processes_full_paths.txt + - + description: Collect command line arguments for a process. + supported_os: [freebsd] + collector: command + condition: ls /proc/$$ + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/cmdline + output_directory: /live_response/process/proc/%line% + output_file: cmdline.txt + - + description: Collect mapped memory regions and their access permissions. + supported_os: [freebsd] + collector: command + condition: ls /proc/$$ + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/map + output_directory: /live_response/process/proc/%line% + output_file: map.txt + - + description: Collect status information about the process. + supported_os: [freebsd] + collector: command + condition: ls /proc/$$ + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/status + output_directory: /live_response/process/proc/%line% + output_file: status.txt + + # linux - description: Collect running processes executable path. - supported_os: [solaris] + supported_os: [linux] collector: command - command: ls -l /proc/[0-9]*/path/a.out + command: ls -l /proc/[0-9]*/exe output_file: running_processes_full_paths.txt - description: Collect the path to the current working directory of the process. - supported_os: [aix, android, linux, netbsd, solaris] + supported_os: [linux] collector: command command: ls -l /proc/[0-9]*/cwd output_file: ls_-l_proc_pid_cwd.txt + - + description: Collect the list of files which the process has open. + supported_os: [linux] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: ls -la /proc/%line%/fd + output_directory: /live_response/process/proc/%line% + output_file: fd.txt + - + description: Collect information about entries corresponding to memory-mapped files. + supported_os: [linux] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: ls -la /proc/%line%/map_files + output_directory: /live_response/process/proc/%line% + output_file: map_files.txt + - + description: Collect command line arguments for a process. + supported_os: [linux] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/cmdline + output_directory: /live_response/process/proc/%line% + output_file: cmdline.txt + - + description: Collect the list of child tasks of a process. + supported_os: [linux] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/task/%line%/children + output_directory: /live_response/process/proc/%line% + output_file: children.txt - description: Collect the command name associated with a process. - supported_os: [android, linux] + supported_os: [linux] collector: command foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done command: cat /proc/%line%/comm - output_directory: proc/%line% + output_directory: /live_response/process/proc/%line% output_file: comm.txt + - + description: Collect mapped memory regions and their access permissions. + supported_os: [linux] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/maps + output_directory: /live_response/process/proc/%line% + output_file: maps.txt + - + description: Collect the initial process' stack trace. + supported_os: [linux] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/stack + output_directory: /live_response/process/proc/%line% + output_file: stack.txt + - + description: Collect status information about the process. + supported_os: [linux] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/status + output_directory: /live_response/process/proc/%line% + output_file: status.txt + - + description: Display the list of UNIX sockets. + supported_os: [linux] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/net/unix + output_directory: /live_response/process/proc/%line%/net + output_file: unix.txt + # linux: strings available + - + description: Collect initial environment that was set when the process was started. + supported_os: [linux] + collector: command + condition: command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: strings /proc/%line%/environ + output_directory: /live_response/process/proc/%line% + output_file: environ.txt + # linux: no strings available + - + description: Collect initial environment that was set when the process was started. + supported_os: [linux] + collector: command + condition: ! command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: astrings /proc/%line%/environ + output_directory: /live_response/process/proc/%line% + output_file: environ.txt + + # macos + - + description: Collect running processes executable path. + supported_os: [macos] + collector: command + command: ps -axo comm | grep ^/ | sort -u + output_file: running_processes_full_paths.txt + + # netbsd + - + description: Collect running processes executable path. + supported_os: [netbsd] + collector: command + command: ls -l /proc/[0-9]*/exe + output_file: running_processes_full_paths.txt + - + description: Collect the path to the current working directory of the process. + supported_os: [netbsd] + collector: command + command: ls -l /proc/[0-9]*/cwd + output_file: ls_-l_proc_pid_cwd.txt + - + description: Collect the list of files which the process has open. + supported_os: [netbsd] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: ls -la /proc/%line%/fd + output_directory: /live_response/process/proc/%line% + output_file: fd.txt - description: Collect command line arguments for a process. - supported_os: [android, linux, netbsd, netscaler, solaris] + supported_os: [netbsd] collector: command foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done - command: strings /proc/%line%/cmdline - output_directory: proc/%line% + command: cat /proc/%line%/cmdline + output_directory: /live_response/process/proc/%line% output_file: cmdline.txt - description: Collect mapped memory regions and their access permissions. - supported_os: [netbsd, netscaler] + supported_os: [netbsd] collector: command foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done command: cat /proc/%line%/map - output_directory: proc/%line% + output_directory: /live_response/process/proc/%line% output_file: map.txt - description: Collect mapped memory regions and their access permissions. - supported_os: [android, linux, netbsd] + supported_os: [netbsd] collector: command foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done command: cat /proc/%line%/maps - output_directory: proc/%line% + output_directory: /live_response/process/proc/%line% output_file: maps.txt + - + description: Collect status information about the process. + supported_os: [netbsd] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/status + output_directory: /live_response/process/proc/%line% + output_file: status.txt + # netbsd: strings available - description: Collect initial environment that was set when the process was started. - supported_os: [android, linux, solaris] + supported_os: [netbsd] collector: command + condition: command_exists "strings" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done command: strings /proc/%line%/environ - output_directory: proc/%line% + output_directory: /live_response/process/proc/%line% output_file: environ.txt + # netbsd: no strings available - - description: Collect the list of child tasks of a process. - supported_os: [android, linux] + description: Collect initial environment that was set when the process was started. + supported_os: [netbsd] collector: command + condition: ! command_exists "strings" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done - command: cat /proc/%line%/task/%line%/children - output_directory: proc/%line% - output_file: children.txt + command: astrings /proc/%line%/environ + output_directory: /live_response/process/proc/%line% + output_file: environ.txt + + # netscaler + - + description: Collect running processes executable path. + supported_os: [netscaler] + collector: command + command: ls -l /proc/[0-9]*/file + output_file: running_processes_full_paths.txt + - + description: Collect command line arguments for a process. + supported_os: [netscaler] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/cmdline + output_directory: /live_response/process/proc/%line% + output_file: cmdline.txt + - + description: Collect mapped memory regions and their access permissions. + supported_os: [netscaler] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/map + output_directory: /live_response/process/proc/%line% + output_file: map.txt + - + description: Collect status information about the process. + supported_os: [netscaler] + collector: command + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: cat /proc/%line%/status + output_directory: /live_response/process/proc/%line% + output_file: status.txt + + # openbsd + - + description: Collect running processes executable path. + supported_os: [openbsd] + collector: command + command: ps -axo args | grep ^/ | awk '{print $1}' | sort -u + output_file: running_processes_full_paths.txt + + # solaris + - + description: Collect running processes executable path. + supported_os: [solaris] + collector: command + command: ls -l /proc/[0-9]*/path/a.out + output_file: running_processes_full_paths.txt + - + description: Collect the path to the current working directory of the process. + supported_os: [solaris] + collector: command + command: ls -l /proc/[0-9]*/cwd + output_file: ls_-l_proc_pid_cwd.txt - description: Collect the list of files which the process has open. - supported_os: [android, linux] + supported_os: [solaris] collector: command foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done command: ls -la /proc/%line%/fd - output_directory: proc/%line% + output_directory: /live_response/process/proc/%line% output_file: fd.txt - - description: Collect information about entries corresponding to memory-mapped files. - supported_os: [linux] + description: Collect the list of files which the process has open. + supported_os: [solaris] collector: command + condition: command_exists "pfiles" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done - command: ls -la /proc/%line%/map_files - output_directory: proc/%line% - output_file: map_files.txt + command: pfiles -F %line% + output_directory: /live_response/process/proc/%line% + output_file: pfiles.txt - - description: Collect the initial process' stack trace. - supported_os: [android, linux] + description: Collect command line arguments for a process. + supported_os: [solaris] collector: command foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done - command: cat /proc/%line%/stack - output_directory: proc/%line% - output_file: stack.txt + command: cat /proc/%line%/cmdline + output_directory: /live_response/process/proc/%line% + output_file: cmdline.txt + + # solaris: strings available - - description: Collect status information about the process. - supported_os: [android, linux, netbsd, netscaler] + description: Collect initial environment that was set when the process was started. + supported_os: [solaris] collector: command + condition: command_exists "strings" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done - command: cat /proc/%line%/status - output_directory: proc/%line% - output_file: status.txt + command: strings /proc/%line%/environ + output_directory: /live_response/process/proc/%line% + output_file: environ.txt - - description: Display the list of UNIX sockets. - supported_os: [linux] + description: Collect mapped memory regions and their access permissions. + supported_os: [solaris] collector: command + condition: command_exists "strings" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done - command: cat /proc/%line%/net/unix - output_directory: proc/%line%/net - output_file: unix.txt + command: strings /proc/%line%/map + output_directory: /live_response/process/proc/%line% + output_file: map.txt - description: Collect running process information. - supported_os: [aix, solaris] + supported_os: [solaris] collector: command + condition: command_exists "strings" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done command: strings /proc/%line%/psinfo - output_directory: proc/%line% + output_directory: /live_response/process/proc/%line% output_file: psinfo.txt - - description: Collect information about all file descriptors opened by a process. - supported_os: [aix] + description: Collect status information about the process. + supported_os: [solaris] collector: command + condition: command_exists "strings" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done - command: procfiles -n -c %line% - output_directory: proc/%line% - output_file: procfiles.txt + command: strings /proc/%line%/status + output_directory: /live_response/process/proc/%line% + output_file: status.txt + + # solaris: no strings available - - description: Collect the list of files which the process has open. + description: Collect initial environment that was set when the process was started. supported_os: [solaris] collector: command + condition: ! command_exists "strings" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done - command: pfiles -F %line% - output_directory: proc/%line% - output_file: pfiles.txt - -# References: -# https://www.defensive-security.com/storage/uploads/Advanced%20Linux%20Detection%20and%20Forensics%20Cheatsheet%20by%20Defensive%20Security.pdf + command: astrings /proc/%line%/environ + output_directory: /live_response/process/proc/%line% + output_file: environ.txt + - + description: Collect mapped memory regions and their access permissions. + supported_os: [solaris] + collector: command + condition: ! command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: astrings /proc/%line%/map + output_directory: /live_response/process/proc/%line% + output_file: map.txt + - + description: Collect running process information. + supported_os: [solaris] + collector: command + condition: ! command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: astrings /proc/%line%/psinfo + output_directory: /live_response/process/proc/%line% + output_file: psinfo.txt + - + description: Collect status information about the process. + supported_os: [solaris] + collector: command + condition: ! command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: astrings /proc/%line%/status + output_directory: /live_response/process/proc/%line% + output_file: status.txt + \ No newline at end of file diff --git a/artifacts/live_response/process/procstat.yaml b/artifacts/live_response/process/procstat.yaml index ee4ecd8d..5da8ac2e 100644 --- a/artifacts/live_response/process/procstat.yaml +++ b/artifacts/live_response/process/procstat.yaml @@ -1,42 +1,43 @@ -version: 1.0 +version: 2.0 +condition: command_exists "procstat" artifacts: - description: Collect command line arguments for a process. supported_os: [freebsd] collector: command - foreach: ps -eo pid | sed -e 's:^ *::' | grep ^[0-9] + foreach: ps -axo pid | sed -e 's:^ *::' | grep ^[0-9] command: procstat arguments %line% - output_directory: procstat/%line% + output_directory: /live_response/process/proc/%line% output_file: arguments.txt - description: Collect binary information for a process. supported_os: [freebsd] collector: command - foreach: ps -eo pid | sed -e 's:^ *::' | grep ^[0-9] + foreach: ps -axo pid | sed -e 's:^ *::' | grep ^[0-9] command: procstat binary %line% - output_directory: procstat/%line% + output_directory: /live_response/process/proc/%line% output_file: binary.txt - description: Collect environment variables for a process. supported_os: [freebsd] collector: command - foreach: ps -eo pid | sed -e 's:^ *::' | grep ^[0-9] + foreach: ps -axo pid | sed -e 's:^ *::' | grep ^[0-9] command: procstat environment %line% - output_directory: procstat/%line% + output_directory: /live_response/process/proc/%line% output_file: environment.txt - description: Collect file descriptor information for a process. supported_os: [freebsd] collector: command - foreach: ps -eo pid | sed -e 's:^ *::' | grep ^[0-9] + foreach: ps -axo pid | sed -e 's:^ *::' | grep ^[0-9] command: procstat files %line% - output_directory: procstat/%line% + output_directory: /live_response/process/proc/%line% output_file: files.txt - description: Collect virtual memory mappings for a process. supported_os: [freebsd] collector: command - foreach: ps -eo pid | sed -e 's:^ *::' | grep ^[0-9] + foreach: ps -axo pid | sed -e 's:^ *::' | grep ^[0-9] command: procstat vm %line% - output_directory: procstat/%line% + output_directory: /live_response/process/proc/%line% output_file: vm.txt \ No newline at end of file diff --git a/artifacts/live_response/process/proctree.yaml b/artifacts/live_response/process/proctree.yaml index a99d0220..84e345ad 100644 --- a/artifacts/live_response/process/proctree.yaml +++ b/artifacts/live_response/process/proctree.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "proctree" +output_directory: /live_response/process artifacts: - description: Print the process tree containing the specified process IDs or users. diff --git a/artifacts/live_response/process/ps.yaml b/artifacts/live_response/process/ps.yaml index af66068f..bd3bec74 100644 --- a/artifacts/live_response/process/ps.yaml +++ b/artifacts/live_response/process/ps.yaml @@ -1,4 +1,5 @@ -version: 2.0 +version: 3.0 +output_directory: /live_response/process artifacts: - description: Report a snapshot of the current processes. @@ -20,25 +21,37 @@ artifacts: output_file: ps_auxwwwf.txt - description: Report a snapshot of the current processes. - supported_os: [aix, android, freebsd, linux, macos, netscaler, solaris] + supported_os: [aix, freebsd, linux, macos, netscaler, solaris] collector: command command: ps -deaf output_file: ps_-deaf.txt - description: Report a snapshot of the current processes. - supported_os: [aix, android, freebsd, linux, macos, netscaler, solaris] + supported_os: [aix, freebsd, linux, macos, netscaler, solaris] collector: command command: ps -ef output_file: ps_-ef.txt - description: Report a snapshot of the current processes. - supported_os: [aix, android, freebsd, linux, macos, netscaler, solaris] + supported_os: [aix, freebsd, linux, macos, netscaler, solaris] collector: command command: ps -efl output_file: ps_-efl.txt - description: Report a snapshot of the current processes including elapsed time since the process was started. - supported_os: [aix, android, freebsd, linux, macos, netbsd, netscaler, openbsd, solaris] + supported_os: [aix] + collector: command + command: ps -eo pid,user,etime,args + output_file: ps_-eo_pid_user_etime_args.txt + - + description: Report a snapshot of the current processes including elapsed time since the process was started. + supported_os: [freebsd, linux, macos, netbsd, netscaler, openbsd] + collector: command + command: ps -axo pid,user,etime,args + output_file: ps_-eo_pid_user_etime_args.txt + - + description: Report a snapshot of the current processes including elapsed time since the process was started. + supported_os: [solaris] collector: command command: ps -eo pid,user,etime,args output_file: ps_-eo_pid_user_etime_args.txt @@ -46,17 +59,18 @@ artifacts: description: Report a snapshot of the current processes including time the command started. supported_os: [freebsd, linux, macos, netbsd, netscaler, openbsd] collector: command - command: ps -eo pid,user,lstart,args + command: ps -axo pid,user,lstart,args output_file: ps_-eo_pid_user_lstart_args.txt - description: Report a snapshot of the current processes including the control groups to which the process belongs. supported_os: [linux] collector: command - command: ps -eo pid,user,cgroup + command: ps -axo pid,user,cgroup output_file: ps_-eo_pid_user_cgroup.txt - description: Report a snapshot of the current processes including used time, verbose, session ID and process group, state and type. supported_os: [esxi] collector: command command: ps -P -T -c -g -s -t -J - output_file: ps_-P_-T_-c_-g_-s_-t_-J.txt \ No newline at end of file + output_file: ps_-P_-T_-c_-g_-s_-t_-J.txt + \ No newline at end of file diff --git a/artifacts/live_response/process/pstat.yaml b/artifacts/live_response/process/pstat.yaml index 96e3d406..67b63326 100644 --- a/artifacts/live_response/process/pstat.yaml +++ b/artifacts/live_response/process/pstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pstat" +output_directory: /live_response/process artifacts: - description: Display entries in the process table. diff --git a/artifacts/live_response/process/pstree.yaml b/artifacts/live_response/process/pstree.yaml index 4ceab7fd..d710cdf6 100644 --- a/artifacts/live_response/process/pstree.yaml +++ b/artifacts/live_response/process/pstree.yaml @@ -1,20 +1,22 @@ -version: 2.0 +version: 3.0 +condition: command_exists "pstree" +output_directory: /live_response/process artifacts: - description: Display a tree of processes. - supported_os: [android, linux] + supported_os: [linux] collector: command command: pstree output_file: pstree.txt - description: Display a tree of processes including command line arguments. - supported_os: [android, linux] + supported_os: [linux] collector: command command: pstree -a output_file: pstree_-a.txt - description: Display a tree of processes with the same parent sorted by PID instead of by name (numeric sort). - supported_os: [android, linux] + supported_os: [linux] collector: command command: pstree -p -n output_file: pstree_-p_-n.txt \ No newline at end of file diff --git a/artifacts/live_response/process/ptree.yaml b/artifacts/live_response/process/ptree.yaml index 9ebdce00..d4108ecc 100644 --- a/artifacts/live_response/process/ptree.yaml +++ b/artifacts/live_response/process/ptree.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "ptree" +output_directory: /live_response/process artifacts: - description: Display a tree of processes. diff --git a/artifacts/live_response/process/strings_running_processes.yaml b/artifacts/live_response/process/strings_running_processes.yaml index 3a6a2ae9..73094b3e 100644 --- a/artifacts/live_response/process/strings_running_processes.yaml +++ b/artifacts/live_response/process/strings_running_processes.yaml @@ -1,48 +1,190 @@ -version: 1.0 +version: 2.0 artifacts: + # aix - description: Extract strings from running processes. supported_os: [aix] collector: command + condition: command_exists "strings" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done command: strings -a /proc/%line%/object/a.out - output_directory: proc/%line% + output_directory: /live_response/process/proc/%line% output_file: strings.txt compress_output_file: true - description: Extract strings from running processes. - supported_os: [freebsd, macos, openbsd] + supported_os: [aix] + collector: command + condition: ! command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: astrings /proc/%line%/object/a.out + output_directory: /live_response/process/proc/%line% + output_file: strings.txt + compress_output_file: true + # esxi + - + description: Extract strings from running processes. + supported_os: [esxi] collector: command - foreach: ps -eo args | grep ^/ | awk '{print $1}' | sort -u + condition: command_exists "strings" + foreach: ps -c | awk '{print $4}' | grep "^/" | sort -u command: strings -a %line% - output_directory: strings + output_directory: /live_response/process/strings output_file: %line%.txt compress_output_file: true - description: Extract strings from running processes. - supported_os: [android, linux, netbsd] + supported_os: [esxi] collector: command + condition: ! command_exists "strings" + foreach: ps -c | awk '{print $4}' | grep "^/" | sort -u + command: astrings %line% + output_directory: /live_response/process/strings + output_file: %line%.txt + compress_output_file: true + # freebsd + - + description: Extract strings from running processes if procfs and strings are available. + supported_os: [freebsd] + collector: command + condition: ls /proc/$$ && command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: strings -a /proc/%line%/file + output_directory: /live_response/process/proc/%line% + output_file: strings.txt + compress_output_file: true + - + description: Extract strings from running processes if procfs is not available and strings is available. + supported_os: [freebsd] + collector: command + condition: if ls /proc/$$; then false; else true; fi && command_exists "strings" + foreach: ps -axo args | grep "^/" | awk '{print $1}' | sort -u + command: strings -a %line% + output_directory: /live_response/process/strings + output_file: %line%.txt + compress_output_file: true + - + description: Extract strings from running processes if procfs is available and strings is not available. + supported_os: [freebsd] + collector: command + condition: ls /proc/$$ && if command_exists "strings"; then false; else true; fi + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: astrings /proc/%line%/file + output_directory: /live_response/process/proc/%line% + output_file: strings.txt + compress_output_file: true + - + description: Extract strings from running processes if neither procfs nor strings are available. + supported_os: [freebsd] + collector: command + condition: if ls /proc/$$; then false; else true; fi && if command_exists "strings"; then false; else true; fi + foreach: ps -axo args | grep "^/" | awk '{print $1}' | sort -u + command: astrings %line% + output_directory: /live_response/process/strings + output_file: %line%.txt + compress_output_file: true + # linux, netbsd + - + description: Extract strings from running processes. + supported_os: [linux, netbsd] + collector: command + condition: command_exists "strings" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done command: strings -a /proc/%line%/exe - output_directory: proc/%line% + output_directory: /live_response/process/proc/%line% + output_file: strings.txt + compress_output_file: true + - + description: Extract strings from running processes. + supported_os: [linux, netbsd] + collector: command + condition: ! command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: astrings /proc/%line%/exe + output_directory: /live_response/process/proc/%line% output_file: strings.txt compress_output_file: true + # macos + - + description: Extract strings from running processes. + supported_os: [macos] + collector: command + condition: xcode-select -p + foreach: ps -axo comm | grep "^/" | sort -u + command: strings -a %line% + output_directory: /live_response/process/strings + output_file: %line%.txt + compress_output_file: true + - + description: Extract strings from running processes. + supported_os: [macos] + collector: command + condition: ! xcode-select -p + foreach: ps -axo comm | grep "^/" | sort -u + command: astrings %line% + output_directory: /live_response/process/strings + output_file: %line%.txt + compress_output_file: true + # netscaler - description: Extract strings from running processes. supported_os: [netscaler] collector: command + condition: command_exists "strings" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done command: strings -a /proc/%line%/file - output_directory: proc/%line% + output_directory: /live_response/process/proc/%line% + output_file: strings.txt + compress_output_file: true + - + description: Extract strings from running processes. + supported_os: [netscaler] + collector: command + condition: ! command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: astrings /proc/%line%/file + output_directory: /live_response/process/proc/%line% output_file: strings.txt compress_output_file: true + # openbsd + - + description: Extract strings from running processes. + supported_os: [openbsd] + collector: command + condition: command_exists "strings" + foreach: ps -axo args | grep "^/" | awk '{print $1}' | sort -u + command: strings -a %line% + output_directory: /live_response/process/strings + output_file: %line%.txt + compress_output_file: true + - + description: Extract strings from running processes. + supported_os: [openbsd] + collector: command + condition: ! command_exists "strings" + foreach: ps -axo args | grep "^/" | awk '{print $1}' | sort -u + command: astrings %line% + output_directory: /live_response/process/strings + output_file: %line%.txt + compress_output_file: true + # solaris - description: Extract strings from running processes. supported_os: [solaris] collector: command + condition: command_exists "strings" foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done command: strings -a /proc/%line%/path/a.out - output_directory: proc/%line% + output_directory: /live_response/process/proc/%line% + output_file: strings.txt + compress_output_file: true + - + description: Extract strings from running processes. + supported_os: [solaris] + collector: command + condition: ! command_exists "strings" + foreach: for pid in /proc/[0-9]*; do echo ${pid} | sed -e 's:/proc/::'; done + command: astrings /proc/%line%/path/a.out + output_directory: /live_response/process/proc/%line% output_file: strings.txt compress_output_file: true - \ No newline at end of file diff --git a/artifacts/live_response/process/top.yaml b/artifacts/live_response/process/top.yaml index 94baede2..6f338695 100644 --- a/artifacts/live_response/process/top.yaml +++ b/artifacts/live_response/process/top.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "top" +output_directory: /live_response/process artifacts: - description: Display current running processes. @@ -8,7 +10,7 @@ artifacts: output_file: top_-b.txt - description: Display current running processes. - supported_os: [android, linux] + supported_os: [linux] collector: command command: top -b -n1 output_file: top_-b_-n1.txt diff --git a/artifacts/live_response/storage/arcstat.yaml b/artifacts/live_response/storage/arcstat.yaml index fd911309..309edb63 100644 --- a/artifacts/live_response/storage/arcstat.yaml +++ b/artifacts/live_response/storage/arcstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "arcstat" +output_directory: /live_response/storage artifacts: - description: Report ZFS ARC and L2ARC statistics. diff --git a/artifacts/live_response/storage/blkid.yaml b/artifacts/live_response/storage/blkid.yaml index e7369626..a4c5a238 100644 --- a/artifacts/live_response/storage/blkid.yaml +++ b/artifacts/live_response/storage/blkid.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "blkid" +output_directory: /live_response/storage artifacts: - description: Locate/print block device attributes. diff --git a/artifacts/live_response/storage/df.yaml b/artifacts/live_response/storage/df.yaml index cf0ef420..c4b8bda6 100644 --- a/artifacts/live_response/storage/df.yaml +++ b/artifacts/live_response/storage/df.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "df" +output_directory: /live_response/storage artifacts: - description: Report file system disk space usage. @@ -8,7 +10,7 @@ artifacts: output_file: df.txt - description: Report file system disk space usage. - supported_os: [android, freebsd, linux, macos, netbsd, netscaler, openbsd, solaris] + supported_os: [freebsd, linux, macos, netbsd, netscaler, openbsd, solaris] collector: command command: df -h output_file: df_-h.txt diff --git a/artifacts/live_response/storage/diskutil.yaml b/artifacts/live_response/storage/diskutil.yaml index 0c2a2658..cebbc816 100644 --- a/artifacts/live_response/storage/diskutil.yaml +++ b/artifacts/live_response/storage/diskutil.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "diskutil" +output_directory: /live_response/storage artifacts: - description: List disks, including internal and external disks, whole disks and partitions, and various kinds of virtual or offline disks. diff --git a/artifacts/live_response/storage/esxcli.yaml b/artifacts/live_response/storage/esxcli.yaml index 7bb26f11..61bc19ec 100644 --- a/artifacts/live_response/storage/esxcli.yaml +++ b/artifacts/live_response/storage/esxcli.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "esxcli" +output_directory: /live_response/storage artifacts: - description: List the NAS volumes currently known to the ESX host. diff --git a/artifacts/live_response/storage/fdisk.yaml b/artifacts/live_response/storage/fdisk.yaml index 5e491aec..e7e25e5d 100644 --- a/artifacts/live_response/storage/fdisk.yaml +++ b/artifacts/live_response/storage/fdisk.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "fdisk" +output_directory: /live_response/storage artifacts: - description: List the partition tables for devices mentioned in /proc/partitions. diff --git a/artifacts/live_response/storage/findmnt.yaml b/artifacts/live_response/storage/findmnt.yaml index bd0339bd..e744487b 100644 --- a/artifacts/live_response/storage/findmnt.yaml +++ b/artifacts/live_response/storage/findmnt.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "findmnt" +output_directory: /live_response/storage artifacts: - description: Display all mounted filesystems in the tree-like format. diff --git a/artifacts/live_response/storage/format.yaml b/artifacts/live_response/storage/format.yaml index 6bee86c2..e1bc7839 100644 --- a/artifacts/live_response/storage/format.yaml +++ b/artifacts/live_response/storage/format.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "format" +output_directory: /live_response/storage artifacts: - description: List available disks. diff --git a/artifacts/live_response/storage/fs_usage.yaml b/artifacts/live_response/storage/fs_usage.yaml index 717909c8..d1e3aac3 100644 --- a/artifacts/live_response/storage/fs_usage.yaml +++ b/artifacts/live_response/storage/fs_usage.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "fs_usage" +output_directory: /live_response/storage artifacts: - description: Report system calls and page faults related to filesystem activity in real-time. diff --git a/artifacts/live_response/storage/geom.yaml b/artifacts/live_response/storage/geom.yaml index 011afccc..629d93a4 100644 --- a/artifacts/live_response/storage/geom.yaml +++ b/artifacts/live_response/storage/geom.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "geom" +output_directory: /live_response/storage artifacts: - description: Print the list of all providers from the DISK class. @@ -7,8 +9,8 @@ artifacts: command: geom disk list output_file: geom_disk_list.txt - - description: Display geoms hierarchy as a tree. + description: Display geoms hierarchy as a tree. supported_os: [freebsd] collector: command command: geom -t - output_file: geom_-t.txt \ No newline at end of file + output_file: geom_-t.txt diff --git a/artifacts/live_response/storage/gstat.yaml b/artifacts/live_response/storage/gstat.yaml index 23387207..d31fa7ab 100644 --- a/artifacts/live_response/storage/gstat.yaml +++ b/artifacts/live_response/storage/gstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "gstat" +output_directory: /live_response/storage artifacts: - description: Print statistics about GEOM disks. diff --git a/artifacts/live_response/storage/iostat.yaml b/artifacts/live_response/storage/iostat.yaml index dac0b1b6..86aeeb6e 100644 --- a/artifacts/live_response/storage/iostat.yaml +++ b/artifacts/live_response/storage/iostat.yaml @@ -1,4 +1,6 @@ -version: 2.0 +version: 3.0 +condition: command_exists "iostat" +output_directory: /live_response/storage artifacts: - description: Report device I/O statistics. diff --git a/artifacts/live_response/storage/iscsiadm.yaml b/artifacts/live_response/storage/iscsiadm.yaml index a16cc1ab..d2a5987a 100644 --- a/artifacts/live_response/storage/iscsiadm.yaml +++ b/artifacts/live_response/storage/iscsiadm.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "iscsiadm" +output_directory: /live_response/storage artifacts: # iscsiadm is an open-iscsi administration utility. - diff --git a/artifacts/live_response/storage/lparstat.yaml b/artifacts/live_response/storage/lparstat.yaml index cd26d9ce..e3b4a673 100644 --- a/artifacts/live_response/storage/lparstat.yaml +++ b/artifacts/live_response/storage/lparstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lparstat" +output_directory: /live_response/storage artifacts: - description: Display logical partition (LPAR) related information and statistics. diff --git a/artifacts/live_response/storage/ls_dev_disk.yaml b/artifacts/live_response/storage/ls_dev_disk.yaml index 17da09ac..dcf663df 100644 --- a/artifacts/live_response/storage/ls_dev_disk.yaml +++ b/artifacts/live_response/storage/ls_dev_disk.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/storage artifacts: - description: Display the mapping of logical volumes with physical disks. diff --git a/artifacts/live_response/storage/ls_vmfs_devices.yaml b/artifacts/live_response/storage/ls_vmfs_devices.yaml index 78700372..c7c4dff0 100644 --- a/artifacts/live_response/storage/ls_vmfs_devices.yaml +++ b/artifacts/live_response/storage/ls_vmfs_devices.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/storage artifacts: - description: Display the mapping of logical volumes with physical disks. diff --git a/artifacts/live_response/storage/lsblk.yaml b/artifacts/live_response/storage/lsblk.yaml index a4dd3abd..2ba72e07 100644 --- a/artifacts/live_response/storage/lsblk.yaml +++ b/artifacts/live_response/storage/lsblk.yaml @@ -1,4 +1,6 @@ -version: 2.0 +version: 3.0 +condition: command_exists "lsblk" +output_directory: /live_response/storage artifacts: - description: List block devices. diff --git a/artifacts/live_response/storage/lsfs.yaml b/artifacts/live_response/storage/lsfs.yaml index 8937b7d9..a4cd2cc8 100644 --- a/artifacts/live_response/storage/lsfs.yaml +++ b/artifacts/live_response/storage/lsfs.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lsfs" +output_directory: /live_response/storage artifacts: - description: Displays the characteristics of file systems. diff --git a/artifacts/live_response/storage/lspv.yaml b/artifacts/live_response/storage/lspv.yaml index c60245e4..9ad24eed 100644 --- a/artifacts/live_response/storage/lspv.yaml +++ b/artifacts/live_response/storage/lspv.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lspv" +output_directory: /live_response/storage artifacts: - description: Displays information about a physical volume within a volume group. diff --git a/artifacts/live_response/storage/lsvg.yaml b/artifacts/live_response/storage/lsvg.yaml index 13d5b3bc..98ab90f0 100644 --- a/artifacts/live_response/storage/lsvg.yaml +++ b/artifacts/live_response/storage/lsvg.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lsvg" +output_directory: /live_response/storage artifacts: - description: Displays information about volume groups. diff --git a/artifacts/live_response/storage/lvdisplay.yaml b/artifacts/live_response/storage/lvdisplay.yaml index 5cb5bfe7..4750ac60 100644 --- a/artifacts/live_response/storage/lvdisplay.yaml +++ b/artifacts/live_response/storage/lvdisplay.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lvdisplay" +output_directory: /live_response/storage artifacts: - description: Display information about logical volumes. diff --git a/artifacts/live_response/storage/lvs.yaml b/artifacts/live_response/storage/lvs.yaml index 89151f22..c2a8b67f 100644 --- a/artifacts/live_response/storage/lvs.yaml +++ b/artifacts/live_response/storage/lvs.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lvs" +output_directory: /live_response/storage artifacts: - description: Display information about logical volumes. diff --git a/artifacts/live_response/storage/mdadm.yaml b/artifacts/live_response/storage/mdadm.yaml index 2eca11ce..17845449 100644 --- a/artifacts/live_response/storage/mdadm.yaml +++ b/artifacts/live_response/storage/mdadm.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "mdadm" +output_directory: /live_response/storage artifacts: - description: Display a snapshot of the kernel's RAID/md state. diff --git a/artifacts/live_response/storage/mount.yaml b/artifacts/live_response/storage/mount.yaml index 02967b30..b7b8d0e5 100644 --- a/artifacts/live_response/storage/mount.yaml +++ b/artifacts/live_response/storage/mount.yaml @@ -1,8 +1,10 @@ -version: 2.0 +version: 3.0 +condition: command_exists "mount" +output_directory: /live_response/storage artifacts: - description: Lists all mounted filesystems. - supported_os: [android, aix, freebsd, linux, openbsd, netbsd, netscaler, openbsd, solaris] + supported_os: [aix, freebsd, linux, openbsd, netbsd, netscaler, openbsd, solaris] collector: command command: mount output_file: mount.txt \ No newline at end of file diff --git a/artifacts/live_response/storage/pdisk.yaml b/artifacts/live_response/storage/pdisk.yaml index 9a4b89c5..cf208339 100644 --- a/artifacts/live_response/storage/pdisk.yaml +++ b/artifacts/live_response/storage/pdisk.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pdisk" +output_directory: /live_response/storage artifacts: - description: List partition tables for all available drives. diff --git a/artifacts/live_response/storage/pvdisplay.yaml b/artifacts/live_response/storage/pvdisplay.yaml index 413df2c8..f9da9947 100644 --- a/artifacts/live_response/storage/pvdisplay.yaml +++ b/artifacts/live_response/storage/pvdisplay.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pvdisplay" +output_directory: /live_response/storage artifacts: - description: Display various attributes of physical volumes. diff --git a/artifacts/live_response/storage/pvesm.yaml b/artifacts/live_response/storage/pvesm.yaml index a894a2b4..3bdc0cd1 100644 --- a/artifacts/live_response/storage/pvesm.yaml +++ b/artifacts/live_response/storage/pvesm.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pvesm" +output_directory: /live_response/storage artifacts: - description: Get status for all Proxmox VE datastores. diff --git a/artifacts/live_response/storage/pvs.yaml b/artifacts/live_response/storage/pvs.yaml index 01ba5d4a..c5259aac 100644 --- a/artifacts/live_response/storage/pvs.yaml +++ b/artifacts/live_response/storage/pvs.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pvs" +output_directory: /live_response/storage artifacts: - description: Display information about physical volumes. diff --git a/artifacts/live_response/storage/vgdisplay.yaml b/artifacts/live_response/storage/vgdisplay.yaml index bd1eedce..addcd455 100644 --- a/artifacts/live_response/storage/vgdisplay.yaml +++ b/artifacts/live_response/storage/vgdisplay.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "vgdisplay" +output_directory: /live_response/storage artifacts: - description: Display volume group information. diff --git a/artifacts/live_response/storage/vgs.yaml b/artifacts/live_response/storage/vgs.yaml index 28feee86..9c2f28cb 100644 --- a/artifacts/live_response/storage/vgs.yaml +++ b/artifacts/live_response/storage/vgs.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "vgs" +output_directory: /live_response/storage artifacts: - description: Display information about volume groups. diff --git a/artifacts/live_response/storage/zfs.yaml b/artifacts/live_response/storage/zfs.yaml index 7fe94ac0..6ddbd9aa 100644 --- a/artifacts/live_response/storage/zfs.yaml +++ b/artifacts/live_response/storage/zfs.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "zfs" +output_directory: /live_response/storage artifacts: - description: Lists the property information for the given datasets in tabular form. diff --git a/artifacts/live_response/storage/zpool.yaml b/artifacts/live_response/storage/zpool.yaml index 32964b43..20f212e6 100644 --- a/artifacts/live_response/storage/zpool.yaml +++ b/artifacts/live_response/storage/zpool.yaml @@ -1,4 +1,6 @@ -version: 3.0 +version: 4.0 +condition: command_exists "zpool" +output_directory: /live_response/storage artifacts: - description: Displays the command history of all pools. diff --git a/artifacts/live_response/system/auditctl.yaml b/artifacts/live_response/system/auditctl.yaml index 19e91401..7956fb8c 100644 --- a/artifacts/live_response/system/auditctl.yaml +++ b/artifacts/live_response/system/auditctl.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "auditctl" +output_directory: /live_response/system artifacts: - description: Display kernel's audit rules. @@ -12,4 +14,4 @@ artifacts: collector: command command: auditctl -s output_file: auditctl_-s.txt - + \ No newline at end of file diff --git a/artifacts/live_response/system/chkconfig.yaml b/artifacts/live_response/system/chkconfig.yaml index 42015e17..1c419129 100644 --- a/artifacts/live_response/system/chkconfig.yaml +++ b/artifacts/live_response/system/chkconfig.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "chkconfig" +output_directory: /live_response/system artifacts: - description: Display a list of all services and their current configuration. diff --git a/artifacts/live_response/system/crle.yaml b/artifacts/live_response/system/crle.yaml index ce5d2611..58da0f5c 100644 --- a/artifacts/live_response/system/crle.yaml +++ b/artifacts/live_response/system/crle.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "crle" +output_directory: /live_response/system artifacts: - description: Displays configuration information. @@ -6,4 +8,4 @@ artifacts: collector: command command: crle output_file: crle.txt - \ No newline at end of file + diff --git a/artifacts/live_response/system/csrutil.yaml b/artifacts/live_response/system/csrutil.yaml index 9808170e..8b683e4a 100644 --- a/artifacts/live_response/system/csrutil.yaml +++ b/artifacts/live_response/system/csrutil.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "csrutil" +output_directory: /live_response/system artifacts: - description: Display the System Integrity Protection (SIP) status. diff --git a/artifacts/live_response/system/date.yaml b/artifacts/live_response/system/date.yaml index 3ac3aaed..6064ca6b 100644 --- a/artifacts/live_response/system/date.yaml +++ b/artifacts/live_response/system/date.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: Display the system date and time. diff --git a/artifacts/live_response/system/ebpf.yaml b/artifacts/live_response/system/ebpf.yaml index 88a1398e..ff8e103b 100644 --- a/artifacts/live_response/system/ebpf.yaml +++ b/artifacts/live_response/system/ebpf.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: List pinned eBPF progs. @@ -8,4 +9,4 @@ artifacts: output_file: ls_-la_sys_fs_bpf.txt # References: -# https://www.defensive-security.com/storage/uploads/Advanced%20Linux%20Detection%20and%20Forensics%20Cheatsheet%20by%20Defensive%20Security.pdf \ No newline at end of file +# https://www.defensive-security.com/storage/uploads/Advanced%20Linux%20Detection%20and%20Forensics%20Cheatsheet%20by%20Defensive%20Security.pdf diff --git a/artifacts/live_response/system/eeprom.yaml b/artifacts/live_response/system/eeprom.yaml index f9985079..67724886 100644 --- a/artifacts/live_response/system/eeprom.yaml +++ b/artifacts/live_response/system/eeprom.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "eeprom" +output_directory: /live_response/system artifacts: - description: Display the values of parameters in the EEPROM. diff --git a/artifacts/live_response/system/env.yaml b/artifacts/live_response/system/env.yaml index f91c2a4b..d878f525 100644 --- a/artifacts/live_response/system/env.yaml +++ b/artifacts/live_response/system/env.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: Display environment variables. diff --git a/artifacts/live_response/system/errpt.yaml b/artifacts/live_response/system/errpt.yaml index af3aab66..d4a5b1a7 100644 --- a/artifacts/live_response/system/errpt.yaml +++ b/artifacts/live_response/system/errpt.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "errpt" +output_directory: /live_response/system artifacts: - description: Display a report of logged errors. diff --git a/artifacts/live_response/system/esxcli.yaml b/artifacts/live_response/system/esxcli.yaml index bda08434..b3c5f367 100644 --- a/artifacts/live_response/system/esxcli.yaml +++ b/artifacts/live_response/system/esxcli.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "esxcli" +output_directory: /live_response/system artifacts: - description: List the VMkernel modules that the system knows about. diff --git a/artifacts/live_response/system/falconctl.yaml b/artifacts/live_response/system/falconctl.yaml index 20316025..98b17f42 100644 --- a/artifacts/live_response/system/falconctl.yaml +++ b/artifacts/live_response/system/falconctl.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: ls "/opt/CrowdStrike/falconctl" || ls /Applications/Falcon.app/Contents/Resources/falconctl +output_directory: /live_response/system artifacts: - description: Display the AgentId diff --git a/artifacts/live_response/system/free.yaml b/artifacts/live_response/system/free.yaml index c2b2da9d..0d77472a 100644 --- a/artifacts/live_response/system/free.yaml +++ b/artifacts/live_response/system/free.yaml @@ -1,8 +1,10 @@ -version: 1.0 +version: 2.0 +condition: command_exists "free" +output_directory: /live_response/system artifacts: - description: Display amount of free and used memory in the system. - supported_os: [android, linux] + supported_os: [linux] collector: command command: free output_file: free.txt diff --git a/artifacts/live_response/system/genkex.yaml b/artifacts/live_response/system/genkex.yaml index 5d21a5f1..d691e072 100644 --- a/artifacts/live_response/system/genkex.yaml +++ b/artifacts/live_response/system/genkex.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "genkex" +output_directory: /live_response/system artifacts: - description: Display the list of kernel extensions currently loaded onto the system and displays the address, size, and path name for each kernel extension in the list. diff --git a/artifacts/live_response/system/getenforce.yaml b/artifacts/live_response/system/getenforce.yaml index 3f433b46..8c896702 100644 --- a/artifacts/live_response/system/getenforce.yaml +++ b/artifacts/live_response/system/getenforce.yaml @@ -1,8 +1,10 @@ -version: 1.0 +version: 2.0 +condition: command_exists "getenforce" +output_directory: /live_response/system artifacts: - description: Display the current mode of SELinux. - supported_os: [android, linux] + supported_os: [linux] collector: command command: getenforce output_file: getenforce.txt diff --git a/artifacts/live_response/system/getprop.yaml b/artifacts/live_response/system/getprop.yaml deleted file mode 100644 index 9f436c23..00000000 --- a/artifacts/live_response/system/getprop.yaml +++ /dev/null @@ -1,9 +0,0 @@ -version: 1.0 -artifacts: - - - description: Display all properties and values from Android propery service. - supported_os: [android] - collector: command - command: getprop - output_file: getprop.txt - \ No newline at end of file diff --git a/artifacts/live_response/system/ha-manager.yaml b/artifacts/live_response/system/ha-manager.yaml index 9cb00d85..0d67ab04 100644 --- a/artifacts/live_response/system/ha-manager.yaml +++ b/artifacts/live_response/system/ha-manager.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "ha-manager" +output_directory: /live_response/system artifacts: - description: Display Proxmox VE HA manager status. diff --git a/artifacts/live_response/system/hidden_directories.yaml b/artifacts/live_response/system/hidden_directories.yaml index 42776296..67129015 100644 --- a/artifacts/live_response/system/hidden_directories.yaml +++ b/artifacts/live_response/system/hidden_directories.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: List all hidden directories outside of user home directory. @@ -7,7 +8,6 @@ artifacts: path: / name_pattern: [".*"] exclude_path_pattern: ["/root", "/home", "/export/home", "/Users"] - file_type: d - max_depth: 6 + file_type: [d] output_file: hidden_directories.txt \ No newline at end of file diff --git a/artifacts/live_response/system/hidden_files.yaml b/artifacts/live_response/system/hidden_files.yaml index a83bc498..3c9e6e67 100644 --- a/artifacts/live_response/system/hidden_files.yaml +++ b/artifacts/live_response/system/hidden_files.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: List all hidden files outside of user home directory. @@ -7,7 +8,6 @@ artifacts: path: / name_pattern: [".*"] exclude_path_pattern: ["/root", "/home", "/export/home", "/Users"] - file_type: f - max_depth: 6 + file_type: [f] output_file: hidden_files.txt \ No newline at end of file diff --git a/artifacts/live_response/system/hwclock.yaml b/artifacts/live_response/system/hwclock.yaml index 9e2e597f..bb7fecaf 100644 --- a/artifacts/live_response/system/hwclock.yaml +++ b/artifacts/live_response/system/hwclock.yaml @@ -1,8 +1,10 @@ -version: 1.0 +version: 2.0 +condition: command_exists "hwclock" +output_directory: /live_response/system artifacts: - description: Display the Hardware Clock time. - supported_os: [android, linux] + supported_os: [linux] collector: command command: hwclock output_file: hwclock.txt diff --git a/artifacts/live_response/system/isainfo.yaml b/artifacts/live_response/system/isainfo.yaml index 12288659..93be8474 100644 --- a/artifacts/live_response/system/isainfo.yaml +++ b/artifacts/live_response/system/isainfo.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "isainfo" +output_directory: /live_response/system artifacts: - description: Display the name of the instruction set(s) used by the operating system kernel components such as device drivers and STREAMS modules. diff --git a/artifacts/live_response/system/kernel_modules.yaml b/artifacts/live_response/system/kernel_modules.yaml index a1d60d40..ec450d3b 100644 --- a/artifacts/live_response/system/kernel_modules.yaml +++ b/artifacts/live_response/system/kernel_modules.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: Display the loaded kernel modules (compare with /proc/modules). @@ -12,8 +13,8 @@ artifacts: collector: command foreach: for module in /sys/module/* ; do echo ${module} | sed -e 's:/sys/module/::'; done command: ls -la /sys/module/%line%/parameters - output_directory: module/%line% + output_directory: /live_response/system/module/%line% output_file: parameters.txt # References: -# https://www.defensive-security.com/storage/uploads/Advanced%20Linux%20Detection%20and%20Forensics%20Cheatsheet%20by%20Defensive%20Security.pdf \ No newline at end of file +# https://www.defensive-security.com/storage/uploads/Advanced%20Linux%20Detection%20and%20Forensics%20Cheatsheet%20by%20Defensive%20Security.pdf diff --git a/artifacts/live_response/system/kernel_tainted_state.yaml b/artifacts/live_response/system/kernel_tainted_state.yaml index 9dcb5586..4d8ebd62 100644 --- a/artifacts/live_response/system/kernel_tainted_state.yaml +++ b/artifacts/live_response/system/kernel_tainted_state.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: Display the kernel tainted state. diff --git a/artifacts/live_response/system/kextstat.yaml b/artifacts/live_response/system/kextstat.yaml index dd5548f1..3b13d106 100644 --- a/artifacts/live_response/system/kextstat.yaml +++ b/artifacts/live_response/system/kextstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "kextstat" +output_directory: /live_response/system artifacts: - description: Display status of loaded kernel extensions (kexts). diff --git a/artifacts/live_response/system/kldstat.yaml b/artifacts/live_response/system/kldstat.yaml index 6ebc949b..3b2fa11a 100644 --- a/artifacts/live_response/system/kldstat.yaml +++ b/artifacts/live_response/system/kldstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "kldstat" +output_directory: /live_response/system artifacts: - description: Display status of dynamic kernel linker. diff --git a/artifacts/live_response/system/last.yaml b/artifacts/live_response/system/last.yaml index 1531533f..7fcfcbae 100644 --- a/artifacts/live_response/system/last.yaml +++ b/artifacts/live_response/system/last.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "last" +output_directory: /live_response/system artifacts: - description: Show a listing of last logins and logouts. diff --git a/artifacts/live_response/system/lastb.yaml b/artifacts/live_response/system/lastb.yaml index f782cb05..e5c8a8bb 100644 --- a/artifacts/live_response/system/lastb.yaml +++ b/artifacts/live_response/system/lastb.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lastb" +output_directory: /live_response/system artifacts: - description: Show a listing of last unsuccessful logins. diff --git a/artifacts/live_response/system/lastcomm.yaml b/artifacts/live_response/system/lastcomm.yaml index c2b744cf..fc70e634 100644 --- a/artifacts/live_response/system/lastcomm.yaml +++ b/artifacts/live_response/system/lastcomm.yaml @@ -1,4 +1,6 @@ version: 1.0 +condition: command_exists "lastcomm" +output_directory: /live_response/system artifacts: - description: Shows the last commands executed in a reverse order based on the default accounting file. @@ -10,7 +12,6 @@ artifacts: description: Shows the last commands executed in a reverse order from the historic accounting files. supported_os: [freebsd, netbsd, openbsd] collector: command - foreach: for acctfile in /var/account/acct.[0123]; do echo ${acctfile} | sed -e 's:/var/account/acct.::'; done - command: lastcomm -f /var/account/acct.%line% - output_file: lastcomm_%line%.txt - + foreach: ls /var/account/acct* + command: lastcomm -f %line% + output_file: lastcomm_%line%.txt \ No newline at end of file diff --git a/artifacts/live_response/system/lastlog.yaml b/artifacts/live_response/system/lastlog.yaml index a1dc75ea..efab5838 100644 --- a/artifacts/live_response/system/lastlog.yaml +++ b/artifacts/live_response/system/lastlog.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lastlog" +output_directory: /live_response/system artifacts: - description: Display the contents of the last login log /var/log/lastlog file. diff --git a/artifacts/live_response/system/launchctl.yaml b/artifacts/live_response/system/launchctl.yaml index 0bf55bac..e92c9ba4 100644 --- a/artifacts/live_response/system/launchctl.yaml +++ b/artifacts/live_response/system/launchctl.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "launchctl" +output_directory: /live_response/system artifacts: - description: Display all loaded and unloaded jobs into launchd. diff --git a/artifacts/live_response/system/loginctl.yaml b/artifacts/live_response/system/loginctl.yaml index bae82f41..7746af09 100644 --- a/artifacts/live_response/system/loginctl.yaml +++ b/artifacts/live_response/system/loginctl.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "loginctl" +output_directory: /live_response/system artifacts: - description: Show terse runtime status information about one or more logged in users, followed by the most recent log data from the journal. diff --git a/artifacts/live_response/system/lsmod.yaml b/artifacts/live_response/system/lsmod.yaml index 3987dc0c..2601a5cf 100644 --- a/artifacts/live_response/system/lsmod.yaml +++ b/artifacts/live_response/system/lsmod.yaml @@ -1,8 +1,10 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lsmod" +output_directory: /live_response/system artifacts: - description: Display the status of modules in the Linux Kernel. - supported_os: [android, linux] + supported_os: [linux] collector: command command: lsmod output_file: lsmod.txt diff --git a/artifacts/live_response/system/lsps.yaml b/artifacts/live_response/system/lsps.yaml index aa0e3ef9..167b23af 100644 --- a/artifacts/live_response/system/lsps.yaml +++ b/artifacts/live_response/system/lsps.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "lsps" +output_directory: /live_response/system artifacts: - description: Displays the characteristics of a paging space. diff --git a/artifacts/live_response/system/mdatp.yaml b/artifacts/live_response/system/mdatp.yaml index 245497c9..81afacbb 100644 --- a/artifacts/live_response/system/mdatp.yaml +++ b/artifacts/live_response/system/mdatp.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "mdatp" +output_directory: /live_response/system artifacts: - description: Display sensor health. diff --git a/artifacts/live_response/system/modinfo.yaml b/artifacts/live_response/system/modinfo.yaml index 7a257e16..45a25993 100644 --- a/artifacts/live_response/system/modinfo.yaml +++ b/artifacts/live_response/system/modinfo.yaml @@ -1,15 +1,19 @@ -version: 1.0 +version: 3.0 +condition: command_exists "modinfo" artifacts: - - - description: Display information about loaded kernel modules. - supported_os: [solaris] - collector: command - command: modinfo - output_file: modinfo.txt - description: Display information about loaded kernel modules. supported_os: [linux] collector: command foreach: lsmod | awk '{print $1}' | tail -n +2 command: modinfo %line% + output_directory: /live_response/system/modinfo output_file: modinfo_%line%.txt + - + description: Display information about loaded kernel modules. + supported_os: [solaris] + collector: command + command: modinfo + output_directory: /live_response/system + output_file: modinfo.txt + \ No newline at end of file diff --git a/artifacts/live_response/system/mpstat.yaml b/artifacts/live_response/system/mpstat.yaml index 96fdaf42..7be433f3 100644 --- a/artifacts/live_response/system/mpstat.yaml +++ b/artifacts/live_response/system/mpstat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "mpstat" +output_directory: /live_response/system artifacts: - description: Display processor statistics in tabular form. diff --git a/artifacts/live_response/system/oslevel.yaml b/artifacts/live_response/system/oslevel.yaml index e2e3ae9f..3acdadcf 100644 --- a/artifacts/live_response/system/oslevel.yaml +++ b/artifacts/live_response/system/oslevel.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "oslevel" +output_directory: /live_response/system artifacts: - description: Display the latest installed level (technology level, maintenance level and service pack) of the system. diff --git a/artifacts/live_response/system/prtdiag.yaml b/artifacts/live_response/system/prtdiag.yaml index ebcc26b2..fd23a5f1 100644 --- a/artifacts/live_response/system/prtdiag.yaml +++ b/artifacts/live_response/system/prtdiag.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "prtdiag" +output_directory: /live_response/system artifacts: - description: Display system diagnostic information. diff --git a/artifacts/live_response/system/pvecm.yaml b/artifacts/live_response/system/pvecm.yaml index 48af44f9..43f15c44 100644 --- a/artifacts/live_response/system/pvecm.yaml +++ b/artifacts/live_response/system/pvecm.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pvecm" +output_directory: /live_response/system artifacts: - description: Display Proxmox VE local view of the cluster nodes. diff --git a/artifacts/live_response/system/pvesubscription.yaml b/artifacts/live_response/system/pvesubscription.yaml index ecc73ccc..6e636b6c 100644 --- a/artifacts/live_response/system/pvesubscription.yaml +++ b/artifacts/live_response/system/pvesubscription.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pvesubscription" +output_directory: /live_response/system artifacts: - description: Display Proxmox VE subscription information. diff --git a/artifacts/live_response/system/pveum.yaml b/artifacts/live_response/system/pveum.yaml index 7bbe4df1..269be2ec 100644 --- a/artifacts/live_response/system/pveum.yaml +++ b/artifacts/live_response/system/pveum.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pveum" +output_directory: /live_response/system artifacts: - description: Display Proxmox VE user list. diff --git a/artifacts/live_response/system/pveversion.yaml b/artifacts/live_response/system/pveversion.yaml index 61fdb8a3..fac683ee 100644 --- a/artifacts/live_response/system/pveversion.yaml +++ b/artifacts/live_response/system/pveversion.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "pveversion" +output_directory: /live_response/system artifacts: - description: Display version information for Proxmox VE packages. diff --git a/artifacts/live_response/system/runlevel.yaml b/artifacts/live_response/system/runlevel.yaml index 1fa2eb00..41318da3 100644 --- a/artifacts/live_response/system/runlevel.yaml +++ b/artifacts/live_response/system/runlevel.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "runlevel" +output_directory: /live_response/system artifacts: - description: Display previous and current SysV runlevel. diff --git a/artifacts/live_response/system/service.yaml b/artifacts/live_response/system/service.yaml index 46afb2c7..770ddacb 100644 --- a/artifacts/live_response/system/service.yaml +++ b/artifacts/live_response/system/service.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "service" +output_directory: /live_response/system artifacts: - description: Display System V services status information. @@ -6,10 +8,4 @@ artifacts: collector: command command: service --status-all output_file: service_--status-all.txt - - - description: Display service names and their package names. - supported_os: [android] - collector: command - command: service list - output_file: service_list.txt \ No newline at end of file diff --git a/artifacts/live_response/system/sestatus.yaml b/artifacts/live_response/system/sestatus.yaml index 784de382..950f9649 100644 --- a/artifacts/live_response/system/sestatus.yaml +++ b/artifacts/live_response/system/sestatus.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "sestatus" +output_directory: /live_response/system artifacts: - description: Display the contexts of files and processes listed in the /etc/sestatus.conf file. diff --git a/artifacts/live_response/system/sgid.yaml b/artifacts/live_response/system/sgid.yaml index 4b82c773..72d03b22 100644 --- a/artifacts/live_response/system/sgid.yaml +++ b/artifacts/live_response/system/sgid.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: Search for files that have SGID bit set. @@ -6,8 +7,7 @@ artifacts: collector: find path: / exclude_file_system: [proc, procfs] - file_type: f - max_depth: 6 - permissions: -2000 + file_type: [f] + permissions: [-2000] output_file: sgid.txt \ No newline at end of file diff --git a/artifacts/live_response/system/showrev.yaml b/artifacts/live_response/system/showrev.yaml index 7a078b54..1b130c42 100644 --- a/artifacts/live_response/system/showrev.yaml +++ b/artifacts/live_response/system/showrev.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "showrev" +output_directory: /live_response/system artifacts: - description: Display machine and software revision information. diff --git a/artifacts/live_response/system/socket_files.yaml b/artifacts/live_response/system/socket_files.yaml index 739aa4c3..6fa17d87 100644 --- a/artifacts/live_response/system/socket_files.yaml +++ b/artifacts/live_response/system/socket_files.yaml @@ -1,12 +1,12 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: List all socket files. supported_os: [all] collector: find path: / - file_type: s - max_depth: 6 + file_type: [s] exclude_file_system: [proc, procfs] output_file: socket_files.txt \ No newline at end of file diff --git a/artifacts/live_response/system/suid.yaml b/artifacts/live_response/system/suid.yaml index 56bf65a3..cf890f1e 100644 --- a/artifacts/live_response/system/suid.yaml +++ b/artifacts/live_response/system/suid.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: Search for files that have SUID bit set. @@ -6,8 +7,7 @@ artifacts: collector: find path: / exclude_file_system: [proc, procfs] - file_type: f - max_depth: 6 - permissions: -4000 + file_type: [f] + permissions: [-4000] output_file: suid.txt \ No newline at end of file diff --git a/artifacts/live_response/system/svcs.yaml b/artifacts/live_response/system/svcs.yaml index f6387dba..e448844b 100644 --- a/artifacts/live_response/system/svcs.yaml +++ b/artifacts/live_response/system/svcs.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "svcs" +output_directory: /live_response/system artifacts: - description: Display information about service instances as recorded in the service configuration repository. diff --git a/artifacts/live_response/system/sw_vers.yaml b/artifacts/live_response/system/sw_vers.yaml index c1ef4a3e..e8721187 100644 --- a/artifacts/live_response/system/sw_vers.yaml +++ b/artifacts/live_response/system/sw_vers.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "sw_vers" +output_directory: /live_response/system artifacts: - description: Display macOS operating system version. diff --git a/artifacts/live_response/system/swapctl.yaml b/artifacts/live_response/system/swapctl.yaml index 59f18754..d9dbf844 100644 --- a/artifacts/live_response/system/swapctl.yaml +++ b/artifacts/live_response/system/swapctl.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "swapctl" +output_directory: /live_response/system artifacts: - description: List the devices making up system swap. diff --git a/artifacts/live_response/system/swapinfo.yaml b/artifacts/live_response/system/swapinfo.yaml index d4b1c35b..6a0da5c1 100644 --- a/artifacts/live_response/system/swapinfo.yaml +++ b/artifacts/live_response/system/swapinfo.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "swapinfo" +output_directory: /live_response/system artifacts: - description: Display system data structures. diff --git a/artifacts/live_response/system/sys_modules.yaml b/artifacts/live_response/system/sys_modules.yaml index 7955a696..74fa9d23 100644 --- a/artifacts/live_response/system/sys_modules.yaml +++ b/artifacts/live_response/system/sys_modules.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: List loaded kernel modules. diff --git a/artifacts/live_response/system/sysctl.yaml b/artifacts/live_response/system/sysctl.yaml index b196826c..77a09d33 100644 --- a/artifacts/live_response/system/sysctl.yaml +++ b/artifacts/live_response/system/sysctl.yaml @@ -1,8 +1,10 @@ -version: 1.0 +version: 2.0 +condition: command_exists "sysctl" +output_directory: /live_response/system artifacts: - description: Display kernel parameters. - supported_os: [android, freebsd, linux, macos, netbsd, netscaler, openbsd] + supported_os: [freebsd, linux, macos, netbsd, netscaler, openbsd] collector: command command: sysctl -a output_file: sysctl_-a.txt diff --git a/artifacts/live_response/system/sysdef.yaml b/artifacts/live_response/system/sysdef.yaml index 808eee9a..356ee3d5 100644 --- a/artifacts/live_response/system/sysdef.yaml +++ b/artifacts/live_response/system/sysdef.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "sysdef" +output_directory: /live_response/system artifacts: - description: Display all hardware devices, as well as pseudo devices, system devices, loadable modules, and the values of selected kernel tunable parameters. diff --git a/artifacts/live_response/system/system_profiler.yaml b/artifacts/live_response/system/system_profiler.yaml index a9762ea6..ea66387f 100644 --- a/artifacts/live_response/system/system_profiler.yaml +++ b/artifacts/live_response/system/system_profiler.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "system_profiler" +output_directory: /live_response/system artifacts: - description: Reports system hardware and software configuration. diff --git a/artifacts/live_response/system/systemctl.yaml b/artifacts/live_response/system/systemctl.yaml index ed212875..8e1d0a96 100644 --- a/artifacts/live_response/system/systemctl.yaml +++ b/artifacts/live_response/system/systemctl.yaml @@ -1,4 +1,6 @@ -version: 1.1 +version: 2.0 +condition: command_exists "systemctl" +output_directory: /live_response/system artifacts: - description: Display all systemd system units. diff --git a/artifacts/live_response/system/timedatectl.yaml b/artifacts/live_response/system/timedatectl.yaml index fc027bc8..c3c51011 100644 --- a/artifacts/live_response/system/timedatectl.yaml +++ b/artifacts/live_response/system/timedatectl.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "timedatectl" +output_directory: /live_response/system artifacts: - description: Display the current settings of the system clock and RTC, including whether network time synchronization is active. diff --git a/artifacts/live_response/system/tmutil.yaml b/artifacts/live_response/system/tmutil.yaml index 4fd900a1..db9cb87b 100644 --- a/artifacts/live_response/system/tmutil.yaml +++ b/artifacts/live_response/system/tmutil.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "tmutil" +output_directory: /live_response/system artifacts: - description: Print paths for all of this computer's completed snapshots. diff --git a/artifacts/live_response/system/uname.yaml b/artifacts/live_response/system/uname.yaml index 457aa82a..e1669944 100644 --- a/artifacts/live_response/system/uname.yaml +++ b/artifacts/live_response/system/uname.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: Display system information. diff --git a/artifacts/live_response/system/uptime.yaml b/artifacts/live_response/system/uptime.yaml index 43bd274e..e5bf621f 100644 --- a/artifacts/live_response/system/uptime.yaml +++ b/artifacts/live_response/system/uptime.yaml @@ -1,4 +1,5 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: Display how long system has been running. @@ -8,7 +9,7 @@ artifacts: output_file: uptime.txt - description: Display how long system has been running in yyyy-mm-dd HH:MM:SS format. - supported_os: [all] + supported_os: [aix, esxi, linux, solaris] collector: command command: uptime -s output_file: uptime_-s.txt diff --git a/artifacts/live_response/system/vm_stat.yaml b/artifacts/live_response/system/vm_stat.yaml index dbd6be33..acd1e068 100644 --- a/artifacts/live_response/system/vm_stat.yaml +++ b/artifacts/live_response/system/vm_stat.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "vm_stat" +output_directory: /live_response/system artifacts: - description: Display virtual memory statistics. diff --git a/artifacts/live_response/system/vmstat.yaml b/artifacts/live_response/system/vmstat.yaml index a013bb2c..0723d9ef 100644 --- a/artifacts/live_response/system/vmstat.yaml +++ b/artifacts/live_response/system/vmstat.yaml @@ -1,8 +1,10 @@ -version: 1.0 +version: 2.0 +condition: command_exists "vmstat" +output_directory: /live_response/system artifacts: - description: Display virtual memory statistics. - supported_os: [android, aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] + supported_os: [aix, freebsd, linux, netbsd, netscaler, openbsd, solaris] collector: command command: vmstat output_file: vmstat.txt diff --git a/artifacts/live_response/system/who.yaml b/artifacts/live_response/system/who.yaml index 8f4142ea..b94d26e9 100644 --- a/artifacts/live_response/system/who.yaml +++ b/artifacts/live_response/system/who.yaml @@ -1,4 +1,6 @@ -version: 3.0 +version: 4.0 +condition: command_exists "who" +output_directory: /live_response/system artifacts: - description: Display the current run-level of the process. diff --git a/artifacts/live_response/system/world_writable_directories.yaml b/artifacts/live_response/system/world_writable_directories.yaml index e05e4629..9c49cc02 100644 --- a/artifacts/live_response/system/world_writable_directories.yaml +++ b/artifacts/live_response/system/world_writable_directories.yaml @@ -1,13 +1,13 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: List all world writable directories. supported_os: [all] collector: find path: / - file_type: d - max_depth: 6 - permissions: 777 + file_type: [d] + permissions: [777] exclude_file_system: [proc, procfs] output_file: world_writable_directories.txt \ No newline at end of file diff --git a/artifacts/live_response/system/world_writable_files.yaml b/artifacts/live_response/system/world_writable_files.yaml index 68cf86b0..375ffe21 100644 --- a/artifacts/live_response/system/world_writable_files.yaml +++ b/artifacts/live_response/system/world_writable_files.yaml @@ -1,13 +1,13 @@ -version: 1.0 +version: 2.0 +output_directory: /live_response/system artifacts: - description: List all world writable files. supported_os: [all] collector: find path: / - file_type: f - max_depth: 6 - permissions: 777 + file_type: [f] + permissions: [777] exclude_file_system: [proc, procfs] output_file: world_writable_files.txt \ No newline at end of file diff --git a/artifacts/live_response/system/zoneadm.yaml b/artifacts/live_response/system/zoneadm.yaml index 52e2805c..f551a008 100644 --- a/artifacts/live_response/system/zoneadm.yaml +++ b/artifacts/live_response/system/zoneadm.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "zoneadm" +output_directory: /live_response/system artifacts: - description: Display all installed zones. A zone is an application container that is maintained by the operating system run-time. diff --git a/artifacts/live_response/vms/esxcli.yaml b/artifacts/live_response/vms/esxcli.yaml index 473923ad..cca69000 100644 --- a/artifacts/live_response/vms/esxcli.yaml +++ b/artifacts/live_response/vms/esxcli.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "esxcli" +output_directory: /live_response/vms artifacts: - description: List networking information for the VM's that have active ports. diff --git a/artifacts/live_response/vms/qm.yaml b/artifacts/live_response/vms/qm.yaml index 0446f6e4..049959e7 100644 --- a/artifacts/live_response/vms/qm.yaml +++ b/artifacts/live_response/vms/qm.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "qm" +output_directory: /live_response/vms artifacts: - description: List all Proxmox VE active and inactive virtual machines. diff --git a/artifacts/live_response/vms/vim-cmd.yaml b/artifacts/live_response/vms/vim-cmd.yaml index d9aa9a5c..1868e6d1 100644 --- a/artifacts/live_response/vms/vim-cmd.yaml +++ b/artifacts/live_response/vms/vim-cmd.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "vim-cmd" +output_directory: /live_response/vms artifacts: - description: Get the list of virtual machines on the host. diff --git a/artifacts/live_response/vms/virsh.yaml b/artifacts/live_response/vms/virsh.yaml index 1eb45fb4..0306aa3c 100644 --- a/artifacts/live_response/vms/virsh.yaml +++ b/artifacts/live_response/vms/virsh.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "virsh" +output_directory: /live_response/vms artifacts: - description: List active and inactive domains. diff --git a/artifacts/live_response/vms/virt-top.yaml b/artifacts/live_response/vms/virt-top.yaml index 6f171779..7dac483f 100644 --- a/artifacts/live_response/vms/virt-top.yaml +++ b/artifacts/live_response/vms/virt-top.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "virt-top" +output_directory: /live_response/vms artifacts: - description: 'top'-like utility for virtualization stats. diff --git a/artifacts/live_response/vms/virtualbox.yaml b/artifacts/live_response/vms/virtualbox.yaml index b3ccbf55..353a12fa 100644 --- a/artifacts/live_response/vms/virtualbox.yaml +++ b/artifacts/live_response/vms/virtualbox.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "VBoxManage" +output_directory: /live_response/vms artifacts: - description: List all registered VMs. diff --git a/artifacts/live_response/vms/vm-support.yaml b/artifacts/live_response/vms/vm-support.yaml index 123abe44..b95bd896 100644 --- a/artifacts/live_response/vms/vm-support.yaml +++ b/artifacts/live_response/vms/vm-support.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "vm-support" +output_directory: /live_response/vms artifacts: - description: List registered VMs. diff --git a/artifacts/live_response/vms/vmctl.yaml b/artifacts/live_response/vms/vmctl.yaml index e8548101..f12ed7b5 100644 --- a/artifacts/live_response/vms/vmctl.yaml +++ b/artifacts/live_response/vms/vmctl.yaml @@ -1,4 +1,6 @@ -version: 1.0 +version: 2.0 +condition: command_exists "vmctl" +output_directory: /live_response/vms artifacts: - description: List running virtual machines on this system. diff --git a/artifacts/memory_dump/avml.yaml b/artifacts/memory_dump/avml.yaml index 0e0a724e..a09a6caf 100644 --- a/artifacts/memory_dump/avml.yaml +++ b/artifacts/memory_dump/avml.yaml @@ -1,8 +1,8 @@ -version: 1.0 +version: 2.0 +output_directory: /memory_dump artifacts: - description: Capture a memory image. supported_os: [linux] collector: command - command: avml %output_file% - output_file: avml.raw \ No newline at end of file + command: avml avml.raw diff --git a/artifacts/memory_dump/process_memory_sections_strings.yaml b/artifacts/memory_dump/process_memory_sections_strings.yaml index 37af92dc..2a33b92a 100644 --- a/artifacts/memory_dump/process_memory_sections_strings.yaml +++ b/artifacts/memory_dump/process_memory_sections_strings.yaml @@ -1,9 +1,8 @@ -version: 1.0 +version: 2.0 +output_directory: /memory_dump artifacts: - description: Dump process memory sections and strings. supported_os: [linux] collector: command - command: linux_procmemdump.sh -u -b -d %output_file% - output_file: proc - stderr_output_file: linux_procmemdump.txt.stderr + command: linux_procmemdump.sh -u -b -d proc diff --git a/artifacts/memory_dump/process_memory_strings.yaml b/artifacts/memory_dump/process_memory_strings.yaml index 2023ff03..1659ac75 100644 --- a/artifacts/memory_dump/process_memory_strings.yaml +++ b/artifacts/memory_dump/process_memory_strings.yaml @@ -1,9 +1,8 @@ -version: 1.0 +version: 2.0 +output_directory: /memory_dump artifacts: - description: Dump strings from the memory space of running processes. supported_os: [linux] collector: command - command: linux_procmemdump.sh -s -d %output_file% - output_file: proc - stderr_output_file: linux_procmemdump.txt.stderr + command: linux_procmemdump.sh -s -d proc diff --git a/bin/README.txt b/bin/README.txt index 46237bf7..857f89f3 100644 --- a/bin/README.txt +++ b/bin/README.txt @@ -1,13 +1,33 @@ -Place your validated binary files in the '[uac_directory]\bin' directory if you want them to be executed instead of the built-in ones provided by the target operating system. +Place your validated executables (binary files and scripts) here! -UAC will detect and run CPU architecture specific executable files, so they need to be placed within the following directory structure: '[uac_directory]\bin\[operating_system]\[architecture]'. +In most cases, the executables should be placed in the '[UAC_DIRECTORY]/bin' directory, +along with any additional support files it needs to run. -Operating system must be one of the following options (in lowercase): android, aix, freebsd, linux, macos, netbsd, netscaler, openbsd or solaris. +For example, if you have an artifact that uses an executable named 'my_script.sh', you should place +this binary in the '[UAC_DIRECTORY]/bin' directory. -Architecture is the kernel architecture (in lowercase). It can be retrieved using 'uname' tool. +In the case where you have executables with the same name, but for multiple operating systems, +they should be placed in the '[UAC_DIRECTORY]/bin/[OS]' directory. -For example: +For example, if you have an artifact that uses an executable named 'lsof', but you +have two binary files, one for Linux and one for FreeBSD, you should place the binaries +in the '[UAC_DIRECTORY]/bin/linux' and '[UAC_DIRECTORY]/bin/freebsd' directories. +Note that the operating system name must be in lowercase. -- if you have a 'ss' binary for Linux x86_64, it needs to be placed in the '[uac_directory]\bin\linux\x86_64' directory. -- if you have a 'lsof' binary for AIX powerpc, it needs to be placed in the '[uac_directory]\bin\aix\powerpc' directory. -- if you have a 'netstat' binary for Android aarch64, it needs to be placed in the '[uac_directory]\bin\android\aarch64' directory. \ No newline at end of file +In the case where you have executables that can be run on multiple operating systems, they +should be placed in the '[UAC_DIRECTORY]/bin/[OS1_OS2_OS3]' directory. Note that you +can have multiple operating systems separated by an underscore '_'. + +For example, if you have an artifact that uses an executable named 'netstat' that +runs on both Linux and ESXi systems, you should place the binary either in the +'[UAC_DIRECTORY]/bin/linux_esxi' directory, or place the binary in the '[UAC_DIRECTORY]/bin/linux' and +'[UAC_DIRECTORY]/bin/esxi' directories. + +In the case where you have executables with the same name, but for multiple operating systems +and multiple architectures, they should be placed in the '[UAC_DIRECTORY]/bin/[OS]/[ARCH]' directory. + +For example, if you have an artifact that uses an executable named 'ss', but you +have binary files for Linux arm64 and ppc64, FreeBSD i386, and Solaris x86_64 and sparc64, +you should place the binary files in the '[UAC_DIRECTORY]/bin/linux/arm64', +'[UAC_DIRECTORY]/bin/linux/ppc64', '[UAC_DIRECTORY]/bin/freebsd/i386', +'[UAC_DIRECTORY]/bin/solaris/x86_64' and '[UAC_DIRECTORY]/bin/solaris/sparc64' directories. diff --git a/bin/linux/avml b/bin/linux/avml new file mode 100755 index 00000000..3556e57b Binary files /dev/null and b/bin/linux/avml differ diff --git a/tools/linux_procmemdump.sh/linux_procmemdump.sh b/bin/linux/linux_procmemdump.sh similarity index 100% rename from tools/linux_procmemdump.sh/linux_procmemdump.sh rename to bin/linux/linux_procmemdump.sh diff --git a/config/uac.conf b/config/uac.conf index f1a8b7ee..1ba3deaa 100644 --- a/config/uac.conf +++ b/config/uac.conf @@ -1,35 +1,49 @@ # UAC configuration file -# Directory/path patterns that will be excluded from 'find', 'stat', 'hash' and -# 'file' collectors. -# As 'find' tool is used to search for files and directories, the path -# patterns below need to be compatible with '-path' option. Please check -# 'find' man pages for more information. +# Directory/path patterns that will be excluded from find, file, hash and +# stat collectors. +# As find tool is used to search for files and directories, the path +# patterns below need to be compatible with -path option. Please check +# find man pages for more information. # Example: ["/etc", "/usr/*/local", "*/log"] exclude_path_pattern: [] -# File name patterns that will be excluded from 'find', 'stat', 'hash' and -# 'file' collectors. -# As 'find' tool is used to search for files and directories, the file name -# patterns below need to be compatible with '-name' option. Please check -# 'find' man pages for more information. +# File name patterns that will be excluded from find, file, hash and +# stat collectors. +# As find tool is used to search for files and directories, the file name +# patterns below need to be compatible with -name option. Please check +# find man pages for more information. # Example: ["/etc/passwd", "*.txt", "*.gz.*", "*.[Ll][Oo][Gg]"] exclude_name_pattern: [] -# File systems that will be excluded from 'find', 'stat', 'hash' and -# 'file' collectors. -# UAC will retrieve the list of existing mountpoints (paths) and add them +# File systems that will be excluded from find, file, hash and +# stat collectors. +# UAC will retrieve the list of existing mount-points (paths) and add them # to the exclusion list automatically. -# The file system types which are supported depend on the target computer's +# The file system types that are supported depend on the target computer's # running kernel. exclude_file_system: [9p, afs, autofs, cifs, davfs, fuse, kernfs, nfs, nfs4, rpc_pipefs, smbfs, sysfs] -# hash algorithms +# Hash algorithms +# Used by the hash collector. # Accepted values: md5, sha1 and sha256 hash_algorithm: [md5, sha1] +# The find tool is used by UAC to search for files and directories within a directory hierarchy. +# The -maxdepth parameter is used to limit the depth of the search within the directory tree. +# By default, when you run the find command without specifying -maxdepth, +# it searches recursively through all subdirectories of the specified directory. +# When you specify the -maxdepth parameter followed by a number, it restricts the depth +# of the search to that number of levels below the starting point in the directory hierarchy. +# This option is used by all find-based UAC collectors such as find, file, hash and stat. +# Performance Consideration: Specifying a value here can improve performance by limiting +# the scope of the search. +# Accepted values: a positive integer or 0 for disabling it (making UAC search recursively +# through all subdirectories of the specified directory). +max_depth: 0 + # Limit data collection based on the date range provided. -# UAC uses find's '-mtime', '-atime' and '-ctime' options to limit the data +# UAC uses find's -mtime, -atime and -ctime options to limit the data # collection based on the file/directory last accessed, last modified and last # status changed dates. # Example 1: @@ -37,7 +51,7 @@ hash_algorithm: [md5, sha1] # changed within the given date range, please set enable_find_mtime and # enable_find_ctime to true and enable_find_atime to false. # Example 2: -# to collect only files which last status was changed within the date +# to collect only files whose last status was changed within the date # range, please set enable_find_ctime to true, and enable_find_atime and # enable_find_mtime to false. # Accepted values: true or false diff --git a/lib/archive_compress_data.sh b/lib/archive_compress_data.sh deleted file mode 100644 index 10b2424e..00000000 --- a/lib/archive_compress_data.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Archive and compress files and directories. -# Globals: -# OPERATING_SYSTEM -# TEMP_DATA_DIR -# UAC_DIR -# Requires: -# None -# Arguments: -# $1: file containing the list of files to be archived and compressed -# $2: destination file -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -archive_compress_data() -{ - ac_source_file="${1:-}" - ac_destination_file="${2:-}" - - # exit if source file does not exist - if [ ! -f "${ac_source_file}" ]; then - printf %b "archive compress data: no such file or directory: \ -'${ac_source_file}'\n" >&2 - return 2 - fi - - case "${OPERATING_SYSTEM}" in - "aix") - tar -L "${ac_source_file}" -cf - | gzip >"${ac_destination_file}" - ;; - "freebsd"|"netbsd"|"netscaler"|"openbsd") - tar -I "${ac_source_file}" -cf - | gzip >"${ac_destination_file}" - ;; - "android"|"esxi"|"linux") - # some old tar/busybox versions do not support -T, so a different - # solution is required to package and compress data - # checking if tar can create package getting names from file - printf %b "${UAC_DIR}/uac" >"${TEMP_DATA_DIR}/.tar_check.tmp" 2>/dev/null - - if tar -T "${TEMP_DATA_DIR}/.tar_check.tmp" \ - -cf "${TEMP_DATA_DIR}/.tar_check.tar" 2>/dev/null; then - tar -T "${ac_source_file}" -cf - | gzip >"${ac_destination_file}" - else - # use file list as tar parameter - ac_file_list=`awk '{ printf "\"%s\" ", $0; }' <"${ac_source_file}"` - # eval is required here - eval "tar -cf - ${ac_file_list} | gzip >\"${ac_destination_file}\"" - fi - ;; - "macos") - tar -T "${ac_source_file}" -cf - | gzip >"${ac_destination_file}" - ;; - "solaris") - tar -cf - -I "${ac_source_file}" | gzip >"${ac_destination_file}" - ;; - esac - -} \ No newline at end of file diff --git a/lib/archive_data.sh b/lib/archive_data.sh deleted file mode 100644 index 320554cb..00000000 --- a/lib/archive_data.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Archive files and directories. -# Globals: -# OPERATING_SYSTEM -# TEMP_DATA_DIR -# UAC_DIR -# Requires: -# None -# Arguments: -# $1: file containing the list of files to be archived and compressed -# $2: destination file -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -archive_data() -{ - ad_source_file="${1:-}" - ad_destination_file="${2:-}" - - # exit if source file does not exist - if [ ! -f "${ad_source_file}" ]; then - printf %b "archive data: no such file or directory: \ -'${ad_source_file}'\n" >&2 - return 2 - fi - - case "${OPERATING_SYSTEM}" in - "aix") - tar -L "${ad_source_file}" -cf "${ad_destination_file}" - ;; - "freebsd"|"netbsd"|"netscaler"|"openbsd") - tar -I "${ad_source_file}" -cf "${ad_destination_file}" - ;; - "android"|"esxi"|"linux") - # some old tar/busybox versions do not support -T, so a different - # solution is required to package and compress data - # checking if tar can create package getting names from file - printf %b "${UAC_DIR}/uac" >"${TEMP_DATA_DIR}/.tar_check.tmp" 2>/dev/null - - if tar -T "${TEMP_DATA_DIR}/.tar_check.tmp" \ - -cf "${TEMP_DATA_DIR}/.tar_check.tar" 2>/dev/null; then - tar -T "${ad_source_file}" -cf "${ad_destination_file}" - else - # use file list as tar parameter - ad_file_list=`awk '{ printf "\"%s\" ", $0; }' <"${ad_source_file}"` - # eval is required here - eval "tar -cf \"${ad_destination_file}\" ${ad_file_list}" - fi - ;; - "macos") - tar -T "${ad_source_file}" -cf "${ad_destination_file}" - ;; - "solaris") - tar -cf "${ad_destination_file}" -I "${ad_source_file}" - ;; - esac - -} \ No newline at end of file diff --git a/lib/array_to_list.sh b/lib/array_to_list.sh deleted file mode 100644 index e53f3339..00000000 --- a/lib/array_to_list.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Convert yaml array to comma separated list. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: array -# Outputs: -# Write comma separated list to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -array_to_list() -{ - at_array="${1:-}" - - # remove starting [ and ending ] - # replace escaped comma (\,) by #_COMMA_# string - # replace escaped double quote (\") by #_DOUBLE_QUOTE_# string - # remove double quotes - # remove white spaces between elements - # remove empty elements - # replace #_COMMA_# string by comma - # replace #_DOUBLE_QUOTE_# string by \" - echo "${at_array}" \ - | sed -e 's:^ *\[::' \ - -e 's:\] *$::' \ - -e 's:\\,:#_COMMA_#:g' \ - -e 's:\\":#_DOUBLE_QUOTE_#:g' \ - -e 's:"::g' \ - -e 's: *,:,:g' \ - -e 's:, *:,:g' \ - -e 's:,,*:,:g' \ - -e 's:^,::g' \ - -e 's:,$::g' \ - -e 's:#_COMMA_#:\\,:g' \ - -e 's:#_DOUBLE_QUOTE_#:\\":g' - -} \ No newline at end of file diff --git a/lib/array_to_psv.sh b/lib/array_to_psv.sh new file mode 100644 index 00000000..9814f690 --- /dev/null +++ b/lib/array_to_psv.sh @@ -0,0 +1,34 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Convert an array to a sanitized pipe-separated values string. +# Arguments: +# string array: array of values +# Returns: +# string: sanitized pipe-separated values string +_array_to_psv() +{ + # remove leading and trailing brackets [ ] + # trim leading and trailing white space + # replace escaped comma (\,) by #_COMMA_# string + # remove white spaces between items + # remove empty items + # replace comma by pipe + # replace #_COMMA_# string by comma + # remove all single and double quotes + sed -e 's|^ *\[||' \ + -e 's|\] *$||' \ + -e 's|^ *||' \ + -e 's| *$||' \ + -e 's|\\,|#_COMMA_#|g' \ + -e 's| *,|,|g' \ + -e 's|, *|,|g' \ + -e 's|^,*||' \ + -e 's|,*$||' \ + -e 's|,,*|,|g' \ + -e 's:,:|:g' \ + -e 's|#_COMMA_#|,|g' \ + -e 's|"||g' \ + -e "s|'||g" + +} \ No newline at end of file diff --git a/lib/artifact_file_exist.sh b/lib/artifact_file_exist.sh deleted file mode 100644 index 8e1a5a68..00000000 --- a/lib/artifact_file_exist.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2001,SC2006 - -############################################################################### -# Check if artifact file exists. -# Globals: -# UAC_DIR -# Requires: -# None -# Arguments: -# $1: artifact file -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -artifact_file_exist() -{ - ae_artifact="${1:-}" - - # shellcheck disable=SC2086 - find "${UAC_DIR}"/artifacts/${ae_artifact} -name "*.yaml" \ - -print >/dev/null 2>/dev/null - -} \ No newline at end of file diff --git a/lib/astrings.sh b/lib/astrings.sh new file mode 100644 index 00000000..1ce02c5b --- /dev/null +++ b/lib/astrings.sh @@ -0,0 +1,22 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Print the sequences of printable characters in files. +# This is a shell implementation of the strings command. +# Arguments: +# string file: input file path +# Returns: +# string: printable characters +astrings() +{ + __as_file="${1:-}" + + if command_exists "tr"; then + tr '\0' '\n' <"${__as_file}" | sed -n 's/\([[:print:]]\{4,\}\)/\n\1\n/gp' | sed -n '/[[:print:]]\{4,\}/p' + elif command_exists "perl"; then + perl -pe 's/\0/\n/g' "${__as_file}" | sed -n 's/\([[:print:]]\{4,\}\)/\n\1\n/gp' | sed -n '/[[:print:]]\{4,\}/p' + else + sed 's/\x00/\n/g' "${__as_file}" | sed -n 's/\([[:print:]]\{4,\}\)/\n\1\n/gp' | sed -n '/[[:print:]]\{4,\}/p' + fi + +} \ No newline at end of file diff --git a/lib/aws_s3_presigned_url_transfer.sh b/lib/aws_s3_presigned_url_transfer.sh new file mode 100644 index 00000000..4a8b598e --- /dev/null +++ b/lib/aws_s3_presigned_url_transfer.sh @@ -0,0 +1,34 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 + +# Transfer file to Amazon S3 presigned URL. +# Arguments: +# string source: source file or empty for testing connection +# string url: presigned url +# Returns: +# boolean: true on success +# false on fail +_aws_s3_presigned_url_transfer() +{ + __aw_source="${1:-}" + __aw_url="${2:-}" + __aw_test_connectivity_mode=false + + if [ -z "${__aw_source}" ]; then + __aw_test_connectivity_mode=true + fi + + __aw_date=`date "+%a, %d %b %Y %H:%M:%S %z"` + __aw_content_type="application/octet-stream" + + _http_transfer \ + "${__aw_source}" \ + "${__aw_url}" \ + "" \ + "${__aw_date}" \ + "${__aw_content_type}" \ + "" \ + "${__aw_test_connectivity_mode}" + +} diff --git a/lib/azure_storage_sas_url_transfer.sh b/lib/azure_storage_sas_url_transfer.sh index 31a4f251..b55de7dd 100644 --- a/lib/azure_storage_sas_url_transfer.sh +++ b/lib/azure_storage_sas_url_transfer.sh @@ -1,34 +1,34 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 -############################################################################### # Transfer file to Azure Storage SAS URL. -# Globals: -# None -# Requires: -# None # Arguments: -# $1: source file -# $2: Azure Storage SAS URL -# Outputs: -# None. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -azure_storage_sas_url_transfer() +# string source: source file or empty for testing connection +# string url: azure storage sas url +# Returns: +# boolean: true on success +# false on fail +_azure_storage_sas_url_transfer() { - au_source="${1:-}" - au_azure_storage_sas_url="${2:-}" + __au_source="${1:-}" + __au_url="${2:-}" + __au_test_connectivity_mode=false - curl \ - --fail \ - --request PUT \ - --header "x-ms-blob-type: BlockBlob" \ - --header "Content-Type: application/octet-stream" \ - --header "Accept: */*" \ - --header "Expect: 100-continue" \ - --upload-file "${au_source}" \ - "${au_azure_storage_sas_url}" + if [ -z "${__au_source}" ]; then + __au_test_connectivity_mode=true + fi -} \ No newline at end of file + __au_date=`date "+%a, %d %b %Y %H:%M:%S %z"` + __au_content_type="application/octet-stream" + + _http_transfer \ + "${__au_source}" \ + "${__au_url}" \ + "" \ + "${__au_date}" \ + "${__au_content_type}" \ + "" \ + "${__au_test_connectivity_mode}" + +} diff --git a/lib/azure_storage_sas_url_transfer_test.sh b/lib/azure_storage_sas_url_transfer_test.sh deleted file mode 100644 index c288ae7a..00000000 --- a/lib/azure_storage_sas_url_transfer_test.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Test the connectivity to Azure Storage SAS URL. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: Azure Storage SAS URL -# Outputs: -# None. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -azure_storage_sas_url_transfer_test() -{ - ab_azure_storage_sas_url="${1:-}" - - curl \ - --fail \ - --request PUT \ - --header "x-ms-blob-type: BlockBlob" \ - --header "Content-Type: application/text" \ - --header "Accept: */*" \ - --header "Expect: 100-continue" \ - --data "Transfer test from UAC" \ - "${ab_azure_storage_sas_url}" - -} \ No newline at end of file diff --git a/lib/build_artifact_list.sh b/lib/build_artifact_list.sh new file mode 100644 index 00000000..6c9f249e --- /dev/null +++ b/lib/build_artifact_list.sh @@ -0,0 +1,29 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Build the artifact list to be used during execution based on the +# artifacts provided in the command line. +# Arguments: +# string artifact_list: comma-separated list of artifacts +# string operating_system: operating system (default: all) +# Returns: +# string: artifact list (line by line) +_build_artifact_list() +{ + __ba_artifact_list="${1:-}" + __ba_operating_system="${2:-all}" + + # some systems use busybox's find that not always support '-type f' + # skip artifacts that are not applicable to the target operating system + __ba_OIFS="${IFS}"; IFS=" +"; + for __ba_item in ${__ba_artifact_list}; do + if [ -f "${__ba_item}" ]; then + if grep -q -E "supported_os:.*all|${__ba_operating_system}" "${__ba_item}" 2>/dev/null || ${__UAC_IGNORE_OPERATING_SYSTEM:-false}; then + echo "${__ba_item}" + fi + fi + done + IFS="${__ba_OIFS}" + +} diff --git a/lib/build_find_command.sh b/lib/build_find_command.sh new file mode 100644 index 00000000..86322f7f --- /dev/null +++ b/lib/build_find_command.sh @@ -0,0 +1,434 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 + +# Build a working find command line based on the given parameters to search for files in a directory hierarchy. +# Arguments: +# string path: path +# string path_pattern: pipe-separated list of path patterns (optional) +# string name_pattern: pipe-separated list of name patterns (optional) +# string exclude_path_pattern: pipe-separated list of exclude path patterns (optional) +# string exclude_name_pattern: pipe-separated list of exclude name patterns (optional) +# integer max_depth: max depth (optional) +# string file_type: file type (optional) +# integer min_file_size: minimum file size (optional) +# integer max_file_size: maximum file size (optional) +# string permissions: permissions (optional) +# string print0: uses -print0 instead of -print (optional) +# integer start_date_days: days (optional) +# integer end_date_days: days (optional) +# boolean is_rescursive: add semicolon at the end of the find command (optional) +# Returns: +# string: command line +_build_find_command() +{ + # some systems such as Solaris 10 do not support more than 9 parameters + # on functions, not even using curly braces {} e.g. ${10} + # so the solution was to use shift + __bf_path="${1:-}" + shift + __bf_path_pattern="${1:-}" + shift + __bf_name_pattern="${1:-}" + shift + __bf_exclude_path_pattern="${1:-}" + shift + __bf_exclude_name_pattern="${1:-}" + shift + __bf_max_depth="${1:-0}" + shift + __bf_file_type="${1:-}" + shift + __bf_min_file_size="${1:-}" + shift + __bf_max_file_size="${1:-}" + shift + __bf_permissions="${1:-}" + shift + __bf_print0="${1:-false}" + shift + __bf_start_date_days="${1:-0}" + shift + __bf_end_date_days="${1:-0}" + shift + __bf_is_recursive="${1:-false}" + + if [ -z "${__bf_path}" ]; then + _log_msg ERR "_build_find_command: empty path parameter" + return 1 + fi + + # Build recursive parameters to be used with find. + # Arguments: + # string parameter: parameter + # string items: pipe-separated value list of items + # boolean quote: add value between double-quotes + # Returns: + # string: recursive parameter string + _build_recursive_parameter() + { + __br_parameter="${1:-}" + __br_items="${2:-}" + __br_quote="${3:-false}" + + __br_quote_param="" + ${__br_quote} && __br_quote_param="\"" + + echo "${__br_items}" \ + | awk -v __br_param="${__br_parameter}" -v __br_quote_param="${__br_quote_param}" 'BEGIN { FS="|"; } { + for(N = 1; N <= NF; N ++) { + if ($N != "") { + if (N == 1) { + printf "%s %s%s%s", __br_param, __br_quote_param, $N, __br_quote_param; + } + else { + printf " -o %s %s%s%s", __br_param, __br_quote_param, $N, __br_quote_param; + } + } + } + }' + } + + __bf_perl_command_exists=false + command_exists "perl" && __bf_perl_command_exists=true + __bf_find_tool="find" + __bf_find_params="" + + # global options such as -maxdepth must be specified before other arguments. + # i.e., -maxdepth affects tests specified before it as well as those specified after it. + # build -maxdepth parameter + if [ "${__UAC_CONF_MAX_DEPTH}" -gt 0 ] && [ "${__UAC_CONF_MAX_DEPTH}" -lt "${__bf_max_depth}" ]; then + __bf_max_depth="${__UAC_CONF_MAX_DEPTH}" + fi + if [ "${__bf_max_depth}" -gt 0 ]; then + if ${__UAC_TOOL_FIND_MAXDEPTH_SUPPORT}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-maxdepth ${__bf_max_depth}" + elif ${__bf_perl_command_exists}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-maxdepth ${__bf_max_depth}" + __bf_find_tool="find_pl" + fi + fi + + # build -path prune parameter + if [ -n "${__bf_exclude_path_pattern}" ]; then + if ${__UAC_TOOL_FIND_OPERATORS_SUPPORT} && ${__UAC_TOOL_FIND_PATH_SUPPORT} && ${__UAC_TOOL_FIND_PRUNE_SUPPORT}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-path\" \"${__bf_exclude_path_pattern}\" true` \) -prune -o" + elif ${__bf_perl_command_exists}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-path\" \"${__bf_exclude_path_pattern}\" true` \) -prune -o" + __bf_find_tool="find_pl" + fi + fi + + # build -path parameter + # -path parameter will be added even if find does not support it + if [ -n "${__bf_path_pattern}" ]; then + if _is_psv "${__bf_path_pattern}"; then + if ${__UAC_TOOL_FIND_OPERATORS_SUPPORT} && ${__UAC_TOOL_FIND_PATH_SUPPORT}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-path\" \"${__bf_path_pattern}\" true` \)" + elif ${__bf_perl_command_exists}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-path\" \"${__bf_path_pattern}\" true` \)" + __bf_find_tool="find_pl" + else + # shellcheck disable=SC2162,SC2030 + echo "${__bf_path_pattern}" \ + | awk 'BEGIN{RS="|"} {print $0}' \ + | while read __bf_path_pattern_line && [ -n "${__bf_path_pattern_line}" ]; do + _build_find_command \ + "${__bf_path}" \ + "${__bf_path_pattern_line}" \ + "${__bf_name_pattern}" \ + "${__bf_exclude_path_pattern}" \ + "${__bf_exclude_name_pattern}" \ + "${__bf_max_depth}" \ + "${__bf_file_type}" \ + "${__bf_min_file_size}" \ + "${__bf_max_file_size}" \ + "${__bf_permissions}" \ + "${__bf_print0}" \ + "${__bf_start_date_days}" \ + "${__bf_end_date_days}" \ + true + done + return + fi + else + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-path \"${__bf_path_pattern}\"" + if ${__UAC_TOOL_FIND_PATH_SUPPORT}; then + true + elif ${__bf_perl_command_exists}; then + __bf_find_tool="find_pl" + fi + fi + fi + + # build -name prune parameter + if [ -n "${__bf_exclude_name_pattern}" ]; then + if ${__UAC_TOOL_FIND_OPERATORS_SUPPORT} && ${__UAC_TOOL_FIND_PRUNE_SUPPORT}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-name\" \"${__bf_exclude_name_pattern}\" true` \) -prune -o" + elif ${__bf_perl_command_exists}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-name\" \"${__bf_exclude_name_pattern}\" true` \) -prune -o" + __bf_find_tool="find_pl" + fi + fi + + # build -name parameter + if [ -n "${__bf_name_pattern}" ]; then + if _is_psv "${__bf_name_pattern}"; then + if ${__UAC_TOOL_FIND_OPERATORS_SUPPORT}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-name\" \"${__bf_name_pattern}\" true` \)" + elif ${__bf_perl_command_exists}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-name\" \"${__bf_name_pattern}\" true` \)" + __bf_find_tool="find_pl" + else + # shellcheck disable=SC2162,SC2030 + echo "${__bf_name_pattern}" \ + | awk 'BEGIN{RS="|"} {print $0}' \ + | while read __bf_name_pattern_line && [ -n "${__bf_name_pattern_line}" ]; do + _build_find_command \ + "${__bf_path}" \ + "${__bf_path_pattern}" \ + "${__bf_name_pattern_line}" \ + "${__bf_exclude_path_pattern}" \ + "${__bf_exclude_name_pattern}" \ + "${__bf_max_depth}" \ + "${__bf_file_type}" \ + "${__bf_min_file_size}" \ + "${__bf_max_file_size}" \ + "${__bf_permissions}" \ + "${__bf_print0}" \ + "${__bf_start_date_days}" \ + "${__bf_end_date_days}" \ + true + done + return + fi + else + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-name \"${__bf_name_pattern}\"" + fi + fi + + # build -type parameter + if [ -n "${__bf_file_type}" ]; then + if _is_psv "${__bf_file_type}"; then + if ${__UAC_TOOL_FIND_OPERATORS_SUPPORT} && ${__UAC_TOOL_FIND_TYPE_SUPPORT}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-type\" \"${__bf_file_type}\"` \)" + elif ${__bf_perl_command_exists}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-type\" \"${__bf_file_type}\"` \)" + __bf_find_tool="find_pl" + elif ${__UAC_TOOL_FIND_TYPE_SUPPORT}; then + # shellcheck disable=SC2162,SC2030 + echo "${__bf_file_type}" \ + | awk 'BEGIN{RS="|"} {print $0}' \ + | while read __bf_file_type_line && [ -n "${__bf_file_type_line}" ]; do + _build_find_command \ + "${__bf_path}" \ + "${__bf_path_pattern}" \ + "${__bf_name_pattern}" \ + "${__bf_exclude_path_pattern}" \ + "${__bf_exclude_name_pattern}" \ + "${__bf_max_depth}" \ + "${__bf_file_type_line}" \ + "${__bf_min_file_size}" \ + "${__bf_max_file_size}" \ + "${__bf_permissions}" \ + "${__bf_print0}" \ + "${__bf_start_date_days}" \ + "${__bf_end_date_days}" \ + true + done + return + fi + elif ${__UAC_TOOL_FIND_TYPE_SUPPORT}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-type ${__bf_file_type}" + elif ${__bf_perl_command_exists}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-type ${__bf_file_type}" + __bf_find_tool="find_pl" + fi + fi + + # build -size parameter + if [ -n "${__bf_min_file_size}" ]; then + if ${__UAC_TOOL_FIND_SIZE_SUPPORT}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-size +${__bf_min_file_size}c" + elif ${__bf_perl_command_exists}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-size +${__bf_min_file_size}c" + __bf_find_tool="find_pl" + fi + fi + if [ -n "${__bf_max_file_size}" ]; then + if ${__UAC_TOOL_FIND_SIZE_SUPPORT}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-size -${__bf_max_file_size}c" + elif ${__bf_perl_command_exists}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-size -${__bf_max_file_size}c" + __bf_find_tool="find_pl" + fi + fi + + # build -perm parameter + # -perm parameter will be added even if find does not support it + if [ -n "${__bf_permissions}" ]; then + if _is_psv "${__bf_permissions}"; then + if ${__UAC_TOOL_FIND_OPERATORS_SUPPORT}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-perm\" \"${__bf_permissions}\"` \)" + elif ${__bf_perl_command_exists}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }\( `_build_recursive_parameter \"-perm\" \"${__bf_permissions}\"` \)" + __bf_find_tool="find_pl" + else + # shellcheck disable=SC2162,SC2030 + echo "${__bf_permissions}" \ + | awk 'BEGIN{RS="|"} {print $0}' \ + | while read __bf_permissions_line && [ -n "${__bf_permissions_line}" ]; do + _build_find_command \ + "${__bf_path}" \ + "${__bf_path_pattern}" \ + "${__bf_name_pattern}" \ + "${__bf_exclude_path_pattern}" \ + "${__bf_exclude_name_pattern}" \ + "${__bf_max_depth}" \ + "${__bf_file_type}" \ + "${__bf_min_file_size}" \ + "${__bf_max_file_size}" \ + "${__bf_permissions_line}" \ + "${__bf_print0}" \ + "${__bf_start_date_days}" \ + "${__bf_end_date_days}" \ + true + done + return + fi + else + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-perm ${__bf_permissions}" + fi + fi + + # build -mtime parameter + __bf_find_mtime_param="" + if ${__UAC_CONF_ENABLE_FIND_MTIME}; then + if [ "${__bf_start_date_days}" -gt 0 ]; then + if ${__UAC_TOOL_FIND_MTIME_SUPPORT}; then + __bf_find_mtime_param="${__bf_find_mtime_param}${__bf_find_mtime_param:+ }-mtime -${__bf_start_date_days}" + elif ${__bf_perl_command_exists}; then + __bf_find_mtime_param="${__bf_find_mtime_param}${__bf_find_mtime_param:+ }-mtime -${__bf_start_date_days}" + __bf_find_tool="find_pl" + fi + fi + if [ "${__bf_end_date_days}" -gt 0 ]; then + if ${__UAC_TOOL_FIND_MTIME_SUPPORT}; then + __bf_find_mtime_param="${__bf_find_mtime_param}${__bf_find_mtime_param:+ }-mtime +${__bf_end_date_days}" + elif ${__bf_perl_command_exists}; then + __bf_find_mtime_param="${__bf_find_mtime_param}${__bf_find_mtime_param:+ }-mtime +${__bf_end_date_days}" + __bf_find_tool="find_pl" + fi + fi + fi + + # build -atime parameter + __bf_find_atime_param="" + if ${__UAC_CONF_ENABLE_FIND_ATIME}; then + if [ "${__bf_start_date_days}" -gt 0 ]; then + if ${__UAC_TOOL_FIND_ATIME_SUPPORT}; then + __bf_find_atime_param="${__bf_find_atime_param}${__bf_find_atime_param:+ }-atime -${__bf_start_date_days}" + elif ${__bf_perl_command_exists}; then + __bf_find_atime_param="${__bf_find_atime_param}${__bf_find_atime_param:+ }-atime -${__bf_start_date_days}" + __bf_find_tool="find_pl" + fi + fi + if [ "${__bf_end_date_days}" -gt 0 ]; then + if ${__UAC_TOOL_FIND_ATIME_SUPPORT}; then + __bf_find_atime_param="${__bf_find_atime_param}${__bf_find_atime_param:+ }-atime +${__bf_end_date_days}" + elif ${__bf_perl_command_exists}; then + __bf_find_atime_param="${__bf_find_atime_param}${__bf_find_atime_param:+ }-atime +${__bf_end_date_days}" + __bf_find_tool="find_pl" + fi + fi + fi + + # build -ctime parameter + __bf_find_ctime_param="" + if ${__UAC_CONF_ENABLE_FIND_CTIME}; then + if [ "${__bf_start_date_days}" -gt 0 ]; then + if ${__UAC_TOOL_FIND_CTIME_SUPPORT}; then + __bf_find_ctime_param="${__bf_find_ctime_param}${__bf_find_ctime_param:+ }-ctime -${__bf_start_date_days}" + elif ${__bf_perl_command_exists}; then + __bf_find_ctime_param="${__bf_find_ctime_param}${__bf_find_ctime_param:+ }-ctime -${__bf_start_date_days}" + __bf_find_tool="find_pl" + fi + fi + if [ "${__bf_end_date_days}" -gt 0 ]; then + if ${__UAC_TOOL_FIND_CTIME_SUPPORT}; then + __bf_find_ctime_param="${__bf_find_ctime_param}${__bf_find_ctime_param:+ }-ctime +${__bf_end_date_days}" + elif ${__bf_perl_command_exists}; then + __bf_find_ctime_param="${__bf_find_ctime_param}${__bf_find_ctime_param:+ }-ctime +${__bf_end_date_days}" + __bf_find_tool="find_pl" + fi + fi + fi + + # build -mtime, -atime and -ctime together + __bf_find_date_range_param="" + if { + { [ -n "${__bf_find_mtime_param}" ] && [ -n "${__bf_find_atime_param}" ]; } || \ + { [ -n "${__bf_find_mtime_param}" ] && [ -n "${__bf_find_ctime_param}" ]; } || \ + { [ -n "${__bf_find_atime_param}" ] && [ -n "${__bf_find_ctime_param}" ]; } + } && \ + { ${__UAC_TOOL_FIND_OPERATORS_SUPPORT} || ${__bf_perl_command_exists}; }; then + # multiples date range parameters enabled + if ${__UAC_TOOL_FIND_OPERATORS_SUPPORT}; then + true + else + __bf_find_tool="find_pl" + fi + if [ -n "${__bf_find_mtime_param}" ]; then + __bf_find_date_range_param="${__bf_find_date_range_param}${__bf_find_date_range_param:+ }\( ${__bf_find_mtime_param} \)" + fi + if [ -n "${__bf_find_ctime_param}" ]; then + if [ -n "${__bf_find_date_range_param}" ]; then + __bf_find_date_range_param="${__bf_find_date_range_param}${__bf_find_date_range_param:+ }-o" + fi + __bf_find_date_range_param="${__bf_find_date_range_param}${__bf_find_date_range_param:+ }\( ${__bf_find_ctime_param} \)" + fi + if [ -n "${__bf_find_atime_param}" ]; then + if [ -n "${__bf_find_date_range_param}" ]; then + __bf_find_date_range_param="${__bf_find_date_range_param}${__bf_find_date_range_param:+ }-o" + fi + __bf_find_date_range_param="${__bf_find_date_range_param}${__bf_find_date_range_param:+ }\( ${__bf_find_atime_param} \)" + fi + __bf_find_date_range_param="\( ${__bf_find_date_range_param} \)" + else + # only one date range parameter enabled + if [ -n "${__bf_find_mtime_param}" ]; then + __bf_find_date_range_param="${__bf_find_mtime_param}" + elif [ -n "${__bf_find_ctime_param}" ]; then + __bf_find_date_range_param="${__bf_find_ctime_param}" + elif [ -n "${__bf_find_atime_param}" ]; then + __bf_find_date_range_param="${__bf_find_atime_param}" + fi + fi + + if [ -n "${__bf_find_params}" ]; then + __bf_find_params="${__bf_find_params}${__bf_find_date_range_param:+ }${__bf_find_date_range_param}" + else + __bf_find_params="${__bf_find_date_range_param}" + fi + + # build -print0 parameter + if ${__bf_print0}; then + if ${__UAC_TOOL_FIND_PRINT0_SUPPORT}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-print0" + elif ${__bf_perl_command_exists}; then + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-print0" + __bf_find_tool="find_pl" + else + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-print" + fi + else + __bf_find_params="${__bf_find_params}${__bf_find_params:+ }-print" + fi + + if ${__bf_is_recursive}; then + printf "%s %s %s; " "${__bf_find_tool}" "${__bf_path}" "${__bf_find_params}" + else + printf "%s %s %s" "${__bf_find_tool}" "${__bf_path}" "${__bf_find_params}" + fi + +} \ No newline at end of file diff --git a/lib/check_available_system_tools.sh b/lib/check_available_system_tools.sh deleted file mode 100644 index 0b0e0047..00000000 --- a/lib/check_available_system_tools.sh +++ /dev/null @@ -1,269 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2034 - -############################################################################### -# Check tools available on the target system. Also, checks for which parameters -# are supported by find tool. -# Globals: -# MOUNT_POINT -# OPERATING_SYSTEM -# UAC_DIR -# Requires: -# command_exists -# Arguments: -# None -# Outputs: -# Set the value for the following global vars: -# CURL_TOOL_AVAILABLE -# FIND_ATIME_SUPPORT -# FIND_CTIME_SUPPORT -# FIND_MAXDEPTH_SUPPORT -# FIND_MTIME_SUPPORT -# FIND_OPERATORS_SUPPORT -# FIND_PATH_SUPPORT -# FIND_PERM_SUPPORT -# FIND_SIZE_SUPPORT -# GZIP_TOOL_AVAILABLE -# MD5_HASHING_TOOL -# PERL_TOOL_AVAILABLE -# PROCSTAT_TOOL_AVAILABLE -# SHA1_HASHING_TOOL -# SHA256_HASHING_TOOL -# STATX_TOOL_AVAILABLE -# STAT_BTIME_SUPPORT -# STAT_TOOL_AVAILABLE -# XARGS_REPLACE_STRING_SUPPORT -# ZIP_TOOL_AVAILABLE -# Exit Status: -# Last command exit status code. -############################################################################### -check_available_system_tools() -{ - CURL_TOOL_AVAILABLE=false - FIND_ATIME_SUPPORT=false - FIND_CTIME_SUPPORT=false - FIND_MAXDEPTH_SUPPORT=false - FIND_MTIME_SUPPORT=false - FIND_OPERATORS_SUPPORT=false - FIND_PATH_SUPPORT=false - FIND_PERM_SUPPORT=false - FIND_SIZE_SUPPORT=false - FIND_TYPE_SUPPORT=false - GZIP_TOOL_AVAILABLE=false - MD5_HASHING_TOOL="" - PERL_TOOL_AVAILABLE=false - PROCSTAT_TOOL_AVAILABLE=false - SHA1_HASHING_TOOL="" - SHA256_HASHING_TOOL="" - STATX_TOOL_AVAILABLE=false - STAT_BTIME_SUPPORT=false - STAT_TOOL_AVAILABLE=false - XARGS_REPLACE_STRING_SUPPORT=false - ZIP_TOOL_AVAILABLE=false - - # check if 'gzip' tool is available - if command_exists "gzip"; then - GZIP_TOOL_AVAILABLE=true - fi - - # check if 'perl' is available - if command_exists "perl"; then - PERL_TOOL_AVAILABLE=true - fi - - # check if 'procstat' is available - if command_exists "procstat"; then - PROCSTAT_TOOL_AVAILABLE=true - fi - - # check if 'curl' is available - if command_exists "curl"; then - CURL_TOOL_AVAILABLE=true - fi - - # check if 'zip' is available - if command_exists "zip"; then - ZIP_TOOL_AVAILABLE=true - elif [ "${OPERATING_SYSTEM}" = "esxi" ] \ - || [ "${OPERATING_SYSTEM}" = "linux" ]; then - for ca_directory in "${UAC_DIR}"/tools/zip/linux/*; do - if "${ca_directory}/zip" - "${UAC_DIR}/uac" >/dev/null 2>/dev/null; then - PATH="${ca_directory}:${PATH}" - export PATH - ZIP_TOOL_AVAILABLE=true - break - fi - done - elif [ "${OPERATING_SYSTEM}" = "freebsd" ] \ - || [ "${OPERATING_SYSTEM}" = "netscaler" ]; then - for ca_directory in "${UAC_DIR}"/tools/zip/freebsd/*; do - if "${ca_directory}/zip" - "${UAC_DIR}/uac" >/dev/null 2>/dev/null; then - PATH="${ca_directory}:${PATH}" - export PATH - ZIP_TOOL_AVAILABLE=true - break - fi - done - fi - - # check if 'stat' is available - if command_exists "stat"; then - STAT_TOOL_AVAILABLE=true - # check if birth time is collected by 'stat' - case "${OPERATING_SYSTEM}" in - "freebsd"|"macos"|"netbsd"|"netscaler"|"openbsd") - if eval "stat -f \"0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B\" \"${MOUNT_POINT}\" \ - | grep -q -E \"\|[0-9]{2,}$\""; then - STAT_BTIME_SUPPORT=true - fi - ;; - "android"|"esxi"|"linux"|"solaris") - if eval "stat -c \"0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W\" \"${MOUNT_POINT}\" \ - | grep -q -E \"\|[0-9]{2,}$\""; then - STAT_BTIME_SUPPORT=true - fi - ;; - esac - fi - - if ${STAT_BTIME_SUPPORT}; then - true - else - # check if 'statx' is available for the current system architecture - if [ "${OPERATING_SYSTEM}" = "esxi" ] \ - || [ "${OPERATING_SYSTEM}" = "linux" ]; then - ca_arch="" - case "${SYSTEM_ARCH}" in - armv[34567]*) - ca_arch="arm" - ;; - aarch64*|armv[89]*) - ca_arch="arm64" - ;; - "i486"|"i586"|"i686"|pentium*|athlon*) - ca_arch="i386" - ;; - "mips") - ca_arch="mips" - ;; - "mips64") - ca_arch="mips64" - ;; - "ppc") - ca_arch="ppc" - ;; - "ppc64") - ca_arch="ppc64" - ;; - "ppc64le") - ca_arch="ppc64le" - ;; - s390*) - ca_arch="s390" - ;; - sparc*) - ca_arch="sparc64" - ;; - *) - ca_arch="x86_64" - ;; - esac - if [ -n "${ca_arch}" ] \ - && eval "\"${UAC_DIR}/tools/statx/linux/${ca_arch}/statx\" \"${MOUNT_POINT}\""; then - PATH="${UAC_DIR}/tools/statx/linux/${ca_arch}:${PATH}" - export PATH - STATX_TOOL_AVAILABLE=true - fi - fi - fi - - # check if 'xargs' supports -I{} parameter - if eval "echo \"uac\" | xargs -I{}"; then - # check if 'xargs' removes the backslash character from escaped quotes - if eval "echo \"uac\'uac\" | xargs -I{} echo \"{}\" | grep \"uac'uac\""; then - XARGS_REPLACE_STRING_SUPPORT=true - fi - fi - - # check which options are supported by the find tool - if eval "find \"${UAC_DIR}\" \\( -name \"uac.conf\" -o -name \"uac.conf\" \\) -print"; then - FIND_OPERATORS_SUPPORT=true - fi - - if eval "find \"${UAC_DIR}\" -path \"${UAC_DIR}\" -print"; then - FIND_PATH_SUPPORT=true - fi - - if eval "find \"${UAC_DIR}/uac\" -size +1c -print"; then - FIND_SIZE_SUPPORT=true - fi - - if eval "find \"${UAC_DIR}/uac\" -maxdepth 1 -print"; then - FIND_MAXDEPTH_SUPPORT=true - fi - - if eval "find \"${UAC_DIR}/uac\" -perm -0000 -print"; then - FIND_PERM_SUPPORT=true - fi - - if eval "find \"${UAC_DIR}/uac\" -type f -print"; then - FIND_TYPE_SUPPORT=true - fi - - if eval "find \"${UAC_DIR}/uac\" -atime +1 -print"; then - FIND_ATIME_SUPPORT=true - fi - - if eval "find \"${UAC_DIR}/uac\" -mtime +1 -print"; then - FIND_MTIME_SUPPORT=true - fi - - if eval "find \"${UAC_DIR}/uac\" -ctime +1 -print"; then - FIND_CTIME_SUPPORT=true - fi - - # check for available MD5 hashing tools - if command_exists "md5sum"; then - MD5_HASHING_TOOL="md5sum" - elif command_exists "md5"; then - MD5_HASHING_TOOL="md5" - elif eval "echo \"uac\" | digest -v -a md5"; then - MD5_HASHING_TOOL="digest -v -a md5" - elif eval "csum -h MD5 \"${UAC_DIR}/uac\""; then - MD5_HASHING_TOOL="csum -h MD5" - elif eval "echo \"uac\" | openssl dgst -md5"; then - MD5_HASHING_TOOL="openssl dgst -md5" - fi - - # check for available SHA1 hashing tools - if command_exists "sha1sum"; then - SHA1_HASHING_TOOL="sha1sum" - elif eval "echo \"uac\" | shasum -a 1"; then - SHA1_HASHING_TOOL="shasum -a 1" - elif command_exists "sha1"; then - SHA1_HASHING_TOOL="sha1" - elif eval "echo \"uac\" | digest -v -a sha1"; then - SHA1_HASHING_TOOL="digest -v -a sha1" - elif eval "csum -h SHA1 \"${UAC_DIR}/uac\""; then - SHA1_HASHING_TOOL="csum -h SHA1" - elif eval "echo \"uac\" | openssl dgst -sha1"; then - SHA1_HASHING_TOOL="openssl dgst -sha1" - fi - - # check for available SHA256 hashing tools - if command_exists "sha256sum"; then - SHA256_HASHING_TOOL="sha256sum" - elif eval "echo \"uac\" | shasum -a 256"; then - SHA256_HASHING_TOOL="shasum -a 256" - elif command_exists "sha256"; then - SHA256_HASHING_TOOL="sha256" - elif eval "echo \"uac\" | digest -v -a sha256"; then - SHA256_HASHING_TOOL="digest -v -a sha256" - elif eval "csum -h SHA256 \"${UAC_DIR}/uac\""; then - SHA256_HASHING_TOOL="csum -h SHA256" - elif eval "echo \"uac\" | openssl dgst -sha256"; then - SHA256_HASHING_TOOL="openssl dgst -sha256" - fi - -} \ No newline at end of file diff --git a/lib/command_collector.sh b/lib/command_collector.sh index 1a9acf3e..a443d8c6 100644 --- a/lib/command_collector.sh +++ b/lib/command_collector.sh @@ -1,237 +1,123 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2001,SC2006 - -############################################################################### -# Collector that runs commands. -# Globals: -# GZIP_TOOL_AVAILABLE -# TEMP_DATA_DIR -# Requires: -# log_message +# shellcheck disable=SC2006 + +# Command collector. # Arguments: -# $1: foreach (optional) -# $2: command -# $3: root output directory -# $4: output directory (optional) -# $5: output file -# $6: stderr output file (optional) -# $7: compress output file (optional) (default: false) -# Outputs: -# Write command output to stdout. -# Write command errors to stderr. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -command_collector() +# string foreach: (optional) +# string command: command +# string output_directory: ful path to the output directory +# string output_file: output file name (optional) +# boolean compress_output_file: compress output file (optional) (default: false) +# Returns: +# none +_command_collector() { - cc_foreach="${1:-}" - cc_command="${2:-}" - cc_root_output_directory="${3:-}" - cc_output_directory="${4:-}" - cc_output_file="${5:-}" - cc_stderr_output_file="${6:-}" - cc_compress_output_file="${7:-false}" - - # return if command is empty - if [ -z "${cc_command}" ]; then - printf %b "command_collector: missing required argument: 'command'\n" >&2 - return 22 - fi - - # return if root output directory is empty - if [ -z "${cc_root_output_directory}" ]; then - printf %b "command_collector: missing required argument: \ -'root_output_directory'\n" >&2 - return 22 + __cc_foreach="${1:-}" + __cc_command="${2:-}" + __cc_output_directory="${3:-}" + __cc_output_file="${4:-}" + __cc_compress_output_file="${5:-false}" + + if [ -z "${__cc_command}" ]; then + _log_msg ERR "_command_collector: empty command parameter" + return 1 fi - # return if output file is empty - if [ -z "${cc_output_file}" ]; then - printf %b "command_collector: missing required argument: 'output_file'\n" >&2 - return 22 + if [ -z "${__cc_output_directory}" ]; then + _log_msg ERR "_command_collector: empty output_directory parameter" + return 1 fi - # loop command - if [ -n "${cc_foreach}" ]; then + if [ -n "${__cc_foreach}" ]; then - # create output directory if it does not exist - if [ ! -d "${TEMP_DATA_DIR}/${cc_root_output_directory}" ]; then - mkdir -p "${TEMP_DATA_DIR}/${cc_root_output_directory}" >/dev/null - fi - - log_message COMMAND "${cc_foreach}" - eval "${cc_foreach}" \ - >"${TEMP_DATA_DIR}/.foreach.tmp" \ - 2>>"${TEMP_DATA_DIR}/${cc_root_output_directory}/foreach.stderr" - - if [ ! -s "${TEMP_DATA_DIR}/.foreach.tmp" ]; then - printf %b "command_collector: loop command returned zero lines: \ -${cc_foreach}\n" >&2 - return 61 - fi + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__cc_foreach}" + __cc_foreach_stdout=`_run_command "${__cc_foreach}"` # shellcheck disable=SC2162 - sort -u <"${TEMP_DATA_DIR}/.foreach.tmp" \ - | while read cc_line || [ -n "${cc_line}" ]; do - - cc_line=`echo "${cc_line}" \ - | sed -e "s/:/#_COLON_#/g"` + echo "${__cc_foreach_stdout}" \ + | sort -u \ + | while IFS= read __cc_line && [ -n "${__cc_line}" ]; do - # replace %line% by cc_line value - cc_new_command=`echo "${cc_command}" \ - | sed -e "s:%line%:${cc_line}:g"` - cc_new_command=`echo "${cc_new_command}" \ - | sed -e "s/#_COLON_#/:/g"` + # replace %line% by __cc_line value + __cc_new_command=`echo "${__cc_command}" | sed -e "s|%line%|${__cc_line}|g"` + __cc_new_output_directory=`echo "${__cc_output_directory}" | sed -e "s|%line%|${__cc_line}|g"` - # replace %line% by cc_line value - cc_new_output_directory=`echo "${cc_output_directory}" \ - | sed -e "s:%line%:${cc_line}:g"` - cc_new_output_directory=`echo "${cc_new_output_directory}" \ - | sed -e "s/#_COLON_#/:/g"` - # sanitize output directory - cc_new_output_directory=`sanitize_path \ - "${cc_root_output_directory}/${cc_new_output_directory}"` - - # replace %line% by cc_line value - cc_new_output_file=`echo "${cc_output_file}" \ - | sed -e "s:%line%:${cc_line}:g"` - cc_new_output_file=`echo "${cc_new_output_file}" \ - | sed -e "s/#_COLON_#/:/g"` - # sanitize output file - cc_new_output_file=`sanitize_filename \ - "${cc_new_output_file}"` - - if [ -n "${cc_stderr_output_file}" ]; then - # replace %line% by cc_line value - cc_new_stderr_output_file=`echo "${cc_stderr_output_file}" \ - | sed -e "s:%line%:${cc_line}:g"` - cc_new_stderr_output_file=`echo "${cc_new_stderr_output_file}" \ - | sed -e "s/#_COLON_#/:/g"` - # sanitize stderr output file - cc_new_stderr_output_file=`sanitize_filename \ - "${cc_new_stderr_output_file}"` - else - cc_new_stderr_output_file="${cc_new_output_file}.stderr" - fi + __cc_new_output_directory=`_sanitize_output_directory "${__cc_new_output_directory}"` - # create output directory if it does not exist - if [ ! -d "${TEMP_DATA_DIR}/${cc_new_output_directory}" ]; then - mkdir -p "${TEMP_DATA_DIR}/${cc_new_output_directory}" >/dev/null + if [ ! -d "${__cc_new_output_directory}" ]; then + mkdir -p "${__cc_new_output_directory}" >/dev/null fi - if echo "${cc_new_command}" | grep -q -E "%output_file%"; then - # replace %output_file% by ${cc_output_file} in command - cc_new_command=`echo "${cc_new_command}" \ - | sed -e "s:%output_file%:${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_output_file}:g"` - # run command and append output to existing file - log_message COMMAND "${cc_new_command}" - eval "${cc_new_command}" \ - >>"${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_stderr_output_file}" \ - 2>>"${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_stderr_output_file}" - # remove output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_output_file}" \ - >/dev/null + if [ -n "${__cc_output_file}" ]; then + __cc_new_output_file=`echo "${__cc_output_file}" | sed -e "s|%line%|${__cc_line}|g"` + __cc_new_output_file=`_sanitize_output_file "${__cc_new_output_file}"` + + if ${__cc_compress_output_file} && command_exists "gzip"; then + __cc_new_output_file="${__cc_new_output_file}.gz" + __cc_new_command="${__cc_new_command} | gzip - | cat -" fi - else - if "${cc_compress_output_file}" && ${GZIP_TOOL_AVAILABLE}; then - # run command and append output to compressed file - log_message COMMAND "${cc_new_command} | gzip - | cat -" - eval "${cc_new_command} | gzip - | cat -" \ - >>"${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_output_file}.gz" \ - 2>>"${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_stderr_output_file}" - else - # run command and append output to existing file - log_message COMMAND "${cc_new_command}" - eval "${cc_new_command}" \ - >>"${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_output_file}" \ - 2>>"${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_stderr_output_file}" - # remove output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_output_file}" \ - >/dev/null + + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__cc_new_command}" + _run_command "${__cc_new_command}" \ + >>"${__cc_new_output_directory}/${__cc_new_output_file}" + + # remove output file if it is empty + if ${__cc_compress_output_file} && command_exists "gzip"; then + __cc_compressed_file_size=`wc -c "${__cc_new_output_directory}/${__cc_new_output_file}" | awk '{print $1}'` + if [ "${__cc_compressed_file_size}" -lt 21 ]; then + rm -f "${__cc_new_output_directory}/${__cc_new_output_file}" >/dev/null + _log_msg DBG "Empty compressed output file '${__cc_new_output_file}'" fi + elif [ ! -s "${__cc_new_output_directory}/${__cc_new_output_file}" ]; then + rm -f "${__cc_new_output_directory}/${__cc_new_output_file}" >/dev/null + _log_msg DBG "Empty output file '${__cc_new_output_file}'" fi + else + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__cc_new_command}" + ( + cd "${__cc_new_output_directory}" \ + && _run_command "${__cc_new_command}" + ) fi - - # remove stderr output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_stderr_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${cc_new_output_directory}/${cc_new_stderr_output_file}" \ - >/dev/null - fi - done - # remove foreach.stderr file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${cc_root_output_directory}/foreach.stderr" ]; then - rm -f "${TEMP_DATA_DIR}/${cc_root_output_directory}/foreach.stderr" \ - >/dev/null - fi - else + __cc_output_directory=`_sanitize_output_directory "${__cc_output_directory}"` - # sanitize output file name - cc_output_file=`sanitize_filename "${cc_output_file}"` - - if [ -n "${cc_stderr_output_file}" ]; then - # sanitize stderr output file name - cc_stderr_output_file=`sanitize_filename "${cc_stderr_output_file}"` - else - cc_stderr_output_file="${cc_output_file}.stderr" + if [ ! -d "${__cc_output_directory}" ]; then + mkdir -p "${__cc_output_directory}" >/dev/null fi - # sanitize output directory - cc_output_directory=`sanitize_path \ - "${cc_root_output_directory}/${cc_output_directory}"` - - # create output directory if it does not exist - if [ ! -d "${TEMP_DATA_DIR}/${cc_output_directory}" ]; then - mkdir -p "${TEMP_DATA_DIR}/${cc_output_directory}" >/dev/null - fi + if [ -n "${__cc_output_file}" ]; then + __cc_output_file=`_sanitize_output_file "${__cc_output_file}"` + if ${__cc_compress_output_file} && command_exists "gzip"; then + __cc_output_file="${__cc_output_file}.gz" + __cc_command="${__cc_command} | gzip - | cat -" + fi + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__cc_command}" + _run_command "${__cc_command}" \ + >>"${__cc_output_directory}/${__cc_output_file}" - if echo "${cc_command}" | grep -q -E "%output_file%"; then - # replace %output_file% by ${cc_output_file} in command - cc_command=`echo "${cc_command}" \ - | sed -e "s:%output_file%:${TEMP_DATA_DIR}/${cc_output_directory}/${cc_output_file}:g"` - # run command and append output to existing file - log_message COMMAND "${cc_command}" - eval "${cc_command}" \ - >>"${TEMP_DATA_DIR}/${cc_output_directory}/${cc_stderr_output_file}" \ - 2>>"${TEMP_DATA_DIR}/${cc_output_directory}/${cc_stderr_output_file}" # remove output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${cc_output_directory}/${cc_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${cc_output_directory}/${cc_output_file}" \ - >/dev/null - fi - else - if "${cc_compress_output_file}" && ${GZIP_TOOL_AVAILABLE}; then - # run command and append output to compressed file - log_message COMMAND "${cc_command} | gzip - | cat -" - eval "${cc_command} | gzip - | cat -" \ - >>"${TEMP_DATA_DIR}/${cc_output_directory}/${cc_output_file}.gz" \ - 2>>"${TEMP_DATA_DIR}/${cc_output_directory}/${cc_stderr_output_file}" - else - # run command and append output to existing file - log_message COMMAND "${cc_command}" - eval "${cc_command}" \ - >>"${TEMP_DATA_DIR}/${cc_output_directory}/${cc_output_file}" \ - 2>>"${TEMP_DATA_DIR}/${cc_output_directory}/${cc_stderr_output_file}" - # remove output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${cc_output_directory}/${cc_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${cc_output_directory}/${cc_output_file}" \ - >/dev/null + if ${__cc_compress_output_file} && command_exists "gzip"; then + __cc_compressed_file_size=`wc -c "${__cc_output_directory}/${__cc_output_file}" | awk '{print $1}'` + if [ "${__cc_compressed_file_size}" -lt 21 ]; then + rm -f "${__cc_output_directory}/${__cc_output_file}" >/dev/null + _log_msg DBG "Empty compressed output file '${__cc_output_file}'" fi + elif [ ! -s "${__cc_output_directory}/${__cc_output_file}" ]; then + rm -f "${__cc_output_directory}/${__cc_output_file}" >/dev/null + _log_msg DBG "Empty output file '${__cc_output_file}'" fi + else + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__cc_command}" + ( + cd "${__cc_output_directory}" \ + && _run_command "${__cc_command}" + ) fi - # remove stderr output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${cc_output_directory}/${cc_stderr_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${cc_output_directory}/${cc_stderr_output_file}" \ - >/dev/null - fi - - fi - + fi + } \ No newline at end of file diff --git a/lib/command_exists.sh b/lib/command_exists.sh old mode 100755 new mode 100644 index d25c3dc5..04ee3edb --- a/lib/command_exists.sh +++ b/lib/command_exists.sh @@ -9,18 +9,18 @@ # false on fail command_exists() { - co_command="${1:-}" + __co_command="${1:-}" - if [ -z "${co_command}" ]; then + if [ -z "${__co_command}" ]; then return 1 fi if eval type type >/dev/null 2>/dev/null; then - eval type "${co_command}" >/dev/null 2>/dev/null + eval type "${__co_command}" >/dev/null 2>/dev/null elif command >/dev/null 2>/dev/null; then - command -v "${co_command}" >/dev/null 2>/dev/null + command -v "${__co_command}" >/dev/null 2>/dev/null else - which "${co_command}" >/dev/null 2>/dev/null + which "${__co_command}" >/dev/null 2>/dev/null fi } \ No newline at end of file diff --git a/lib/copy_data.sh b/lib/copy_data.sh index 138964b8..545640bf 100644 --- a/lib/copy_data.sh +++ b/lib/copy_data.sh @@ -1,40 +1,28 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 -############################################################################### # Copy files and directories. -# Globals: -# None -# Requires: -# None # Arguments: -# $1: file containing the list of files to be copied -# $2: destination directory -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -copy_data() +# string from_file: file containing the list of files to be copied +# string destination_file: destination directory +# Returns: +# none +_copy_data() { - cp_source_file="${1:-}" - cp_destination="${2:-}" - - # exit if source file does not exist - if [ ! -f "${cp_source_file}" ]; then - printf %b "copy data: no such file or directory: '${cp_source_file}'\n" >&2 - return 2 + __cd_from_file="${1:-}" + __cd_destination_directory="${2:-}" + + if [ ! -f "${__cd_from_file}" ]; then + _error_msg "_copy_data: no such file or directory: '${__cd_from_file}'" + return 1 fi - # shellcheck disable=SC2162 - while read cp_line || [ -n "${cp_line}" ]; do - cp_dirname=`dirname "${cp_line}"` - if [ -n "${cp_dirname}" ] && [ -d "${cp_dirname}" ]; then - mkdir -p "${cp_destination}/${cp_dirname}" - cp -r "${cp_line}" "${cp_destination}/${cp_dirname}" - fi - done <"${cp_source_file}" + # shellcheck disable=SC2162 + while read __cd_line && [ -n "${__cd_line}" ]; do + # shellcheck disable=SC2006 + __cd_dirname=`dirname "${__cd_line}" | sed -e "s|^${__UAC_MOUNT_POINT}|/|" -e "s|^${__UAC_TEMP_DATA_DIR}/collected|/|"` + mkdir -p "${__cd_destination_directory}/${__cd_dirname}" + cp -r "${__cd_line}" "${__cd_destination_directory}/${__cd_dirname}" + done <"${__cd_from_file}" } \ No newline at end of file diff --git a/lib/create_acquisition_log.sh b/lib/create_acquisition_log.sh index f6a71340..da6fd2a6 100644 --- a/lib/create_acquisition_log.sh +++ b/lib/create_acquisition_log.sh @@ -1,79 +1,67 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 -############################################################################### # Create the acquisition log. -# Globals: -# MOUNT_POINT -# OPERATING_SYSTEM -# SYSTEM_ARCH -# UAC_VERSION -# Requires: -# None # Arguments: -# $1: case number -# $2: evidence number -# $3: description -# $4: examiner name -# $5: notes -# $6: hostname -# $7: acquisition start date -# $8: acquisition end date -# $9: output file computed hash -# $10: destination directory -# $11: output file -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -create_acquisition_log() +# string file: full path to the acquisition file +# string start_date: acquisition start date +# string end_date: acquisition end date +# string computed_hashes: computed hashes +# Returns: +# none +_create_acquisition_log() { - cl_case_number="${1:-}" - shift - cl_evidence_number="${1:-}" - shift - cl_description="${1:-}" - shift - cl_examiner="${1:-}" - shift - cl_notes="${1:-}" - shift - cl_hostname="${1:-}" - shift - cl_acquisition_start_date="${1:-}" - shift - cl_acquisition_end_date="${1:-}" - shift - cl_output_file_hash="${1:-}" - shift - cl_destination_directory="${1:-}" - shift - cl_output_file="${1:-}" + __cl_file="${1:-}" + __cl_start_date="${2:-}" + __cl_end_date="${3:-}" + __cl_computed_hashes="${4:-}" - cat >"${cl_destination_directory}/${cl_output_file}" << EOF -Created by UAC (Unix-like Artifacts Collector) ${UAC_VERSION} + cat >"${__cl_file}" << EOF +Created by UAC (Unix-like Artifacts Collector) ${__UAC_VERSION} [Case Information] -Case Number: ${cl_case_number} -Evidence Number: ${cl_evidence_number} -Description: ${cl_description} -Examiner: ${cl_examiner} -Notes: ${cl_notes} +Case Number: ${__UAC_CASE_NUMBER} +Evidence Number: ${__UAC_EVIDENCE_NUMBER} +Description: ${__UAC_EVIDENCE_DESCRIPTION} +Examiner: ${__UAC_EXAMINER} +Notes: ${__UAC_EVIDENCE_NOTES} [System Information] -Operating System: ${OPERATING_SYSTEM} -System Architecture: ${SYSTEM_ARCH} -Hostname: ${cl_hostname} +Operating System: ${__UAC_OPERATING_SYSTEM} +System Architecture: ${__UAC_SYSTEM_ARCH} +Hostname: ${__UAC_HOSTNAME} [Acquisition Information] -Mount Point: ${MOUNT_POINT} -Acquisition started at: ${cl_acquisition_start_date} -Acquisition finished at: ${cl_acquisition_end_date} +Mount Point: ${__UAC_MOUNT_POINT} +Acquisition Started: ${__cl_start_date} +Acquisition Finished: ${__cl_end_date} -[Output File MD5 Computed Hash] -${cl_output_file_hash} +[Output Information] EOF +if [ "${__UAC_OUTPUT_FORMAT}" = "none" ]; then + cat >>"${__cl_file}" << EOF +Directory: ${__UAC_OUTPUT_BASE_NAME} +Format: ${__UAC_OUTPUT_FORMAT} +EOF +else + cat >>"${__cl_file}" << EOF +File: ${__UAC_OUTPUT_BASE_NAME}.${__UAC_OUTPUT_EXTENSION} +Format: ${__UAC_OUTPUT_FORMAT} +EOF + if [ "${__UAC_OUTPUT_FORMAT}" = "zip" ] && [ -n "${__UAC_OUTPUT_PASSWORD}" ]; then + cat >>"${__cl_file}" << EOF +Password: "${__UAC_OUTPUT_PASSWORD}" +EOF + fi + + cat >>"${__cl_file}" << EOF + +[Computed Hashes] +${__cl_computed_hashes} +EOF + +fi + } \ No newline at end of file diff --git a/lib/create_artifact_list.sh b/lib/create_artifact_list.sh deleted file mode 100644 index e062ac7d..00000000 --- a/lib/create_artifact_list.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2001,SC2006 - -############################################################################### -# Create artifact list to be collected based on the artifact list provided in -# the command line. -# Globals: -# TEMP_DATA_DIR -# UAC_DIR -# Requires: -# None -# Arguments: -# $1: comma separated list of artifacts -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -create_artifact_list() -{ - pr_artifact_list="${1:-}" - - OIFS="${IFS}" - IFS="," - for pr_artifact in ${pr_artifact_list}; do - if eval "echo \"${pr_artifact}\" | grep -q -E \"^!\""; then - pr_artifact=`echo "${pr_artifact}" | sed -e 's:^!::'` - # shellcheck disable=SC2086 - find "${UAC_DIR}"/artifacts/${pr_artifact} -name "*.yaml" -print \ - | sed -e "s:${UAC_DIR}/artifacts/::g" \ - >"${TEMP_DATA_DIR}/.artifacts.exclude.tmp" - - # remove common lines between include and exclude - awk 'NR==FNR {a[$0]=1; next} !a[$0]' \ - "${TEMP_DATA_DIR}/.artifacts.exclude.tmp" \ - "${TEMP_DATA_DIR}/.artifacts.include.tmp" \ - >"${TEMP_DATA_DIR}/.artifacts.diff.tmp" - cp "${TEMP_DATA_DIR}/.artifacts.diff.tmp" "${TEMP_DATA_DIR}/.artifacts.include.tmp" - - else - # shellcheck disable=SC2086 - find "${UAC_DIR}"/artifacts/${pr_artifact} -name "*.yaml" -print \ - | sed -e "s:${UAC_DIR}/artifacts/::g" \ - >>"${TEMP_DATA_DIR}/.artifacts.include.tmp" - fi - done - IFS="${OIFS}" - - # remove duplicates - awk '!a[$0]++' <"${TEMP_DATA_DIR}/.artifacts.include.tmp" - -} \ No newline at end of file diff --git a/lib/error_msg.sh b/lib/error_msg.sh new file mode 100644 index 00000000..4cca8a31 --- /dev/null +++ b/lib/error_msg.sh @@ -0,0 +1,13 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Print error message. +# Arguments: +# string message: error message +# Returns: +# none +_error_msg() +{ + __em_message="${1:-unexpected error}" + printf "uac: %b" "${__em_message}\n" >&2 +} \ No newline at end of file diff --git a/lib/exit_fatal.sh b/lib/exit_fatal.sh new file mode 100644 index 00000000..0e3d24d1 --- /dev/null +++ b/lib/exit_fatal.sh @@ -0,0 +1,16 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Perform exit of the program (with exit code 1). +# Arguments: +# string message: error message +# Returns: +# none +_exit_fatal() +{ + __ef_message="${1:-}" + if [ -n "${__ef_message}" ]; then + printf "uac: %b" "${__ef_message}\n" >&2 + fi + exit 1 +} \ No newline at end of file diff --git a/lib/exit_success.sh b/lib/exit_success.sh new file mode 100644 index 00000000..dbaba3a8 --- /dev/null +++ b/lib/exit_success.sh @@ -0,0 +1,16 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Perform a normal exit of the program (with exit code 0). +# Arguments: +# string message: message +# Returns: +# none +_exit_success() +{ + __es_message="${1:-}" + if [ -n "${__es_message}" ]; then + printf "uac: %b" "${__es_message}\n" + fi + exit 0 +} \ No newline at end of file diff --git a/lib/file_collector.sh b/lib/file_collector.sh deleted file mode 100644 index 675bef58..00000000 --- a/lib/file_collector.sh +++ /dev/null @@ -1,166 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Collector that searches and collects files. -# Globals: -# GLOBAL_EXCLUDE_MOUNT_POINT -# GLOBAL_EXCLUDE_NAME_PATTERN -# GLOBAL_EXCLUDE_PATH_PATTERN -# MOUNT_POINT -# START_DATE_DAYS -# END_DATE_DAYS -# TEMP_DATA_DIR -# Requires: -# find_wrapper -# get_mount_point_by_file_system -# sanitize_filename -# sanitize_path -# Arguments: -# $1: path -# $2: is file list (optional) (default: false) -# $3: path pattern (optional) -# $4: name pattern (optional) -# $5: exclude path pattern (optional) -# $6: exclude name pattern (optional) -# $7: exclude file system (optional) -# $8: max depth (optional) -# $9: file type (optional) (default: f) -# $10: min file size (optional) -# $11: max file size (optional) -# $12: permissions (optional) -# $13: ignore date range (optional) (default: false) -# $14: root output directory -# $15: output file -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -file_collector() -{ - # some systems such as Solaris 10 do not support more than 9 parameters - # on functions, not even using curly braces {} e.g. ${10} - # so the solution was to use shift - fl_path="${1:-}" - shift - fl_is_file_list="${1:-false}" - shift - fl_path_pattern="${1:-}" - shift - fl_name_pattern="${1:-}" - shift - fl_exclude_path_pattern="${1:-}" - shift - fl_exclude_name_pattern="${1:-}" - shift - fl_exclude_file_system="${1:-}" - shift - fl_max_depth="${1:-}" - shift - fl_file_type="${1:-f}" - shift - fl_min_file_size="${1:-}" - shift - fl_max_file_size="${1:-}" - shift - fl_permissions="${1:-}" - shift - fl_ignore_date_range="${1:-false}" - shift - fl_root_output_directory="${1:-}" - shift - fl_output_file="${1:-}" - - # return if path is empty - if [ -z "${fl_path}" ]; then - printf %b "file_collector: missing required argument: 'path'\n" >&2 - return 22 - fi - - # return if root output directory is empty - if [ -z "${fl_root_output_directory}" ]; then - printf %b "file_collector: missing required argument: \ -'root_output_directory'\n" >&2 - return 22 - fi - - # return if output file is empty - if [ -z "${fl_output_file}" ]; then - printf %b "file_collector: missing required argument: 'output_file'\n" >&2 - return 22 - fi - - # prepend TEMP_DATA_DIR to path if it does not start with / - # (which means local file) - if echo "${fl_path}" | grep -q -v -E "^/"; then - fl_path=`sanitize_path "${TEMP_DATA_DIR}/${fl_root_output_directory}/${fl_path}"` - fi - - # return if is file list and file list does not exist - if ${fl_is_file_list} && [ ! -f "${fl_path}" ]; then - printf %b "file_collector: file list does not exist: '${fl_path}'\n" >&2 - return 2 - fi - - # sanitize output file name - fl_output_file=`sanitize_filename "${fl_output_file}"` - - ${fl_ignore_date_range} && fl_date_range_start_days="" \ - || fl_date_range_start_days="${START_DATE_DAYS}" - ${fl_ignore_date_range} && fl_date_range_end_days="" \ - || fl_date_range_end_days="${END_DATE_DAYS}" - - # local exclude mount points - if [ -n "${fl_exclude_file_system}" ]; then - fl_exclude_mount_point=`get_mount_point_by_file_system \ - "${fl_exclude_file_system}"` - fl_exclude_path_pattern="${fl_exclude_path_pattern},\ -${fl_exclude_mount_point}" - fi - - # global exclude mount points - if [ -n "${GLOBAL_EXCLUDE_MOUNT_POINT}" ]; then - fl_exclude_path_pattern="${fl_exclude_path_pattern},\ -${GLOBAL_EXCLUDE_MOUNT_POINT}" - fi - - # global exclude path pattern - if [ -n "${GLOBAL_EXCLUDE_PATH_PATTERN}" ]; then - fl_exclude_path_pattern="${fl_exclude_path_pattern},\ -${GLOBAL_EXCLUDE_PATH_PATTERN}" - fi - - # global exclude name pattern - if [ -n "${GLOBAL_EXCLUDE_NAME_PATTERN}" ]; then - fl_exclude_name_pattern="${fl_exclude_name_pattern},\ -${GLOBAL_EXCLUDE_NAME_PATTERN}" - fi - - if ${fl_is_file_list}; then - cat "${fl_path}" \ - >>"${TEMP_DATA_DIR}/${fl_output_file}" \ - 2>>"${TEMP_DATA_DIR}/${fl_output_file}.stderr" - else - # prepend mount point if is not file list - fl_path=`sanitize_path "${MOUNT_POINT}/${fl_path}"` - - find_wrapper \ - "${fl_path}" \ - "${fl_path_pattern}" \ - "${fl_name_pattern}" \ - "${fl_exclude_path_pattern}" \ - "${fl_exclude_name_pattern}" \ - "${fl_max_depth}" \ - "${fl_file_type}" \ - "${fl_min_file_size}" \ - "${fl_max_file_size}" \ - "${fl_permissions}" \ - "${fl_date_range_start_days}" \ - "${fl_date_range_end_days}" \ - >>"${TEMP_DATA_DIR}/${fl_output_file}" \ - 2>>"${TEMP_DATA_DIR}/${fl_output_file}.stderr" - - fi - -} \ No newline at end of file diff --git a/lib/file_system_symlink_support.sh b/lib/file_system_symlink_support.sh deleted file mode 100644 index 0b72716a..00000000 --- a/lib/file_system_symlink_support.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Check if given directory's file system supports symlink creation. -# Globals: -# UAC_DIR -# Requires: -# None -# Arguments: -# $1: directory -# Outputs: -# None. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -file_system_symlink_support() -{ - fs_directory="${1:-}" - - rm "${fs_directory}/.uac-symlink.tmp" - if ln -s "${UAC_DIR}" "${fs_directory}/.uac-symlink.tmp"; then - rm "${fs_directory}/.uac-symlink.tmp" - return 0 - fi - return 1 - -} \ No newline at end of file diff --git a/lib/filter_list.sh b/lib/filter_list.sh new file mode 100644 index 00000000..f7aacd2d --- /dev/null +++ b/lib/filter_list.sh @@ -0,0 +1,30 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Filter a list of items (line by line) based on an exclusion list. +# Arguments: +# string list: list of items +# string exclude_list: exclusion list +# Returns: +# string: filtered list +_filter_list() +{ + __fl_list="${1:-}" + __fl_exclude_list="${2:-}" + + __fl_OIFS="${IFS}" + IFS=" +" + for __fl_i in ${__fl_list}; do + __fl_found=false + for __fl_e in ${__fl_exclude_list}; do + if [ "${__fl_i}" = "${__fl_e}" ]; then + __fl_found=true + break + fi + done + $__fl_found || printf "%s\n" "${__fl_i}" + done + + IFS="${__fl_OIFS}" +} diff --git a/lib/find_based_collector.sh b/lib/find_based_collector.sh new file mode 100644 index 00000000..2f899944 --- /dev/null +++ b/lib/find_based_collector.sh @@ -0,0 +1,267 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006,SC2153 + +# Find-based collectors. +# Arguments: +# string collector: collector name +# string path: path +# boolean is_file_list: path points to an existing file list (optional) (default: false) +# string path_pattern: pipe-separated list of path patterns (optional) +# string name_pattern: pipe-separated list of name patterns (optional) +# string exclude_path_pattern: pipe-separated list of exclude path patterns (optional) +# string exclude_name_pattern: pipe-separated list of exclude name patterns (optional) +# string exclude_file_system: pipe-separated list of exclude file system name (optional) +# integer max_depth: max depth (optional) +# string file_type: file type (optional) +# integer min_file_size: minimum file size (optional) +# integer max_file_size: maximum file size (optional) +# string permissions: permissions (optional) +# boolean ignore_date_range: ignore date range (optional) (default: false) +# string output_directory: full path to the output directory +# string output_file: output file name +# Returns: +# boolean: true on success +# false on fail +_find_based_collector() +{ + __fc_collector="${1:-}" + shift + __fc_path="${1:-}" + shift + __fc_is_file_list="${1:-false}" + shift + __fc_path_pattern="${1:-}" + shift + __fc_name_pattern="${1:-}" + shift + __fc_exclude_path_pattern="${1:-}" + shift + __fc_exclude_name_pattern="${1:-}" + shift + __fc_exclude_file_system="${1:-}" + shift + __fc_max_depth="${1:-}" + shift + __fc_file_type="${1:-}" + shift + __fc_min_file_size="${1:-}" + shift + __fc_max_file_size="${1:-}" + shift + __fc_permissions="${1:-}" + shift + __fc_ignore_date_range="${1:-false}" + shift + __fc_output_directory="${1:-}" + shift + __fc_output_file="${1:-}" + + if [ "${__fc_collector}" != "file" ] \ + && [ "${__fc_collector}" != "find" ] \ + && [ "${__fc_collector}" != "hash" ] \ + && [ "${__fc_collector}" != "stat" ]; then + _log_msg ERR "_find_based_collector: invalid collector '${__fc_collector}'" + return 1 + fi + + if [ -z "${__fc_path}" ]; then + _log_msg ERR "_find_based_collector: empty path parameter" + return 1 + fi + + if [ -z "${__fc_output_directory}" ]; then + _log_msg ERR "_find_based_collector: empty output_directory parameter" + return 1 + fi + + if [ -z "${__fc_output_file}" ]; then + _log_msg ERR "_find_based_collector: empty output_file parameter" + return 1 + fi + + if ${__fc_is_file_list}; then + # prepend __UAC_TEMP_DATA_DIR/collected if path does not start with / + if echo "${__fc_path}" | grep -q -v -E "^/"; then + __fc_path="${__UAC_TEMP_DATA_DIR}/collected/${__fc_path}" + fi + __fc_path=`_sanitize_path "${__fc_path}"` + if [ ! -f "${__fc_path}" ]; then + _log_msg ERR "_find_based_collector: no such file or directory: '${__fc_path}'" + return 1 + fi + else + # prepend mount point to path + __fc_path=`_sanitize_path "${__UAC_MOUNT_POINT}/${__fc_path}"` + + # exclude path pattern (global) + if [ -n "${__UAC_CONF_EXCLUDE_PATH_PATTERN}" ]; then + __fc_exclude_path_pattern="${__fc_exclude_path_pattern}${__fc_exclude_path_pattern:+|}${__UAC_CONF_EXCLUDE_PATH_PATTERN}" + fi + + # exclude file systems / mount points (local) + if [ -n "${__fc_exclude_file_system}" ]; then + __fc_exclude_mount_points=`_get_mount_point_by_file_system "${__fc_exclude_file_system}" "${__UAC_OPERATING_SYSTEM}"` + __fc_exclude_path_pattern="${__fc_exclude_path_pattern}${__fc_exclude_path_pattern:+|}${__fc_exclude_mount_points}" + fi + + # exclude file systems / mount points (global) + if [ -n "${__UAC_EXCLUDE_MOUNT_POINTS}" ]; then + __fc_exclude_path_pattern="${__fc_exclude_path_pattern}${__fc_exclude_path_pattern:+|}${__UAC_EXCLUDE_MOUNT_POINTS}" + fi + + # add __UAC_DIR and __UAC_TEMP_DATA_DIR to exclude path pattern + __fc_exclude_path_pattern="${__fc_exclude_path_pattern}${__fc_exclude_path_pattern:+|}${__UAC_DIR}|${__UAC_TEMP_DATA_DIR}" + + # exclude name pattern (global) + if [ -n "${__UAC_CONF_EXCLUDE_NAME_PATTERN}" ]; then + __fc_exclude_name_pattern="${__fc_exclude_name_pattern}${__fc_exclude_name_pattern:+|}${__UAC_CONF_EXCLUDE_NAME_PATTERN}" + fi + + __fc_start_date_days="${__UAC_START_DATE_DAYS}" + __fc_end_date_days="${__UAC_END_DATE_DAYS}" + + ${__fc_ignore_date_range} && { __fc_start_date_days="0"; __fc_end_date_days="0"; } + fi + + __fc_output_directory=`_sanitize_output_directory "${__fc_output_directory}"` + __fc_output_file=`_sanitize_output_file "${__fc_output_file}"` + + if [ ! -d "${__fc_output_directory}" ]; then + mkdir -p "${__fc_output_directory}" >/dev/null + fi + + case "${__fc_collector}" in + "file"|"find") + if ${__fc_is_file_list}; then + __fc_find_command="cat \"${__fc_path}\"" + else + __fc_find_command=`_build_find_command \ + "${__fc_path}" \ + "${__fc_path_pattern}" \ + "${__fc_name_pattern}" \ + "${__fc_exclude_path_pattern}" \ + "${__fc_exclude_name_pattern}" \ + "${__fc_max_depth}" \ + "${__fc_file_type}" \ + "${__fc_min_file_size}" \ + "${__fc_max_file_size}" \ + "${__fc_permissions}" \ + "" \ + "${__fc_start_date_days}" \ + "${__fc_end_date_days}"` + fi + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__fc_find_command}" + _run_command "${__fc_find_command}" \ + >>"${__fc_output_directory}/${__fc_output_file}" + ;; + "hash") + for __fc_algorithm in `echo "${__UAC_CONF_HASH_ALGORITHM}" | sed -e 's:|: :g'`; do + + __fc_hashing_tool="" + if [ "${__fc_algorithm}" = "md5" ]; then + __fc_hashing_tool="${__UAC_TOOL_MD5_BIN}" + elif [ "${__fc_algorithm}" = "sha1" ]; then + __fc_hashing_tool="${__UAC_TOOL_SHA1_BIN}" + elif [ "${__fc_algorithm}" = "sha256" ]; then + __fc_hashing_tool="${__UAC_TOOL_SHA256_BIN}" + fi + + if [ -n "${__fc_hashing_tool}" ]; then + if ${__fc_is_file_list}; then + __fc_hash_command="sed 's|.|\\\\&|g' \"${__fc_path}\" | xargs ${__UAC_TOOL_XARGS_MAX_PROCS_PARAM}${__UAC_TOOL_XARGS_MAX_PROCS_PARAM:+ }${__fc_hashing_tool}" + elif ${__UAC_TOOL_FIND_PRINT0_SUPPORT} && ${__UAC_TOOL_XARGS_NULL_DELIMITER_SUPPORT}; then + __fc_find_command=`_build_find_command \ + "${__fc_path}" \ + "${__fc_path_pattern}" \ + "${__fc_name_pattern}" \ + "${__fc_exclude_path_pattern}" \ + "${__fc_exclude_name_pattern}" \ + "${__fc_max_depth}" \ + "${__fc_file_type}" \ + "${__fc_min_file_size}" \ + "${__fc_max_file_size}" \ + "${__fc_permissions}" \ + "true" \ + "${__fc_start_date_days}" \ + "${__fc_end_date_days}"` + __fc_hash_command="${__fc_find_command} | xargs -0 ${__UAC_TOOL_XARGS_MAX_PROCS_PARAM}${__UAC_TOOL_XARGS_MAX_PROCS_PARAM:+ }${__fc_hashing_tool}" + else + __fc_find_command=`_build_find_command \ + "${__fc_path}" \ + "${__fc_path_pattern}" \ + "${__fc_name_pattern}" \ + "${__fc_exclude_path_pattern}" \ + "${__fc_exclude_name_pattern}" \ + "${__fc_max_depth}" \ + "${__fc_file_type}" \ + "${__fc_min_file_size}" \ + "${__fc_max_file_size}" \ + "${__fc_permissions}" \ + "" \ + "${__fc_start_date_days}" \ + "${__fc_end_date_days}"` + __fc_hash_command="${__fc_find_command} | sed 's|.|\\\\&|g' | xargs ${__UAC_TOOL_XARGS_MAX_PROCS_PARAM}${__UAC_TOOL_XARGS_MAX_PROCS_PARAM:+ }${__fc_hashing_tool}" + fi + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__fc_hash_command}" + _run_command "${__fc_hash_command}" \ + >>"${__fc_output_directory}/${__fc_output_file}.${__fc_algorithm}" + fi + done + ;; + "stat") + if [ -n "${__UAC_TOOL_STAT_BIN}" ]; then + if ${__fc_is_file_list}; then + __fc_stat_command="sed 's|.|\\\\&|g' \"${__fc_path}\" | xargs ${__UAC_TOOL_XARGS_MAX_PROCS_PARAM}${__UAC_TOOL_XARGS_MAX_PROCS_PARAM:+ }${__UAC_TOOL_STAT_BIN}${__UAC_TOOL_STAT_PARAMS:+ }${__UAC_TOOL_STAT_PARAMS}" + elif ${__UAC_TOOL_FIND_PRINT0_SUPPORT} && ${__UAC_TOOL_XARGS_NULL_DELIMITER_SUPPORT}; then + __fc_find_command=`_build_find_command \ + "${__fc_path}" \ + "${__fc_path_pattern}" \ + "${__fc_name_pattern}" \ + "${__fc_exclude_path_pattern}" \ + "${__fc_exclude_name_pattern}" \ + "${__fc_max_depth}" \ + "${__fc_file_type}" \ + "${__fc_min_file_size}" \ + "${__fc_max_file_size}" \ + "${__fc_permissions}" \ + "true" \ + "${__fc_start_date_days}" \ + "${__fc_end_date_days}"` + __fc_stat_command="${__fc_find_command} | xargs -0 ${__UAC_TOOL_XARGS_MAX_PROCS_PARAM}${__UAC_TOOL_XARGS_MAX_PROCS_PARAM:+ }${__UAC_TOOL_STAT_BIN}${__UAC_TOOL_STAT_PARAMS:+ }${__UAC_TOOL_STAT_PARAMS}" + else + __fc_find_command=`_build_find_command \ + "${__fc_path}" \ + "${__fc_path_pattern}" \ + "${__fc_name_pattern}" \ + "${__fc_exclude_path_pattern}" \ + "${__fc_exclude_name_pattern}" \ + "${__fc_max_depth}" \ + "${__fc_file_type}" \ + "${__fc_min_file_size}" \ + "${__fc_max_file_size}" \ + "${__fc_permissions}" \ + "" \ + "${__fc_start_date_days}" \ + "${__fc_end_date_days}"` + __fc_stat_command="${__fc_find_command} | sed 's|.|\\\\&|g' | xargs ${__UAC_TOOL_XARGS_MAX_PROCS_PARAM}${__UAC_TOOL_XARGS_MAX_PROCS_PARAM:+ }${__UAC_TOOL_STAT_BIN}${__UAC_TOOL_STAT_PARAMS:+ }${__UAC_TOOL_STAT_PARAMS}" + fi + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__fc_stat_command}" + _run_command "${__fc_stat_command}" \ + | sed -e "s:|':|:g" \ + -e "s:'|:|:g" \ + -e "s:' -> ': -> :" \ + -e 's:|":|:g' \ + -e 's:"|:|:g' \ + -e 's:" -> ": -> :' \ + -e "s:\`::g" \ + -e "s:|.$:|0:" \ + >>"${__fc_output_directory}/${__fc_output_file}" + else + _log_msg ERR "_find_based_collector: cannot run stat collector. Target system has neither 'stat', 'statx' nor 'perl' tool available" + return 1 + fi + ;; + esac + +} diff --git a/lib/find_collector.sh b/lib/find_collector.sh deleted file mode 100644 index 9fcb7f5f..00000000 --- a/lib/find_collector.sh +++ /dev/null @@ -1,181 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Collector that searches files and directories. -# Globals: -# GLOBAL_EXCLUDE_MOUNT_POINT -# GLOBAL_EXCLUDE_NAME_PATTERN -# GLOBAL_EXCLUDE_PATH_PATTERN -# MOUNT_POINT -# START_DATE_DAYS -# END_DATE_DAYS -# Requires: -# find_wrapper -# get_mount_point_by_file_system -# sanitize_filename -# sanitize_path -# sort_uniq_file -# Arguments: -# $1: path -# $2: path pattern (optional) -# $3: name pattern (optional) -# $4: exclude path pattern (optional) -# $5: exclude name pattern (optional) -# $6: exclude file system (optional) -# $7: max depth (optional) -# $8: file type (optional) -# $9: min file size (optional) -# $10: max file size (optional) -# $11: permissions (optional) -# $12: ignore date range (optional) (default: false) -# $13: root output directory -# $14: output directory (optional) -# $15: output file -# $16: stderr output file (optional) -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -find_collector() -{ - # some systems such as Solaris 10 do not support more than 9 parameters - # on functions, not even using curly braces {} e.g. ${10} - # so the solution was to use shift - fc_path="${1:-}" - shift - fc_path_pattern="${1:-}" - shift - fc_name_pattern="${1:-}" - shift - fc_exclude_path_pattern="${1:-}" - shift - fc_exclude_name_pattern="${1:-}" - shift - fc_exclude_file_system="${1:-}" - shift - fc_max_depth="${1:-}" - shift - fc_file_type="${1:-}" - shift - fc_min_file_size="${1:-}" - shift - fc_max_file_size="${1:-}" - shift - fc_permissions="${1:-}" - shift - fc_ignore_date_range="${1:-false}" - shift - fc_root_output_directory="${1:-}" - shift - fc_output_directory="${1:-}" - shift - fc_output_file="${1:-}" - shift - fc_stderr_output_file="${1:-}" - - # return if path is empty - if [ -z "${fc_path}" ]; then - printf %b "find_collector: missing required argument: 'path'\n" >&2 - return 22 - fi - - # return if root output directory is empty - if [ -z "${fc_root_output_directory}" ]; then - printf %b "find_collector: missing required argument: \ -'root_output_directory'\n" >&2 - return 22 - fi - - # return if output file is empty - if [ -z "${fc_output_file}" ]; then - printf %b "find_collector: missing required argument: 'output_file'\n" >&2 - return 22 - fi - - # sanitize output file name - fc_output_file=`sanitize_filename "${fc_output_file}"` - - if [ -n "${fc_stderr_output_file}" ]; then - # sanitize stderr output file name - fc_stderr_output_file=`sanitize_filename "${fc_stderr_output_file}"` - else - fc_stderr_output_file="${fc_output_file}.stderr" - fi - - # sanitize output directory - fc_output_directory=`sanitize_path \ - "${fc_root_output_directory}/${fc_output_directory}"` - - # create output directory if it does not exist - if [ ! -d "${TEMP_DATA_DIR}/${fc_output_directory}" ]; then - mkdir -p "${TEMP_DATA_DIR}/${fc_output_directory}" >/dev/null - fi - - ${fc_ignore_date_range} && fc_date_range_start_days="" \ - || fc_date_range_start_days="${START_DATE_DAYS}" - ${fc_ignore_date_range} && fc_date_range_end_days="" \ - || fc_date_range_end_days="${END_DATE_DAYS}" - - # local exclude mount points - if [ -n "${fc_exclude_file_system}" ]; then - fc_exclude_mount_point=`get_mount_point_by_file_system \ - "${fc_exclude_file_system}"` - fc_exclude_path_pattern="${fc_exclude_path_pattern},\ -${fc_exclude_mount_point}" - fi - - # global exclude mount points - if [ -n "${GLOBAL_EXCLUDE_MOUNT_POINT}" ]; then - fc_exclude_path_pattern="${fc_exclude_path_pattern},\ -${GLOBAL_EXCLUDE_MOUNT_POINT}" - fi - - # global exclude path pattern - if [ -n "${GLOBAL_EXCLUDE_PATH_PATTERN}" ]; then - fc_exclude_path_pattern="${fc_exclude_path_pattern},\ -${GLOBAL_EXCLUDE_PATH_PATTERN}" - fi - - # global exclude name pattern - if [ -n "${GLOBAL_EXCLUDE_NAME_PATTERN}" ]; then - fc_exclude_name_pattern="${fc_exclude_name_pattern},\ -${GLOBAL_EXCLUDE_NAME_PATTERN}" - fi - - # prepend mount point - fc_path=`sanitize_path "${MOUNT_POINT}/${fc_path}"` - - find_wrapper \ - "${fc_path}" \ - "${fc_path_pattern}" \ - "${fc_name_pattern}" \ - "${fc_exclude_path_pattern}" \ - "${fc_exclude_name_pattern}" \ - "${fc_max_depth}" \ - "${fc_file_type}" \ - "${fc_min_file_size}" \ - "${fc_max_file_size}" \ - "${fc_permissions}" \ - "${fc_date_range_start_days}" \ - "${fc_date_range_end_days}" \ - >>"${TEMP_DATA_DIR}/${fc_output_directory}/${fc_output_file}" \ - 2>>"${TEMP_DATA_DIR}/${fc_output_directory}/${fc_stderr_output_file}" - - # sort and uniq output file - sort_uniq_file "${TEMP_DATA_DIR}/${fc_output_directory}/${fc_output_file}" - - # remove output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${fc_output_directory}/${fc_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${fc_output_directory}/${fc_output_file}" \ - >/dev/null - fi - - # remove stderr output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${fc_output_directory}/${fc_stderr_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${fc_output_directory}/${fc_stderr_output_file}" \ - >/dev/null - fi - -} \ No newline at end of file diff --git a/lib/find_wrapper.sh b/lib/find_wrapper.sh deleted file mode 100644 index d4702716..00000000 --- a/lib/find_wrapper.sh +++ /dev/null @@ -1,412 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Use internal 'find' tool or 'find.pl' script to search for files in a -# directory hierarchy. -# Globals: -# ENABLE_FIND_ATIME -# ENABLE_FIND_CTIME -# ENABLE_FIND_MTIME -# FIND_ATIME_SUPPORT -# FIND_CTIME_SUPPORT -# FIND_MAXDEPTH_SUPPORT -# FIND_MTIME_SUPPORT -# FIND_OPERATORS_SUPPORT -# FIND_PATH_SUPPORT -# FIND_PERM_SUPPORT -# FIND_SIZE_SUPPORT -# PERL_TOOL_AVAILABLE -# UAC_DIR -# Requires: -# get_mount_point_by_file_system -# log_message -# Arguments: -# $1: path -# $2: path pattern (optional) -# $3: name pattern (optional) -# $4: exclude path pattern (optional) -# $5: exclude name pattern (optional) -# $6: max depth (optional) -# $7: file type (optional) -# $8: min file size (optional) -# $9: max file size (optional) -# $10: permissions (optional) -# $11: date range start in days (optional) (default: 0) -# $12: date range end in days (optional) (default: 0) -# Output: -# Write search results to stdout. -# Write any errors to stderr. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -find_wrapper() -{ - # some systems such as Solaris 10 do not support more than 9 parameters - # on functions, not even using curly braces {} e.g. ${10} - # so the solution was to use shift - fw_path="${1:-}" - shift - fw_path_pattern="${1:-}" - shift - fw_name_pattern="${1:-}" - shift - fw_exclude_path_pattern="${1:-}" - shift - fw_exclude_name_pattern="${1:-}" - shift - fw_max_depth="${1:-}" - shift - fw_file_type="${1:-}" - shift - fw_min_file_size="${1:-}" - shift - fw_max_file_size="${1:-}" - shift - fw_permissions="${1:-}" - shift - fw_date_range_start_days="${1:-0}" - shift - fw_date_range_end_days="${1:-0}" - - ############################################################################# - # Build recursive parameter if a list of items is provided - # Globals: - # None - # Arguments: - # $1: parameter (-path or -name) - # $2: list of items - # Output: - # Write the output to stdout - # Return: - # None - ############################################################################# - _build_recursive_param() - { - _br_param="${1:--name}" - _br_items="${2:-}" - - if [ -n "${_br_items}" ]; then - # remove white spaces between items and commas - # remove empty items - # replace escaped comma (\,) by #_COMMA_# string - # replace escaped double quote (\") by #_DOUBLE_QUOTE_# string - # replace #_COMMA_# string by comma (,) - # replace #_DOUBLE_QUOTE_# string by escaped double quote (\") - echo "${_br_items}" \ - | sed -e 's: *,:,:g' \ - -e 's:, *:,:g' \ - -e 's:^,*::' \ - -e 's:,*$::' \ - -e 's:\\,:#_COMMA_#:g' \ - -e 's:\\":#_DOUBLE_QUOTE_#:g' \ - | awk -v _br_param="${_br_param}" 'BEGIN { FS=","; } { - for(N = 1; N <= NF; N ++) { - if ($N != "") { - gsub("#_COMMA_#", ",", $N); - gsub("#_DOUBLE_QUOTE_#", "\\\"", $N); - if (N == 1) { - printf "%s \"%s\"", _br_param, $N; - } - else { - printf " -o %s \"%s\"", _br_param, $N; - } - } - } - }' - fi - } - - # return if starting point is empty - if [ -z "${fw_path}" ]; then - printf %b "find_wrapper: missing required argument: 'path'\n" >&2 - return 22 - fi - - fw_find_tool="find" - fw_find_path_param="" - fw_find_name_param="" - fw_find_path_prune_param="" - fw_find_name_prune_param="" - fw_find_max_depth_param="" - fw_find_type_param="" - fw_find_min_file_size_param="" - fw_find_max_file_size_param="" - fw_find_perm_param="" - fw_find_atime_param="" - fw_find_mtime_param="" - fw_find_ctime_param="" - fw_find_date_range_param="" - - if ${FIND_OPERATORS_SUPPORT} || ${PERL_TOOL_AVAILABLE}; then - # build -path -prune parameter - if [ -n "${fw_exclude_path_pattern}" ]; then - if ${FIND_PATH_SUPPORT}; then - fw_find_path_prune_param="\( `_build_recursive_param \ - \"-path\" \"${fw_exclude_path_pattern}\"` \) -prune -o" - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_path_prune_param="\( `_build_recursive_param \ - \"-path\" \"${fw_exclude_path_pattern}\"` \) -prune -o" - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - # build -name -prune parameter - if [ -n "${fw_exclude_name_pattern}" ]; then - fw_find_name_prune_param="\( `_build_recursive_param \ - \"-name\" \"${fw_exclude_name_pattern}\"` \) -prune -o" - fi - # build -path parameter - if [ -n "${fw_path_pattern}" ]; then - if ${FIND_PATH_SUPPORT}; then - fw_find_path_param="\( `_build_recursive_param \ - \"-path\" \"${fw_path_pattern}\"` \)" - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_path_param="\( `_build_recursive_param \ - \"-path\" \"${fw_path_pattern}\"` \)" - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - fi - - # build -maxdepth parameter - if [ -n "${fw_max_depth}" ] && [ "${fw_max_depth}" -gt 0 ]; then - if ${FIND_MAXDEPTH_SUPPORT}; then - fw_find_max_depth_param="-maxdepth ${fw_max_depth}" - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_max_depth_param="-maxdepth ${fw_max_depth}" - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - - # build -type parameter - # -type parameter will be added even if 'find' does not support it - if [ -n "${fw_file_type}" ]; then - fw_find_type_param="-type ${fw_file_type}" - if ${FIND_TYPE_SUPPORT}; then - true - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - - # build -size parameter - if [ -n "${fw_min_file_size}" ]; then - if ${FIND_SIZE_SUPPORT}; then - fw_find_min_file_size_param="-size +${fw_min_file_size}c" - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_min_file_size_param="-size +${fw_min_file_size}c" - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - if [ -n "${fw_max_file_size}" ]; then - if ${FIND_SIZE_SUPPORT}; then - fw_find_max_file_size_param="-size -${fw_max_file_size}c" - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_max_file_size_param="-size -${fw_max_file_size}c" - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - - # build -perm parameter - # -perm parameter will be added even if 'find' does not support it - if [ -n "${fw_permissions}" ]; then - fw_find_perm_param="-perm ${fw_permissions}" - if ${FIND_PERM_SUPPORT}; then - true - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - - # build -atime parameter - if ${ENABLE_FIND_ATIME}; then - if [ "${fw_date_range_start_days}" -gt 0 ]; then - if ${FIND_ATIME_SUPPORT}; then - fw_find_atime_param="-atime -${fw_date_range_start_days}" - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_atime_param="-atime -${fw_date_range_start_days}" - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - if [ "${fw_date_range_end_days}" -gt 0 ]; then - if ${FIND_ATIME_SUPPORT}; then - fw_find_atime_param="${fw_find_atime_param} \ --atime +${fw_date_range_end_days}" - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_atime_param="${fw_find_atime_param} \ --atime +${fw_date_range_end_days}" - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - fi - - # build -mtime parameter - if ${ENABLE_FIND_MTIME}; then - if [ "${fw_date_range_start_days}" -gt 0 ]; then - if ${FIND_MTIME_SUPPORT}; then - fw_find_mtime_param="-mtime -${fw_date_range_start_days}" - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_mtime_param="-mtime -${fw_date_range_start_days}" - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - if [ "${fw_date_range_end_days}" -gt 0 ]; then - if ${FIND_MTIME_SUPPORT}; then - fw_find_mtime_param="${fw_find_mtime_param} \ --mtime +${fw_date_range_end_days}" - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_mtime_param="${fw_find_mtime_param} \ --mtime +${fw_date_range_end_days}" - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - fi - - # build -ctime parameter - if ${ENABLE_FIND_CTIME}; then - if [ "${fw_date_range_start_days}" -gt 0 ]; then - if ${FIND_CTIME_SUPPORT}; then - fw_find_ctime_param="-ctime -${fw_date_range_start_days}" - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_ctime_param="-ctime -${fw_date_range_start_days}" - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - if [ "${fw_date_range_end_days}" -gt 0 ]; then - if ${FIND_CTIME_SUPPORT}; then - fw_find_ctime_param="${fw_find_ctime_param} \ --ctime +${fw_date_range_end_days}" - elif ${PERL_TOOL_AVAILABLE}; then - fw_find_ctime_param="${fw_find_ctime_param} \ --ctime +${fw_date_range_end_days}" - fw_find_tool="perl \"${UAC_DIR}/tools/find.pl/find.pl\"" - fi - fi - fi - - if [ -n "${fw_find_atime_param}" ] || [ -n "${fw_find_mtime_param}" ] \ - || [ -n "${fw_find_ctime_param}" ]; then - - if ${FIND_OPERATORS_SUPPORT}; then - # multiples date range parameters enabled - if [ -n "${fw_find_atime_param}" ]; then - fw_find_date_range_param="\( ${fw_find_atime_param} \)" - fi - if [ -n "${fw_find_mtime_param}" ]; then - if [ -n "${fw_find_date_range_param}" ]; then - fw_find_date_range_param=" ${fw_find_date_range_param} -o " - fi - fw_find_date_range_param=" ${fw_find_date_range_param} \ -\( ${fw_find_mtime_param} \)" - fi - if [ -n "${fw_find_ctime_param}" ]; then - if [ -n "${fw_find_date_range_param}" ]; then - fw_find_date_range_param=" ${fw_find_date_range_param} -o " - fi - fw_find_date_range_param=" ${fw_find_date_range_param} \ -\( ${fw_find_ctime_param} \)" - fi - fw_find_date_range_param="\( ${fw_find_date_range_param} \)" - else - # only one date range parameter enabled - if [ -n "${fw_find_atime_param}" ]; then - fw_find_date_range_param="${fw_find_atime_param}" - elif [ -n "${fw_find_mtime_param}" ]; then - fw_find_date_range_param="${fw_find_mtime_param}" - elif [ -n "${fw_find_ctime_param}" ]; then - fw_find_date_range_param="${fw_find_ctime_param}" - fi - fi - - fi - - # build -name parameter - if [ -n "${fw_name_pattern}" ]; then - if ${FIND_OPERATORS_SUPPORT} || ${PERL_TOOL_AVAILABLE}; then - fw_find_name_param="\( `_build_recursive_param \"-name\" \ -\"${fw_name_pattern}\"` \)" - log_message COMMAND "${fw_find_tool} \ -${fw_path} \ -${fw_find_max_depth_param} \ -${fw_find_path_prune_param} \ -${fw_find_name_prune_param} \ -${fw_find_path_param} \ -${fw_find_name_param} \ -${fw_find_type_param} \ -${fw_find_min_file_size_param} \ -${fw_find_max_file_size_param} \ -${fw_find_perm_param} \ -${fw_find_date_range_param} -print" - eval "${fw_find_tool} \ -${fw_path} \ -${fw_find_max_depth_param} \ -${fw_find_path_prune_param} \ -${fw_find_name_prune_param} \ -${fw_find_path_param} \ -${fw_find_name_param} \ -${fw_find_type_param} \ -${fw_find_min_file_size_param} \ -${fw_find_max_file_size_param} \ -${fw_find_perm_param} \ -${fw_find_date_range_param} -print" - else - # if operators are not supported, 'find' will be run for each -name value - # shellcheck disable=SC2162 - echo "${fw_name_pattern}" \ - | sed -e 's:\\,:#_COMMA_#:g' -e 's: *,:,:g' -e 's:, *:,:g' \ - -e 's:, *:,:g' -e 's:^,*::' \ - | awk 'BEGIN { FS=","; } { - for(N = 1; N <= NF; N ++) { - printf "\"%s\"\n", $N; - } - }' \ - | sed -e 's:#_COMMA_#:,:g' \ - | while read fw_name || [ -n "${fw_name}" ]; do - log_message COMMAND "${fw_find_tool} \ -${fw_path} \ -${fw_find_max_depth_param} \ -${fw_find_path_param} \ --name ${fw_name} ${fw_find_type_param} \ -${fw_find_min_file_size_param} \ -${fw_find_max_file_size_param} \ -${fw_find_perm_param} \ -${fw_find_date_range_param} -print" - eval "${fw_find_tool} \ -${fw_path} \ -${fw_find_max_depth_param} \ -${fw_find_path_param} \ --name ${fw_name} ${fw_find_type_param} \ -${fw_find_min_file_size_param} \ -${fw_find_max_file_size_param} \ -${fw_find_perm_param} \ -${fw_find_date_range_param} -print" - done - fi - else - log_message COMMAND "${fw_find_tool} \ -${fw_path} \ -${fw_find_max_depth_param} \ -${fw_find_path_prune_param} \ -${fw_find_name_prune_param} \ -${fw_find_path_param} \ -${fw_find_type_param} \ -${fw_find_min_file_size_param} \ -${fw_find_max_file_size_param} \ -${fw_find_perm_param} \ -${fw_find_date_range_param} -print" - eval "${fw_find_tool} \ -${fw_path} \ -${fw_find_max_depth_param} \ -${fw_find_path_prune_param} \ -${fw_find_name_prune_param} \ -${fw_find_path_param} \ -${fw_find_type_param} \ -${fw_find_min_file_size_param} \ -${fw_find_max_file_size_param} \ -${fw_find_perm_param} \ -${fw_find_date_range_param} -print" - fi - -} \ No newline at end of file diff --git a/lib/get_absolute_directory_path.sh b/lib/get_absolute_directory_path.sh deleted file mode 100644 index 6ba6d17b..00000000 --- a/lib/get_absolute_directory_path.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Get absolute directory path. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: path -# Outputs: -# Write directory path to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -get_absolute_directory_path() -{ - ga_directory="${1:-}" - - # shellcheck disable=SC2005,SC2006 - echo "`cd "${ga_directory}" && pwd`" - -} \ No newline at end of file diff --git a/lib/get_absolute_path.sh b/lib/get_absolute_path.sh new file mode 100644 index 00000000..68759737 --- /dev/null +++ b/lib/get_absolute_path.sh @@ -0,0 +1,14 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Get the absolute path from relative path. +# Arguments: +# string path: relative path +# Returns: +# string: absolute path +_get_absolute_path() +{ + __ga_path="${1:-}" + + ( cd "${__ga_path}" && pwd ) +} \ No newline at end of file diff --git a/lib/get_bin_path.sh b/lib/get_bin_path.sh new file mode 100644 index 00000000..31749517 --- /dev/null +++ b/lib/get_bin_path.sh @@ -0,0 +1,84 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 + +# Get the path to the bin directory based on the system's architecture. +# Arguments: +# string operating_system: operating system name +# string arch: system architecture +# Returns: +# string: path +_get_bin_path() +{ + __gb_os="${1:-linux}" + __gb_arch="${2:-x86_64}" + + _get_system_arch_bin_path() + { + case "${1:-x86_64}" in + armv[34567]*) + echo "arm" + ;; + aarch64*|armv[89]*) + echo "arm64" + ;; + athlon*|"i386"|"i486"|"i586"|"i686"|pentium*) + echo "i686" + ;; + "mips"|"mipsel") + echo "mips" + ;; + mips64*) + echo "mips64" + ;; + "ppc") + echo "ppc" + ;; + "ppcle") + echo "ppcle" + ;; + "ppc64") + echo "ppc64" + ;; + "ppc64le") + echo "ppc64le" + ;; + s390*) + echo "s390x" + ;; + "sparc") + echo "sparc" + ;; + "sparc64") + echo "sparc64" + ;; + *) + echo "x86_64" + ;; + esac + } + + __gb_correct_arch=`_get_system_arch_bin_path "${__gb_arch}"` + __gb_path="" + + # tools directory + for __gb_tool in statx zip; do + for __gb_dir in "${__UAC_DIR}"/tools/"${__gb_tool}"/*; do + if echo "${__gb_dir}" | grep -q -E "${__gb_os}"; then + __gb_path="${__gb_path}${__gb_path:+:}${__gb_dir}/${__gb_correct_arch}" + fi + done + done + + # bin directory + for __gb_dir in "${__UAC_DIR}"/bin/*; do + if echo "${__gb_dir}" | grep -q -E "${__gb_os}"; then + __gb_path="${__gb_path}${__gb_path:+:}${__gb_dir}/${__gb_correct_arch}" + __gb_path="${__gb_path}${__gb_path:+:}${__gb_dir}" + fi + done + + __gb_path="${__gb_path}${__gb_path:+:}${__UAC_DIR}/bin" + echo "${__gb_path}" + +} \ No newline at end of file diff --git a/lib/get_current_user.sh b/lib/get_current_user.sh index a5cdb8a2..2c529a9c 100644 --- a/lib/get_current_user.sh +++ b/lib/get_current_user.sh @@ -1,36 +1,19 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -############################################################################### # Get the current user. -# Globals: -# LOGNAME -# USER -# Requires: -# None # Arguments: # None -# Outputs: -# Write the current user to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -get_current_user() +# Returns: +# string: current user name +_get_current_user() { - # some systems like the docker version of Alpine Linux do not have - # neither LOGNAME nor USER set, and this can cause an error message if set -u - set +u - # who and whoami are not available on some systems - if [ -n "${LOGNAME}" ]; then - printf %b "${LOGNAME}" - elif [ -n "${USER}" ]; then - printf %b "${USER}" + if [ -n "${LOGNAME:-}" ]; then + echo "${LOGNAME}" + elif [ -n "${USER:-}" ]; then + echo "${USER}" else - id | sed -e 's:uid=[0-9]*(::' -e 's:).*::' + id | sed -e 's|^uid=[0-9]*(\([^)]*\).*|\1|' 2>/dev/null fi - - set -u - } \ No newline at end of file diff --git a/lib/get_days_since_date_until_now.sh b/lib/get_days_since_date_until_now.sh index 3ee50dae..1603d7d8 100644 --- a/lib/get_days_since_date_until_now.sh +++ b/lib/get_days_since_date_until_now.sh @@ -1,35 +1,29 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 +# shellcheck disable=SC2003,SC2006 -############################################################################### # Count days since a specific date until now (today). -# Globals: -# None -# Requires: -# get_epoch_date # Arguments: -# $1: date in YYYY-MM-DD format -# Outputs: -# Write number of days to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -get_days_since_date_until_now() +# string date: date in YYYY-MM-DD format +# Returns: +# integer: number of days +_get_days_since_date_until_now() { - gd_date="${1:-}" - - gd_epoch_now=`get_epoch_date` - gd_epoch_date=`get_epoch_date "${gd_date}"` || return 1 - if [ "${gd_epoch_now}" -gt "${gd_epoch_date}" ]; then - # shellcheck disable=SC2003 - gd_difference=`expr "${gd_epoch_now}" - "${gd_epoch_date}" 2>/dev/null` - # shellcheck disable=SC2003 - expr "${gd_difference}" / 86400 2>/dev/null + __gd_date="${1:-}" + + __gd_epoch_now=`_get_epoch_date` + __gd_epoch_date=`_get_epoch_date "${__gd_date}"` + + if [ -z "${__gd_epoch_date}" ]; then + return 1 + fi + + if [ "${__gd_epoch_now}" -gt "${__gd_epoch_date}" ]; then + __gd_difference=`expr "${__gd_epoch_now}" - "${__gd_epoch_date}" 2>/dev/null` + expr "${__gd_difference}" / 86400 2>/dev/null else - printf %b "uac: date '${gd_date}' cannot be greater than today.\n" >&2 - return 22 + _error_msg "date '${__gd_date}' cannot be greater than today." + return 1 fi } \ No newline at end of file diff --git a/lib/get_epoch_date.sh b/lib/get_epoch_date.sh index ee299023..e72e4a1b 100644 --- a/lib/get_epoch_date.sh +++ b/lib/get_epoch_date.sh @@ -1,54 +1,35 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 -############################################################################### -# Get epoch timestamp for the current or given date. -# Globals: -# UAC_DIR -# Requires: -# None +# Get Unix Epoch timestamp for the current or given date. # Arguments: -# $1: date in YYYY-MM-DD format (optional) -# Outputs: -# Write given date epoch timestamp to stdout. -# Write current epoch timestamp to stdout if $1 is empty. -# Write empty string to stdout on error. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -get_epoch_date() +# string date: date in YYYY-MM-DD format (optional) +# Returns: +# string: epoch timestamp +_get_epoch_date() { - ge_date="${1:-}" - ge_date_regex="^((19|20)[0-9][0-9])-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])$" - - if [ -n "${ge_date}" ] \ - && echo "${ge_date}" | grep -v -q -E "${ge_date_regex}"; then - printf %b "uac: invalid date '${ge_date}'\n\ -Try 'uac --help' for more information.\n" >&2 - return 22 - fi - + __ge_date="${1:-}" + # get epoch timestamp for the given date - if [ -n "${ge_date}" ]; then - date -d "${ge_date} 00:00:00" "+%s" 2>/dev/null && return 0 + if [ -n "${__ge_date}" ]; then + if echo "${__ge_date}" | grep -v -q -E "[1-9][0-9]{3}-[0-1][0-9]-[0-3][0-9]" 2>/dev/null; then + return 1 + fi + date -d "${__ge_date} 00:00:00" "+%s" 2>/dev/null && return 0 # bsd date - date -j -f "%Y-%m-%d %H:%M:%S" "${ge_date} 00:00:00" "+%s" \ - 2>/dev/null && return 0 + date -j -f "%Y-%m-%d %H:%M:%S" "${__ge_date} 00:00:00" "+%s" 2>/dev/null && return 0 # old busybox date (MMDDhhmmYYYY) - ge_string=`echo "${ge_date}" \ + # shellcheck disable=SC2006 + __ge_string=`echo "${__ge_date}" \ | awk 'BEGIN { FS="-"; } { print $2$3"0000"$1; }' 2>/dev/null` - date -d "${ge_string}" "+%s" 2>/dev/null && return 0 + date -d "${__ge_string}" "+%s" 2>/dev/null && return 0 # any system with 'perl' tool - perl "${UAC_DIR}/tools/date_to_epoch.pl/date_to_epoch.pl" "${ge_date}" \ - 2>/dev/null - elif eval "perl -e 'print time'" >/dev/null 2>/dev/null; then + date_to_epoch_pl "${__ge_date}" 2>/dev/null + elif perl -e 'print time' >/dev/null 2>/dev/null; then # get current epoch timestamp perl -e 'print time' 2>/dev/null else # get current epoch timestamp date "+%s" 2>/dev/null fi - } \ No newline at end of file diff --git a/lib/get_hostname.sh b/lib/get_hostname.sh index 2b6ea0b9..a54c9221 100644 --- a/lib/get_hostname.sh +++ b/lib/get_hostname.sh @@ -1,44 +1,45 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -############################################################################### -# Get the current system hostname. -# Globals: -# HOSTNAME -# MOUNT_POINT -# Requires: -# None +# Get the system's hostname. # Arguments: -# None -# Outputs: -# Write the hostname to stdout. -# Write "unknown" to stdout if not able to get current hostname. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -get_hostname() +# string mount_point: mount point +# Returns: +# string: system's hostname or "unknown" if not able to get it +_get_hostname() { + __gh_mount_point="${1:-/}" - if [ "${MOUNT_POINT}" = "/" ]; then + if [ "${__gh_mount_point}" = "/" ]; then # some systems do not have hostname tool installed - if eval "hostname" 2>/dev/null; then + if hostname 2>/dev/null; then true - elif eval "uname -n"; then + elif uname -n 2>/dev/null; then true - elif [ -n "${HOSTNAME}" ]; then - printf %b "${HOSTNAME}" + elif [ -n "${HOSTNAME:-}" ]; then + echo "${HOSTNAME}" elif [ -r "/etc/hostname" ]; then - head -1 "/etc/hostname" + head -1 "/etc/hostname" 2>/dev/null + elif [ -r "/etc/rc.conf" ]; then + sed -n -e 's|^hostname="\(.*\)"|\1|p' <"/etc/rc.conf" 2>/dev/null + elif [ -r "/etc/myname" ]; then + head -1 "/etc/myname" 2>/dev/null + elif [ -r "/etc/nodename" ]; then + head -1 "/etc/nodename" 2>/dev/null else - printf %b "unknown" + echo "unknown" fi else - if [ -r "${MOUNT_POINT}/etc/hostname" ]; then - head -1 "${MOUNT_POINT}/etc/hostname" + if [ -r "${__gh_mount_point}/etc/hostname" ]; then + head -1 "${__gh_mount_point}/etc/hostname" 2>/dev/null + elif [ -r "${__gh_mount_point}/etc/rc.conf" ]; then + sed -n -e 's|^hostname="\(.*\)"|\1|p' <"${__gh_mount_point}/etc/rc.conf" 2>/dev/null + elif [ -r "${__gh_mount_point}/etc/myname" ]; then + head -1 "${__gh_mount_point}/etc/myname" 2>/dev/null + elif [ -r "${__gh_mount_point}/etc/nodename" ]; then + head -1 "${__gh_mount_point}/etc/nodename" 2>/dev/null else - printf %b "unknown" + echo "unknown" fi fi - } \ No newline at end of file diff --git a/lib/get_mount_point_by_file_system.sh b/lib/get_mount_point_by_file_system.sh index d587a014..b4a1085d 100644 --- a/lib/get_mount_point_by_file_system.sh +++ b/lib/get_mount_point_by_file_system.sh @@ -1,130 +1,104 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -############################################################################### # Get the list of mount points by file system. -# Globals: -# OPERATING_SYSTEM -# Requires: -# None # Arguments: -# $1: comma separated list of file systems -# Outputs: -# Write the list of mount points (comma separated) to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -get_mount_point_by_file_system() +# string file_systems: pipe-separated list of file systems +# string operating_system: operating system name +# Returns: +# string: pipe-separated list of mount points +_get_mount_point_by_file_system() { - gm_file_system_list="${1:-}" + __gm_file_systems="${1:-}" + __gm_operating_system="${2:-}" - # return if file system list is empty - if [ -z "${gm_file_system_list}" ]; then - printf %b "get_mount_point_by_file_system: missing required argument: \ -'file system list'\n" >&2 - return 2 + if [ -z "${__gm_file_systems}" ] || [ -z "${__gm_operating_system}" ]; then + return 1 fi - # list mounted points - # remove white spaces from gm_file_system_list - # remove double quotes from gm_file_system_list - # split into an array and record values into a awk dict - # print if file system is in gm_file_system_dict - # remove last comma from output - case "${OPERATING_SYSTEM}" in + case "${__gm_operating_system}" in "aix") mount \ - | awk -v gm_file_system_list="${gm_file_system_list}" \ + | awk -v __gm_file_systems="${__gm_file_systems}" \ 'BEGIN { - gsub(/[ ]+/, "", gm_file_system_list); - gsub("\"", "", gm_file_system_list); - split(gm_file_system_list, gm_file_system_array, ","); - for (i in gm_file_system_array) { - gm_file_system_dict[gm_file_system_array[i]]=""; + split(__gm_file_systems, __gm_file_system_array, "|"); + for (i in __gm_file_system_array) { + __gm_file_system_dict[__gm_file_system_array[i]]=""; } } { - if ($3 in gm_file_system_dict) { - printf "%s,", $2; + if ($3 in __gm_file_system_dict) { + printf "%s|", $2; } - }' \ - | awk '{gsub(/,$/, ""); print}' 2>/dev/null + }' 2>/dev/null \ + | sed -e 's:|$::' 2>/dev/null ;; "esxi") df -u \ - | awk -v gm_file_system_list="${gm_file_system_list}" \ + | awk -v __gm_file_systems="${__gm_file_systems}" \ 'BEGIN { - gsub(/[ ]+/, "", gm_file_system_list); - gsub("\"", "", gm_file_system_list); - split(gm_file_system_list, gm_file_system_array, ","); - for (i in gm_file_system_array) { - gm_file_system_dict[gm_file_system_array[i]]=""; + split(__gm_file_systems, __gm_file_system_array, "|"); + for (i in __gm_file_system_array) { + __gm_file_system_dict[__gm_file_system_array[i]]=""; } } { - if (tolower($1) in gm_file_system_dict) { - printf "%s,", $6; + if (tolower($1) in __gm_file_system_dict) { + printf "%s|", $6; } - }' \ - | awk '{gsub(/,$/, ""); print}' 2>/dev/null + }' 2>/dev/null \ + | sed -e 's:|$::' 2>/dev/null ;; "freebsd"|"macos"|"netscaler") mount \ - | sed -e 's:(::g' -e 's:,: :g' -e 's:)::g' \ + | sed -e 's|(||g' -e 's|,| |g' -e 's|)||g' \ | awk 'BEGIN { FS=" on "; } { print $2; }' \ - | awk -v gm_file_system_list="${gm_file_system_list}" \ + | awk -v __gm_file_systems="${__gm_file_systems}" \ 'BEGIN { - gsub(/[ ]+/, "", gm_file_system_list); - gsub("\"", "", gm_file_system_list); - split(gm_file_system_list, gm_file_system_array, ","); - for (i in gm_file_system_array) { - gm_file_system_dict[gm_file_system_array[i]]=""; + split(__gm_file_systems, __gm_file_system_array, "|"); + for (i in __gm_file_system_array) { + __gm_file_system_dict[__gm_file_system_array[i]]=""; } } { - if ($2 in gm_file_system_dict) { - printf "%s,", $1; + if ($2 in __gm_file_system_dict) { + printf "%s|", $1; } - }' \ - | awk '{gsub(/,$/, ""); print}' 2>/dev/null + }' 2>/dev/null \ + | sed -e 's:|$::' 2>/dev/null ;; - "android"|"linux"|"netbsd"|"openbsd") + "linux"|"netbsd"|"openbsd") mount \ | awk 'BEGIN { FS=" on "; } { print $2; }' \ - | awk -v gm_file_system_list="${gm_file_system_list}" \ + | awk -v __gm_file_systems="${__gm_file_systems}" \ 'BEGIN { - gsub(/[ ]+/, "", gm_file_system_list); - gsub("\"", "", gm_file_system_list); - split(gm_file_system_list, gm_file_system_array, ","); - for (i in gm_file_system_array) { - gm_file_system_dict[gm_file_system_array[i]]=""; + split(__gm_file_systems, __gm_file_system_array, "|"); + for (i in __gm_file_system_array) { + __gm_file_system_dict[__gm_file_system_array[i]]=""; } } { - if ($3 in gm_file_system_dict) { - printf "%s,", $1; + if ($3 in __gm_file_system_dict) { + printf "%s|", $1; } - }' \ - | awk '{gsub(/,$/, ""); print}' 2>/dev/null + }' 2>/dev/null \ + | sed -e 's:|$::' 2>/dev/null ;; "solaris") df -n \ - | awk -v gm_file_system_list="${gm_file_system_list}" \ + | awk -v __gm_file_systems="${__gm_file_systems}" \ 'BEGIN { - gsub(/[ ]+/, "", gm_file_system_list); - gsub("\"", "", gm_file_system_list); - split(gm_file_system_list, gm_file_system_array, ","); - for (i in gm_file_system_array) { - gm_file_system_dict[gm_file_system_array[i]]=""; + split(__gm_file_systems, __gm_file_system_array, "|"); + for (i in __gm_file_system_array) { + __gm_file_system_dict[__gm_file_system_array[i]]=""; } } { - if ($3 in gm_file_system_dict) { - printf "%s,", $1; + if ($3 in __gm_file_system_dict) { + printf "%s|", $1; } - }' \ - | awk '{gsub(/,$/, ""); print}' 2>/dev/null + }' 2>/dev/null \ + | sed -e 's:|$::' 2>/dev/null ;; esac diff --git a/lib/get_nproc.sh b/lib/get_nproc.sh new file mode 100644 index 00000000..ba768e37 --- /dev/null +++ b/lib/get_nproc.sh @@ -0,0 +1,41 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Print the number of processing units available. +# Arguments: +# none +# Returns: +# integer: number of processing units available +_get_nproc() +{ + # esxi, linux, netbsd + grep -c "^processor" /proc/cpuinfo 2>/dev/null && return 0 + # freebsd, linux, solaris + if command_exists "nproc"; then + nproc 2>/dev/null + return 0 + fi + # freebsd, linux, macos + if command_exists "getconf" && getconf _NPROCESSORS_ONLN >/dev/null 2>/dev/null; then + getconf _NPROCESSORS_ONLN 2>/dev/null + return 0 + fi + # freebsd, macos, netbsd, netscaler, openbsd + if command_exists "sysctl" && sysctl -n hw.ncpu >/dev/null 2>/dev/null; then + sysctl -n hw.ncpu 2>/dev/null + return 0 + fi + # solaris + if command_exists "psrinfo"; then + psrinfo 2>/dev/null | wc -l 2>/dev/null | awk '{print $1}' 2>/dev/null + return 0 + fi + # aix + if command_exists "lsdev" && lsdev -Cc processor >/dev/null 2>/dev/null; then + lsdev -Cc processor 2>/dev/null | grep -c Available 2>/dev/null + return 0 + fi + + echo "unknown" && return 1 + +} \ No newline at end of file diff --git a/lib/get_operating_system.sh b/lib/get_operating_system.sh index d84cd971..3d817b1f 100644 --- a/lib/get_operating_system.sh +++ b/lib/get_operating_system.sh @@ -1,62 +1,47 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 -############################################################################### # Get current operating system. -# Globals: -# None -# Requires: -# None # Arguments: -# None -# Outputs: -# Write operating system to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -get_operating_system() +# none +# Returns: +# string: operating system +_get_operating_system() { - gs_kernel_name=`uname -s` + # shellcheck disable=SC2006 + __go_kernel_name=`uname -s` - case "${gs_kernel_name}" in + case "${__go_kernel_name}" in "AIX") - printf %b "aix" + echo "aix" ;; "FreeBSD") - if eval "uname -r | grep -q -E -i \"netscaler\""; then - printf %b "netscaler" + if uname -r | grep -q -E -i "netscaler"; then + echo "netscaler" else - printf %b "freebsd" + echo "freebsd" fi ;; "Linux") - if eval "env | grep -q -E \"ANDROID_ROOT\"" \ - && eval "env | grep -q -E \"ANDROID_DATA\""; then - printf %b "android" - else - printf %b "linux" - fi + echo "linux" ;; "Darwin") - printf %b "macos" + echo "macos" ;; "NetBSD") - printf %b "netbsd" + echo "netbsd" ;; "OpenBSD") - printf %b "openbsd" + echo "openbsd" ;; "SunOS") - printf %b "solaris" + echo "solaris" ;; "VMkernel") - printf %b "esxi" + echo "esxi" ;; *) - printf %b "${gs_kernel_name}" + echo "${__go_kernel_name}" ;; esac - } \ No newline at end of file diff --git a/lib/get_profile_by_name.sh b/lib/get_profile_by_name.sh new file mode 100644 index 00000000..b6ef4709 --- /dev/null +++ b/lib/get_profile_by_name.sh @@ -0,0 +1,22 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Get the full path to the profile file based on the profile name. +# Arguments: +# string profile_name: profile name +# string profiles_dir: full path to the profiles directory +# Returns: +# string: full path to the profile file +_get_profile_by_name() +{ + __gp_profile_name="${1:-}" + __gp_profiles_dir="${2:-profiles}" + + for __gp_file in "${__gp_profiles_dir}"/*.yaml; do + if grep -q -E "name: +${__gp_profile_name} *$" <"${__gp_file}" 2>/dev/null; then + echo "${__gp_file}" + break + fi + done 2>/dev/null + +} \ No newline at end of file diff --git a/lib/get_profile_file.sh b/lib/get_profile_file.sh deleted file mode 100644 index 1f17dd9e..00000000 --- a/lib/get_profile_file.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Get profile file based on the profile name. -# Globals: -# UAC_DIR -# Requires: -# None -# Arguments: -# $1: profile name -# Outputs: -# Write profile file to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -get_profile_file() -{ - gp_profile_name="${1:-}" - - for gp_file in "${UAC_DIR}"/profiles/*.yaml; do - if grep -q -E "name: +${gp_profile_name} *$" <"${gp_file}" 2>/dev/null; then - echo "${gp_file}" | sed 's:.*/::' # strip directory from path - break - fi - done - -} \ No newline at end of file diff --git a/lib/get_system_arch.sh b/lib/get_system_arch.sh index 7c7b32e4..6a5c6536 100644 --- a/lib/get_system_arch.sh +++ b/lib/get_system_arch.sh @@ -1,30 +1,21 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -############################################################################### -# Get system architecture. -# Globals: -# OPERATING_SYSTEM -# Requires: -# None +# Get the system's architecture. # Arguments: -# None -# Outputs: -# Write system architecture to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -get_system_arch() +# string os: operating system name +# Returns: +# string: system's architecture +_get_system_arch() { - - case "${OPERATING_SYSTEM}" in + __sa_os="${1:-}" + + case "${__sa_os}" in "aix"|"solaris") uname -p ;; - "android"|"esxi"|"freebsd"|"linux"|"macos"|"netbsd"|"netscaler"|"openbsd") + "esxi"|"freebsd"|"linux"|"macos"|"netbsd"|"netscaler"|"openbsd") uname -m ;; esac - } \ No newline at end of file diff --git a/lib/get_user_home_list.sh b/lib/get_user_home_list.sh index 95991302..2851f8c3 100644 --- a/lib/get_user_home_list.sh +++ b/lib/get_user_home_list.sh @@ -2,89 +2,97 @@ # SPDX-License-Identifier: Apache-2.0 # shellcheck disable=SC2006 -############################################################################### # Get current user list and their home directories. -# Globals: -# MOUNT_POINT -# OPERATING_SYSTEM -# TEMP_DATA_DIR -# Requires: -# get_current_user -# sanitize_path # Arguments: -# $1: skip users with non-interactive shells (default: false) -# $2: passwd file path (default: /etc/passwd) -# Outputs: -# Write user:home list to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -get_user_home_list() +# boolean skip_nologin_users: skip users with non-interactive shells (default: false) +# string mount_point: mount point +# string passwd_file_path: passwd file path (default: /etc/passwd) +# Returns: +# string: user:home list +_get_user_home_list() { - gu_skip_nologin_users="${1:-false}" - gu_passwd_file_path="${2:-/etc/passwd}" - + __gu_skip_nologin_users="${1:-false}" + __gu_mount_point="${2:-/}" + __gu_passwd_file_path="${3:-/etc/passwd}" + # skip users with non-interactive shells - gu_non_interactive_shells_grep="false$|halt$|nologin$|shutdown$|sync$|:$" + __gu_non_interactive_shells_grep="false$|halt$|nologin$|shutdown$|sync$|git-shell$|:$" - if [ -f "${TEMP_DATA_DIR}/.user_home_list.tmp" ]; then - rm -f "${TEMP_DATA_DIR}/.user_home_list.tmp" >/dev/null - fi + __gu_user_home_from_passwd="" + __gu_user_home_from_dir="" + __gu_user_home_shadow="" + __gu_user_home_current_user="" # extract user:home from passwd file - gu_etc_passwd=`sanitize_path "${MOUNT_POINT}/${gu_passwd_file_path}"` - if [ -f "${gu_etc_passwd}" ]; then - if ${gu_skip_nologin_users}; then - sed -e 's/#.*$//g' -e '/^ *$/d' -e '/^$/d' <"${gu_etc_passwd}" \ - | grep -v -E "${gu_non_interactive_shells_grep}" \ + if [ -f "${__gu_mount_point}/${__gu_passwd_file_path}" ]; then + if ${__gu_skip_nologin_users}; then + # remove lines starting with # (comments) + # remove inline comments + # remove blank lines + __gu_user_home_from_passwd=`sed -e 's|#.*$||g' \ + -e '/^ *$/d' \ + -e '/^$/d' \ + <"${__gu_mount_point}/${__gu_passwd_file_path}" \ + | grep -v -E "${__gu_non_interactive_shells_grep}" \ | awk 'BEGIN { FS=":"; } { printf "%s:%s\n",$1,$6; - }' >>"${TEMP_DATA_DIR}/.user_home_list.tmp" + }'` else - sed -e 's/#.*$//g' -e '/^ *$/d' -e '/^$/d' <"${gu_etc_passwd}" \ + __gu_user_home_from_passwd=`sed -e 's|#.*$||g' \ + -e '/^ *$/d' \ + -e '/^$/d' \ + <"${__gu_mount_point}/${__gu_passwd_file_path}" \ | awk 'BEGIN { FS=":"; } { printf "%s:%s\n",$1,$6; - }' >>"${TEMP_DATA_DIR}/.user_home_list.tmp" + }'` fi fi - # extract user:home from /home | /Users | /export/home - gu_user_home_dir="/home" - if [ "${OPERATING_SYSTEM}" = "macos" ]; then - gu_user_home_dir="/Users" - elif [ "${OPERATING_SYSTEM}" = "solaris" ]; then - gu_user_home_dir="/export/home" - fi - - if [ -d "${MOUNT_POINT}/${gu_user_home_dir}" ]; then - for gu_home_dir in "${MOUNT_POINT}/${gu_user_home_dir}"/*; do - echo "${gu_home_dir}" \ - | sed -e "s:${MOUNT_POINT}::" -e 's://*:/:g' \ - | awk '{ - split($1, parts, "/"); - size = 0; - for (i in parts) size++; - printf "%s:%s\n",parts[size],$1; - }' - done >>"${TEMP_DATA_DIR}/.user_home_list.tmp" - fi + # extract user:home from /home | /Users | /export/home | /u + for __gu_parent_home_dir in /home /Users /export/home /u; do + # let's skip home directories that are symlinks to avoid data dupplication + if [ ! -h "${__gu_mount_point}${__gu_parent_home_dir}" ]; then + for __gu_user_home_dir in "${__gu_mount_point}${__gu_parent_home_dir}"/*; do + __gu_user_home_from_dir_temp=`echo "${__gu_user_home_dir}" \ + | sed -e "s|^${__gu_mount_point}||" \ + | awk '{ + split($1, parts, "/"); + size = 0; + for (i in parts) size++; + printf "%s:%s\n",parts[size],$1; + }'` + __gu_user_home_from_dir="${__gu_user_home_from_dir} +${__gu_user_home_from_dir_temp}" + done + fi + done # ChomeOS has '/home/.shadow' directory - gu_user_home_dir="/home/.shadow" - if [ -d "${MOUNT_POINT}/${gu_user_home_dir}" ]; then - echo "shadow:${gu_user_home_dir}" >>"${TEMP_DATA_DIR}/.user_home_list.tmp" + if [ -d "${__gu_mount_point}/home/.shadow" ]; then + __gu_user_home_shadow="shadow:/home/.shadow" fi # extract user:home for current user only if running on a live system # useful for systems which do not have a /etc/passwd file - if [ "${MOUNT_POINT}" = "/" ] && [ -n "${HOME}" ]; then - gu_current_user=`get_current_user` - echo "${gu_current_user}:${HOME}" >>"${TEMP_DATA_DIR}/.user_home_list.tmp" + if [ "${__gu_mount_point}" = "/" ] && [ -n "${HOME:-}" ]; then + __gu_current_user=`_get_current_user` + __gu_user_home_current_user="${__gu_current_user}:${HOME}" fi - # remove empty user or home - grep -v -E "^:|:$" "${TEMP_DATA_DIR}/.user_home_list.tmp" \ + # remove blank lines + # remove :/home + # remove *:/home + # remove user: + # sort unique + printf "%s\n%s\n%s\n%s\n" \ + "${__gu_user_home_from_passwd}" \ + "${__gu_user_home_from_dir}" \ + "${__gu_user_home_shadow}" \ + "${__gu_user_home_current_user}" \ + | sed -e '/^$/d' \ + -e '/^*/d' \ + -e '/^:/d' \ + -e '/:$/d' \ | sort -u - -} \ No newline at end of file + +} diff --git a/lib/grep_o.sh b/lib/grep_o.sh new file mode 100644 index 00000000..c52e35b5 --- /dev/null +++ b/lib/grep_o.sh @@ -0,0 +1,13 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Print only the matching part of a string. This function mimics 'grep -o'. +# Arguments: +# string pattern: pattern matching +# Returns: +# string: corresponding match +_grep_o() +{ + __go_pattern="${1:-}" + sed -n -e 's|.*\('"${__go_pattern}"'\).*|\1|p' +} diff --git a/lib/hash_collector.sh b/lib/hash_collector.sh deleted file mode 100644 index 161f2a34..00000000 --- a/lib/hash_collector.sh +++ /dev/null @@ -1,448 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Collector that searches and hashes files. -# Globals: -# END_DATE_DAYS -# GLOBAL_EXCLUDE_MOUNT_POINT -# GLOBAL_EXCLUDE_NAME_PATTERN -# GLOBAL_EXCLUDE_PATH_PATTERN -# HASH_ALGORITHM -# MD5_HASHING_TOOL -# MOUNT_POINT -# SHA1_HASHING_TOOL -# SHA256_HASHING_TOOL -# START_DATE_DAYS -# TEMP_DATA_DIR -# XARGS_REPLACE_STRING_SUPPORT -# Requires: -# find_wrapper -# get_mount_point_by_file_system -# sanitize_filename -# sanitize_path -# sort_uniq_file -# Arguments: -# $1: path -# $2: is file list (optional) (default: false) -# $3: path pattern (optional) -# $4: name pattern (optional) -# $5: exclude path pattern (optional) -# $6: exclude name pattern (optional) -# $7: exclude file system (optional) -# $8: max depth (optional) -# $9: file type (optional) -# $10: min file size (optional) -# $11: max file size (optional) -# $12: permissions (optional) -# $13: ignore date range (optional) (default: false) -# $14: root output directory -# $15: output directory (optional) -# $16: output file -# $17: stderr output file (optional) -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -hash_collector() -{ - # some systems such as Solaris 10 do not support more than 9 parameters - # on functions, not even using curly braces {} e.g. ${10} - # so the solution was to use shift - hc_path="${1:-}" - shift - hc_is_file_list="${1:-false}" - shift - hc_path_pattern="${1:-}" - shift - hc_name_pattern="${1:-}" - shift - hc_exclude_path_pattern="${1:-}" - shift - hc_exclude_name_pattern="${1:-}" - shift - hc_exclude_file_system="${1:-}" - shift - hc_max_depth="${1:-}" - shift - hc_file_type="${1:-}" - shift - hc_min_file_size="${1:-}" - shift - hc_max_file_size="${1:-}" - shift - hc_permissions="${1:-}" - shift - hc_ignore_date_range="${1:-false}" - shift - hc_root_output_directory="${1:-}" - shift - hc_output_directory="${1:-}" - shift - hc_output_file="${1:-}" - shift - hc_stderr_output_file="${1:-}" - - # return if path is empty - if [ -z "${hc_path}" ]; then - printf %b "hash_collector: missing required argument: 'path'\n" >&2 - return 22 - fi - - # return if root output directory is empty - if [ -z "${hc_root_output_directory}" ]; then - printf %b "hash_collector: missing required argument: \ -'root_output_directory'\n" >&2 - return 22 - fi - - # return if output file is empty - if [ -z "${hc_output_file}" ]; then - printf %b "hash_collector: missing required argument: 'output_file'\n" >&2 - return 22 - fi - - # prepend root output directory to path if it does not start with / - # (which means local file) - if echo "${hc_path}" | grep -q -v -E "^/"; then - hc_path=`sanitize_path "${TEMP_DATA_DIR}/${hc_root_output_directory}/${hc_path}"` - fi - - # return if is file list and file list does not exist - if ${hc_is_file_list} && [ ! -f "${hc_path}" ]; then - printf %b "hash_collector: file list does not exist: '${hc_path}'\n" >&2 - return 5 - fi - - # sanitize output file name - hc_output_file=`sanitize_filename "${hc_output_file}"` - - if [ -n "${hc_stderr_output_file}" ]; then - # sanitize stderr output file name - hc_stderr_output_file=`sanitize_filename "${hc_stderr_output_file}"` - else - hc_stderr_output_file="${hc_output_file}.stderr" - fi - - # sanitize output directory - hc_output_directory=`sanitize_path \ - "${hc_root_output_directory}/${hc_output_directory}"` - - # create output directory if it does not exist - if [ ! -d "${TEMP_DATA_DIR}/${hc_output_directory}" ]; then - mkdir -p "${TEMP_DATA_DIR}/${hc_output_directory}" >/dev/null - fi - - ${hc_ignore_date_range} && hc_date_range_start_days="" \ - || hc_date_range_start_days="${START_DATE_DAYS}" - ${hc_ignore_date_range} && hc_date_range_end_days="" \ - || hc_date_range_end_days="${END_DATE_DAYS}" - - # local exclude mount points - if [ -n "${hc_exclude_file_system}" ]; then - hc_exclude_mount_point=`get_mount_point_by_file_system \ - "${hc_exclude_file_system}"` - hc_exclude_path_pattern="${hc_exclude_path_pattern},\ -${hc_exclude_mount_point}" - fi - - # global exclude mount points - if [ -n "${GLOBAL_EXCLUDE_MOUNT_POINT}" ]; then - hc_exclude_path_pattern="${hc_exclude_path_pattern},\ -${GLOBAL_EXCLUDE_MOUNT_POINT}" - fi - - # global exclude path pattern - if [ -n "${GLOBAL_EXCLUDE_PATH_PATTERN}" ]; then - hc_exclude_path_pattern="${hc_exclude_path_pattern},\ -${GLOBAL_EXCLUDE_PATH_PATTERN}" - fi - - # global exclude name pattern - if [ -n "${GLOBAL_EXCLUDE_NAME_PATTERN}" ]; then - hc_exclude_name_pattern="${hc_exclude_name_pattern},\ -${GLOBAL_EXCLUDE_NAME_PATTERN}" - fi - - # prepend mount point if is not file list - ${hc_is_file_list} || hc_path=`sanitize_path "${MOUNT_POINT}/${hc_path}"` - - if is_element_in_list "md5" "${HASH_ALGORITHM}" \ - && [ -n "${MD5_HASHING_TOOL}" ]; then - if ${XARGS_REPLACE_STRING_SUPPORT}; then - if ${hc_is_file_list}; then - log_message COMMAND "sort -u \"${hc_path}\" | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} ${MD5_HASHING_TOOL} \"{}\"" - # sort and uniq - # escape single and double quotes - # shellcheck disable=SC2086 - sort -u "${hc_path}" \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} ${MD5_HASHING_TOOL} "{}" \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.md5" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - else - # find - # sort and uniq - # escape single and double quotes - # shellcheck disable=SC2086 - find_wrapper \ - "${hc_path}" \ - "${hc_path_pattern}" \ - "${hc_name_pattern}" \ - "${hc_exclude_path_pattern}" \ - "${hc_exclude_name_pattern}" \ - "${hc_max_depth}" \ - "${hc_file_type}" \ - "${hc_min_file_size}" \ - "${hc_max_file_size}" \ - "${hc_permissions}" \ - "${hc_date_range_start_days}" \ - "${hc_date_range_end_days}" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" \ - | sort -u \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} ${MD5_HASHING_TOOL} "{}" \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.md5" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - log_message COMMAND "| sort -u | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} ${MD5_HASHING_TOOL} \"{}\"" - - fi - else - if ${hc_is_file_list}; then - log_message COMMAND "sort -u \"${hc_path}\" | while read %line%; do ${MD5_HASHING_TOOL} \"%line%\"" - # shellcheck disable=SC2162 - sort -u "${hc_path}" \ - | while read hc_line || [ -n "${hc_line}" ]; do - ${MD5_HASHING_TOOL} "${hc_line}" - done \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.md5" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - else - # shellcheck disable=SC2162 - find_wrapper \ - "${hc_path}" \ - "${hc_path_pattern}" \ - "${hc_name_pattern}" \ - "${hc_exclude_path_pattern}" \ - "${hc_exclude_name_pattern}" \ - "${hc_max_depth}" \ - "${hc_file_type}" \ - "${hc_min_file_size}" \ - "${hc_max_file_size}" \ - "${hc_permissions}" \ - "${hc_date_range_start_days}" \ - "${hc_date_range_end_days}" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" \ - | sort -u \ - | while read hc_line || [ -n "${hc_line}" ]; do - ${MD5_HASHING_TOOL} "${hc_line}" - done \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.md5" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - log_message COMMAND "| sort -u | while read %line%; do ${MD5_HASHING_TOOL} \"%line%\"" - fi - fi - - # sort and uniq output file - sort_uniq_file "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.md5" - - # remove output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.md5" ]; then - rm -f "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.md5" \ - >/dev/null - fi - - # remove stderr output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" \ - >/dev/null - fi - - fi - - if is_element_in_list "sha1" "${HASH_ALGORITHM}" \ - && [ -n "${SHA1_HASHING_TOOL}" ]; then - if ${XARGS_REPLACE_STRING_SUPPORT}; then - if ${hc_is_file_list}; then - log_message COMMAND "sort -u \"${hc_path}\" | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} ${SHA1_HASHING_TOOL} \"{}\"" - # sort and uniq - # escape single and double quotes - # shellcheck disable=SC2086 - sort -u "${hc_path}" \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} ${SHA1_HASHING_TOOL} "{}" \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha1" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - else - # find - # sort and uniq - # escape single and double quotes - # shellcheck disable=SC2086 - find_wrapper \ - "${hc_path}" \ - "${hc_path_pattern}" \ - "${hc_name_pattern}" \ - "${hc_exclude_path_pattern}" \ - "${hc_exclude_name_pattern}" \ - "${hc_max_depth}" \ - "${hc_file_type}" \ - "${hc_min_file_size}" \ - "${hc_max_file_size}" \ - "${hc_permissions}" \ - "${hc_date_range_start_days}" \ - "${hc_date_range_end_days}" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" \ - | sort -u \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} ${SHA1_HASHING_TOOL} "{}" \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha1" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - log_message COMMAND "| sort -u | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} ${SHA1_HASHING_TOOL} \"{}\"" - fi - else - if ${hc_is_file_list}; then - log_message COMMAND "sort -u \"${hc_path}\" | while read %line%; do ${SHA1_HASHING_TOOL} \"%line%\"" - # shellcheck disable=SC2162 - sort -u "${hc_path}" \ - | while read hc_line || [ -n "${hc_line}" ]; do - ${SHA1_HASHING_TOOL} "${hc_line}" - done \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha1" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - else - # shellcheck disable=SC2162 - find_wrapper \ - "${hc_path}" \ - "${hc_path_pattern}" \ - "${hc_name_pattern}" \ - "${hc_exclude_path_pattern}" \ - "${hc_exclude_name_pattern}" \ - "${hc_max_depth}" \ - "${hc_file_type}" \ - "${hc_min_file_size}" \ - "${hc_max_file_size}" \ - "${hc_permissions}" \ - "${hc_date_range_start_days}" \ - "${hc_date_range_end_days}" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" \ - | sort -u \ - | while read hc_line || [ -n "${hc_line}" ]; do - ${SHA1_HASHING_TOOL} "${hc_line}" - done \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha1" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - log_message COMMAND "| sort -u | while read %line%; do ${SHA1_HASHING_TOOL} \"%line%\"" - fi - fi - - # sort and uniq output file - sort_uniq_file "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha1" - - # remove output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha1" ]; then - rm -f "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha1" \ - >/dev/null - fi - - # remove stderr output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" \ - >/dev/null - fi - - fi - - if is_element_in_list "sha256" "${HASH_ALGORITHM}" \ - && [ -n "${SHA256_HASHING_TOOL}" ]; then - if ${XARGS_REPLACE_STRING_SUPPORT}; then - if ${hc_is_file_list}; then - log_message COMMAND "sort -u \"${hc_path}\" | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} ${SHA256_HASHING_TOOL} \"{}\"" - # sort and uniq - # escape single and double quotes - # shellcheck disable=SC2086 - sort -u "${hc_path}" \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} ${SHA256_HASHING_TOOL} "{}" \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha256" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - else - # find - # sort and uniq - # escape single and double quotes - # shellcheck disable=SC2086 - find_wrapper \ - "${hc_path}" \ - "${hc_path_pattern}" \ - "${hc_name_pattern}" \ - "${hc_exclude_path_pattern}" \ - "${hc_exclude_name_pattern}" \ - "${hc_max_depth}" \ - "${hc_file_type}" \ - "${hc_min_file_size}" \ - "${hc_max_file_size}" \ - "${hc_permissions}" \ - "${hc_date_range_start_days}" \ - "${hc_date_range_end_days}" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" \ - | sort -u \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} ${SHA256_HASHING_TOOL} "{}" \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha256" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - log_message COMMAND "| sort -u | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} ${SHA256_HASHING_TOOL} \"{}\"" - fi - else - if ${hc_is_file_list}; then - log_message COMMAND "sort -u \"${hc_path}\" | while read %line%; do ${SHA256_HASHING_TOOL} \"%line%\"" - # shellcheck disable=SC2162 - sort -u "${hc_path}" \ - | while read hc_line || [ -n "${hc_line}" ]; do - ${SHA256_HASHING_TOOL} "${hc_line}" - done \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha256" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - else - # shellcheck disable=SC2162 - find_wrapper \ - "${hc_path}" \ - "${hc_path_pattern}" \ - "${hc_name_pattern}" \ - "${hc_exclude_path_pattern}" \ - "${hc_exclude_name_pattern}" \ - "${hc_max_depth}" \ - "${hc_file_type}" \ - "${hc_min_file_size}" \ - "${hc_max_file_size}" \ - "${hc_permissions}" \ - "${hc_date_range_start_days}" \ - "${hc_date_range_end_days}" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" \ - | sort -u \ - | while read hc_line || [ -n "${hc_line}" ]; do - ${SHA256_HASHING_TOOL} "${hc_line}" - done \ - >>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha256" \ - 2>>"${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" - log_message COMMAND "| sort -u | while read %line%; do ${SHA256_HASHING_TOOL} \"%line%\"" - fi - fi - - # sort and uniq output file - sort_uniq_file "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha256" - - # remove output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha256" ]; then - rm -f "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_output_file}.sha256" \ - >/dev/null - fi - - # remove stderr output file will be hidden if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${hc_output_directory}/${hc_stderr_output_file}" \ - >/dev/null - fi - - fi - -} \ No newline at end of file diff --git a/lib/http_transfer.sh b/lib/http_transfer.sh new file mode 100644 index 00000000..69486580 --- /dev/null +++ b/lib/http_transfer.sh @@ -0,0 +1,75 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 + +# Transfer file via HTTP PUT method. +# Arguments: +# string source: source file +# string url: url +# string header_host: host header value +# string header_date: date header value +# string header_content_type: content type header value +# string header_authorization: authorization header value +# boolean test_connectivity_mode: transfer testing data if true (default: false) +# Returns: +# boolean: true on success +# false on fail +_http_transfer() +{ + __ht_source="${1:-}" + __ht_url="${2:-}" + __ht_header_host="${3:-}" + __ht_header_date="${4:-}" + __ht_header_content_type="${5:-}" + __ht_header_authorization="${6:-}" + __ht_test_connectivity_mode="${7:-false}" + + if command_exists "curl"; then + __ht_data="--upload-file \"${__ht_source}\"" + ${__ht_test_connectivity_mode} && __ht_data="--data \"Transfer test from UAC\"" + __ht_verbose="--fail" + ${__UAC_VERBOSE_MODE} && __ht_verbose="--verbose" + + __ht_command="curl \ +${__ht_verbose} \ +--insecure \ +--request PUT" + + else + __ht_data="--body-file \"${__ht_source}\"" + ${__ht_test_connectivity_mode} && __ht_data="--body-data \"Transfer test from UAC\"" + __ht_verbose="--quiet" + ${__UAC_VERBOSE_MODE} && __ht_verbose="--verbose" + + __ht_command="wget \ +--output-document - \ +${__ht_verbose} \ +--no-check-certificate \ +--method PUT" + fi + + if [ -n "${__ht_header_host}" ]; then + __ht_command="${__ht_command} \ +--header \"Host: ${__ht_header_host}\"" + fi + + __ht_command="${__ht_command} \ +--header \"Date: ${__ht_header_date}\" \ +--header \"Content-Type: ${__ht_header_content_type}\" \ +--header \"Accept: */*\" \ +--header \"Expect: 100-continue\" \ +--header \"x-ms-blob-type: BlockBlob\"" + + if [ -n "${__ht_header_authorization}" ]; then + __ht_command="${__ht_command} \ +--header \"Authorization: ${__ht_header_authorization}\"" + fi + + __ht_command="${__ht_command} \ +${__ht_data} \ +\"${__ht_url}\"" + + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__ht_command}" + eval "${__ht_command}" + +} diff --git a/lib/ibm_cos_transfer.sh b/lib/ibm_cos_transfer.sh deleted file mode 100644 index 1b72361e..00000000 --- a/lib/ibm_cos_transfer.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Transfer file to IBM Cloud Object Storage. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: source file -# $2: URL (https://[endpoint]/[bucket-name]/[object-key]). -# $3: API key / token -# Outputs: -# None. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -ibm_cos_transfer() -{ - it_source="${1:-}" - it_url="${2:-}" - it_api_key="${3:-}" - - curl \ - --fail \ - --request PUT \ - --header "Authorization: Bearer ${it_api_key}" \ - --header "Content-Type: application/octet-stream" \ - --header "Accept: */*" \ - --header "Expect: 100-continue" \ - --upload-file "${it_source}" \ - "${it_url}" - -} \ No newline at end of file diff --git a/lib/ibm_cos_transfer_test.sh b/lib/ibm_cos_transfer_test.sh deleted file mode 100644 index 217c2de2..00000000 --- a/lib/ibm_cos_transfer_test.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Test the connectivity to IBM Cloud Object Storage. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: URL (https://[endpoint]/[bucket-name]/[object-key]). -# $2: API key / token -# Outputs: -# None. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -ibm_cos_transfer_test() -{ - ib_url="${1:-}" - ib_api_key="${2:-}" - - curl \ - --fail \ - --request PUT \ - --header "Authorization: Bearer ${ib_api_key}" \ - --header "Content-Type: application/text" \ - --header "Accept: */*" \ - --header "Expect: 100-continue" \ - --data "Transfer test from UAC" \ - "${ib_url}" - -} \ No newline at end of file diff --git a/lib/init_temp_data_dir.sh b/lib/init_temp_data_dir.sh new file mode 100644 index 00000000..5017ba52 --- /dev/null +++ b/lib/init_temp_data_dir.sh @@ -0,0 +1,29 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Create temporary files and directories used during execution. +# Arguments: +# none +# Returns: +# none +_init_temp_data_dir() +{ + # remove any existing (old) collected data + # grep is just a protection measure to make sure UAC is removing the proper directory + if [ -d "${__UAC_TEMP_DATA_DIR}" ] && printf "%s" "${__UAC_TEMP_DATA_DIR}" | grep -q "uac-data.tmp"; then + rm -rf "${__UAC_TEMP_DATA_DIR}" >/dev/null 2>/dev/null \ + || { _error_msg "cannot remove old temporary data directory from previous collection '${__UAC_TEMP_DATA_DIR}'."; return 1; } + fi + + # create temporary directory + mkdir -p "${__UAC_TEMP_DATA_DIR}" >/dev/null 2>/dev/null \ + || { _error_msg "cannot create temporary data directory '${__UAC_TEMP_DATA_DIR}'."; return 1; } + # directory where collected data that goes to the output file will be temporarily stored + mkdir -p "${__UAC_TEMP_DATA_DIR}/collected" >/dev/null 2>/dev/null + # directory where collected data using %temp_directory% will be temporarily stored + mkdir -p "${__UAC_TEMP_DATA_DIR}/tmp" >/dev/null 2>/dev/null + + touch "${__UAC_TEMP_DATA_DIR}/file_collector.tmp" + touch "${__UAC_TEMP_DATA_DIR}/${__UAC_LOG_FILE}" + +} \ No newline at end of file diff --git a/lib/is_digit.sh b/lib/is_digit.sh new file mode 100644 index 00000000..6a638456 --- /dev/null +++ b/lib/is_digit.sh @@ -0,0 +1,19 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Test whether all characters in the parameter are digits and there is +# at least one character. +# Arguments: +# integer number: input to be tested +# Returns: +# boolean: true on success +# false on fail +_is_digit() +{ + __id_number="${1:-empty}" + + if echo "${__id_number}" | grep -q -E "^-?[0-9]*$"; then + return 0 + fi + return 1 +} \ No newline at end of file diff --git a/lib/is_element_in_list.sh b/lib/is_element_in_list.sh deleted file mode 100644 index 28483266..00000000 --- a/lib/is_element_in_list.sh +++ /dev/null @@ -1,67 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Check if an element exists in a comma separated list. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: element -# $2: comma separated list -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -is_element_in_list() -{ - ie_element="${1:-}" - ie_list="${2:-}" - - # return if element is empty - if [ -z "${ie_element}" ]; then - printf %b "is_element_in_list: missing required argument: 'element'\n" >&2 - return 22 - fi - - # return if list is empty - if [ -z "${ie_list}" ]; then - printf %b "is_element_in_list: missing required argument: 'list'\n" >&2 - return 22 - fi - - # trim leading and trailing white space - # remove double and single quotes - ie_element=`echo "${ie_element}" \ - | sed -e 's:^ *::' \ - -e 's: *$::' \ - -e 's:"::g' \ - -e "s:'::g"` - - # trim leading and trailing white space - # remove any white spaces between comma and each item - # trim leading and trailing comma - # remove double and single quotes - echo "${ie_list}" \ - | sed -e 's:^ *::' \ - -e 's: *$::' \ - -e 's: *,:,:g' \ - -e 's:, *:,:g' \ - -e 's:^,*::' \ - -e 's:,*$::' \ - -e 's:"::g' \ - -e "s:'::g" \ - | awk -v ie_element="${ie_element}" 'BEGIN { FS=","; } { - for(N = 1; N <= NF; N ++) { - if (ie_element == $N) { - exit 0; # true - } - } - exit 1; # false - }' - -} \ No newline at end of file diff --git a/lib/is_in_list.sh b/lib/is_in_list.sh new file mode 100644 index 00000000..d9993065 --- /dev/null +++ b/lib/is_in_list.sh @@ -0,0 +1,28 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Check whether an element exists in a pipe-separated values string. +# Arguments: +# string element: element +# string list: pipe-separated values +# Returns: +# boolean: true on success +# false on fail +_is_in_list() +{ + __il_element="${1:-}" + __il_list="${2:-}" + + # shellcheck disable=SC2006 + __il_OIFS="${IFS}"; IFS="|" + for __il_item in ${__il_list}; do + if [ "${__il_element}" = "${__il_item}" ]; then + IFS="${__il_OIFS}" + return 0 + fi + done + + IFS="${__il_OIFS}" + return 1 + +} \ No newline at end of file diff --git a/lib/is_integer.sh b/lib/is_integer.sh deleted file mode 100644 index 54f6755c..00000000 --- a/lib/is_integer.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Test whether parameter is an integer or not. -# removed. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: number -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -is_integer() -{ - ii_number="${1:-}" - - # return if number is empty - if [ -z "${ii_number}" ]; then - printf %b "is_integer: missing required argument: 'number'\n" >&2 - return 22 - fi - - # shellcheck disable=SC2003 - expr 1 + "${ii_number}" >/dev/null - -} \ No newline at end of file diff --git a/lib/is_output_format_supported.sh b/lib/is_output_format_supported.sh new file mode 100644 index 00000000..8514dd67 --- /dev/null +++ b/lib/is_output_format_supported.sh @@ -0,0 +1,55 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Check whether output format is support. +# Arguments: +# string output_format: output format +# Returns: +# string: output extension +_is_output_format_supported() +{ + __if_output_format="${1:-}" + __if_output_password="${2:-}" + __if_output_extension="" + + # check if output format is supported + case "${__if_output_format}" in + "none") + __if_output_extension="" + ;; + "tar") + if command_exists "tar"; then + __if_output_extension="tar" + if command_exists "gzip"; then + __if_output_extension="tar.gz" + fi + else + _error_msg "cannot create output file as tar tool was not found. Please choose a different output format." + return 1 + fi + ;; + "zip") + if command_exists "zip"; then + __if_output_extension="zip" + if [ -n "${__if_output_password}" ]; then + if zip --password infected - "${__UAC_DIR}/uac" >/dev/null 2>/dev/null; then + true + else + _error_msg "cannot create password-protected zip file as zip tool does not support such feature" + return 1 + fi + fi + else + _error_msg "cannot create output file as zip tool was not found. Please choose a different output format." + return 1 + fi + ;; + *) + _error_msg "invalid output format '${__if_output_format}'" + return 1 + ;; + esac + + echo "${__if_output_extension}" + +} diff --git a/lib/is_psv.sh b/lib/is_psv.sh new file mode 100644 index 00000000..1ad98150 --- /dev/null +++ b/lib/is_psv.sh @@ -0,0 +1,18 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Test whether string is a pipe separated value. +# Arguments: +# string string: input to be tested +# Returns: +# boolean: true on success +# false on fail +_is_psv() +{ + __ip_string="${1:-}" + + if echo "${__ip_string}" | grep -q -E "\|"; then + return 0 + fi + return 1 +} \ No newline at end of file diff --git a/lib/is_root.sh b/lib/is_root.sh new file mode 100644 index 00000000..15625eec --- /dev/null +++ b/lib/is_root.sh @@ -0,0 +1,25 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 + +# Check if the current user has root privileges (UID = 0). +# Arguments: +# none +# Returns: +# boolean: true on success +# false on fail +_is_root() +{ + __ir_uid="" + if id -u >/dev/null 2>/dev/null; then + __ir_uid=`id -u 2>/dev/null` + elif [ -f "/etc/passwd" ]; then + __ir_current_user=`_get_current_user` + __ir_uid=`grep "^${__ir_current_user}" /etc/passwd 2>/dev/null \ + | awk 'BEGIN { FS=":"; } { print $3; }' 2>/dev/null` + fi + if [ "${__ir_uid}" -eq 0 ]; then + return 0 + fi + return 1 +} \ No newline at end of file diff --git a/lib/is_running_with_root_privileges.sh b/lib/is_running_with_root_privileges.sh deleted file mode 100644 index b0918c08..00000000 --- a/lib/is_running_with_root_privileges.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Check if the current user has root privileges. -# Globals: -# None -# Requires: -# get_current_user -# Arguments: -# None -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -is_running_with_root_privileges() -{ - - ir_current_user=`get_current_user` - ir_uid="" - - if [ "${ir_current_user}" = "root" ]; then - return 0 - else - # id command is not available on VMWare ESXi - if eval "id -u" >/dev/null 2>/dev/null; then - ir_uid=`id -u` - elif [ -f "/etc/passwd" ]; then - ir_uid=`grep "^${ir_current_user}" /etc/passwd 2>/dev/null \ - | awk 'BEGIN { FS=":"; } { print $3; }' 2>/dev/null` - fi - if [ "${ir_uid}" = "0" ]; then - return 0 - fi - fi - return 2 - -} \ No newline at end of file diff --git a/lib/is_valid_operating_system.sh b/lib/is_valid_operating_system.sh deleted file mode 100644 index ad938e71..00000000 --- a/lib/is_valid_operating_system.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Check if given operating system is valid. -# removed. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: operating system -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -is_valid_operating_system() -{ - io_operating_system="${1:-}" - - if [ "${io_operating_system}" != "android" ] \ - && [ "${io_operating_system}" != "aix" ] \ - && [ "${io_operating_system}" != "esxi" ] \ - && [ "${io_operating_system}" != "freebsd" ] \ - && [ "${io_operating_system}" != "linux" ] \ - && [ "${io_operating_system}" != "macos" ] \ - && [ "${io_operating_system}" != "netbsd" ] \ - && [ "${io_operating_system}" != "netscaler" ] \ - && [ "${io_operating_system}" != "openbsd" ] \ - && [ "${io_operating_system}" != "solaris" ]; then - return 2 - fi - -} \ No newline at end of file diff --git a/lib/list_artifacts.sh b/lib/list_artifacts.sh index a17aad8d..73da2030 100644 --- a/lib/list_artifacts.sh +++ b/lib/list_artifacts.sh @@ -1,27 +1,68 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 -############################################################################### # List available artifacts. -# Globals: -# UAC_DIR -# Requires: -# None # Arguments: -# None -# Outputs: -# Write available profiles to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -list_artifacts() +# string artifacts_dir: full path to the artifacts directory +# string os: operating system (default: all) +# Returns: +# none +_list_artifacts() { - printf %b "--------------------------------------------------------------------------------\n" - printf %b "Artifacts\n" - printf %b "--------------------------------------------------------------------------------\n" + __la_artifacts_dir="${1:-}" + __la_os="${2:-all}" + + if [ ! -d "${__la_artifacts_dir}" ]; then + _error_msg "list artifacts: no such file or directory: '${__la_artifacts_dir}'" + return 1 + fi - find "${UAC_DIR}"/artifacts/* -name "*.yaml" -print \ - | sed -e "s:^${UAC_DIR}/artifacts/::g" 2>/dev/null + if [ "${__la_os}" = "all" ] || _is_in_list "${__la_os}" "aix|esxi|freebsd|linux|macos|netbsd|netscaler|openbsd|solaris"; then + true + else + _error_msg "list artifacts: invalid operating system '${__la_os}'" + return 1 + fi + # Get artifacts for all or a specific operating system. + # Arguments: + # string artifacts_dir: full path to the artifacts directory + # string os: operating system (default: all) + # Returns: + # string: list of artifacts + _get_operating_system_artifact_list() + { + __oa_artifacts_dir="${1:-}" + __oa_os="${2:-all}" + + if [ "${__oa_os}" = "all" ]; then + find "${__oa_artifacts_dir}"/* -name "*.yaml" -print 2>/dev/null \ + | sed -e "s|^${__oa_artifacts_dir}/||" 2>/dev/null + else + __oa_OIFS="${IFS}" + IFS=" +" + __oa_artifacts_tmp=`find "${__oa_artifacts_dir}"/* -name "*.yaml" -print 2>/dev/null` + for __oa_item in ${__oa_artifacts_tmp}; do + if grep -q -E "supported_os:.*all|${__oa_os}" "${__oa_item}" 2>/dev/null; then + echo "${__oa_item}" | sed -e "s|^${__oa_artifacts_dir}/||" 2>/dev/null + fi + done + IFS="${__oa_OIFS}" + fi + } + + __la_selected_artifacts=`_get_operating_system_artifact_list "${__la_artifacts_dir}" "${__la_os}"` + __la_artifact_count=`echo "${__la_selected_artifacts}" | wc -l` + + printf "%s\n%s\n%s\n%s\n%s\n%s\n" \ +"--------------------------------------------------------------------------------" \ +"${__la_os} artifacts" \ +"--------------------------------------------------------------------------------" \ +"${__la_selected_artifacts}" \ +"--------------------------------------------------------------------------------" \ +"Total: ${__la_artifact_count}" + + return 0 } \ No newline at end of file diff --git a/lib/list_profiles.sh b/lib/list_profiles.sh index 134d3543..6ae851e9 100644 --- a/lib/list_profiles.sh +++ b/lib/list_profiles.sh @@ -2,29 +2,29 @@ # SPDX-License-Identifier: Apache-2.0 # shellcheck disable=SC2006 -############################################################################### # List available profiles. -# Globals: -# UAC_DIR -# Requires: -# None # Arguments: -# None -# Outputs: -# Write available profiles to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -list_profiles() +# string profiles_dir: full path to the profiles directory +# Returns: +# none +_list_profiles() { - printf %b "--------------------------------------------------------------------------------\n" - printf %b "Profile Name : Description\n" - printf %b "--------------------------------------------------------------------------------\n" - for lp_file in "${UAC_DIR}"/profiles/*.yaml; do - lp_name=`grep -E "name: " <"${lp_file}" | sed -e 's/name: //'` - lp_description=`grep -E "description: " <"${lp_file}" | sed -e 's/description: //'` - printf %b "${lp_name} : ${lp_description}\n" - done + __lp_profiles_dir="${1:-}" + + if [ ! -d "${__lp_profiles_dir}" ]; then + _error_msg "list profiles: no such file or directory: '${__lp_profiles_dir}'" + return 1 + fi + printf "%s\n%s\n%s\n" \ +"--------------------------------------------------------------------------------" \ +"Profile Name : Description" \ +"--------------------------------------------------------------------------------" + for __lp_file in "${__lp_profiles_dir}"/*.yaml; do + __lp_name=`sed -n 's|name\: *\(.*\)|\1|p' "${__lp_file}"` + __lp_description=`sed -n 's|description\: *\(.*\)|\1|p' "${__lp_file}"` + printf "%s : %s\n" "${__lp_name}" "${__lp_description}" + done + + return 0 } \ No newline at end of file diff --git a/lib/load_config_file.sh b/lib/load_config_file.sh index bc131757..f08be7ab 100644 --- a/lib/load_config_file.sh +++ b/lib/load_config_file.sh @@ -2,99 +2,94 @@ # SPDX-License-Identifier: Apache-2.0 # shellcheck disable=SC2006 -############################################################################### -# Load config file (yaml) and set global variables values. -# Globals: -# None -# Requires: -# array_to_list -# is_element_in_list +# Load config file (yaml) to set global variables. # Arguments: -# $1: config file -# Outputs: -# Set the value for the following global vars: -# GLOBAL_EXCLUDE_PATH_PATTERN -# GLOBAL_EXCLUDE_NAME_PATTERN -# GLOBAL_EXCLUDE_FILE_SYSTEM -# HASH_ALGORITHM -# ENABLE_FIND_ATIME -# ENABLE_FIND_MTIME -# ENABLE_FIND_CTIME -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -load_config_file() { - lc_config_file="${1:-}" +# string config_file: full path to the config file +# Returns: +# none +_load_config_file() +{ + __lc_config_file="${1:-}" - # return if config file does not exist - if [ ! -f "${lc_config_file}" ]; then - printf %b "uac: config file: no such file or directory: \ -'${lc_config_file}'\n" >&2 - return 2 + if [ ! -f "${__lc_config_file}" ]; then + _error_msg "config file: no such file or directory: '${__lc_config_file}'" + return 1 fi - GLOBAL_EXCLUDE_PATH_PATTERN="" - GLOBAL_EXCLUDE_NAME_PATTERN="" - GLOBAL_EXCLUDE_FILE_SYSTEM="" - HASH_ALGORITHM="" - ENABLE_FIND_MTIME=true - ENABLE_FIND_ATIME=false - ENABLE_FIND_CTIME=true - - # exclude_path_pattern - GLOBAL_EXCLUDE_PATH_PATTERN=`sed -n \ - "/exclude_path_pattern:/s/exclude_path_pattern://p" "${lc_config_file}"` - GLOBAL_EXCLUDE_PATH_PATTERN=`array_to_list "${GLOBAL_EXCLUDE_PATH_PATTERN}"` - - # exclude_name_pattern - GLOBAL_EXCLUDE_NAME_PATTERN=`sed -n \ - "/exclude_name_pattern:/s/exclude_name_pattern://p" "${lc_config_file}"` - GLOBAL_EXCLUDE_NAME_PATTERN=`array_to_list "${GLOBAL_EXCLUDE_NAME_PATTERN}"` + __UAC_CONF_EXCLUDE_PATH_PATTERN="" + __UAC_CONF_EXCLUDE_NAME_PATTERN="" + __UAC_CONF_EXCLUDE_FILE_SYSTEM="" + __UAC_CONF_HASH_ALGORITHM="md5|sha1" + __UAC_CONF_MAX_DEPTH=0 + __UAC_CONF_ENABLE_FIND_MTIME=true + __UAC_CONF_ENABLE_FIND_ATIME=false + __UAC_CONF_ENABLE_FIND_CTIME=true - # exclude_file_system - GLOBAL_EXCLUDE_FILE_SYSTEM=`sed -n \ - "/exclude_file_system:/s/exclude_file_system://p" "${lc_config_file}"` - GLOBAL_EXCLUDE_FILE_SYSTEM=`array_to_list "${GLOBAL_EXCLUDE_FILE_SYSTEM}"` + # load exclude_path_pattern option + __lc_value=`sed -n -e 's|exclude_path_pattern\: *\(.*\)|\1|p' "${__lc_config_file}" | _array_to_psv` + if [ -n "${__lc_value}" ]; then + __UAC_CONF_EXCLUDE_PATH_PATTERN="${__lc_value}" + fi + + # load exclude_name_pattern option + __lc_value=`sed -n -e 's|exclude_name_pattern\: *\(.*\)|\1|p' "${__lc_config_file}" | _array_to_psv` + if [ -n "${__lc_value}" ]; then + __UAC_CONF_EXCLUDE_NAME_PATTERN="${__lc_value}" + fi - # hash_algorithm - HASH_ALGORITHM=`sed -n \ - "/hash_algorithm:/s/hash_algorithm://p" "${lc_config_file}"` - HASH_ALGORITHM=`array_to_list "${HASH_ALGORITHM}"` + # load exclude_file_system option + __lc_value=`sed -n -e 's|exclude_file_system\: *\(.*\)|\1|p' "${__lc_config_file}" | _array_to_psv` + if [ -n "${__lc_value}" ]; then + __UAC_CONF_EXCLUDE_FILE_SYSTEM="${__lc_value}" + fi - if [ -z "${HASH_ALGORITHM}" ]; then - printf %b "uac: config file: 'hash_algorithm' must not be empty.\n" >&2 - return 22 + # load hash_algorithm option + __lc_value=`sed -n -e 's|hash_algorithm\: *\(.*\)|\1|p' "${__lc_config_file}" | _array_to_psv` + if [ -n "${__lc_value}" ]; then + __UAC_CONF_HASH_ALGORITHM="${__lc_value}" fi - - # check if hashes are valid - # shellcheck disable=SC2001 - lc_hash_algorithm=`echo "${HASH_ALGORITHM}" | sed -e 's:,: :g'` - for lc_hash in ${lc_hash_algorithm}; do - if is_element_in_list "${lc_hash}" "md5,sha1,sha256"; then - continue - else - printf %b "uac: config file: invalid hash algorithm '${lc_hash}'\n" >&2 - return 22 - fi + __lc_valid_values="md5|sha1|sha256" + for __lc_item in `echo "${__UAC_CONF_HASH_ALGORITHM}" | sed -e 's:|: :g'`; do + _is_in_list "${__lc_item}" "${__lc_valid_values}" || { _error_msg "config file: invalid hash algorithm '${__lc_item}'"; return 1; } done - - # enable_find_mtime - ENABLE_FIND_MTIME=`sed -n \ - "/enable_find_mtime:/s/enable_find_mtime://p" "${lc_config_file}"` - ENABLE_FIND_MTIME=`lrstrip "${ENABLE_FIND_MTIME}"` - ${ENABLE_FIND_MTIME} || ENABLE_FIND_MTIME=false - # enable_find_atime - ENABLE_FIND_ATIME=`sed -n \ - "/enable_find_atime:/s/enable_find_atime://p" "${lc_config_file}"` - ENABLE_FIND_ATIME=`lrstrip "${ENABLE_FIND_ATIME}"` - ${ENABLE_FIND_ATIME} || ENABLE_FIND_ATIME=false + # load max_depth option + __lc_value=`sed -n -e 's|max_depth\: *\(.*\)|\1|p' "${__lc_config_file}" | sed -e 's| *||'` + if [ -n "${__lc_value}" ]; then + __UAC_CONF_MAX_DEPTH="${__lc_value}" + fi + if _is_digit "${__UAC_CONF_MAX_DEPTH}" && [ "${__UAC_CONF_MAX_DEPTH}" -ge 0 ]; then + true + else + _error_msg "config file: 'max_depth' must be zero or a positive integer." + return 1 + fi + + # load enable_find_mtime option + __lc_value=`sed -n -e 's| *$||' -e 's:enable_find_mtime\: *\(.*\):\1:p' "${__lc_config_file}"` + if [ -n "${__lc_value}" ]; then + __UAC_CONF_ENABLE_FIND_MTIME="${__lc_value}" + fi + if [ "${__UAC_CONF_ENABLE_FIND_MTIME}" != true ]; then + __UAC_CONF_ENABLE_FIND_MTIME=false + fi + + # load enable_find_atime option + __lc_value=`sed -n -e 's| *$||' -e 's:enable_find_atime\: *\(.*\):\1:p' "${__lc_config_file}"` + if [ -n "${__lc_value}" ]; then + __UAC_CONF_ENABLE_FIND_ATIME="${__lc_value}" + fi + if [ "${__UAC_CONF_ENABLE_FIND_ATIME}" != true ]; then + __UAC_CONF_ENABLE_FIND_ATIME=false + fi + + # load enable_find_ctime option + __lc_value=`sed -n -e 's| *$||' -e 's:enable_find_ctime\: *\(.*\):\1:p' "${__lc_config_file}"` + if [ -n "${__lc_value}" ]; then + __UAC_CONF_ENABLE_FIND_CTIME="${__lc_value}" + fi + if [ "${__UAC_CONF_ENABLE_FIND_CTIME}" != true ]; then + __UAC_CONF_ENABLE_FIND_CTIME=false + fi - # enable_find_ctime - ENABLE_FIND_CTIME=`sed -n \ - "/enable_find_ctime:/s/enable_find_ctime://p" "${lc_config_file}"` - ENABLE_FIND_CTIME=`lrstrip "${ENABLE_FIND_CTIME}"` - ${ENABLE_FIND_CTIME} || ENABLE_FIND_CTIME=false - } \ No newline at end of file diff --git a/lib/load_lib_files.sh b/lib/load_lib_files.sh deleted file mode 100644 index 345b13f2..00000000 --- a/lib/load_lib_files.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC1091 - -. "${UAC_DIR}/lib/archive_compress_data.sh" -. "${UAC_DIR}/lib/archive_data.sh" -. "${UAC_DIR}/lib/array_to_list.sh" -. "${UAC_DIR}/lib/artifact_file_exist.sh" -. "${UAC_DIR}/lib/azure_storage_sas_url_transfer_test.sh" -. "${UAC_DIR}/lib/azure_storage_sas_url_transfer.sh" -. "${UAC_DIR}/lib/check_available_system_tools.sh" -. "${UAC_DIR}/lib/command_collector.sh" -. "${UAC_DIR}/lib/command_exists.sh" -. "${UAC_DIR}/lib/copy_data.sh" -. "${UAC_DIR}/lib/create_acquisition_log.sh" -. "${UAC_DIR}/lib/create_artifact_list.sh" -. "${UAC_DIR}/lib/file_collector.sh" -. "${UAC_DIR}/lib/file_system_symlink_support.sh" -. "${UAC_DIR}/lib/find_collector.sh" -. "${UAC_DIR}/lib/find_wrapper.sh" -. "${UAC_DIR}/lib/get_absolute_directory_path.sh" -. "${UAC_DIR}/lib/get_current_user.sh" -. "${UAC_DIR}/lib/get_days_since_date_until_now.sh" -. "${UAC_DIR}/lib/get_epoch_date.sh" -. "${UAC_DIR}/lib/get_hostname.sh" -. "${UAC_DIR}/lib/get_mount_point_by_file_system.sh" -. "${UAC_DIR}/lib/get_operating_system.sh" -. "${UAC_DIR}/lib/get_profile_file.sh" -. "${UAC_DIR}/lib/get_system_arch.sh" -. "${UAC_DIR}/lib/get_user_home_list.sh" -. "${UAC_DIR}/lib/hash_collector.sh" -. "${UAC_DIR}/lib/ibm_cos_transfer_test.sh" -. "${UAC_DIR}/lib/ibm_cos_transfer.sh" -. "${UAC_DIR}/lib/is_element_in_list.sh" -. "${UAC_DIR}/lib/is_integer.sh" -. "${UAC_DIR}/lib/is_running_with_root_privileges.sh" -. "${UAC_DIR}/lib/is_valid_operating_system.sh" -. "${UAC_DIR}/lib/list_artifacts.sh" -. "${UAC_DIR}/lib/list_profiles.sh" -. "${UAC_DIR}/lib/load_config_file.sh" -. "${UAC_DIR}/lib/log_message.sh" -. "${UAC_DIR}/lib/output_file_exists.sh" -. "${UAC_DIR}/lib/lrstrip.sh" -. "${UAC_DIR}/lib/parse_artifacts_file.sh" -. "${UAC_DIR}/lib/profile_file_to_artifact_list.sh" -. "${UAC_DIR}/lib/s3_presigned_url_transfer_test.sh" -. "${UAC_DIR}/lib/s3_presigned_url_transfer.sh" -. "${UAC_DIR}/lib/sanitize_artifact_list.sh" -. "${UAC_DIR}/lib/sanitize_filename.sh" -. "${UAC_DIR}/lib/sanitize_path.sh" -. "${UAC_DIR}/lib/sftp_transfer_test.sh" -. "${UAC_DIR}/lib/sftp_transfer.sh" -. "${UAC_DIR}/lib/sort_uniq_file.sh" -. "${UAC_DIR}/lib/stat_collector.sh" -. "${UAC_DIR}/lib/terminate.sh" -. "${UAC_DIR}/lib/usage.sh" -. "${UAC_DIR}/lib/validate_artifacts_file.sh" -. "${UAC_DIR}/lib/validate_profile_file.sh" \ No newline at end of file diff --git a/lib/load_libraries.sh b/lib/load_libraries.sh new file mode 100644 index 00000000..a8de7d76 --- /dev/null +++ b/lib/load_libraries.sh @@ -0,0 +1,68 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC1091 + +. "${__UAC_DIR}/lib/array_to_psv.sh" +. "${__UAC_DIR}/lib/astrings.sh" +. "${__UAC_DIR}/lib/aws_s3_presigned_url_transfer.sh" +. "${__UAC_DIR}/lib/azure_storage_sas_url_transfer.sh" +. "${__UAC_DIR}/lib/build_artifact_list.sh" +. "${__UAC_DIR}/lib/build_find_command.sh" +. "${__UAC_DIR}/lib/command_collector.sh" +. "${__UAC_DIR}/lib/command_exists.sh" +. "${__UAC_DIR}/lib/copy_data.sh" +. "${__UAC_DIR}/lib/create_acquisition_log.sh" +. "${__UAC_DIR}/lib/error_msg.sh" +. "${__UAC_DIR}/lib/exit_fatal.sh" +. "${__UAC_DIR}/lib/exit_success.sh" +. "${__UAC_DIR}/lib/filter_list.sh" +. "${__UAC_DIR}/lib/find_based_collector.sh" +. "${__UAC_DIR}/lib/get_absolute_path.sh" +. "${__UAC_DIR}/lib/get_bin_path.sh" +. "${__UAC_DIR}/lib/get_current_user.sh" +. "${__UAC_DIR}/lib/get_days_since_date_until_now.sh" +. "${__UAC_DIR}/lib/get_epoch_date.sh" +. "${__UAC_DIR}/lib/get_hostname.sh" +. "${__UAC_DIR}/lib/get_mount_point_by_file_system.sh" +. "${__UAC_DIR}/lib/get_nproc.sh" +. "${__UAC_DIR}/lib/get_operating_system.sh" +. "${__UAC_DIR}/lib/get_profile_by_name.sh" +. "${__UAC_DIR}/lib/get_system_arch.sh" +. "${__UAC_DIR}/lib/get_user_home_list.sh" +. "${__UAC_DIR}/lib/grep_o.sh" +. "${__UAC_DIR}/lib/http_transfer.sh" +. "${__UAC_DIR}/lib/init_temp_data_dir.sh" +. "${__UAC_DIR}/lib/is_digit.sh" +. "${__UAC_DIR}/lib/is_in_list.sh" +. "${__UAC_DIR}/lib/is_output_format_supported.sh" +. "${__UAC_DIR}/lib/is_psv.sh" +. "${__UAC_DIR}/lib/is_root.sh" +. "${__UAC_DIR}/lib/list_artifacts.sh" +. "${__UAC_DIR}/lib/list_profiles.sh" +. "${__UAC_DIR}/lib/load_config_file.sh" +. "${__UAC_DIR}/lib/log_msg.sh" +. "${__UAC_DIR}/lib/output_exists.sh" +. "${__UAC_DIR}/lib/parse_artifact.sh" +. "${__UAC_DIR}/lib/parse_command_line_arguments.sh" +. "${__UAC_DIR}/lib/parse_profile.sh" +. "${__UAC_DIR}/lib/remove_non_regular_files.sh" +. "${__UAC_DIR}/lib/remove_temp_data_dir.sh" +. "${__UAC_DIR}/lib/run_command.sh" +. "${__UAC_DIR}/lib/s3_transfer_amazon.sh" +. "${__UAC_DIR}/lib/s3_transfer_google.sh" +. "${__UAC_DIR}/lib/s3_transfer_ibm.sh" +. "${__UAC_DIR}/lib/sanitize_output_directory.sh" +. "${__UAC_DIR}/lib/sanitize_output_file.sh" +. "${__UAC_DIR}/lib/sanitize_path.sh" +. "${__UAC_DIR}/lib/setup_tools.sh" +. "${__UAC_DIR}/lib/sftp_transfer.sh" +. "${__UAC_DIR}/lib/sort_uniq_file.sh" +. "${__UAC_DIR}/lib/tar_data.sh" +. "${__UAC_DIR}/lib/tar_gz_data.sh" +. "${__UAC_DIR}/lib/terminate.sh" +. "${__UAC_DIR}/lib/usage.sh" +. "${__UAC_DIR}/lib/validate_artifact_list.sh" +. "${__UAC_DIR}/lib/validate_artifact.sh" +. "${__UAC_DIR}/lib/validate_profile.sh" +. "${__UAC_DIR}/lib/verbose_msg.sh" +. "${__UAC_DIR}/lib/zip_data.sh" \ No newline at end of file diff --git a/lib/log_message.sh b/lib/log_message.sh deleted file mode 100644 index 0e7585fe..00000000 --- a/lib/log_message.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Log message into uac log file. -# Globals: -# UAC_LOG_FILE -# Requires: -# None -# Arguments: -# $1: level -# COMMAND Command -# DEBUG Debug -# INFO Info (default) -# WARNING Warning -# ERROR Error -# $2: message -# Outputs: -# Write message to UAC_LOG_FILE. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -log_message() -{ - lm_level="${1:-I}" - lm_message="${2:-}" - - if [ "${lm_level}" != "COMMAND" ] && [ "${lm_level}" != "DEBUG" ] \ - && [ "${lm_level}" != "INFO" ] && [ "${lm_level}" != "WARNING" ] \ - && [ "${lm_level}" != "ERROR" ]; then - lm_level="INFO" - fi - - lm_timestamp=`date "+%Y-%m-%d %H:%M:%S %z"` - printf %b "${lm_timestamp} ${lm_level} ${lm_message}\n" \ - >>"${UAC_LOG_FILE}" 2>/dev/null - -} \ No newline at end of file diff --git a/lib/log_msg.sh b/lib/log_msg.sh new file mode 100644 index 00000000..e77465f8 --- /dev/null +++ b/lib/log_msg.sh @@ -0,0 +1,29 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Log message. +# Arguments: +# string level: error level (default: INF) +# accepted values: DBG, INF, ERR, CMD +# string message: error message +# Returns: +# none +_log_msg() +{ + __lm_level="${1:-INF}" + __lm_message="${2:-}" + + if [ ! -f "${__UAC_TEMP_DATA_DIR}/${__UAC_LOG_FILE}" ]; then + return 1 + fi + + if [ "${__lm_level}" = "DBG" ]; then + ${__UAC_DEBUG_MODE} || return 0 + fi + + # shellcheck disable=SC2006 + __lm_timestamp=`date "+%Y-%m-%d %H:%M:%S %z"` + printf "%s %s %s\n" "${__lm_timestamp}" "${__lm_level}" "${__lm_message}" \ + >>"${__UAC_TEMP_DATA_DIR}/${__UAC_LOG_FILE}" 2>/dev/null + +} \ No newline at end of file diff --git a/lib/lrstrip.sh b/lib/lrstrip.sh deleted file mode 100644 index 4a075352..00000000 --- a/lib/lrstrip.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Returns a copy of the string with leading and trailing white space characters -# removed. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: string -# Outputs: -# Write new string to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -lrstrip() -{ - lr_string="${1:-}" - - echo "${lr_string}" | sed -e 's:^ *::' -e 's: *$::' -} \ No newline at end of file diff --git a/lib/output_exists.sh b/lib/output_exists.sh new file mode 100644 index 00000000..8c5eb3a5 --- /dev/null +++ b/lib/output_exists.sh @@ -0,0 +1,24 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Check whether output file or directory exists. +# Arguments: +# string output_base_name: full path to the output file +# Returns: +# boolean: true on success +# false on fail +_output_exists() +{ + __of_output_file="${1:-}" + + if [ -d "${__of_output_file}" ]; then + _error_msg "cannot create output directory ‘${__of_output_file}’: Directory exists" + return 0 + elif [ -f "${__of_output_file}" ]; then + _error_msg "cannot create output file '${__of_output_file}': File exists" + return 0 + fi + + return 1 + +} diff --git a/lib/output_file_exists.sh b/lib/output_file_exists.sh deleted file mode 100644 index 4159c925..00000000 --- a/lib/output_file_exists.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -# Check whether output file exists. -# Arguments: -# string output_file: full path to output file -# Returns: -# boolean: true on success -# false on fail -output_file_exists() -{ - __of_output_file="${1:-}" - - if [ -d "${__of_output_file}" ]; then - printf %b "uac: can't create directory '${__of_output_file}': Directory exists" >&2 - return 0 - elif [ -f "${__of_output_file}.tar.gz" ]; then - printf %b "uac: can't create output file '${__of_output_file}.tar.gz': File exists" >&2 - return 0 - elif [ -f "${__of_output_file}.tar" ]; then - printf %b "uac: can't create output file '${__of_output_file}.tar': File exists" >&2 - return 0 - fi - return 1 - -} diff --git a/lib/parse_artifact.sh b/lib/parse_artifact.sh new file mode 100644 index 00000000..390e49a5 --- /dev/null +++ b/lib/parse_artifact.sh @@ -0,0 +1,492 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006,SC2162 + +# Parse an artifact file to collect data. +# Arguments: +# string artifact: full path to the artifact file +# Returns: +# none +_parse_artifact() +{ + __pa_artifact="${1:-}" + + if [ ! -f "${__pa_artifact}" ]; then + _log_msg ERR "_parse_artifact: no such file or directory: '${__pa_artifact}'" + return 1 + fi + + _cleanup_local_vars() + { + __pa_collector="" + __pa_command="" + __pa_compress_output_file=false + __pa_condition="" + __pa_description="" + __pa_exclude_file_system="" + __pa_exclude_name_pattern="" + __pa_exclude_nologin_users=false + __pa_exclude_path_pattern="" + __pa_file_type="" + __pa_foreach="" + __pa_ignore_date_range=false + __pa_is_file_list=false + __pa_max_depth="" + __pa_max_file_size="" + __pa_min_file_size="" + __pa_name_pattern="" + __pa_output_directory="" + __pa_output_file="" + __pa_path_pattern="" + __pa_path="" + __pa_permissions="" + __pa_supported_os="" + } + _cleanup_local_vars + + __pa_global_output_directory="" + + _replace_exposed_variables() + { + __re_value="${1:-}" + + if [ -n "${__UAC_START_DATE}" ]; then + __re_value=`printf "%s" "${__re_value}" \ + | sed -e "s|%start_date%|${__UAC_START_DATE}|g" \ + -e "s|%start_date_epoch%|${__UAC_START_DATE_EPOCH}|g" 2>/dev/null` + fi + if [ -n "${__UAC_END_DATE}" ]; then + __re_value=`printf "%s" "${__re_value}" \ + | sed -e "s|%end_date%|${__UAC_END_DATE}|g" \ + -e "s|%end_date_epoch%|${__UAC_END_DATE_EPOCH}|g" 2>/dev/null` + fi + printf "%s" "${__re_value}" \ + | sed -e "s|%uac_directory%|${__UAC_DIR}|g" \ + -e "s|%mount_point%|${__UAC_MOUNT_POINT}|g" \ + -e "s|%temp_directory%|${__UAC_TEMP_DATA_DIR}/tmp|g" 2>/dev/null + } + + # remove lines starting with # (comments) and any inline comments + # remove leading and trailing space characters + # remove blank lines + # add a new line and '-' to the end of file + printf "\n%s\n" "-" \ + | cat "${__pa_artifact}" - \ + | sed -e 's|#.*$||g' \ + -e 's|^ *||' \ + -e 's| *$||' \ + -e '/^$/d' 2>/dev/null \ + | while read __pa_key __pa_value; do + + case "${__pa_key}" in + "artifacts:") + # read the next line which must be a dash (-) + read __pa_dash + if [ "${__pa_dash}" != "-" ]; then + _log_msg ERR "_parse_artifact: invalid 'artifacts' sequence of mappings." + return 1 + fi + if [ -n "${__pa_output_directory}" ]; then + __pa_global_output_directory="${__pa_output_directory}" + fi + if [ -n "${__pa_condition}" ]; then + # run global condition command and skip collection if exit code is greater than 0 + if echo "${__pa_condition}" | grep -q -E "^!"; then + __pa_condition=`echo "${__pa_condition}" | sed -e 's|^! *||' 2>/dev/null` + if _run_command "${__pa_condition}" >/dev/null; then + _log_msg DBG "Global condition '${__pa_condition}' not satisfied. Skipping..." + return 1 + else + _log_msg DBG "Global condition '${__pa_condition}' satisfied" + fi + else + if _run_command "${__pa_condition}" >/dev/null; then + _log_msg DBG "Global condition '${__pa_condition}' satisfied" + else + _log_msg DBG "Global condition '${__pa_condition}' not satisfied. Skipping..." + return 1 + fi + fi + fi + _cleanup_local_vars + ;; + "collector:") + __pa_collector="${__pa_value}" + ;; + "command:") + if [ "${__pa_value}" = "\"\"\"" ]; then + __pa_value="" + while read __pa_line && [ "${__pa_line}" != "\"\"\"" ]; do + __pa_value="${__pa_value}${__pa_line} +" + done + fi + __pa_command=`_replace_exposed_variables "${__pa_value}"` + ;; + "compress_output_file:") + __pa_compress_output_file="${__pa_value}" + ;; + "condition:") + if [ "${__pa_value}" = "\"\"\"" ]; then + __pa_value="" + while read __pa_line && [ "${__pa_line}" != "\"\"\"" ]; do + __pa_value="${__pa_value}${__pa_line} +" + done + fi + __pa_condition=`_replace_exposed_variables "${__pa_value}"` + ;; + "description:") + __pa_description="${__pa_value}" + ;; + "exclude_file_system:") + __pa_exclude_file_system=`echo "${__pa_value}" | _array_to_psv 2>/dev/null` + ;; + "exclude_name_pattern:") + __pa_exclude_name_pattern=`echo "${__pa_value}" | _array_to_psv 2>/dev/null` + ;; + "exclude_nologin_users:") + __pa_exclude_nologin_users="${__pa_value}" + ;; + "exclude_path_pattern:") + __pa_exclude_path_pattern=`echo "${__pa_value}" | _array_to_psv 2>/dev/null` + ;; + "file_type:") + __pa_file_type=`echo "${__pa_value}" | _array_to_psv 2>/dev/null` + ;; + "foreach:") + if [ "${__pa_value}" = "\"\"\"" ]; then + __pa_value="" + while read __pa_line && [ "${__pa_line}" != "\"\"\"" ]; do + __pa_value="${__pa_value}${__pa_line} +" + done + fi + __pa_foreach=`_replace_exposed_variables "${__pa_value}"` + ;; + "ignore_date_range:") + __pa_ignore_date_range="${__pa_value}" + ;; + "is_file_list:") + __pa_is_file_list="${__pa_value}" + ;; + "max_depth:") + __pa_max_depth="${__pa_value}" + ;; + "max_file_size:") + __pa_max_file_size="${__pa_value}" + ;; + "min_file_size:") + __pa_min_file_size="${__pa_value}" + ;; + "name_pattern:") + __pa_name_pattern=`echo "${__pa_value}" | _array_to_psv 2>/dev/null` + ;; + "output_directory:") + if echo "${__pa_value}" | grep -q -E "%temp_directory%"; then + __pa_output_directory=`echo "${__pa_value}" | sed -e "s|%temp_directory%|${__UAC_TEMP_DATA_DIR}/tmp|g" 2>/dev/null` + else + __pa_output_directory="${__UAC_TEMP_DATA_DIR}/collected/${__pa_value}" + fi + ;; + "output_file:") + __pa_output_file="${__pa_value}" + ;; + "path:") + __pa_path=`_replace_exposed_variables "${__pa_value}"` + ;; + "path_pattern:") + __pa_path_pattern=`echo "${__pa_value}" | _array_to_psv 2>/dev/null` + ;; + "permissions:") + __pa_permissions=`echo "${__pa_value}" | _array_to_psv 2>/dev/null` + ;; + "supported_os:") + __pa_supported_os=`echo "${__pa_value}" | _array_to_psv 2>/dev/null` + ;; + "-") + + # skip if artifact does not apply to the current operating system or all + # try to collect all artifacts regardless of the operating system if the debugging mode is enabled (--debug). + if _is_in_list "${__UAC_OPERATING_SYSTEM}" "${__pa_supported_os}" \ + || _is_in_list "all" "${__pa_supported_os}" \ + || ${__UAC_IGNORE_OPERATING_SYSTEM:-false}; then + true + else + _cleanup_local_vars + continue + fi + + # skip if invalid collector + if [ "${__pa_collector}" != "command" ] \ + && [ "${__pa_collector}" != "file" ] \ + && [ "${__pa_collector}" != "find" ] \ + && [ "${__pa_collector}" != "hash" ] \ + && [ "${__pa_collector}" != "stat" ]; then + _log_msg ERR "_parse_artifact: invalid collector '${__pa_collector}'" + _cleanup_local_vars + continue + fi + + if [ -n "${__pa_condition}" ]; then + # run local condition command and skip collection if exit code greater than 0 + if echo "${__pa_condition}" | grep -q -E "^!"; then + __pa_condition=`echo "${__pa_condition}" | sed -e 's|^! *||' 2>/dev/null` + if _run_command "${__pa_condition}" false >/dev/null; then + _log_msg DBG "Condition '${__pa_condition}' not satisfied. Skipping..." + _cleanup_local_vars + continue + else + _log_msg DBG "Condition '${__pa_condition}' satisfied" + fi + else + if _run_command "${__pa_condition}" false >/dev/null; then + _log_msg DBG "Condition '${__pa_condition}' satisfied" + else + _log_msg DBG "Condition '${__pa_condition}' not satisfied. Skipping..." + _cleanup_local_vars + continue + fi + fi + fi + + if [ -z "${__pa_output_directory}" ]; then + __pa_output_directory="${__pa_global_output_directory}" + fi + + # path, command and foreach contains %user% and/or %user_home% + # the same collector needs to be run for each %user% and/or %user_home% + if echo "${__pa_path}" | grep -q -E "%user%" 2>/dev/null \ + || echo "${__pa_command}" | grep -q -E "%user%" 2>/dev/null \ + || echo "${__pa_foreach}" | grep -q -E "%user%" 2>/dev/null \ + || echo "${__pa_path}" | grep -q -E "%user_home%" 2>/dev/null \ + || echo "${__pa_command}" | grep -q -E "%user_home%" 2>/dev/null \ + || echo "${__pa_foreach}" | grep -q -E "%user_home%" 2>/dev/null; then + + # loop through users + __pa_user_home_list="${__UAC_USER_HOME_LIST}" + ${__pa_exclude_nologin_users} && __pa_user_home_list="${__UAC_VALID_SHELL_ONLY_USER_HOME_LIST}" + __pa_processed_home="" + echo "${__pa_user_home_list}" \ + | while read __pa_line && [ -n "${__pa_line}" ]; do + __pa_user=`echo "${__pa_line}" | cut -d ":" -f 1` + __pa_home=`echo "${__pa_line}" | cut -d ":" -f 2` + __pa_no_slash_home=`echo "${__pa_line}" | cut -d ":" -f 2 | sed -e 's|^/||' 2>/dev/null` + _log_msg INF "Collecting data for user ${__pa_user}" + + # replace %user% and %user_home% in path + __pa_new_path=`echo "${__pa_path}" \ + | sed -e "s|%user%|${__pa_user}|g" \ + -e "s|/%user_home%|${__pa_home}|g" \ + -e "s|%user_home%|${__pa_no_slash_home}|g" 2>/dev/null` + + if [ "${__pa_collector}" = "file" ]; then + if echo "${__pa_processed_home}" | grep -q -E "\|${__pa_new_path}\|"; then + _log_msg INF "Skipping as home directory ${__pa_new_path} has been collected already" + continue + else + __pa_processed_home="${__pa_processed_home}|${__pa_new_path}|" + fi + fi + + # replace %user% and %user_home% in command + __pa_new_command=`echo "${__pa_command}" \ + | sed -e "s|%user%|${__pa_user}|g" \ + -e "s|/%user_home%|${__pa_home}|g" \ + -e "s|%user_home%|${__pa_no_slash_home}|g" 2>/dev/null` + + # replace %user% and %user_home% in foreach + __pa_new_foreach=`echo "${__pa_foreach}" \ + | sed -e "s|%user%|${__pa_user}|g" \ + -e "s|/%user_home%|${__pa_home}|g" \ + -e "s|%user_home%|${__pa_no_slash_home}|g" 2>/dev/null` + + # replace %user% and %user_home% in output_directory + __pa_new_output_directory=`echo "${__pa_output_directory}" \ + | sed -e "s|%user%|${__pa_user}|g" \ + -e "s|/%user_home%|${__pa_home}|g" \ + -e "s|%user_home%|${__pa_no_slash_home}|g" 2>/dev/null` + + # replace %user% and %user_home% in output_file + __pa_new_output_file=`echo "${__pa_output_file}" \ + | sed -e "s|%user%|${__pa_user}|g" \ + -e "s|/%user_home%|${__pa_home}|g" \ + -e "s|%user_home%|${__pa_no_slash_home}|g" 2>/dev/null` + + __pa_new_max_depth="${__pa_max_depth}" + # if home directory is / (root in some systems), + # maxdepth will be set to 2 + if [ "${__pa_new_path}" = "${__UAC_MOUNT_POINT}" ]; then + __pa_new_max_depth=2 + fi + + if [ "${__pa_collector}" = "command" ]; then + _command_collector \ + "${__pa_new_foreach}" \ + "${__pa_new_command}" \ + "${__pa_new_output_directory}" \ + "${__pa_new_output_file}" \ + "${__pa_compress_output_file}" + elif [ "${__pa_collector}" = "file" ]; then + _find_based_collector \ + "file" \ + "${__pa_new_path}" \ + "${__pa_is_file_list}" \ + "${__pa_path_pattern}" \ + "${__pa_name_pattern}" \ + "${__pa_exclude_path_pattern}" \ + "${__pa_exclude_name_pattern}" \ + "${__pa_exclude_file_system}" \ + "${__pa_new_max_depth}" \ + "${__pa_file_type}" \ + "${__pa_min_file_size}" \ + "${__pa_max_file_size}" \ + "${__pa_permissions}" \ + "${__pa_ignore_date_range}" \ + "${__UAC_TEMP_DATA_DIR}" \ + "file_collector.tmp" + elif [ "${__pa_collector}" = "find" ]; then + _find_based_collector \ + "find" \ + "${__pa_new_path}" \ + false \ + "${__pa_path_pattern}" \ + "${__pa_name_pattern}" \ + "${__pa_exclude_path_pattern}" \ + "${__pa_exclude_name_pattern}" \ + "${__pa_exclude_file_system}" \ + "${__pa_new_max_depth}" \ + "${__pa_file_type}" \ + "${__pa_min_file_size}" \ + "${__pa_max_file_size}" \ + "${__pa_permissions}" \ + "${__pa_ignore_date_range}" \ + "${__pa_new_output_directory}" \ + "${__pa_new_output_file}" + elif [ "${__pa_collector}" = "hash" ]; then + _file_collector \ + "hash" \ + "${__pa_new_path}" \ + "${__pa_is_file_list}" \ + "${__pa_path_pattern}" \ + "${__pa_name_pattern}" \ + "${__pa_exclude_path_pattern}" \ + "${__pa_exclude_name_pattern}" \ + "${__pa_exclude_file_system}" \ + "${__pa_new_max_depth}" \ + "${__pa_file_type}" \ + "${__pa_min_file_size}" \ + "${__pa_max_file_size}" \ + "${__pa_permissions}" \ + "${__pa_ignore_date_range}" \ + "${__pa_new_output_directory}" \ + "${__pa_new_output_file}" + elif [ "${__pa_collector}" = "stat" ]; then + _file_collector \ + "stat" \ + "${__pa_new_path}" \ + "${__pa_is_file_list}" \ + "${__pa_path_pattern}" \ + "${__pa_name_pattern}" \ + "${__pa_exclude_path_pattern}" \ + "${__pa_exclude_name_pattern}" \ + "${__pa_exclude_file_system}" \ + "${__pa_new_max_depth}" \ + "${__pa_file_type}" \ + "${__pa_min_file_size}" \ + "${__pa_max_file_size}" \ + "${__pa_permissions}" \ + "${__pa_ignore_date_range}" \ + "${__pa_new_output_directory}" \ + "${__pa_new_output_file}" + fi + done + else + if [ "${__pa_collector}" = "command" ]; then + _command_collector \ + "${__pa_foreach}" \ + "${__pa_command}" \ + "${__pa_output_directory}" \ + "${__pa_output_file}" \ + "${__pa_compress_output_file}" + elif [ "${__pa_collector}" = "file" ]; then + _find_based_collector \ + "file" \ + "${__pa_path}" \ + "${__pa_is_file_list}" \ + "${__pa_path_pattern}" \ + "${__pa_name_pattern}" \ + "${__pa_exclude_path_pattern}" \ + "${__pa_exclude_name_pattern}" \ + "${__pa_exclude_file_system}" \ + "${__pa_max_depth}" \ + "${__pa_file_type}" \ + "${__pa_min_file_size}" \ + "${__pa_max_file_size}" \ + "${__pa_permissions}" \ + "${__pa_ignore_date_range}" \ + "${__UAC_TEMP_DATA_DIR}" \ + "file_collector.tmp" + elif [ "${__pa_collector}" = "find" ]; then + _find_based_collector \ + "find" \ + "${__pa_path}" \ + "${__pa_is_file_list}" \ + "${__pa_path_pattern}" \ + "${__pa_name_pattern}" \ + "${__pa_exclude_path_pattern}" \ + "${__pa_exclude_name_pattern}" \ + "${__pa_exclude_file_system}" \ + "${__pa_max_depth}" \ + "${__pa_file_type}" \ + "${__pa_min_file_size}" \ + "${__pa_max_file_size}" \ + "${__pa_permissions}" \ + "${__pa_ignore_date_range}" \ + "${__pa_output_directory}" \ + "${__pa_output_file}" + elif [ "${__pa_collector}" = "hash" ]; then + _find_based_collector \ + "hash" \ + "${__pa_path}" \ + "${__pa_is_file_list}" \ + "${__pa_path_pattern}" \ + "${__pa_name_pattern}" \ + "${__pa_exclude_path_pattern}" \ + "${__pa_exclude_name_pattern}" \ + "${__pa_exclude_file_system}" \ + "${__pa_max_depth}" \ + "${__pa_file_type}" \ + "${__pa_min_file_size}" \ + "${__pa_max_file_size}" \ + "${__pa_permissions}" \ + "${__pa_ignore_date_range}" \ + "${__pa_output_directory}" \ + "${__pa_output_file}" + elif [ "${__pa_collector}" = "stat" ]; then + _find_based_collector \ + "stat" \ + "${__pa_path}" \ + "${__pa_is_file_list}" \ + "${__pa_path_pattern}" \ + "${__pa_name_pattern}" \ + "${__pa_exclude_path_pattern}" \ + "${__pa_exclude_name_pattern}" \ + "${__pa_exclude_file_system}" \ + "${__pa_max_depth}" \ + "${__pa_file_type}" \ + "${__pa_min_file_size}" \ + "${__pa_max_file_size}" \ + "${__pa_permissions}" \ + "${__pa_ignore_date_range}" \ + "${__pa_output_directory}" \ + "${__pa_output_file}" + fi + fi + + _cleanup_local_vars + ;; + esac + done + +} \ No newline at end of file diff --git a/lib/parse_artifacts_file.sh b/lib/parse_artifacts_file.sh deleted file mode 100644 index a4dd89fb..00000000 --- a/lib/parse_artifacts_file.sh +++ /dev/null @@ -1,450 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2001,SC2006 - -############################################################################### -# Parse artifacts file. -# Globals: -# END_DATE -# END_DATE_EPOCH -# MOUNT_POINT -# START_DATE -# START_DATE_EPOCH -# TEMP_DATA_DIR -# USER_HOME_LIST -# Requires: -# array_to_list -# lrstrip -# sanitize_path -# Arguments: -# $1: artifacts file -# $2: root output directory -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -parse_artifacts_file() -{ - pa_artifacts_file="${1:-}" - pa_root_output_directory="${2:-}" - - # return if artifacts file does not exist - if [ ! -f "${pa_artifacts_file}" ]; then - printf %b "parse_artifacts_file: no such file or directory: \ -'${pa_artifacts_file}'\n" >&2 - return 2 - fi - - _cleanup_local_vars() { - pa_collector="" - pa_supported_os="" - pa_foreach="" - pa_command="" - pa_path="" - pa_path_pattern="" - pa_name_pattern="" - pa_exclude_path_pattern="" - pa_exclude_name_pattern="" - pa_exclude_file_system="" - pa_max_depth="" - pa_file_type="" - pa_min_file_size="" - pa_max_file_size="" - pa_permissions="" - pa_ignore_date_range=false - pa_output_file="" - pa_output_directory="" - pa_stderr_output_file="" - pa_is_file_list=false - pa_compress_output_file=false - pa_exclude_nologin_users=false - } - - _cleanup_local_vars - - # save IFS value - OIFS="${IFS}" - - # add '-' to the end of file - # remove lines starting with # (comments) - # remove inline comments - # remove blank lines - # shellcheck disable=SC2162 - printf %b "\n-" | cat "${pa_artifacts_file}" - \ - | sed -e 's/#.*$//g' -e '/^ *$/d' -e '/^$/d' 2>/dev/null \ - | while IFS=":" read pa_key pa_value || [ -n "${pa_key}" ]; do - - pa_key=`lrstrip "${pa_key}"` - pa_value=`lrstrip "${pa_value}"` - - if [ -n "${pa_value}" ]; then - # replace %uac_directory% by ${UAC_DIR} value - pa_value=`echo "${pa_value}" \ - | sed -e "s:%uac_directory%:${UAC_DIR}:g"` - - # replace %mount_point% by ${MOUNT_POINT} value - pa_value=`echo "${pa_value}" \ - | sed -e "s:%mount_point%:${MOUNT_POINT}:g"` - - # replace %destination_directory% by ${TEMP_DATA_DIR}/${pa_root_output_directory} value - pa_value=`echo "${pa_value}" \ - | sed -e "s:%destination_directory%:${TEMP_DATA_DIR}/${pa_root_output_directory}:g"` - - if [ -n "${START_DATE}" ]; then - # replace %start_date% by ${START_DATE} value - pa_value=`echo "${pa_value}" \ - | sed -e "s:%start_date%:${START_DATE}:g"` - # replace %start_date_epoch% by ${START_DATE_EPOCH} value - pa_value=`echo "${pa_value}" \ - | sed -e "s:%start_date_epoch%:${START_DATE_EPOCH}:g"` - fi - - if [ -n "${END_DATE}" ]; then - # replace %end_date% by ${END_DATE} value - pa_value=`echo "${pa_value}" \ - | sed -e "s:%end_date%:${END_DATE}:g"` - # replace %end_date_epoch% by ${END_DATE_EPOCH} value - pa_value=`echo "${pa_value}" \ - | sed -e "s:%end_date_epoch%:${END_DATE_EPOCH}:g"` - fi - fi - - case "${pa_key}" in - "artifacts") - # read the next line which must be a dash (-) - read pa_dash - pa_dash=`lrstrip "${pa_dash}"` - if [ "${pa_dash}" != "-" ]; then - printf %b "validate_artifacts_file: invalid 'artifacts' \ -sequence of mappings\n" >&2 - return 150 - fi - ;; - "collector") - pa_collector="${pa_value}" - ;; - "supported_os") - pa_supported_os=`array_to_list "${pa_value}"` - ;; - "foreach"|"loop_command") - pa_foreach="${pa_value}" - ;; - "command") - pa_command="${pa_value}" - ;; - "path") - pa_path="${pa_value}" - ;; - "path_pattern") - pa_path_pattern=`array_to_list "${pa_value}"` - ;; - "name_pattern") - pa_name_pattern=`array_to_list "${pa_value}"` - ;; - "exclude_path_pattern") - pa_exclude_path_pattern=`array_to_list "${pa_value}"` - ;; - "exclude_name_pattern") - pa_exclude_name_pattern=`array_to_list "${pa_value}"` - ;; - "exclude_file_system") - pa_exclude_file_system=`array_to_list "${pa_value}"` - ;; - "max_depth") - pa_max_depth="${pa_value}" - ;; - "file_type") - pa_file_type="${pa_value}" - ;; - "min_file_size") - pa_min_file_size="${pa_value}" - ;; - "max_file_size") - pa_max_file_size="${pa_value}" - ;; - "permissions") - pa_permissions="${pa_value}" - ;; - "ignore_date_range") - pa_ignore_date_range="${pa_value}" - ;; - "output_directory") - pa_output_directory="${pa_value}" - ;; - "output_file") - pa_output_file="${pa_value}" - ;; - "stderr_output_file") - pa_stderr_output_file="${pa_value}" - ;; - "is_file_list") - pa_is_file_list="${pa_value}" - ;; - "compress_output_file") - pa_compress_output_file="${pa_value}" - ;; - "exclude_nologin_users") - pa_exclude_nologin_users="${pa_value}" - ;; - "-") - - # restore IFS value - IFS="${OIFS}" - - # cannot use ! is_element_in_list because it is not accepted by solaris - # skip if artifact does not apply to the current operating system - if is_element_in_list "${OPERATING_SYSTEM}" "${pa_supported_os}" \ - || is_element_in_list "all" "${pa_supported_os}"; then - # shellcheck disable=SC2034 - pa_do_nothing=true - else - _cleanup_local_vars - continue - fi - - # skip if invalid collector - if [ "${pa_collector}" != "command" ] \ - && [ "${pa_collector}" != "file" ] \ - && [ "${pa_collector}" != "find" ] \ - && [ "${pa_collector}" != "hash" ] \ - && [ "${pa_collector}" != "stat" ]; then - _cleanup_local_vars - continue - fi - - # path, command or foreach contains %user% and/or %user_home% - # the same collector needs to be run for each %user% and/or %user_home% - if echo "${pa_path}" | grep -q -E "%user%" 2>/dev/null \ - || echo "${pa_command}" | grep -q -E "%user%" 2>/dev/null \ - || echo "${pa_foreach}" | grep -q -E "%user%" 2>/dev/null \ - || echo "${pa_path}" | grep -q -E "%user_home%" 2>/dev/null \ - || echo "${pa_command}" | grep -q -E "%user_home%" 2>/dev/null \ - || echo "${pa_foreach}" | grep -q -E "%user_home%" 2>/dev/null; then - - # loop through users - pa_user_home_list="${USER_HOME_LIST}" - ${pa_exclude_nologin_users} && pa_user_home_list="${VALID_SHELL_ONLY_USER_HOME_LIST}" - echo "${pa_user_home_list}" \ - | while read pa_line || [ -n "${pa_line}" ]; do - pa_user=`echo "${pa_line}" | awk -F":" '{ print $1 }'` - pa_home=`echo "${pa_line}" | awk -F":" '{ print $2 }'` - - # replace %user% and %user_home% in path - pa_new_path=`echo "${pa_path}" \ - | sed -e "s:%user%:${pa_user}:g" \ - | sed -e "s:%user_home%:${pa_home}:g"` - - # replace %user% and %user_home% in command - pa_new_command=`echo "${pa_command}" \ - | sed -e "s:%user%:${pa_user}:g" \ - | sed -e "s:%user_home%:${pa_home}:g"` - - # replace %user% and %user_home% in foreach - pa_new_foreach=`echo "${pa_foreach}" \ - | sed -e "s:%user%:${pa_user}:g" \ - | sed -e "s:%user_home%:${pa_home}:g"` - - pa_new_max_depth="${pa_max_depth}" - # if home directory is / (root in some systems), - # maxdepth will be set to 2 - if [ "${pa_new_path}" = "${MOUNT_POINT}" ]; then - pa_new_max_depth=2 - fi - - # replace %user% and %user_home% in output_directory - pa_new_output_directory=`echo "${pa_output_directory}" \ - | sed -e "s:%user%:${pa_user}:g" \ - | sed -e "s:%user_home%:${pa_home}:g"` - - # replace %user% and %user_home% in output_file - pa_new_output_file=`echo "${pa_output_file}" \ - | sed -e "s:%user%:${pa_user}:g" \ - | sed -e "s:%user_home%:${pa_home}:g"` - - # replace %user% and %user_home% in stderr_output_file - pa_new_stderr_output_file=`echo "${pa_stderr_output_file}" \ - | sed -e "s:%user%:${pa_user}:g" \ - | sed -e "s:%user_home%:${pa_home}:g"` - - if [ "${pa_collector}" = "command" ]; then - command_collector \ - "${pa_new_foreach}" \ - "${pa_new_command}" \ - "${pa_root_output_directory}" \ - "${pa_new_output_directory}" \ - "${pa_new_output_file}" \ - "${pa_new_stderr_output_file}" \ - "${pa_compress_output_file}" - elif [ "${pa_collector}" = "file" ]; then - file_collector \ - "${pa_new_path}" \ - "${pa_is_file_list}" \ - "${pa_path_pattern}" \ - "${pa_name_pattern}" \ - "${pa_exclude_path_pattern}" \ - "${pa_exclude_name_pattern}" \ - "${pa_exclude_file_system}" \ - "${pa_new_max_depth}" \ - "${pa_file_type}" \ - "${pa_min_file_size}" \ - "${pa_max_file_size}" \ - "${pa_permissions}" \ - "${pa_ignore_date_range}" \ - "${pa_root_output_directory}" \ - ".files.tmp" - elif [ "${pa_collector}" = "find" ]; then - find_collector \ - "${pa_new_path}" \ - "${pa_path_pattern}" \ - "${pa_name_pattern}" \ - "${pa_exclude_path_pattern}" \ - "${pa_exclude_name_pattern}" \ - "${pa_exclude_file_system}" \ - "${pa_new_max_depth}" \ - "${pa_file_type}" \ - "${pa_min_file_size}" \ - "${pa_max_file_size}" \ - "${pa_permissions}" \ - "${pa_ignore_date_range}" \ - "${pa_root_output_directory}" \ - "${pa_new_output_directory}" \ - "${pa_new_output_file}" \ - "${pa_new_stderr_output_file}" - elif [ "${pa_collector}" = "hash" ]; then - hash_collector \ - "${pa_new_path}" \ - "${pa_is_file_list}" \ - "${pa_path_pattern}" \ - "${pa_name_pattern}" \ - "${pa_exclude_path_pattern}" \ - "${pa_exclude_name_pattern}" \ - "${pa_exclude_file_system}" \ - "${pa_new_max_depth}" \ - "${pa_file_type}" \ - "${pa_min_file_size}" \ - "${pa_max_file_size}" \ - "${pa_permissions}" \ - "${pa_ignore_date_range}" \ - "${pa_root_output_directory}" \ - "${pa_new_output_directory}" \ - "${pa_new_output_file}" \ - "${pa_new_stderr_output_file}" - elif [ "${pa_collector}" = "stat" ]; then - stat_collector \ - "${pa_new_path}" \ - "${pa_is_file_list}" \ - "${pa_path_pattern}" \ - "${pa_name_pattern}" \ - "${pa_exclude_path_pattern}" \ - "${pa_exclude_name_pattern}" \ - "${pa_exclude_file_system}" \ - "${pa_new_max_depth}" \ - "${pa_file_type}" \ - "${pa_min_file_size}" \ - "${pa_max_file_size}" \ - "${pa_permissions}" \ - "${pa_ignore_date_range}" \ - "${pa_root_output_directory}" \ - "${pa_new_output_directory}" \ - "${pa_new_output_file}" \ - "${pa_new_stderr_output_file}" - fi - done - - else - - if [ "${pa_collector}" = "command" ]; then - command_collector \ - "${pa_foreach}" \ - "${pa_command}" \ - "${pa_root_output_directory}" \ - "${pa_output_directory}" \ - "${pa_output_file}" \ - "${pa_stderr_output_file}" \ - "${pa_compress_output_file}" - elif [ "${pa_collector}" = "file" ]; then - file_collector \ - "${pa_path}" \ - "${pa_is_file_list}" \ - "${pa_path_pattern}" \ - "${pa_name_pattern}" \ - "${pa_exclude_path_pattern}" \ - "${pa_exclude_name_pattern}" \ - "${pa_exclude_file_system}" \ - "${pa_max_depth}" \ - "${pa_file_type}" \ - "${pa_min_file_size}" \ - "${pa_max_file_size}" \ - "${pa_permissions}" \ - "${pa_ignore_date_range}" \ - "${pa_root_output_directory}" \ - ".files.tmp" - elif [ "${pa_collector}" = "find" ]; then - find_collector \ - "${pa_path}" \ - "${pa_path_pattern}" \ - "${pa_name_pattern}" \ - "${pa_exclude_path_pattern}" \ - "${pa_exclude_name_pattern}" \ - "${pa_exclude_file_system}" \ - "${pa_max_depth}" \ - "${pa_file_type}" \ - "${pa_min_file_size}" \ - "${pa_max_file_size}" \ - "${pa_permissions}" \ - "${pa_ignore_date_range}" \ - "${pa_root_output_directory}" \ - "${pa_output_directory}" \ - "${pa_output_file}" \ - "${pa_stderr_output_file}" - elif [ "${pa_collector}" = "hash" ]; then - hash_collector \ - "${pa_path}" \ - "${pa_is_file_list}" \ - "${pa_path_pattern}" \ - "${pa_name_pattern}" \ - "${pa_exclude_path_pattern}" \ - "${pa_exclude_name_pattern}" \ - "${pa_exclude_file_system}" \ - "${pa_max_depth}" \ - "${pa_file_type}" \ - "${pa_min_file_size}" \ - "${pa_max_file_size}" \ - "${pa_permissions}" \ - "${pa_ignore_date_range}" \ - "${pa_root_output_directory}" \ - "${pa_output_directory}" \ - "${pa_output_file}" \ - "${pa_stderr_output_file}" - elif [ "${pa_collector}" = "stat" ]; then - stat_collector \ - "${pa_path}" \ - "${pa_is_file_list}" \ - "${pa_path_pattern}" \ - "${pa_name_pattern}" \ - "${pa_exclude_path_pattern}" \ - "${pa_exclude_name_pattern}" \ - "${pa_exclude_file_system}" \ - "${pa_max_depth}" \ - "${pa_file_type}" \ - "${pa_min_file_size}" \ - "${pa_max_file_size}" \ - "${pa_permissions}" \ - "${pa_ignore_date_range}" \ - "${pa_root_output_directory}" \ - "${pa_output_directory}" \ - "${pa_output_file}" \ - "${pa_stderr_output_file}" - fi - - fi - - _cleanup_local_vars - ;; - esac - - done - -} \ No newline at end of file diff --git a/lib/parse_command_line_arguments.sh b/lib/parse_command_line_arguments.sh new file mode 100644 index 00000000..d3ef306d --- /dev/null +++ b/lib/parse_command_line_arguments.sh @@ -0,0 +1,402 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 + +# Parse command line arguments. +# $@: all parameters passed to the script +# Returns: +# boolean: true on success +# false on fail +_parse_command_line_arguments() +{ + while [ ${#} -gt 0 ]; do + case "${1}" in + # optional arguments + "-h"|"--help") + _usage + _exit_success + ;; + "-v"|"--verbose") + __UAC_VERBOSE_MODE=true + ;; + "--debug") + __UAC_DEBUG_MODE=true + ;; + "--trace") + __UAC_TRACE_MODE=true + ;; + "-V"|"--version") + printf "UAC (Unix-like Artifacts Collector) %s\n" "${__UAC_VERSION}" + _exit_success + ;; + # profiling arguments + "-p"|"--profile") + if [ -n "${2:-}" ]; then + # print available profiles + if [ "${2}" = "list" ]; then + _list_profiles "${__UAC_DIR}/profiles" || return 1 + _exit_success + fi + if [ -f "${2}" ]; then + __pc_profile="${2}" + else + # get profile file based on the profile name + __pc_profile=`_get_profile_by_name "${2}" "${__UAC_DIR}/profiles"` + if [ -z "${__pc_profile}" ]; then + _error_msg "profile not found '${2}'" + return 1 + fi + fi + # check whether profile is valid + _validate_profile "${__pc_profile}" "${__UAC_DIR}/artifacts" || return 1 + + __pc_new_artifacts=`_parse_profile "${__pc_profile}" 2>/dev/null` + __UAC_ARTIFACT_LIST="${__UAC_ARTIFACT_LIST}${__UAC_ARTIFACT_LIST:+,}${__pc_new_artifacts}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "-a"|"--artifacts") + if [ -n "${2:-}" ]; then + # print available artifacts + if [ "${2}" = "list" ]; then + _list_artifacts "${__UAC_DIR}/artifacts" "${3:-}" || return 1 + _exit_success + fi + __UAC_ARTIFACT_LIST="${__UAC_ARTIFACT_LIST}${__UAC_ARTIFACT_LIST:+,}${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + # output arguments + "-o"|"--output-base-name") + if [ -n "${2:-}" ]; then + __UAC_OUTPUT_BASE_NAME="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "-f"|"--output-format") + if [ -n "${2:-}" ]; then + __UAC_OUTPUT_FORMAT="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "-P"|"--output-password") + if [ -n "${2:-}" ]; then + __UAC_OUTPUT_PASSWORD="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + # collection arguments + "-c"|"--config") + if [ -n "${2:-}" ]; then + __UAC_CONFIG_FILE="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "-m"|"--mount-point") + if [ -n "${2:-}" ]; then + __UAC_MOUNT_POINT="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "-s"|"--operating-system") + if [ -n "${2:-}" ]; then + __UAC_OPERATING_SYSTEM="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "-H"|"--hash-collected") + __UAC_HASH_COLLECTED=true + ;; + "-t"|"--max-threads") + if [ -n "${2:-}" ]; then + if [ "${2}" = "list" ]; then + __ua_nproc=`_get_nproc` + printf "Number of processing units: %s\n" "${__ua_nproc}" + _exit_success + fi + __UAC_MAX_THREADS="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "-u"|"--run-as-non-root") + __UAC_RUN_AS_NON_ROOT=true + ;; + "--hostname") + if [ -n "${2:-}" ]; then + __UAC_HOSTNAME="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--temp-dir") + if [ -n "${2:-}" ]; then + __UAC_TEMP_DIR="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + # filter arguments + "--start-date") + if [ -n "${2:-}" ]; then + __UAC_START_DATE="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--end-date") + if [ -n "${2:-}" ]; then + __UAC_END_DATE="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + # informational arguments + "--case-number") + if [ -n "${2:-}" ]; then + __UAC_CASE_NUMBER="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--description") + if [ -n "${2:-}" ]; then + __UAC_EVIDENCE_DESCRIPTION="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--evidence-number") + if [ -n "${2:-}" ]; then + __UAC_EVIDENCE_NUMBER="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--examiner") + if [ -n "${2:-}" ]; then + __UAC_EXAMINER="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--notes") + if [ -n "${2:-}" ]; then + __UAC_EVIDENCE_NOTES="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + # remote transfer arguments + "--sftp") + if [ -n "${2:-}" ]; then + __UAC_SFTP="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--sftp-port") + if [ -n "${2:-}" ]; then + __UAC_SFTP_PORT="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--sftp-identity-file") + if [ -n "${2:-}" ]; then + __UAC_SFTP_IDENTITY_FILE="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--sftp-ssh-options") + if [ -n "${2:-}" ]; then + __UAC_SFTP_SSH_OPTIONS="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--s3-provider") + if [ -n "${2:-}" ]; then + __UAC_S3_PROVIDER="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--s3-region") + if [ -n "${2:-}" ]; then + __UAC_S3_REGION="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--s3-bucket") + if [ -n "${2:-}" ]; then + __UAC_S3_BUCKET="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--s3-access-key") + if [ -n "${2:-}" ]; then + __UAC_S3_ACCESS_KEY="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--s3-secret-key") + if [ -n "${2:-}" ]; then + __UAC_S3_SECRET_KEY="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--s3-token") + if [ -n "${2:-}" ]; then + __UAC_S3_TOKEN="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--aws-s3-presigned-url") + if [ -n "${2:-}" ]; then + __UAC_AWS_S3_PRESIGNED_URL="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--aws-s3-presigned-url-log-file") + if [ -n "${2:-}" ]; then + __UAC_AWS_S3_PRESIGNED_URL_LOG_FILE="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--azure-storage-sas-url") + if [ -n "${2:-}" ]; then + __UAC_AZURE_STORAGE_SAS_URL="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--azure-storage-sas-url-log-file") + if [ -n "${2:-}" ]; then + __UAC_AZURE_STORAGE_SAS_URL_LOG_FILE="${2}" + shift + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--delete-local-on-successful-transfer") + __UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER=true + ;; + # validation arguments + "--validate-artifact") + if [ -n "${2:-}" ]; then + printf "Validating artifact %s\n" "${2}" + _validate_artifact "${2}" || return 1 + _exit_success "artifact successfully validated." + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + "--validate-profile") + if [ -n "${2:-}" ]; then + printf "Validating profile %s\n" "${2}" + _validate_profile "${2}" || return 1 + _exit_success "profile successfully validated." + else + _error_msg "option '${1}' requires an argument.\nTry 'uac --help' for more information." + return 1 + fi + ;; + # invalid arguments + -*) + _error_msg "invalid option '${1}'\nTry 'uac --help' for more information." + return 1 + ;; + # positional arguments + *) + if [ -z "${__UAC_DESTINATION_DIR}" ]; then + __UAC_DESTINATION_DIR="${1}" + else + _error_msg "invalid option '${1}'\nTry 'uac --help' for more information." + return 1 + fi + ;; + esac + shift + done +} diff --git a/lib/parse_profile.sh b/lib/parse_profile.sh new file mode 100644 index 00000000..e49309d9 --- /dev/null +++ b/lib/parse_profile.sh @@ -0,0 +1,30 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Parse a profile to create a comma separated list of artifacts. +# Arguments: +# string profile: full path to the profile file +# Returns: +# string: comma separated list of artifacts +_parse_profile() +{ + __pp_profile="${1:-}" + + # remove lines starting with # (comments) + # remove inline comments + # remove blank lines + # remove leading and trailing space characters + # remove lines that do not start with a dash (-) + # remove leading dash (-) + # remove trailing comma + sed -e 's|#.*$||g' \ + -e '/^ *$/d' \ + -e '/^$/d' \ + -e 's|^ *||' \ + -e 's| *$||' \ + -e '/^[^-]/d' \ + -e 's|^- *||' <"${__pp_profile}" \ + | awk 'BEGIN {ORS=","} {print $0}' \ + | sed -e 's|,$||' + +} \ No newline at end of file diff --git a/lib/presigned_url_transfer.sh b/lib/presigned_url_transfer.sh new file mode 100644 index 00000000..e74757db --- /dev/null +++ b/lib/presigned_url_transfer.sh @@ -0,0 +1,44 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Transfer file to a presigned URL. +# Arguments: +# string source: source file path +# string url: presigned URL +# Returns: +# boolean: true on success +# false on fail +presigned_url_transfer() +{ + __pu_source="${1:-}" + __pu_url="${2:-}" + + if command_exists "curl"; then + __pu_command="curl \ +--fail \ +--insecure \ +--request PUT \ +--header \"x-ms-blob-type: BlockBlob\" \ +--header \"Content-Type: application/octet-stream\" \ +--header \"Accept: */*\" \ +--header \"Expect: 100-continue\" \ +--upload-file \"${__pu_source}\" \ +\"${__pu_url}\"" + else + __pu_command="wget \ +-O - \ +--quiet \ +--no-check-certificate \ +--method PUT \ +--header \"x-ms-blob-type: BlockBlob\" \ +--header \"Content-Type: application/octet-stream\" \ +--header \"Accept: */*\" \ +--header \"Expect: 100-continue\" \ +--body-file \"${__pu_source}\" \ +\"${__pu_url}\"" + fi + + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__pu_command}" + _run_command "${__pu_command}" + +} \ No newline at end of file diff --git a/lib/profile_file_to_artifact_list.sh b/lib/profile_file_to_artifact_list.sh deleted file mode 100644 index 589c11ac..00000000 --- a/lib/profile_file_to_artifact_list.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Create a comma separated list of artifacts based on a profile file. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: profile file -# Outputs: -# Comma separated list of artifacts. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -profile_file_to_artifact_list() -{ - pl_profile_file="${1:-}" - - # remove lines starting with # (comments) - # remove inline comments - # remove blank lines - # grep lines starting with " - " - # remove " - " from the beginning of the line - # shellcheck disable=SC2162 - sed -e 's/#.*$//g' -e '/^ *$/d' -e '/^$/d' <"${pl_profile_file}" 2>/dev/null \ - | grep -E " +- +" \ - | sed -e 's: *- *::g' 2>/dev/null \ - | while read pl_line || [ -n "${pl_line}" ]; do - printf %b "${pl_line}," - done - -} \ No newline at end of file diff --git a/lib/remove_non_regular_files.sh b/lib/remove_non_regular_files.sh new file mode 100644 index 00000000..fedd6fa3 --- /dev/null +++ b/lib/remove_non_regular_files.sh @@ -0,0 +1,32 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Remove all entries that are non-regular files. +# Arguments: +# string file: input file +# Returns: +# none +_remove_non_regular_files() +{ + __rn_file="${1:-}" + + if [ ! -f "${__rn_file}" ]; then + _log_msg ERR "_remove_non_regular_files: no such file or directory '${__rn_file}'" + return 1 + fi + + sed 's|.|\\&|g' "${__rn_file}" \ + | xargs "${__UAC_TOOL_XARGS_MAX_PROCS_PARAM}"${__UAC_TOOL_XARGS_MAX_PROCS_PARAM:+ }find \ + >"${__UAC_TEMP_DATA_DIR}/remove_non_regular_files_xargs.tmp" \ + 2>>"${__UAC_TEMP_DATA_DIR}/remove_non_regular_files.stderr.txt" + + # shellcheck disable=SC2162 + while read __rn_line && [ -n "${__rn_line}" ]; do + if [ -f "${__rn_line}" ] && [ ! -h "${__rn_line}" ]; then + echo "${__rn_line}" + fi + done <"${__UAC_TEMP_DATA_DIR}/remove_non_regular_files_xargs.tmp" >"${__UAC_TEMP_DATA_DIR}/remove_non_regular_files.tmp" + + sort -u <"${__UAC_TEMP_DATA_DIR}/remove_non_regular_files.tmp" 2>/dev/null | sed -e '/^$/d' 2>/dev/null >"${__rn_file}" + +} diff --git a/lib/remove_temp_data_dir.sh b/lib/remove_temp_data_dir.sh new file mode 100644 index 00000000..62e6475d --- /dev/null +++ b/lib/remove_temp_data_dir.sh @@ -0,0 +1,20 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Remove temporary files and directories used during execution. +# Arguments: +# none +# Returns: +# none +_remove_temp_data_dir() +{ + if ${__UAC_DEBUG_MODE}; then + printf "Temporary data directory not removed '%s'\n" "${__UAC_TEMP_DATA_DIR}" + else + if [ -d "${__UAC_TEMP_DATA_DIR}" ] && printf "%s" "${__UAC_TEMP_DATA_DIR}" | grep -q "uac-data.tmp"; then + rm -rf "${__UAC_TEMP_DATA_DIR}" >/dev/null 2>/dev/null \ + || printf "Cannot remove temporary data directory '%s'.\n" "${__UAC_TEMP_DATA_DIR}" + fi + fi + +} \ No newline at end of file diff --git a/lib/run_command.sh b/lib/run_command.sh new file mode 100644 index 00000000..4d7f6f78 --- /dev/null +++ b/lib/run_command.sh @@ -0,0 +1,45 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 + +# Run command. +# Arguments: +# string command: command (including arguments) +# boolean log_stderr: send stderr to uac.log (optional) (default: true) +# Returns: +# integer: command exit code +_run_command() +{ + __rc_command="${1:-}" + __rc_log_stderr="${2:-true}" + + if [ -z "${__rc_command}" ]; then + _log_msg ERR "_run_command: empty command parameter" + return 1 + fi + + if [ ! -d "${__UAC_TEMP_DATA_DIR}" ]; then + return 1 + fi + + __rc_stderr_file="/dev/null" + if ${__rc_log_stderr}; then + __rc_stderr_file="${__UAC_TEMP_DATA_DIR}/run_command.stderr.txt" + fi + + eval "${__rc_command}" \ + 2>"${__rc_stderr_file}" + __rc_exit_code="$?" + + __rc_stderr="" + if [ -s "${__UAC_TEMP_DATA_DIR}/run_command.stderr.txt" ] && ${__rc_log_stderr}; then + __rc_stderr=`awk 'BEGIN {ORS="/n"} {print $0}' "${__UAC_TEMP_DATA_DIR}/run_command.stderr.txt" | sed -e 's|/n$||' 2>/dev/null` + __rc_stderr=" 2> ${__rc_stderr}" + fi + + __rc_command=`echo "${__rc_command}" | awk 'BEGIN {ORS="/n"} {print $0}' | sed -e 's| *| |g' -e 's|/n$||' 2>/dev/null` + _log_msg CMD "${__rc_command}${__rc_stderr}" + + return "${__rc_exit_code}" + +} \ No newline at end of file diff --git a/lib/s3_presigned_url_transfer.sh b/lib/s3_presigned_url_transfer.sh deleted file mode 100644 index 285a3c32..00000000 --- a/lib/s3_presigned_url_transfer.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Transfer file to S3 presigned URL. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: source file -# $2: S3 presigned URL -# Outputs: -# None. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -s3_presigned_url_transfer() -{ - pu_source="${1:-}" - pu_s3_presigned_url="${2:-}" - - curl \ - --fail \ - --request PUT \ - --header "Content-Type: application/octet-stream" \ - --header "Accept: */*" \ - --header "Expect: 100-continue" \ - --upload-file "${pu_source}" \ - "${pu_s3_presigned_url}" - -} \ No newline at end of file diff --git a/lib/s3_presigned_url_transfer_test.sh b/lib/s3_presigned_url_transfer_test.sh deleted file mode 100644 index afe96cb0..00000000 --- a/lib/s3_presigned_url_transfer_test.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Test the connectivity to S3 presigned URL. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: S3 presigned URL -# Outputs: -# None. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -s3_presigned_url_transfer_test() -{ - pr_s3_presigned_url="${1:-}" - - curl \ - --fail \ - --request PUT \ - --header "Content-Type: application/text" \ - --header "Accept: */*" \ - --header "Expect: 100-continue" \ - --data "Transfer test from UAC" \ - "${pr_s3_presigned_url}" - -} \ No newline at end of file diff --git a/lib/s3_transfer_amazon.sh b/lib/s3_transfer_amazon.sh new file mode 100644 index 00000000..085ba030 --- /dev/null +++ b/lib/s3_transfer_amazon.sh @@ -0,0 +1,46 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 + +# Transfer file to Amazon S3. +# Arguments: +# string source: source file or empty for testing connection +# string region: region +# string bucket: bucket name +# string access_key: access key +# string secret_key: secret key +# Returns: +# boolean: true on success +# false on fail +_s3_transfer_amazon() +{ + __s3a_source="${1:-}" + __s3a_region="${2:-us-east-1}" + __s3a_bucket="${3:-}" + __s3a_access_key="${4:-}" + __s3a_secret_key="${5:-}" + __s3a_test_connectivity_mode=false + + if [ -z "${__s3a_source}" ]; then + __s3a_test_connectivity_mode=true + __s3a_source="transfer_test_from_uac.txt" + fi + + __s3a_date=`date "+%a, %d %b %Y %H:%M:%S %z"` + __s3a_content_type="application/octet-stream" + __s3a_host="${__s3a_bucket}.s3.${__s3a_region}.amazonaws.com" + __s3a_string_to_sign="PUT\n\n${__s3a_content_type}\n${__s3a_date}\n/${__s3a_bucket}/${__s3a_source}" + __s3a_signature=`printf "%b" "${__s3a_string_to_sign}" | openssl sha1 -hmac "${__s3a_secret_key}" -binary | openssl base64` + __s3a_authorization="AWS ${__s3a_access_key}:${__s3a_signature}" + __s3a_url="https://${__s3a_host}/${__s3a_source}" + + _http_transfer \ + "${__s3a_source}" \ + "${__s3a_url}" \ + "${__s3a_host}" \ + "${__s3a_date}" \ + "${__s3a_content_type}" \ + "${__s3a_authorization}" \ + "${__s3a_test_connectivity_mode}" + +} diff --git a/lib/s3_transfer_google.sh b/lib/s3_transfer_google.sh new file mode 100644 index 00000000..042e2374 --- /dev/null +++ b/lib/s3_transfer_google.sh @@ -0,0 +1,40 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 + +# Transfer file to Google S3. +# Arguments: +# string source: source file or empty for testing connection +# string bucket: bucket name +# string token: bearer token +# Returns: +# boolean: true on success +# false on fail +_s3_transfer_google() +{ + __s3g_source="${1:-}" + __s3g_bucket="${2:-}" + __s3g_token="${3:-}" + __s3g_test_connectivity_mode=false + + if [ -z "${__s3g_source}" ]; then + __s3g_test_connectivity_mode=true + __s3g_source="transfer_test_from_uac.txt" + fi + + __s3g_date=`date "+%a, %d %b %Y %H:%M:%S %z"` + __s3g_content_type="application/octet-stream" + __s3g_host="storage.googleapis.com" + __s3g_authorization="Bearer ${__s3g_token}" + __s3g_url="https://${__s3g_host}/${__s3g_bucket}/${__s3g_source}" + + _http_transfer \ + "${__s3g_source}" \ + "${__s3g_url}" \ + "${__s3g_host}" \ + "${__s3g_date}" \ + "${__s3g_content_type}" \ + "${__s3g_authorization}" \ + "${__s3g_test_connectivity_mode}" + +} diff --git a/lib/s3_transfer_ibm.sh b/lib/s3_transfer_ibm.sh new file mode 100644 index 00000000..3eddb12a --- /dev/null +++ b/lib/s3_transfer_ibm.sh @@ -0,0 +1,42 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 + +# Transfer file to IBM S3. +# Arguments: +# string source: source file or empty for testing connection +# string region: region +# string bucket: bucket name +# string token: bearer token +# Returns: +# boolean: true on success +# false on fail +_s3_transfer_ibm() +{ + __s3i_source="${1:-}" + __s3i_region="${2:-us-south}" + __s3i_bucket="${3:-}" + __s3i_token="${4:-}" + __s3i_test_connectivity_mode=false + + if [ -z "${__s3i_source}" ]; then + __s3i_test_connectivity_mode=true + __s3i_source="transfer_test_from_uac.txt" + fi + + __s3i_date=`date "+%a, %d %b %Y %H:%M:%S %z"` + __s3i_content_type="application/octet-stream" + __s3i_host="s3.${__s3i_region}.cloud-object-storage.appdomain.cloud" + __s3i_authorization="Bearer ${__s3i_token}" + __s3i_url="https://${__s3i_host}/${__s3i_bucket}/${__s3i_source}" + + _http_transfer \ + "${__s3i_source}" \ + "${__s3i_url}" \ + "${__s3i_host}" \ + "${__s3i_date}" \ + "${__s3i_content_type}" \ + "${__s3i_authorization}" \ + "${__s3i_test_connectivity_mode}" + +} diff --git a/lib/sanitize_artifact_list.sh b/lib/sanitize_artifact_list.sh deleted file mode 100644 index 855d9395..00000000 --- a/lib/sanitize_artifact_list.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Sanitize artifact list. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: artifact list -# Outputs: -# Write sanitized artifact list to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -sanitize_artifact_list() -{ - sa_artifact_list="${1:-}" - - # remove ../ - # remove ./ - # replace !/ by ! - # remove 'artifacts/' directory - # replace consecutive slashes by one slash - # replace consecutive commas by one comma - # remove leading and trailing comma - echo "${sa_artifact_list}" \ - | sed -e 's:\.\./::g' \ - -e 's:\./::g' \ - -e 's:!/:!:g' \ - -e 's:artifacts/::g' \ - -e 's://*:/:g' \ - -e 's:,,*:,:g' \ - -e 's:^,::' \ - -e 's:,$::' \ - 2>/dev/null - -} \ No newline at end of file diff --git a/lib/sanitize_filename.sh b/lib/sanitize_filename.sh deleted file mode 100644 index dcec683c..00000000 --- a/lib/sanitize_filename.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Sanitize filename. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: filename -# Outputs: -# Write sanitized filename to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -sanitize_filename() -{ - sf_filename="${1:-}" - - # remove leading spaces - # remove trailing spaces - # replace consecutive slashes by one slash - # remove leading / - # replace consecutive slashes by one underscore (_) - echo "${sf_filename}" \ - | sed -e 's:^ *::' \ - -e 's: *$::' \ - -e 's://*:/:g' \ - -e 's:^/::' \ - -e 's://*:_:g' \ - 2>/dev/null - -} \ No newline at end of file diff --git a/lib/sanitize_output_directory.sh b/lib/sanitize_output_directory.sh new file mode 100644 index 00000000..bd156657 --- /dev/null +++ b/lib/sanitize_output_directory.sh @@ -0,0 +1,34 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Sanitize output directory path. +# Arguments: +# string path: path +# Returns: +# string: sanitized path +_sanitize_output_directory() +{ + __sd_path="${1:-}" + + # remove leading and trailing spaces + # replace consecutive slashes by one slash + # replace .. by . + # replace invalid characters (Windows only) \ * ? : " < > by underscore + # remove trailing slash + # add slash if empty path + echo "${__sd_path}" \ + | sed -e 's|^ *||' \ + -e 's| *$||' \ + -e 's|\.\.|\.|g' \ + -e 's|\\|_|g' \ + -e 's|*|_|g' \ + -e 's|?|_|g' \ + -e 's|:|_|g' \ + -e 's|"|_|g' \ + -e 's|<|_|g' \ + -e 's|>|_|g' \ + -e 's|//*|/|g' \ + -e 's|/$||' \ + -e 's|^$|/|' + +} diff --git a/lib/sanitize_output_file.sh b/lib/sanitize_output_file.sh new file mode 100644 index 00000000..84303501 --- /dev/null +++ b/lib/sanitize_output_file.sh @@ -0,0 +1,33 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Sanitize output filename. +# Arguments: +# string filename: filename +# Returns: +# string: sanitized filename +_sanitize_output_file() +{ + __sf_filename="${1:-}" + + # remove leading and trailing spaces + # remove leading slashes + # remove trailing slashes + # replace slash by underscore + # replace invalid characters (Windows only) \ * ? : " < > by underscore + # add underscore if empty filename + echo "${__sf_filename}" \ + | sed -e 's|^ *||' \ + -e 's| *$||' \ + -e 's|^//*||' \ + -e 's|//*$||' \ + -e 's|//*|_|g' \ + -e 's|\\|_|g' \ + -e 's|*|_|g' \ + -e 's|?|_|g' \ + -e 's|:|_|g' \ + -e 's|"|_|g' \ + -e 's|<|_|g' \ + -e 's|>|_|g' \ + -e 's|^$|_|' +} diff --git a/lib/sanitize_path.sh b/lib/sanitize_path.sh index c322ac87..373f0d2b 100644 --- a/lib/sanitize_path.sh +++ b/lib/sanitize_path.sh @@ -1,30 +1,26 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -############################################################################### # Sanitize path. -# Globals: -# None -# Requires: -# None # Arguments: -# $1: path -# Outputs: -# Write sanitized path to stdout. -# Write / to stdout if path is empty. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -sanitize_path() +# string path: path +# Returns: +# string: sanitized path +_sanitize_path() { - sp_path="${1:-}" + __sp_path="${1:-}" - # remove leading spaces - # remove trailing spaces + # remove leading and trailing spaces # replace consecutive slashes by one slash + # replace .. by . # remove trailing slash - echo "${sp_path}" \ - | sed -e 's:^ *::' -e 's: *$::' -e 's://*:/:g' -e 's:/$::' -e 's:^$:/:' - -} \ No newline at end of file + # add slash if empty path + echo "${__sp_path}" \ + | sed -e 's|^ *||' \ + -e 's| *$||' \ + -e 's|\.\.|\.|g' \ + -e 's|//*|/|g' \ + -e 's|/$||' \ + -e 's|^$|/|' + +} diff --git a/lib/setup_tools.sh b/lib/setup_tools.sh new file mode 100644 index 00000000..73b403e9 --- /dev/null +++ b/lib/setup_tools.sh @@ -0,0 +1,157 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006 + +# Setup required tools and parameters. +# Arguments: +# none +# Returns: +# none +_setup_tools() +{ + __UAC_TOOL_FIND_OPERATORS_SUPPORT=false + __UAC_TOOL_FIND_PATH_SUPPORT=false + __UAC_TOOL_FIND_PRUNE_SUPPORT=false + __UAC_TOOL_FIND_SIZE_SUPPORT=false + __UAC_TOOL_FIND_MAXDEPTH_SUPPORT=false + __UAC_TOOL_FIND_PERM_SUPPORT=false + __UAC_TOOL_FIND_TYPE_SUPPORT=false + __UAC_TOOL_FIND_MTIME_SUPPORT=false + __UAC_TOOL_FIND_ATIME_SUPPORT=false + __UAC_TOOL_FIND_CTIME_SUPPORT=false + __UAC_TOOL_FIND_PRINT0_SUPPORT=false + __UAC_TOOL_XARGS_NULL_DELIMITER_SUPPORT=false + __UAC_TOOL_XARGS_MAX_PROCS_PARAM="" + __UAC_TOOL_STAT_BIN="" + __UAC_TOOL_STAT_PARAMS="" + __UAC_TOOL_STAT_BTIME=false + __UAC_TOOL_TAR_NO_FROM_FILE_SUPPORT=false + __UAC_TOOL_MD5_BIN="" + __UAC_TOOL_SHA1_BIN="" + __UAC_TOOL_SHA256_BIN="" + + # check which options are supported by 'find' + if find "${__UAC_DIR}" \( -name "uac" -o -name "uac.conf" \) -print >/dev/null; then + __UAC_TOOL_FIND_OPERATORS_SUPPORT=true + fi + if find "${__UAC_DIR}" -path "${__UAC_DIR}" -print >/dev/null; then + __UAC_TOOL_FIND_PATH_SUPPORT=true + fi + if find "${__UAC_DIR}/uac" -name "uac" -prune -o -print >/dev/null; then + __UAC_TOOL_FIND_PRUNE_SUPPORT=true + fi + if find "${__UAC_DIR}/uac" -size +1c -print >/dev/null; then + __UAC_TOOL_FIND_SIZE_SUPPORT=true + fi + if find "${__UAC_DIR}/uac" -maxdepth 1 -print >/dev/null; then + __UAC_TOOL_FIND_MAXDEPTH_SUPPORT=true + fi + if find "${__UAC_DIR}/uac" -perm 755 -print >/dev/null; then + __UAC_TOOL_FIND_PERM_SUPPORT=true + fi + if find "${__UAC_DIR}/uac" -type f -print >/dev/null; then + __UAC_TOOL_FIND_TYPE_SUPPORT=true + fi + if find "${__UAC_DIR}/uac" -mtime +1 -print >/dev/null; then + __UAC_TOOL_FIND_MTIME_SUPPORT=true + fi + if find "${__UAC_DIR}/uac" -atime +1 -print >/dev/null; then + __UAC_TOOL_FIND_ATIME_SUPPORT=true + fi + if find "${__UAC_DIR}/uac" -ctime +1 -print >/dev/null; then + __UAC_TOOL_FIND_CTIME_SUPPORT=true + fi + if find "${__UAC_DIR}/uac" -print0 >/dev/null; then + __UAC_TOOL_FIND_PRINT0_SUPPORT=true + fi + + if echo "uac" | xargs -0 echo >/dev/null; then + __UAC_TOOL_XARGS_NULL_DELIMITER_SUPPORT=true + fi + if echo "uac" | xargs -P 2 echo >/dev/null; then + __UAC_TOOL_XARGS_MAX_PROCS_PARAM="-P ${__UAC_MAX_THREADS}" + fi + + # check which stat tool and options are available for the target system + if statx "${__UAC_MOUNT_POINT}" | grep -q -E "\|[0-9]+\|[0-9]+\|[0-9]+$"; then + __UAC_TOOL_STAT_BIN="statx" + __UAC_TOOL_STAT_PARAMS="" + __UAC_TOOL_STAT_BTIME=true + elif stat -c "0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W" "${__UAC_MOUNT_POINT}" | grep -q -E "\|[0-9]+\|[0-9]+\|[0-9]+\|"; then + __UAC_TOOL_STAT_BIN="stat" + __UAC_TOOL_STAT_PARAMS="-c \"0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W\"" + if stat -c "0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W" "${__UAC_MOUNT_POINT}" | grep -q -E "\|[0-9]+\|[0-9]+\|[0-9][0-9]+$"; then + __UAC_TOOL_STAT_BTIME=true + fi + elif stat -f "0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B" "${__UAC_MOUNT_POINT}" | grep -q -E "\|[0-9]+\|[0-9]+\|[0-9]+\|"; then + __UAC_TOOL_STAT_BIN="stat" + __UAC_TOOL_STAT_PARAMS="-f \"0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B\"" + if stat -f "0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B" "${__UAC_MOUNT_POINT}" | grep -q -E "\|[0-9]+\|[0-9]+\|[0-9][0-9]+$"; then + __UAC_TOOL_STAT_BTIME=true + fi + elif stat_pl "${__UAC_MOUNT_POINT}" | grep -q -E "\|[0-9]+\|[0-9]+\|[0-9]+$"; then + __UAC_TOOL_STAT_BIN="stat_pl" + __UAC_TOOL_STAT_PARAMS="" + fi + + case "${__UAC_OPERATING_SYSTEM}" in + "esxi") + __UAC_TOOL_TAR_NO_FROM_FILE_SUPPORT=true + ;; + "linux") + # some old tar/busybox versions do not support -T, so a different + # solution is required to package and compress data + # checking if tar can create package getting names from file + echo "${__UAC_DIR}/uac" >"${__UAC_TEMP_DATA_DIR}/tar_gz_data.tmp" 2>/dev/null + if tar -T "${__UAC_TEMP_DATA_DIR}/tar_gz_data.tmp" -cf "${__UAC_TEMP_DATA_DIR}/tar_gz_data.tar" 2>/dev/null; then + true + else + __UAC_TOOL_TAR_NO_FROM_FILE_SUPPORT=true + fi + ;; + esac + + # check for available MD5 hashing tools + if command_exists "md5sum"; then + __UAC_TOOL_MD5_BIN="md5sum" + elif command_exists "md5"; then + __UAC_TOOL_MD5_BIN="md5" + elif echo "uac" | digest -v -a md5 >/dev/null; then + __UAC_TOOL_MD5_BIN="digest -v -a md5" + elif csum -h MD5 "${__UAC_DIR}/uac" >/dev/null; then + __UAC_TOOL_MD5_BIN="csum -h MD5" + elif echo "uac" | openssl dgst -md5 >/dev/null; then + __UAC_TOOL_MD5_BIN="openssl dgst -md5" + fi + + # check for available SHA1 hashing tools + if command_exists "sha1sum"; then + __UAC_TOOL_SHA1_BIN="sha1sum" + elif echo "uac" | shasum -a 1 >/dev/null; then + __UAC_TOOL_SHA1_BIN="shasum -a 1" + elif command_exists "sha1"; then + __UAC_TOOL_SHA1_BIN="sha1" + elif echo "uac" | digest -v -a sha1 >/dev/null; then + __UAC_TOOL_SHA1_BIN="digest -v -a sha1" + elif csum -h SHA1 "${__UAC_DIR}/uac" >/dev/null; then + __UAC_TOOL_SHA1_BIN="csum -h SHA1" + elif echo "uac" | openssl dgst -sha1 >/dev/null; then + __UAC_TOOL_SHA1_BIN="openssl dgst -sha1" + fi + + # check for available SHA256 hashing tools + if command_exists "sha256sum"; then + __UAC_TOOL_SHA256_BIN="sha256sum" + elif echo "uac" | shasum -a 256 >/dev/null; then + __UAC_TOOL_SHA256_BIN="shasum -a 256" + elif command_exists "sha256"; then + __UAC_TOOL_SHA256_BIN="sha256" + elif echo "uac" | digest -v -a sha256 >/dev/null; then + __UAC_TOOL_SHA256_BIN="digest -v -a sha256" + elif csum -h SHA256 "${__UAC_DIR}/uac" >/dev/null; then + __UAC_TOOL_SHA256_BIN="csum -h SHA256" + elif echo "uac" | openssl dgst -sha256 >/dev/null; then + __UAC_TOOL_SHA256_BIN="openssl dgst -sha256" + fi + +} diff --git a/lib/sftp_transfer.sh b/lib/sftp_transfer.sh index 6d78b6f0..4c87f68d 100644 --- a/lib/sftp_transfer.sh +++ b/lib/sftp_transfer.sh @@ -1,47 +1,48 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -############################################################################### # Transfer file to SFTP server. -# Globals: -# None -# Requires: -# None # Arguments: -# $1: source file or directory -# $2: remote destination -# $3: remote port (default: 22) -# $4: identity file -# Outputs: -# None. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -sftp_transfer() +# string source: source file name +# leave it blank for connection test only +# string destination: destination in the form [user@]host:[path] +# integer port: port number +# string identity_file: identity file path +# string ssh_options: comma separated ssh options +# Returns: +# boolean: true on success +# false on fail +_sftp_transfer() { - sr_source="${1:-}" - sr_destination="${2:-}" - sr_port="${3:-22}" - sr_identity_file="${4:-}" + __sr_source="${1:-}" + __sr_destination="${2:-}" + __sr_port="${3:-22}" + __sr_identity_file="${4:-}" + __sr_ssh_options="${5:-}" - if [ -n "${sr_identity_file}" ]; then - sftp -r \ - -P "${sr_port}" \ - -o StrictHostKeyChecking=no \ - -o UserKnownHostsFile=/dev/null \ - -i "${sr_identity_file}" \ - "${sr_destination}" >/dev/null << EOF -mput "${sr_source}" -EOF + __sr_port_param="-P ${__sr_port}" + __sr_ssh_options_param="-o StrictHostKeyChecking=no,UserKnownHostsFile=/dev/null${__sr_ssh_options:+,}${__sr_ssh_options}" + __sr_identity_file_param="${__sr_identity_file:+-i }\"${__sr_identity_file}\"" + + if [ -n "${__sr_source}" ]; then + __sr_command="sftp -r \ +${__sr_port_param} \ +${__sr_identity_file_param} \ +${__sr_ssh_options_param} \ +\"${__sr_destination}\" >/dev/null << EOF +mput \"${__sr_source}\" +EOF" else - sftp -r \ - -P "${sr_port}" \ - -o StrictHostKeyChecking=no \ - -o UserKnownHostsFile=/dev/null \ - "${sr_destination}" >/dev/null << EOF -mput "${sr_source}" -EOF - fi + __sr_command="sftp -r \ +${__sr_port_param} \ +${__sr_identity_file_param} \ +${__sr_ssh_options_param} \ +\"${__sr_destination}\" >/dev/null << EOF +pwd +EOF" + fi + + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__sr_command}" + eval "${__sr_command}" >/dev/null } \ No newline at end of file diff --git a/lib/sftp_transfer_test.sh b/lib/sftp_transfer_test.sh deleted file mode 100644 index de6cc02d..00000000 --- a/lib/sftp_transfer_test.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 - -############################################################################### -# Test the connectivity to SFTP server. -# Globals: -# None -# Requires: -# None -# Arguments: -# $1: remote destination -# $2: remote port (default: 22) -# $3: identity file -# Outputs: -# None. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -sftp_transfer_test() -{ - sf_destination="${1:-}" - sf_port="${2:-22}" - sf_identity_file="${3:-}" - - if [ -n "${sf_identity_file}" ]; then - sftp -P "${sf_port}" \ - -o StrictHostKeyChecking=no \ - -o UserKnownHostsFile=/dev/null \ - -i "${sf_identity_file}" \ - "${sf_destination}" >/dev/null << EOF -pwd -EOF - else - sftp -P "${sf_port}" \ - -o StrictHostKeyChecking=no \ - -o UserKnownHostsFile=/dev/null \ - "${sf_destination}" >/dev/null << EOF -pwd -EOF - fi - -} \ No newline at end of file diff --git a/lib/sort_uniq_file.sh b/lib/sort_uniq_file.sh index 69a08c10..35e5be53 100644 --- a/lib/sort_uniq_file.sh +++ b/lib/sort_uniq_file.sh @@ -1,34 +1,22 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -############################################################################### # Sort and uniq files. -# Globals: -# None -# Requires: -# None # Arguments: -# $1: file -# Outputs: -# Sorted and unique file. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -sort_uniq_file() +# string file: input file +# Returns: +# none +_sort_uniq_file() { - su_file="${1:-}" + __su_file="${1:-}" - # sort and uniq file, and store data into a temporary file - if eval "sort -u \"${su_file}\"" >"${su_file}.sort_uniq_file.tmp"; then - # remove original file - if eval "rm -f \"${su_file}\""; then - # rename temporary to original file - mv "${su_file}.sort_uniq_file.tmp" "${su_file}" - fi - else - printf %b "sort_uniq_file: no such file or directory: '${su_file}'\n" >&2 - return 2 + if [ ! -f "${__su_file}" ]; then + _log_msg ERR "_sort_uniq_file: no such file or directory '${__su_file}'" + return 1 fi -} \ No newline at end of file + # sort, uniq and remove empty lines + sort -u <"${__su_file}" 2>/dev/null | sed -e '/^$/d' 2>/dev/null >"${__UAC_TEMP_DATA_DIR}/sort_uniq_file.tmp" + cp "${__UAC_TEMP_DATA_DIR}/sort_uniq_file.tmp" "${__su_file}" 2>/dev/null + +} diff --git a/lib/stat_collector.sh b/lib/stat_collector.sh deleted file mode 100644 index 5ec62285..00000000 --- a/lib/stat_collector.sh +++ /dev/null @@ -1,676 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Collector that searches and stat files. -# Globals: -# GLOBAL_EXCLUDE_MOUNT_POINT -# GLOBAL_EXCLUDE_NAME_PATTERN -# GLOBAL_EXCLUDE_PATH_PATTERN -# MOUNT_POINT -# START_DATE_DAYS -# END_DATE_DAYS -# STATX_TOOL_AVAILABLE -# STAT_BTIME_SUPPORT -# STAT_TOOL_AVAILABLE -# TEMP_DATA_DIR -# XARGS_REPLACE_STRING_SUPPORT -# Requires: -# find_wrapper -# get_mount_point_by_file_system -# sanitize_filename -# sanitize_path -# sort_uniq_file -# Arguments: -# $1: path -# $2: is file list (optional) (default: false) -# $3: path pattern (optional) -# $4: name pattern (optional) -# $5: exclude path pattern (optional) -# $6: exclude name pattern (optional) -# $7: exclude file system (optional) -# $8: max depth (optional) -# $9: file type (optional) (default: f) -# $10: min file size (optional) -# $11: max file size (optional) -# $12: permissions (optional) -# $13: ignore date range (optional) (default: false) -# $14: root output directory -# $15: output directory (optional) -# $16: output file -# $17: stderr output file (optional) -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -stat_collector() -{ - # some systems such as Solaris 10 do not support more than 9 parameters - # on functions, not even using curly braces {} e.g. ${10} - # so the solution was to use shift - sc_path="${1:-}" - shift - sc_is_file_list="${1:-false}" - shift - sc_path_pattern="${1:-}" - shift - sc_name_pattern="${1:-}" - shift - sc_exclude_path_pattern="${1:-}" - shift - sc_exclude_name_pattern="${1:-}" - shift - sc_exclude_file_system="${1:-}" - shift - sc_max_depth="${1:-}" - shift - sc_file_type="${1:-}" - shift - sc_min_file_size="${1:-}" - shift - sc_max_file_size="${1:-}" - shift - sc_permissions="${1:-}" - shift - sc_ignore_date_range="${1:-false}" - shift - sc_root_output_directory="${1:-}" - shift - sc_output_directory="${1:-}" - shift - sc_output_file="${1:-}" - shift - sc_stderr_output_file="${1:-}" - - # function that runs 'stat' tool - _stat() - { - _sc_path="${1:-}" - shift - _sc_is_file_list="${1:-false}" - shift - _sc_path_pattern="${1:-}" - shift - _sc_name_pattern="${1:-}" - shift - _sc_exclude_path_pattern="${1:-}" - shift - _sc_exclude_name_pattern="${1:-}" - shift - _sc_max_depth="${1:-}" - shift - _sc_file_type="${1:-}" - shift - _sc_min_file_size="${1:-}" - shift - _sc_max_file_size="${1:-}" - shift - _sc_permissions="${1:-}" - shift - _sc_date_range_start_days="${1:-}" - shift - _sc_date_range_end_days="${1:-}" - shift - _sc_output_directory="${1:-}" - shift - _sc_output_file="${1:-}" - - # 'xargs' performance is much better than 'while' loop - if ${XARGS_REPLACE_STRING_SUPPORT}; then - case "${OPERATING_SYSTEM}" in - "freebsd"|"macos"|"netbsd"|"netscaler"|"openbsd") - if ${_sc_is_file_list}; then - log_message COMMAND "sort -u \"${_sc_path}\" | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} stat -f \"0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B\" \"{}\"" - sort -u "${_sc_path}" \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} stat -f "0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B" "{}" \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - else - find_wrapper \ - "${_sc_path}" \ - "${_sc_path_pattern}" \ - "${_sc_name_pattern}" \ - "${_sc_exclude_path_pattern}" \ - "${_sc_exclude_name_pattern}" \ - "${_sc_max_depth}" \ - "${_sc_file_type}" \ - "${_sc_min_file_size}" \ - "${_sc_max_file_size}" \ - "${_sc_permissions}" \ - "${_sc_date_range_start_days}" \ - "${_sc_date_range_end_days}" \ - | sort -u \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} stat -f "0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B" "{}" \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - log_message COMMAND "| sort -u | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} stat -f \"0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B\" \"{}\"" - fi - ;; - "android"|"esxi"|"linux"|"solaris") - # %N returns quoted file names, so single and double quotes, and back - # apostrophe needs to be removed using 'sed' - - # also, some old 'stat' versions return the question mark '?' - # character if %W was not able to proper get the btime. In this case, - # the question mark will be replaced by a zero character - if ${_sc_is_file_list}; then - log_message COMMAND "sort -u \"${_sc_path}\" | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} stat -c \"0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W\" \"{}\"" - sort -u "${_sc_path}" \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} stat -c "0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W" "{}" \ - | sed -e "s:|':|:g" \ - -e "s:'|:|:g" \ - -e "s:' -> ': -> :" \ - -e 's:|":|:g' \ - -e 's:"|:|:g' \ - -e 's:" -> ": -> :' \ - -e "s:\`::g" \ - -e "s:|.$:|0:" \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - else - find_wrapper \ - "${_sc_path}" \ - "${_sc_path_pattern}" \ - "${_sc_name_pattern}" \ - "${_sc_exclude_path_pattern}" \ - "${_sc_exclude_name_pattern}" \ - "${_sc_max_depth}" \ - "${_sc_file_type}" \ - "${_sc_min_file_size}" \ - "${_sc_max_file_size}" \ - "${_sc_permissions}" \ - "${_sc_date_range_start_days}" \ - "${_sc_date_range_end_days}" \ - | sort -u \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} stat -c "0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W" "{}" \ - | sed -e "s:|':|:g" \ - -e "s:'|:|:g" \ - -e "s:' -> ': -> :" \ - -e 's:|":|:g' \ - -e 's:"|:|:g' \ - -e 's:" -> ": -> :' \ - -e "s:\`::g" \ - -e "s:|.$:|0:" \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - log_message COMMAND "| sort -u | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} stat -c \"0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W\" \"{}\"" - fi - ;; - esac - - # no 'xargs -I{}' - else - case "${OPERATING_SYSTEM}" in - "freebsd"|"macos"|"netbsd"|"netscaler"|"openbsd") - if ${_sc_is_file_list}; then - log_message COMMAND "sort -u \"${_sc_path}\" | while read %line%; do stat -f \"0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B\" \"%line%\"; done" - # shellcheck disable=SC2162 - sort -u "${_sc_path}" \ - | while read _sc_line || [ -n "${_sc_line}" ]; do - stat -f "0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B" "${_sc_line}" - done \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - else - # shellcheck disable=SC2162 - find_wrapper \ - "${_sc_path}" \ - "${_sc_path_pattern}" \ - "${_sc_name_pattern}" \ - "${_sc_exclude_path_pattern}" \ - "${_sc_exclude_name_pattern}" \ - "${_sc_max_depth}" \ - "${_sc_file_type}" \ - "${_sc_min_file_size}" \ - "${_sc_max_file_size}" \ - "${_sc_permissions}" \ - "${_sc_date_range_start_days}" \ - "${_sc_date_range_end_days}" \ - | sort -u \ - | while read _sc_line || [ -n "${_sc_line}" ]; do - stat -f "0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B" "${_sc_line}" - done \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - log_message COMMAND "| sort -u | while read %line%; do stat -f \"0|%N%SY|%i|%Sp|%u|%g|%z|%a|%m|%c|%B\" \"%line%\"; done" - fi - ;; - "android"|"esxi"|"linux"|"solaris") - # %N returns quoted file names, so single and double quotes, and back - # apostrophe needs to be removed using 'sed' - - # also, some old 'stat' versions return the question mark '?' - # character if %W was not able to proper get the btime. In this case, - # the question mark will be replaced by a zero character - if ${_sc_is_file_list}; then - log_message COMMAND "sort -u \"${_sc_path}\" | while read %line%; do stat -c \"0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W\" \"%line%\"; done" - # shellcheck disable=SC2162 - sort -u "${_sc_path}" \ - | while read _sc_line || [ -n "${_sc_line}" ]; do - stat -c "0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W" "${_sc_line}" \ - | sed -e "s:|':|:g" \ - -e "s:'|:|:g" \ - -e "s:' -> ': -> :" \ - -e 's:|":|:g' \ - -e 's:"|:|:g' \ - -e 's:" -> ": -> :' \ - -e "s:\`::g" \ - -e "s:|.$:|0:" - done \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - else - # shellcheck disable=SC2162 - find_wrapper \ - "${_sc_path}" \ - "${_sc_path_pattern}" \ - "${_sc_name_pattern}" \ - "${_sc_exclude_path_pattern}" \ - "${_sc_exclude_name_pattern}" \ - "${_sc_max_depth}" \ - "${_sc_file_type}" \ - "${_sc_min_file_size}" \ - "${_sc_max_file_size}" \ - "${_sc_permissions}" \ - "${_sc_date_range_start_days}" \ - "${_sc_date_range_end_days}" \ - | sort -u \ - | while read _sc_line || [ -n "${_sc_line}" ]; do - stat -c "0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W" "${_sc_line}" \ - | sed -e "s:|':|:g" \ - -e "s:'|:|:g" \ - -e "s:' -> ': -> :" \ - -e 's:|":|:g' \ - -e 's:"|:|:g' \ - -e 's:" -> ": -> :' \ - -e "s:\`::g" \ - -e "s:|.$:|0:" - done \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - log_message COMMAND "| sort -u | while read %line%; do stat -c \"0|%N|%i|%A|%u|%g|%s|%X|%Y|%Z|%W\" \"%line%\"; done" - fi - ;; - esac - fi - } - - # function that runs 'statx' tool - _statx() - { - _sc_path="${1:-}" - shift - _sc_is_file_list="${1:-false}" - shift - _sc_path_pattern="${1:-}" - shift - _sc_name_pattern="${1:-}" - shift - _sc_exclude_path_pattern="${1:-}" - shift - _sc_exclude_name_pattern="${1:-}" - shift - _sc_max_depth="${1:-}" - shift - _sc_file_type="${1:-}" - shift - _sc_min_file_size="${1:-}" - shift - _sc_max_file_size="${1:-}" - shift - _sc_permissions="${1:-}" - shift - _sc_date_range_start_days="${1:-}" - shift - _sc_date_range_end_days="${1:-}" - shift - _sc_output_directory="${1:-}" - shift - _sc_output_file="${1:-}" - - # 'xargs' performance is much better than 'while' loop - if ${XARGS_REPLACE_STRING_SUPPORT}; then - if ${_sc_is_file_list}; then - log_message COMMAND "sort -u \"${_sc_path}\" | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} statx \"{}\"" - sort -u "${_sc_path}" \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} statx "{}" \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - else - find_wrapper \ - "${_sc_path}" \ - "${_sc_path_pattern}" \ - "${_sc_name_pattern}" \ - "${_sc_exclude_path_pattern}" \ - "${_sc_exclude_name_pattern}" \ - "${_sc_max_depth}" \ - "${_sc_file_type}" \ - "${_sc_min_file_size}" \ - "${_sc_max_file_size}" \ - "${_sc_permissions}" \ - "${_sc_date_range_start_days}" \ - "${_sc_date_range_end_days}" \ - | sort -u \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} statx "{}" \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - log_message COMMAND "| sort -u | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} statx \"{}\"" - fi - - # no 'xargs -I{}' - else - if ${_sc_is_file_list}; then - log_message COMMAND "sort -u \"${_sc_path}\" | while read %line%; do statx \"%line%\"; done" - # shellcheck disable=SC2162 - sort -u "${_sc_path}" \ - | while read _sc_line || [ -n "${_sc_line}" ]; do - statx "${_sc_line}" - done \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - else - # shellcheck disable=SC2162 - find_wrapper \ - "${_sc_path}" \ - "${_sc_path_pattern}" \ - "${_sc_name_pattern}" \ - "${_sc_exclude_path_pattern}" \ - "${_sc_exclude_name_pattern}" \ - "${_sc_max_depth}" \ - "${_sc_file_type}" \ - "${_sc_min_file_size}" \ - "${_sc_max_file_size}" \ - "${_sc_permissions}" \ - "${_sc_date_range_start_days}" \ - "${_sc_date_range_end_days}" \ - | sort -u \ - | while read _sc_line || [ -n "${_sc_line}" ]; do - statx "${_sc_line}" - done \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - log_message COMMAND "| sort -u | while read %line%; do statx \"%line%\"; done" - fi - fi - - } - - # function that runs 'stat.pl' tool - _stat_pl() - { - _sc_path="${1:-}" - shift - _sc_is_file_list="${1:-false}" - shift - _sc_path_pattern="${1:-}" - shift - _sc_name_pattern="${1:-}" - shift - _sc_exclude_path_pattern="${1:-}" - shift - _sc_exclude_name_pattern="${1:-}" - shift - _sc_max_depth="${1:-}" - shift - _sc_file_type="${1:-}" - shift - _sc_min_file_size="${1:-}" - shift - _sc_max_file_size="${1:-}" - shift - _sc_permissions="${1:-}" - shift - _sc_date_range_start_days="${1:-}" - shift - _sc_date_range_end_days="${1:-}" - shift - _sc_output_directory="${1:-}" - shift - _sc_output_file="${1:-}" - - # 'xargs' performance is much better than 'while' loop - if ${XARGS_REPLACE_STRING_SUPPORT}; then - if ${_sc_is_file_list}; then - log_message COMMAND "sort -u \"${_sc_path}\" | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} perl \"${UAC_DIR}/tools/stat.pl/stat.pl\" \"{}\"" - sort -u "${_sc_path}" \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} perl "${UAC_DIR}/tools/stat.pl/stat.pl" "{}" \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - else - find_wrapper \ - "${_sc_path}" \ - "${_sc_path_pattern}" \ - "${_sc_name_pattern}" \ - "${_sc_exclude_path_pattern}" \ - "${_sc_exclude_name_pattern}" \ - "${_sc_max_depth}" \ - "${_sc_file_type}" \ - "${_sc_min_file_size}" \ - "${_sc_max_file_size}" \ - "${_sc_permissions}" \ - "${_sc_date_range_start_days}" \ - "${_sc_date_range_end_days}" \ - | sort -u \ - | sed -e "s:':\\\':g" -e 's:":\\\":g' \ - | xargs -I{} perl "${UAC_DIR}/tools/stat.pl/stat.pl" "{}" \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - log_message COMMAND "| sort -u | sed -e \"s:':\\\':g\" -e 's:\":\\\\\":g' | xargs -I{} perl \"${UAC_DIR}/tools/stat.pl/stat.pl\" \"{}\"" - fi - - # no 'xargs -I{}' - else - if ${_sc_is_file_list}; then - log_message COMMAND "sort -u \"${_sc_path}\" | while read %line%; do perl \"${UAC_DIR}/tools/stat.pl/stat.pl\" \"%line%\"; done" - # shellcheck disable=SC2162 - sort -u "${_sc_path}" \ - | while read _sc_line || [ -n "${_sc_line}" ]; do - perl "${UAC_DIR}/tools/stat.pl/stat.pl" "${_sc_line}" - done \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - else - # shellcheck disable=SC2162 - find_wrapper \ - "${_sc_path}" \ - "${_sc_path_pattern}" \ - "${_sc_name_pattern}" \ - "${_sc_exclude_path_pattern}" \ - "${_sc_exclude_name_pattern}" \ - "${_sc_max_depth}" \ - "${_sc_file_type}" \ - "${_sc_min_file_size}" \ - "${_sc_max_file_size}" \ - "${_sc_permissions}" \ - "${_sc_date_range_start_days}" \ - "${_sc_date_range_end_days}" \ - | sort -u \ - | while read _sc_line || [ -n "${_sc_line}" ]; do - perl "${UAC_DIR}/tools/stat.pl/stat.pl" "${_sc_line}" - done \ - >>"${TEMP_DATA_DIR}/${_sc_output_directory}/${_sc_output_file}" - log_message COMMAND "| sort -u | while read %line%; do perl \"${UAC_DIR}/tools/stat.pl/stat.pl\" \"%line%\"; done" - fi - fi - - } - - # return if path is empty - if [ -z "${sc_path}" ]; then - printf %b "stat_collector: missing required argument: 'path'\n" >&2 - return 22 - fi - - # return if root output directory is empty - if [ -z "${sc_root_output_directory}" ]; then - printf %b "stat_collector: missing required argument: \ -'root_output_directory'\n" >&2 - return 22 - fi - - # return if output file is empty - if [ -z "${sc_output_file}" ]; then - printf %b "stat_collector: missing required argument: 'output_file'\n" >&2 - return 22 - fi - - # prepend root output directory to path if it does not start with / - # (which means local file) - if echo "${sc_path}" | grep -q -v -E "^/"; then - sc_path=`sanitize_path "${TEMP_DATA_DIR}/${sc_root_output_directory}/${sc_path}"` - fi - - # return if is file list and file list does not exist - if ${sc_is_file_list} && [ ! -f "${sc_path}" ]; then - printf %b "stat_collector: file list does not exist: '${sc_path}'\n" >&2 - return 5 - fi - - # sanitize output file name - sc_output_file=`sanitize_filename "${sc_output_file}"` - - if [ -n "${sc_stderr_output_file}" ]; then - # sanitize stderr output file name - sc_stderr_output_file=`sanitize_filename "${sc_stderr_output_file}"` - else - sc_stderr_output_file="${sc_output_file}.stderr" - fi - - # sanitize output directory - sc_output_directory=`sanitize_path \ - "${sc_root_output_directory}/${sc_output_directory}"` - - # create output directory if it does not exist - if [ ! -d "${TEMP_DATA_DIR}/${sc_output_directory}" ]; then - mkdir -p "${TEMP_DATA_DIR}/${sc_output_directory}" >/dev/null - fi - - ${sc_ignore_date_range} && sc_date_range_start_days="" \ - || sc_date_range_start_days="${START_DATE_DAYS}" - ${sc_ignore_date_range} && sc_date_range_end_days="" \ - || sc_date_range_end_days="${END_DATE_DAYS}" - - # local exclude mount points - if [ -n "${sc_exclude_file_system}" ]; then - sc_exclude_mount_point=`get_mount_point_by_file_system \ - "${sc_exclude_file_system}"` - sc_exclude_path_pattern="${sc_exclude_path_pattern},\ -${sc_exclude_mount_point}" - fi - - # global exclude mount points - if [ -n "${GLOBAL_EXCLUDE_MOUNT_POINT}" ]; then - sc_exclude_path_pattern="${sc_exclude_path_pattern},\ -${GLOBAL_EXCLUDE_MOUNT_POINT}" - fi - - # global exclude path pattern - if [ -n "${GLOBAL_EXCLUDE_PATH_PATTERN}" ]; then - sc_exclude_path_pattern="${sc_exclude_path_pattern},\ -${GLOBAL_EXCLUDE_PATH_PATTERN}" - fi - - # global exclude name pattern - if [ -n "${GLOBAL_EXCLUDE_NAME_PATTERN}" ]; then - sc_exclude_name_pattern="${sc_exclude_name_pattern},\ -${GLOBAL_EXCLUDE_NAME_PATTERN}" - fi - - # prepend mount point if is not file list - ${sc_is_file_list} || sc_path=`sanitize_path "${MOUNT_POINT}/${sc_path}"` - - # always run native 'stat' if it collects file's birth time - if ${STAT_TOOL_AVAILABLE} && ${STAT_BTIME_SUPPORT}; then - _stat \ - "${sc_path}" \ - "${sc_is_file_list}" \ - "${sc_path_pattern}" \ - "${sc_name_pattern}" \ - "${sc_exclude_path_pattern}" \ - "${sc_exclude_name_pattern}" \ - "${sc_max_depth}" \ - "${sc_file_type}" \ - "${sc_min_file_size}" \ - "${sc_max_file_size}" \ - "${sc_permissions}" \ - "${sc_date_range_start_days}" \ - "${sc_date_range_end_days}" \ - "${sc_output_directory}" \ - "${sc_output_file}" \ - 2>>"${TEMP_DATA_DIR}/${sc_output_directory}/${sc_stderr_output_file}" - - # run 'statx' if native 'stat' does not collect file's birth time - elif ${STATX_TOOL_AVAILABLE}; then - _statx \ - "${sc_path}" \ - "${sc_is_file_list}" \ - "${sc_path_pattern}" \ - "${sc_name_pattern}" \ - "${sc_exclude_path_pattern}" \ - "${sc_exclude_name_pattern}" \ - "${sc_max_depth}" \ - "${sc_file_type}" \ - "${sc_min_file_size}" \ - "${sc_max_file_size}" \ - "${sc_permissions}" \ - "${sc_date_range_start_days}" \ - "${sc_date_range_end_days}" \ - "${sc_output_directory}" \ - "${sc_output_file}" \ - 2>>"${TEMP_DATA_DIR}/${sc_output_directory}/${sc_stderr_output_file}" - - # run native 'stat' if 'statx' is not available - elif ${STAT_TOOL_AVAILABLE}; then - _stat \ - "${sc_path}" \ - "${sc_is_file_list}" \ - "${sc_path_pattern}" \ - "${sc_name_pattern}" \ - "${sc_exclude_path_pattern}" \ - "${sc_exclude_name_pattern}" \ - "${sc_max_depth}" \ - "${sc_file_type}" \ - "${sc_min_file_size}" \ - "${sc_max_file_size}" \ - "${sc_permissions}" \ - "${sc_date_range_start_days}" \ - "${sc_date_range_end_days}" \ - "${sc_output_directory}" \ - "${sc_output_file}" \ - 2>>"${TEMP_DATA_DIR}/${sc_output_directory}/${sc_stderr_output_file}" - - # run 'stat.pl' if neither 'stat' nor 'statx' is available - elif ${PERL_TOOL_AVAILABLE}; then - _stat_pl \ - "${sc_path}" \ - "${sc_is_file_list}" \ - "${sc_path_pattern}" \ - "${sc_name_pattern}" \ - "${sc_exclude_path_pattern}" \ - "${sc_exclude_name_pattern}" \ - "${sc_max_depth}" \ - "${sc_file_type}" \ - "${sc_min_file_size}" \ - "${sc_max_file_size}" \ - "${sc_permissions}" \ - "${sc_date_range_start_days}" \ - "${sc_date_range_end_days}" \ - "${sc_output_directory}" \ - "${sc_output_file}" \ - 2>>"${TEMP_DATA_DIR}/${sc_output_directory}/${sc_stderr_output_file}" - - else - printf %b "stat_collector: target system has neither 'stat', 'statx' nor \ -'perl' tool available\n" >&2 - return 127 - fi - - # sort and uniq output file - sort_uniq_file "${TEMP_DATA_DIR}/${sc_output_directory}/${sc_output_file}" - - # remove output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${sc_output_directory}/${sc_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${sc_output_directory}/${sc_output_file}" \ - >/dev/null - fi - - # remove stderr output file if it is empty - if [ ! -s "${TEMP_DATA_DIR}/${sc_output_directory}/${sc_stderr_output_file}" ]; then - rm -f "${TEMP_DATA_DIR}/${sc_output_directory}/${sc_stderr_output_file}" \ - >/dev/null - fi - -} \ No newline at end of file diff --git a/lib/tar_data.sh b/lib/tar_data.sh new file mode 100644 index 00000000..0591c091 --- /dev/null +++ b/lib/tar_data.sh @@ -0,0 +1,46 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Archive files and directories. +# Arguments: +# string from_file: file containing the list of files to be archived +# string destination_file: output file +# Returns: +# none +_tar_data() +{ + __td_from_file="${1:-}" + __td_destination_file="${2:-}" + + if [ ! -f "${__td_from_file}" ]; then + _error_msg "_tar_data: no such file or directory: '${__td_from_file}'" + return 1 + fi + + __td_tar_command="tar -T \"${__td_from_file}\" -cf \"${__td_destination_file}\"" + case "${__UAC_OPERATING_SYSTEM}" in + "aix") + __td_tar_command="tar -L \"${__td_from_file}\" -cf \"${__td_destination_file}\"" + ;; + "freebsd"|"netbsd"|"netscaler"|"openbsd") + __td_tar_command="tar -I \"${__td_from_file}\" -cf \"${__td_destination_file}\"" + ;; + "esxi"|"linux") + if ${__UAC_TOOL_TAR_NO_FROM_FILE_SUPPORT}; then + __tg_tar_command="tar -cf \"${__td_destination_file}\" *" + fi + ;; + "macos") + true + ;; + "solaris") + __td_tar_command="tar -cf \"${__td_destination_file}\" -I \"${__td_from_file}\"" + ;; + esac + + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__td_tar_command}" + eval "${__td_tar_command}" \ + >>"${__UAC_TEMP_DATA_DIR}/tar_data.stdout.txt" \ + 2>>"${__UAC_TEMP_DATA_DIR}/tar_data.stderr.txt" + +} diff --git a/lib/tar_gz_data.sh b/lib/tar_gz_data.sh new file mode 100644 index 00000000..a62700ce --- /dev/null +++ b/lib/tar_gz_data.sh @@ -0,0 +1,46 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Archive and compress files and directories. +# Arguments: +# string from_file: file containing the list of files to be archived and compressed +# string destination_file: output file +# Returns: +# none +_tar_gz_data() +{ + __tg_from_file="${1:-}" + __tg_destination_file="${2:-}" + + if [ ! -f "${__tg_from_file}" ]; then + _error_msg "_tar_gz_data: no such file or directory: '${__tg_from_file}'" + return 1 + fi + + __tg_tar_command="tar -T \"${__tg_from_file}\" -cf - | gzip >\"${__tg_destination_file}\"" + case "${__UAC_OPERATING_SYSTEM}" in + "aix") + __tg_tar_command="tar -L \"${__tg_from_file}\" -cf - | gzip >\"${__tg_destination_file}\"" + ;; + "freebsd"|"netbsd"|"netscaler"|"openbsd") + __tg_tar_command="tar -I \"${__tg_from_file}\" -cf - | gzip >\"${__tg_destination_file}\"" + ;; + "esxi"|"linux") + if ${__UAC_TOOL_TAR_NO_FROM_FILE_SUPPORT}; then + __tg_tar_command="tar -cf - * | gzip >\"${__tg_destination_file}\"" + fi + ;; + "macos") + true + ;; + "solaris") + __tg_tar_command="tar -cf - -I \"${__tg_from_file}\" | gzip >\"${__tg_destination_file}\"" + ;; + esac + + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__tg_tar_command}" + eval "${__tg_tar_command}" \ + >>"${__UAC_TEMP_DATA_DIR}/tar_gz_data.stdout.txt" \ + 2>>"${__UAC_TEMP_DATA_DIR}/tar_gz_data.stderr.txt" + +} diff --git a/lib/terminate.sh b/lib/terminate.sh index 66f7e7d8..5940ce31 100644 --- a/lib/terminate.sh +++ b/lib/terminate.sh @@ -1,29 +1,15 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -############################################################################### # Clean up and exit. -# Globals: -# TEMP_DATA_DIR -# Requires: -# None # Arguments: -# None -# Outputs: -# Write exit message to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -terminate() +# none +# Returns: +# integer: exit code 130 +_terminate() { - printf %b "\nCaught signal! Cleaning up and quitting...\n" - if [ -d "${TEMP_DATA_DIR}" ]; then - rm -rf "${TEMP_DATA_DIR}" >/dev/null 2>/dev/null - if [ -d "${TEMP_DATA_DIR}" ]; then - printf %b "Cannot remove temporary directory '${TEMP_DATA_DIR}'\n" - fi - fi + printf "\n%s\n" "Caught signal! Cleaning up and quitting..." + _remove_temp_data_dir exit 130 } \ No newline at end of file diff --git a/lib/usage.sh b/lib/usage.sh index 597428b1..301bbefa 100644 --- a/lib/usage.sh +++ b/lib/usage.sh @@ -1,68 +1,85 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -############################################################################### # Print the command line usage for the program. -# Globals: -# None -# Requires: -# None # Arguments: -# None -# Outputs: -# Write the command line usage for the program to stdout. -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -usage() +# none +# Returns: +# none +_usage() { - - printf %b "Usage: $0 {-p PROFILE | -a ARTIFACTS} DESTINATION [OPTIONS] - or: $0 --validate-artifacts-file FILE + printf "%s" "Usage: $0 [-h] [-V] [--debug] {-p PROFILE | -a ARTIFACT} DESTINATION + + or: $0 --validate-artifact FILE + or: $0 --validate-profile FILE Optional Arguments: -h, --help Display this help and exit. - -V, --version Output version information and exit. + -v, --verbose Increases the verbosity level. --debug Enable debug mode. + --trace Enable trace messages. + -V, --version Output version information and exit. Profiling Arguments: - -p, --profile PROFILE - Specify the collection profile name. Use '--profile list' - to list available profiles. - -a, --artifacts ARTIFACTS - Specify the artifacts to be collected during the collection. + -p, --profile PROFILE + Specify the collection profile name or path. + Use '--profile list' to list all available profiles. + -a, --artifacts ARTIFACT + Specify the artifact(s) to be collected during the collection. The expression is a comma separated string where each element - is an artifact file. Each element can be prepended with an - exclamation mark to exclude the artifact. + is an artifact. You can exclude individual artifacts by + prefixing them with an exclamation mark (!). Special characters such as ! and * must be escaped with a backslash. Examples: --artifacts files/logs/\*,\!files/logs/var_log.yaml - Use '--artifacts list' to list available artifacts. + Use '--artifacts list [OPERATING_SYSTEM]' to list available + artifacts (default: all). Positional Arguments: DESTINATION Specify the directory the output file should be copied to. +Output Arguments: + -o, --output-base-name BASENAME + Specify the base name of the output file (without extension). + Default: uac-%hostname%-%os%-%timestamp% + -f, --output-format FORMAT + Specify the output format. + Compression will be enabled if gzip is available. + Options: none, tar, zip (default: tar) + -P, --output-password PASSWORD + Specify the password to be used to encrypt the contents + of the archive file. + Applies to zip output format only. + Collection Arguments: + -c, --config FILE + Load the config from a specific file. -m, --mount-point MOUNT_POINT Specify the mount point (default: /). -s, --operating-system OPERATING_SYSTEM Specify the operating system. - Options: aix, android, esxi, freebsd, linux, macos, netbsd + Options: aix, esxi, freebsd, linux, macos, netbsd netscaler, openbsd, solaris + -H, --hash-collected + Hash all collected files. + -t, --max-threads THREADS + Specify the number of files that will be processed in + parallel by the hash and stat collectors (default: 2). + Use '--max-threads list' to list the number of processing + units available on the system. -u, --run-as-non-root Disable root user check. Note that data collection may be limited. - --hostname HOSTNAME + --hostname HOSTNAME Specify the target system hostname. - --temp-dir PATH + --temp-dir PATH Write all temporary data to this directory. Filter Arguments: - --date-range-start YYYY-MM-DD + --start-date YYYY-MM-DD Only collects files that were last modified/accessed/changed after the given date. - --date-range-end YYYY-MM-DD + --end-date YYYY-MM-DD Only collects files that were last modified/accessed/changed before the given date. @@ -73,41 +90,51 @@ Informational Arguments: Specify the description. --evidence-number EVIDENCE_NUMBER Specify the evidence number. - --examiner EXAMINER + --examiner EXAMINER Specify the examiner name. - --notes NOTES + --notes NOTES Specify the notes. Remote Transfer Arguments: --sftp SERVER - Transfer output file to remote SFTP server. + Transfer the output file to remote SFTP server. SERVER must be specified in the form [user@]host:[path] --sftp-port PORT Remote SFTP server port (default: 22). --sftp-identity-file FILE File from which the identity (private key) for public key authentication is read. - --s3-presigned-url URL - Transfer output file to AWS S3 using a pre-signed URL. - --s3-presigned-url-log-file URL - Transfer log file to AWS S3 using a pre-signed URL. + --sftp-ssh-options + Comma separated ssh options. + --s3-provider + Transfer the output and log files to S3 service. + Options: amazon, google, ibm + --s3-region + S3 region name (default: us-east-1 [amazon], us-south [ibm]). + --s3-bucket + S3 bucket/cloud object storage name. + --s3-access-key + The access key for the bucket/cloud object storage. + --s3-secret-key + The secret access key for the bucket/cloud object storage. + --s3-token + The session/bearer token for the bucket/cloud object storage. + --aws-s3-presigned-url URL + Transfer the output file to AWS S3 using a pre-signed URL. + --aws-s3-presigned-url-log-file URL + Transfer the log file to AWS S3 using a pre-signed URL. --azure-storage-sas-url URL - Transfer output file to Azure Storage using a SAS URL. + Transfer the output file to Azure Storage using a SAS URL. --azure-storage-sas-url-log-file URL - Transfer log file to Azure Storage using a SAS URL. - --ibm-cos-url URL - Transfer output file to IBM Cloud Object Storage. - --ibm-cos-url-log-file URL - Transfer log file to IBM Cloud Object Storage. - --ibm-cloud-api-key KEY - IBM Cloud API key / Bearer token. + Transfer the log file to Azure Storage using a SAS URL. --delete-local-on-successful-transfer Delete local output and log files on successful transfer. Validation Arguments: - --validate-artifacts-file FILE - Validate artifacts file. + --validate-artifact FILE + Validate artifact. + --validate-profile FILE + Validate profile. " - -} \ No newline at end of file +} diff --git a/lib/validate_artifact.sh b/lib/validate_artifact.sh new file mode 100644 index 00000000..18e23456 --- /dev/null +++ b/lib/validate_artifact.sh @@ -0,0 +1,477 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006,SC2162 + +# Check whether the provided artifact has any errors. +# Arguments: +# string artifact: full path to the artifact file +# Returns: +# boolean: true on success +# false on fail +_validate_artifact() +{ + __va_artifact="${1:-}" + + if [ ! -f "${__va_artifact}" ]; then + _error_msg "artifact: no such file or directory: '${__va_artifact}'" + return 1 + fi + + _cleanup_local_vars() + { + __va_collector="" + __va_command="" + __va_compress_output_file="" + __va_condition="" + __va_description="" + __va_exclude_file_system="" + __va_exclude_name_pattern="" + __va_exclude_nologin_users="" + __va_exclude_path_pattern="" + __va_file_type="" + __va_foreach="" + __va_ignore_date_range="" + __va_is_file_list="" + __va_max_depth="" + __va_max_file_size="" + __va_min_file_size="" + __va_name_pattern="" + __va_output_directory="" + __va_output_file="" + __va_path_pattern="" + __va_path="" + __va_permissions="" + __va_supported_os="" + } + _cleanup_local_vars + + __va_global_output_directory="" + __va_artifacts_prop_exists=false + + # remove lines starting with # (comments) and any inline comments + # remove leading and trailing space characters + # remove blank lines + # add a new line and '-' to the end of file + printf "\n%s\n" "-" \ + | cat "${__va_artifact}" - \ + | sed -e 's|#.*$||g' \ + -e 's|^ *||' \ + -e 's| *$||' \ + -e '/^$/d' \ + | while read __va_key __va_value; do + + case "${__va_key}" in + "artifacts:") + ${__va_artifacts_prop_exists} \ + && { _error_msg "artifact: invalid duplicated 'artifacts' mapping."; return 1; } + __va_artifacts_prop_exists=true + # read the next line which must be a dash (-) + read __va_dash + if [ "${__va_dash}" != "-" ]; then + _error_msg "artifact: invalid 'artifacts' sequence of mappings." + return 1 + fi + if [ -n "${__va_output_directory}" ]; then + __va_global_output_directory="${__va_output_directory}" + fi + ;; + "collector:") + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'collector' must not be empty." + return 1 + elif _is_in_list "${__va_value}" "command|file|find|hash|stat"; then + true + else + _error_msg "artifact: invalid collector '${__va_value}'" + return 1 + fi + __va_collector="${__va_value}" + ;; + "command:") + if [ "${__va_value}" = "\"\"\"" ]; then + __va_value="" + while read __va_line && [ "${__va_line}" != "\"\"\"" ]; do + if [ "${__va_line}" = "-" ]; then + _error_msg "artifact: missing closing \"\"\" for 'command' collector." + return 1 + fi + __va_value="${__va_value}${__va_line}\n" + done + fi + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'command' must not be empty." + return 1 + fi + __va_command="${__va_value}" + ;; + "compress_output_file:") + if [ "${__va_value}" != true ] && [ "${__va_value}" != false ]; then + _error_msg "artifact: 'compress_output_file' must be 'true' or 'false'." + return 1 + fi + __va_compress_output_file="${__va_value}" + ;; + "condition:") + if [ "${__va_value}" = "\"\"\"" ]; then + __va_value="" + while read __va_line && [ "${__va_line}" != "\"\"\"" ]; do + if [ "${__va_line}" = "-" ]; then + _error_msg "artifact: missing closing \"\"\" for 'condition' property." + return 1 + fi + __va_value="${__va_value}${__va_line}\n" + done + fi + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'condition' must not be empty." + return 1 + fi + __va_condition="${__va_value}" + ;; + "description:") + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'description' must not be empty." + return 1 + fi + __va_description="${__va_value}" + ;; + "exclude_file_system:") + if echo "${__va_value}" | grep -q -v -E "^\[.*\]$"; then + _error_msg "artifact: 'exclude_file_system' must be an array/list." + return 1 + fi + __va_value=`echo "${__va_value}" | _array_to_psv` + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'exclude_file_system' must not be empty." + return 1 + fi + __va_exclude_file_system="${__va_value}" + ;; + "exclude_name_pattern:") + if echo "${__va_value}" | grep -q -v -E "^\[.*\]$"; then + _error_msg "artifact: 'exclude_name_pattern' must be an array/list." + return 1 + fi + __va_value=`echo "${__va_value}" | _array_to_psv` + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'exclude_name_pattern' must not be empty." + return 1 + fi + __va_exclude_name_pattern="${__va_value}" + ;; + "exclude_nologin_users:") + if [ "${__va_value}" != true ] && [ "${__va_value}" != false ]; then + _error_msg "artifact: 'exclude_nologin_users' must be 'true' or 'false'." + return 1 + fi + __va_exclude_nologin_users="${__va_value}" + ;; + "exclude_path_pattern:") + if echo "${__va_value}" | grep -q -v -E "^\[.*\]$"; then + _error_msg "artifact: 'exclude_path_pattern' must be an array/list." + return 1 + fi + __va_value=`echo "${__va_value}" | _array_to_psv` + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'exclude_path_pattern' must not be empty." + return 1 + fi + __va_exclude_path_pattern="${__va_value}" + ;; + "file_type:") + if echo "${__va_value}" | grep -q -v -E "^\[.*\]$"; then + _error_msg "artifact: 'file_type' must be an array/list." + return 1 + fi + __va_value=`echo "${__va_value}" | _array_to_psv` + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'file_type' must not be empty." + return 1 + fi + __va_valid_values="b|c|d|p|f|l|s" + for __va_item in `echo "${__va_value}" | sed -e 's:|: :g'`; do + if _is_in_list "${__va_item}" "${__va_valid_values}"; then + true + else + _error_msg "artifact: invalid file_type '${__va_item}'" + return 1 + fi + done + __va_file_type="${__va_value}" + ;; + "foreach:") + if [ "${__va_value}" = "\"\"\"" ]; then + __va_value="" + while read __va_line && [ "${__va_line}" != "\"\"\"" ]; do + if [ "${__va_line}" = "-" ]; then + _error_msg "artifact: missing closing \"\"\" for 'foreach' property." + return 1 + fi + __va_value="${__va_value}${__va_line}\n" + done + fi + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'foreach' must not be empty." + return 1 + fi + __va_foreach="${__va_value}" + ;; + "ignore_date_range:") + if [ "${__va_value}" != true ] && [ "${__va_value}" != false ]; then + _error_msg "artifact: 'ignore_date_range' must be 'true' or 'false'." + return 1 + fi + __va_ignore_date_range="${__va_value}" + ;; + "is_file_list:") + if [ "${__va_value}" != true ] && [ "${__va_value}" != false ]; then + _error_msg "artifact: 'is_file_list' must be 'true' or 'false'." + return 1 + fi + __va_is_file_list="${__va_value}" + ;; + "max_depth:") + if _is_digit "${__va_value}" && [ "${__va_value}" -ge 0 ]; then + true + else + _error_msg "artifact: 'max_depth' must be zero or a positive integer." + return 1 + fi + __va_max_depth="${__va_value}" + ;; + "max_file_size:") + if _is_digit "${__va_value}" && [ "${__va_value}" -gt 0 ]; then + true + else + _error_msg "artifact: 'max_file_size' must be a positive integer." + return 1 + fi + __va_max_file_size="${__va_value}" + ;; + "min_file_size:") + if _is_digit "${__va_value}" && [ "${__va_value}" -gt 0 ]; then + true + else + _error_msg "artifact: 'min_file_size' must be a positive integer." + return 1 + fi + __va_min_file_size="${__va_value}" + ;; + "name_pattern:") + if echo "${__va_value}" | grep -q -v -E "^\[.*\]$"; then + _error_msg "artifact: 'name_pattern' must be an array/list." + return 1 + fi + __va_value=`echo "${__va_value}" | _array_to_psv` + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'name_pattern' must not be empty." + return 1 + fi + __va_name_pattern="${__va_value}" + ;; + "output_directory:") + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'output_directory' must not be empty." + return 1 + fi + __va_output_directory="${__va_value}" + ;; + "output_file:") + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'output_file' must not be empty." + return 1 + fi + __va_output_file="${__va_value}" + ;; + "path:") + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'path' must not be empty." + return 1 + elif echo "${__va_value}" | grep -q -v -E "^/"; then + _error_msg "artifact: 'output_directory' invalid path. Path must be absolute (starting with /)." + return 1 + fi + __va_path="${__va_value}" + ;; + "path_pattern:") + if echo "${__va_value}" | grep -q -v -E "^\[.*\]$"; then + _error_msg "artifact: 'path_pattern' must be an array/list." + return 1 + fi + __va_value=`echo "${__va_value}" | _array_to_psv` + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'path_pattern' must not be empty." + return 1 + fi + __va_path_pattern="${__va_value}" + ;; + "permissions:") + if echo "${__va_value}" | grep -q -v -E "^\[.*\]$"; then + _error_msg "artifact: 'permissions' must be an array/list." + return 1 + fi + __va_value=`echo "${__va_value}" | _array_to_psv` + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'permissions' must not be empty." + return 1 + fi + for __va_item in `echo "${__va_value}" | sed -e 's:|: :g'`; do + if _is_digit "${__va_item}" \ + && [ "${__va_item}" -gt -7778 ] \ + && [ "${__va_item}" -lt 7778 ]; then + true + else + _error_msg "artifact: 'permissions' must be an integer between -7777 and 7777." + return 1 + fi + done + __va_permissions="${__va_value}" + ;; + "supported_os:") + if echo "${__va_value}" | grep -q -v -E "^\[.*\]$"; then + _error_msg "artifact: 'supported_os' must be an array/list." + return 1 + fi + __va_value=`echo "${__va_value}" | _array_to_psv` + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'supported_os' must not be empty." + return 1 + fi + __va_valid_values="all|aix|esxi|freebsd|linux|macos|netbsd|netscaler|openbsd|solaris" + for __va_item in `echo "${__va_value}" | sed -e 's:|: :g'`; do + if _is_in_list "${__va_item}" "${__va_valid_values}"; then + true + else + _error_msg "artifact: invalid supported_os '${__va_item}'" + return 1 + fi + done + __va_supported_os="${__va_value}" + ;; + "version:") + if [ -z "${__va_value}" ]; then + _error_msg "artifact: 'version' must not be empty." + return 1 + fi + ;; + "-") + ${__va_artifacts_prop_exists} \ + || { _error_msg "artifact: missing 'artifacts' mapping."; return 1; } + + if [ -z "${__va_description}" ]; then + _error_msg "artifact: missing 'description' property." + return 1 + fi + if [ -z "${__va_supported_os}" ]; then + _error_msg "artifact: missing 'supported_os' property." + return 1 + fi + if [ -z "${__va_collector}" ]; then + _error_msg "artifact: missing 'collector' property." + return 1 + fi + if [ "${__va_collector}" = "command" ] \ + || [ "${__va_collector}" = "find" ] \ + || [ "${__va_collector}" = "hash" ] \ + || [ "${__va_collector}" = "stat" ]; then + if [ -z "${__va_output_directory}" ] && [ -z "${__va_global_output_directory}" ]; then + _error_msg "artifact: missing 'output_directory' property." + return 1 + fi + fi + if [ "${__va_collector}" = "command" ]; then + if [ -z "${__va_command}" ]; then + _error_msg "artifact: missing 'command' property." + return 1 + fi + if [ -n "${__va_exclude_file_system}" ]; then + _error_msg "artifact: invalid 'exclude_file_system' property for 'command' collector." + return 1 + fi + if [ -n "${__va_exclude_name_pattern}" ]; then + _error_msg "artifact: invalid 'exclude_name_pattern' property for 'command' collector." + return 1 + fi + if [ -n "${__va_exclude_path_pattern}" ]; then + _error_msg "artifact: invalid 'exclude_path_pattern' property for 'command' collector." + return 1 + fi + if [ -n "${__va_file_type}" ]; then + _error_msg "artifact: invalid 'file_type' property for 'command' collector." + return 1 + fi + if [ -n "${__va_ignore_date_range}" ]; then + _error_msg "artifact: invalid 'ignore_date_range' property for 'command' collector." + return 1 + fi + if [ -n "${__va_is_file_list}" ]; then + _error_msg "artifact: invalid 'is_file_list' property for 'command' collector." + return 1 + fi + if [ -n "${__va_max_depth}" ]; then + _error_msg "artifact: invalid 'max_depth' property for 'command' collector." + return 1 + fi + if [ -n "${__va_max_file_size}" ]; then + _error_msg "artifact: invalid 'max_file_size' property for 'command' collector." + return 1 + fi + if [ -n "${__va_min_file_size}" ]; then + _error_msg "artifact: invalid 'min_file_size' property for 'command' collector." + return 1 + fi + if [ -n "${__va_name_pattern}" ]; then + _error_msg "artifact: invalid 'name_pattern' property for 'command' collector." + return 1 + fi + if [ -n "${__va_path}" ]; then + _error_msg "artifact: invalid 'path' property for 'command' collector." + return 1 + fi + if [ -n "${__va_path_pattern}" ]; then + _error_msg "artifact: invalid 'path_pattern' property for 'command' collector." + return 1 + fi + if [ -n "${__va_permissions}" ]; then + _error_msg "artifact: invalid 'permissions' property for 'command' collector." + return 1 + fi + elif [ "${__va_collector}" = "file" ] \ + || [ "${__va_collector}" = "find" ] \ + || [ "${__va_collector}" = "hash" ] \ + || [ "${__va_collector}" = "stat" ]; then + if [ -z "${__va_path}" ]; then + _error_msg "artifact: missing 'path' property." + return 1 + fi + if [ -n "${__va_command}" ]; then + _error_msg "artifact: invalid 'command' property for '${__va_collector}' collector." + return 1 + fi + if [ -n "${__va_compress_output_file}" ]; then + _error_msg "artifact: invalid 'compress_output_file' property for '${__va_collector}' collector." + return 1 + fi + if [ -n "${__va_foreach}" ]; then + _error_msg "artifact: invalid 'foreach' property for '${__va_collector}' collector." + return 1 + fi + if [ "${__va_collector}" = "find" ] \ + || [ "${__va_collector}" = "hash" ] \ + || [ "${__va_collector}" = "stat" ]; then + if [ -z "${__va_output_file}" ]; then + _error_msg "artifact: missing 'output_file' property." + return 1 + fi + fi + fi + _cleanup_local_vars + ;; + *) + __va_key=`echo "${__va_key}" | sed -e 's|\|$||'` + _error_msg "artifact: invalid property '${__va_key}'." + return 1 + esac + + done +} \ No newline at end of file diff --git a/lib/validate_artifact_list.sh b/lib/validate_artifact_list.sh new file mode 100644 index 00000000..0b4d8f3f --- /dev/null +++ b/lib/validate_artifact_list.sh @@ -0,0 +1,95 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006,SC2086 + +# Build the artifact list to be used during execution based on the artifacts provided in the command line. +# Arguments: +# string artifact_list: comma-separated list of artifacts +# string artifacts_dir: full path to the artifacts directory (default: artifacts) +# Returns: +# string: artifact list (line by line) +_validate_artifact_list() +{ + # valid relative path + # live_response/process/ps.yaml + # artifacts/live_response/process/ps.yaml + # ./artifacts/live_response/process/ps.yaml + # valid full path (only if the artifact is outside uac/artifacts directory) + # /tmp/my_custom_artifact.yaml + # invalid relative path (all ../ will be removed) + # ../uac/artifacts/live_response/process/ps.yaml + + __vl_artifact_list="${1:-}" + __vl_artifacts_dir="${2:-artifacts}" + + __vl_updated_artifact_list="" + + # remove any ../ + # replace consecutive slashes by one slash + # replace consecutive commas by one comma + # remove leading and trailing comma + __vl_artifact_list=`echo "${__vl_artifact_list}" \ + | sed -e 's|\.\./||g' \ + -e 's|//*|/|g' \ + -e 's|,,*|,|g' \ + -e 's|^,||' \ + -e 's|,$||'` + + __vl_OIFS="${IFS}"; IFS="," + for __vl_artifact in ${__vl_artifact_list}; do + case "${__vl_artifact}" in + \!/*) + __vl_artifact=`echo "${__vl_artifact}" | sed -e 's|^!||' 2>/dev/null` + __vl_exclude_list=`find ${__vl_artifact} -print 2>/dev/null` + if [ -z "${__vl_exclude_list}" ]; then + _error_msg "artifact not found '${__vl_artifact}'" + IFS="${__vl_OIFS}" + return 1 + fi + __vl_updated_artifact_list=`_filter_list "${__vl_updated_artifact_list}" "${__vl_exclude_list}"` + ;; + \!*) + # remove leading ! + # remove leading ./ + # remove leading artifacts/ + __vl_artifact=`echo "${__vl_artifact}" | sed -e 's|^!||' -e 's|^\./||' -e 's|^artifacts/||' 2>/dev/null` + __vl_exclude_list=`find "${__UAC_DIR}/artifacts"/${__vl_artifact} -print 2>/dev/null` + if [ -z "${__vl_exclude_list}" ]; then + _error_msg "artifact not found '${__vl_artifact}'" + IFS="${__vl_OIFS}" + return 1 + fi + __vl_updated_artifact_list=`_filter_list "${__vl_updated_artifact_list}" "${__vl_exclude_list}"` + ;; + /*) + __vl_include_list=`find ${__vl_artifact} -print 2>/dev/null` + if [ -z "${__vl_include_list}" ]; then + _error_msg "artifact not found '${__vl_artifact}'" + IFS="${__vl_OIFS}" + return 1 + fi + __vl_updated_artifact_list="${__vl_updated_artifact_list}${__vl_updated_artifact_list:+ +}${__vl_include_list}" + ;; + *) + # remove leading ! + # remove leading ./ + # remove leading artifacts/ + __vl_artifact=`echo "${__vl_artifact}" | sed -e 's|^!||' -e 's|^\./||' -e 's|^artifacts/||' 2>/dev/null` + __vl_include_list=`find "${__UAC_DIR}/artifacts"/${__vl_artifact} -print 2>/dev/null` + if [ -z "${__vl_include_list}" ]; then + _error_msg "artifact not found '${__vl_artifact}'" + IFS="${__vl_OIFS}" + return 1 + fi + __vl_updated_artifact_list="${__vl_updated_artifact_list}${__vl_updated_artifact_list:+ +}${__vl_include_list}" + ;; + esac + done + IFS="${__vl_OIFS}" + + # remove duplicates + echo "${__vl_updated_artifact_list}" | awk '!a[$0]++' + +} diff --git a/lib/validate_artifacts_file.sh b/lib/validate_artifacts_file.sh deleted file mode 100644 index 916af48c..00000000 --- a/lib/validate_artifacts_file.sh +++ /dev/null @@ -1,437 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Validate artifacts file. -# Globals: -# UAC_DIR -# Requires: -# array_to_list -# is_integer -# lrstrip -# Arguments: -# $1: artifacts file -# Outputs: -# None -# Exit Status: -# Exit with status 0 on success. -# Exit with status greater than 0 if errors occur. -############################################################################### -validate_artifacts_file() -{ - va_artifacts_file="${1:-}" - - # return if artifacts file does not exist - if [ ! -f "${va_artifacts_file}" ]; then - printf %b "uac: no such file or directory: '${va_artifacts_file}'\n" >&2 - return 2 - fi - - _cleanup_local_vars() { - va_version="" - va_description="" - va_collector="" - va_supported_os="" - va_foreach="" - va_command="" - va_path="" - va_path_pattern="" - va_name_pattern="" - va_exclude_path_pattern="" - va_exclude_name_pattern="" - va_max_depth="" - va_file_type="" - va_min_file_size="" - va_max_file_size="" - va_permissions="" - va_ignore_date_range=false - va_output_file="" - va_output_directory="" - va_is_file_list=false - va_compress_output_file=false - va_exclude_nologin_users=false - } - - va_artifacts_prop=false - _cleanup_local_vars - - # add '-' to the end of file - # remove lines starting with # (comments) - # remove inline comments - # remove blank lines - # shellcheck disable=SC2162 - printf %b "\n-" | cat "${va_artifacts_file}" - \ - | sed -e 's/#.*$//g' -e '/^ *$/d' -e '/^$/d' 2>/dev/null \ - | while IFS=":" read va_key va_value || [ -n "${va_key}" ]; do - - va_key=`lrstrip "${va_key}"` - - case "${va_key}" in - "artifacts") - if ${va_artifacts_prop}; then - printf %b "uac: artifacts file: invalid duplicated \ -'artifacts' mapping.\n" >&2 - return 151 - fi - va_artifacts_prop=true - # read the next line which must be a dash (-) - read va_dash - va_dash=`lrstrip "${va_dash}"` - if [ "${va_dash}" != "-" ]; then - printf %b "uac: artifacts file: invalid 'artifacts' \ -sequence of mappings.\n" >&2 - return 150 - fi - ;; - "version") - va_version=`lrstrip "${va_value}"` - if [ -z "${va_version}" ]; then - printf %b "uac: artifacts file: 'version' must not be \ -empty.\n" >&2 - return 152 - fi - ;; - "description") - va_description=`lrstrip "${va_value}"` - if [ -z "${va_description}" ]; then - printf %b "uac: artifacts file: 'description' must not be \ -empty.\n" >&2 - return 152 - fi - ;; - "collector") - va_collector=`lrstrip "${va_value}"` - if [ "${va_collector}" != "command" ] \ - && [ "${va_collector}" != "file" ] \ - && [ "${va_collector}" != "find" ] \ - && [ "${va_collector}" != "hash" ] \ - && [ "${va_collector}" != "stat" ]; then - printf %b "uac: artifacts file: invalid 'collector': \ -'${va_collector}'\n" >&2 - return 152 - fi - ;; - "supported_os") - va_supported_os=`lrstrip "${va_value}"` - if [ -z "${va_supported_os}" ]; then - printf %b "uac: artifacts file: 'supported_os' must not \ -be empty.\n" >&2 - return 152 - elif echo "${va_supported_os}" | grep -q -v -E "^\[" \ - && echo "${va_supported_os}" | grep -q -v -E "\]$"; then - printf %b "uac: artifacts file: 'supported_os' must be an \ -array/list.\n" >&2 - return 152 - fi - va_so_list="all,aix,android,esxi,freebsd,linux,macos,netbsd,netscaler,openbsd,solaris" - OIFS="${IFS}" - IFS="," - for va_os in `array_to_list "${va_supported_os}"`; do - if is_element_in_list "${va_os}" "${va_so_list}"; then - continue - else - printf %b "uac: artifacts file: invalid supported_os \ -'${va_os}'\n" >&2 - return 152 - fi - done - IFS="${OIFS}" - ;; - "foreach"|"loop_command") - va_foreach=`lrstrip "${va_value}"` - if [ -z "${va_foreach}" ]; then - printf %b "uac: artifacts file: 'foreach' must not be \ -empty.\n" >&2 - return 152 - fi - ;; - "command") - va_command=`lrstrip "${va_value}"` - if [ -z "${va_command}" ]; then - printf %b "uac: artifacts file: 'command' must not be \ -empty.\n" >&2 - return 152 - fi - ;; - "path") - va_path=`lrstrip "${va_value}"` - if [ -z "${va_path}" ]; then - printf %b "uac: artifacts file: 'path' must not be \ -empty.\n" >&2 - return 152 - fi - ;; - "path_pattern") - va_path_pattern=`lrstrip "${va_value}"` - if [ -z "${va_path_pattern}" ]; then - printf %b "uac: artifacts file: 'path_pattern' must not be \ -empty.\n" >&2 - return 152 - elif echo "${va_path_pattern}" | grep -q -v -E "^\[" \ - && echo "${va_path_pattern}" | grep -q -v -E "\]$"; then - printf %b "uac: artifacts file: 'path_pattern' must be an \ -array/list.\n" >&2 - return 152 - fi - ;; - "name_pattern") - va_name_pattern=`lrstrip "${va_value}"` - if [ -z "${va_name_pattern}" ]; then - printf %b "uac: artifacts file: 'name_pattern' must not be \ -empty.\n" >&2 - return 152 - elif echo "${va_name_pattern}" | grep -q -v -E "^\[" \ - && echo "${va_name_pattern}" | grep -q -v -E "\]$"; then - printf %b "uac: artifacts file: 'name_pattern' must be an \ -array/list.\n" >&2 - return 152 - fi - ;; - "exclude_path_pattern") - va_exclude_path_pattern=`lrstrip "${va_value}"` - if [ -z "${va_exclude_path_pattern}" ]; then - printf %b "uac: artifacts file: 'exclude_path_pattern' must \ -not be empty.\n" >&2 - return 152 - elif echo "${va_exclude_path_pattern}" | grep -q -v -E "^\[" \ - && echo "${va_exclude_path_pattern}" | grep -q -v -E "\]$"; then - printf %b "uac: artifacts file: 'exclude_path_pattern' must \ -be an array/list.\n" >&2 - return 152 - fi - ;; - "exclude_name_pattern") - va_exclude_name_pattern=`lrstrip "${va_value}"` - if [ -z "${va_exclude_name_pattern}" ]; then - printf %b "uac: artifacts file: 'exclude_name_pattern' must \ -not be empty.\n" >&2 - return 152 - elif echo "${va_exclude_name_pattern}" | grep -q -v -E "^\[" \ - && echo "${va_exclude_name_pattern}" | grep -q -v -E "\]$"; then - printf %b "uac: artifacts file: 'exclude_name_pattern' must \ -be an array/list.\n" >&2 - return 152 - fi - ;; - "exclude_file_system") - va_exclude_file_system=`lrstrip "${va_value}"` - if [ -z "${va_exclude_file_system}" ]; then - printf %b "uac: artifacts file: 'exclude_file_system' must \ -not be empty.\n" >&2 - return 152 - elif echo "${va_exclude_file_system}" | grep -q -v -E "^\[" \ - && echo "${va_exclude_file_system}" | grep -q -v -E "\]$"; then - printf %b "uac: artifacts file: 'exclude_file_system' must \ -be an array/list.\n" >&2 - return 152 - fi - ;; - "max_depth") - va_max_depth=`lrstrip "${va_value}"` - if is_integer "${va_max_depth}" 2>/dev/null \ - && [ "${va_max_depth}" -gt 0 ]; then - continue - else - printf %b "uac: artifacts file: 'max_depth' must be a \ -positive integer, but got a '${va_max_depth}'\n" >&2 - return 152 - fi - ;; - "file_type") - va_file_type=`lrstrip "${va_value}"` - if [ "${va_file_type}" != "b" ] \ - && [ "${va_file_type}" != "c" ] \ - && [ "${va_file_type}" != "d" ] \ - && [ "${va_file_type}" != "p" ] \ - && [ "${va_file_type}" != "f" ] \ - && [ "${va_file_type}" != "l" ] \ - && [ "${va_file_type}" != "s" ]; then - printf %b "uac: artifacts file: invalid file_type \ -'${va_file_type}'\n" >&2 - return 152 - fi - ;; - "min_file_size") - va_min_file_size=`lrstrip "${va_value}"` - if is_integer "${va_min_file_size}" 2>/dev/null \ - && [ "${va_min_file_size}" -gt 0 ]; then - continue - else - printf %b "uac: artifacts file: 'min_file_size' must be a \ -positive integer, but got a '${va_min_file_size}'\n" >&2 - return 152 - fi - ;; - "max_file_size") - va_max_file_size=`lrstrip "${va_value}"` - if is_integer "${va_max_file_size}" 2>/dev/null \ - && [ "${va_max_file_size}" -gt 0 ]; then - continue - else - printf %b "uac: artifacts file: 'max_file_size' must be a \ -positive integer, but got a '${va_max_file_size}'\n" >&2 - return 152 - fi - ;; - "permissions") - va_permissions=`lrstrip "${va_value}"` - if is_integer "${va_permissions}" 2>/dev/null \ - && [ "${va_permissions}" -gt -7778 ] \ - && [ "${va_permissions}" -lt 7778 ]; then - continue - else - printf %b "uac: artifacts file: 'permissions' must be a \ -positive integer between 1 and 7777, but got a '${va_permissions}'\n" >&2 - return 152 - fi - ;; - "ignore_date_range") - va_ignore_date_range=`lrstrip "${va_value}"` - if [ "${va_ignore_date_range}" != true ] \ - && [ "${va_ignore_date_range}" != false ]; then - printf %b "uac: artifacts file: 'ignore_date_range' must be \ -'true' or 'false', but got a '${va_ignore_date_range}'\n" >&2 - return 152 - fi - ;; - "output_directory") - va_output_directory=`lrstrip "${va_value}"` - if [ -z "${va_output_directory}" ]; then - printf %b "uac: artifacts file: 'output_directory' must not \ -be empty.\n" >&2 - return 152 - fi - ;; - "output_file") - va_output_file=`lrstrip "${va_value}"` - if [ -z "${va_output_file}" ]; then - printf %b "uac: artifacts file: 'output_file' must not be \ -empty.\n" >&2 - return 152 - fi - ;; - "stderr_output_file") - va_stderr_output_file=`lrstrip "${va_value}"` - if [ -z "${va_stderr_output_file}" ]; then - printf %b "uac: artifacts file: 'stderr_output_file' must not be \ -empty.\n" >&2 - return 152 - fi - ;; - "is_file_list") - va_is_file_list=`lrstrip "${va_value}"` - if [ "${va_is_file_list}" != true ] \ - && [ "${va_is_file_list}" != false ]; then - printf %b "uac: artifacts file: 'is_file_list' must be \ -'true' or 'false', but got a '${va_is_file_list}'\n" >&2 - return 152 - fi - ;; - "compress_output_file") - va_compress_output_file=`lrstrip "${va_value}"` - if [ "${va_compress_output_file}" != true ] \ - && [ "${va_compress_output_file}" != false ]; then - printf %b "uac: artifacts file: 'compress_output_file' must \ -be 'true' or 'false', but got a '${va_compress_output_file}'\n" >&2 - return 152 - fi - ;; - "exclude_nologin_users") - va_exclude_nologin_users=`lrstrip "${va_value}"` - if [ "${va_exclude_nologin_users}" != true ] \ - && [ "${va_exclude_nologin_users}" != false ]; then - printf %b "uac: artifacts file: 'exclude_nologin_users' must \ -be 'true' or 'false', but got a '${va_exclude_nologin_users}'\n" >&2 - return 152 - fi - ;; - "-") - if [ ${va_artifacts_prop} = false ]; then - printf %b "uac: artifacts file: missing 'artifacts' \ -mapping.\n" >&2 - return 150 - fi - if [ -z "${va_description}" ]; then - printf %b "uac: artifacts file: missing 'description' \ -property.\n" >&2 - return 153 - fi - if [ -z "${va_collector}" ]; then - printf %b "uac: artifacts file: missing 'collector' \ -property.\n" >&2 - return 153 - fi - if [ -z "${va_supported_os}" ]; then - printf %b "uac: artifacts file: missing 'supported_os' \ -property.\n" >&2 - return 153 - fi - - if [ "${va_collector}" = "command" ]; then - if [ -z "${va_command}" ]; then - printf %b "uac: artifacts file: missing 'command' property \ -for 'command' collector.\n" >&2 - return 153 - elif [ -z "${va_output_file}" ]; then - printf %b "uac: artifacts file: missing 'output_file' \ -property for 'command' collector.\n" >&2 - return 153 - fi - fi - - if [ "${va_collector}" = "find" ]; then - if [ -z "${va_path}" ]; then - printf %b "uac: artifacts file: missing 'path' property \ -for 'find' collector.\n" >&2 - return 153 - elif [ -z "${va_output_file}" ]; then - printf %b "uac: artifacts file: missing 'output_file' \ -property for 'find' collector.\n" >&2 - return 153 - fi - fi - - if [ "${va_collector}" = "hash" ]; then - if [ -z "${va_path}" ]; then - printf %b "uac: artifacts file: missing 'path' property \ -for 'hash' collector.\n" >&2 - return 153 - elif [ -z "${va_output_file}" ]; then - printf %b "uac: artifacts file: missing 'output_file' \ -property for 'hash' collector.\n" >&2 - return 153 - fi - fi - - if [ "${va_collector}" = "stat" ]; then - if [ -z "${va_path}" ]; then - printf %b "uac: artifacts file: missing 'path' property \ -for 'stat' collector.\n" >&2 - return 153 - elif [ -z "${va_output_file}" ]; then - printf %b "uac: artifacts file: missing 'output_file' \ -property for 'stat' collector.\n" >&2 - return 153 - fi - fi - - if [ "${va_collector}" = "file" ]; then - if [ -z "${va_path}" ]; then - printf %b "uac: artifacts file: missing 'path' property \ -for 'file' collector.\n" >&2 - return 153 - fi - fi - - _cleanup_local_vars - ;; - *) - printf %b "uac: artifacts file: invalid property \ -'${va_key}'\n" >&2 - return 153 - esac - - done - -} \ No newline at end of file diff --git a/lib/validate_profile.sh b/lib/validate_profile.sh new file mode 100644 index 00000000..2fdd23d0 --- /dev/null +++ b/lib/validate_profile.sh @@ -0,0 +1,99 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 +# shellcheck disable=SC2006,SC2086,SC2162 + +# Check whether the provided profile file has any errors. +# Arguments: +# string profile: full path to the profile file +# string artifacts_dir: full path to the artifacts directory +# Returns: +# boolean: true on success +# false on fail +_validate_profile() +{ + __vp_profile="${1:-}" + __vp_artifacts_dir="${2:-artifacts}" + + if [ ! -f "${__vp_profile}" ]; then + _error_msg "profile file: no such file or directory: '${__vp_profile}'" + return 1 + fi + + __vp_name_prop_exists=false + __vp_description_prop_exists=false + __vp_artifacts_prop_exists=false + __vp_artifact_list_exists=false + + # remove lines starting with # (comments) and any inline comments + # remove leading and trailing space characters + # remove blank lines + # add a new line and '__EOF__' to the end of file + printf "%s\n" "__EOF__" \ + | cat "${__vp_profile}" - \ + | sed -e 's|#.*$||g' \ + -e 's|^ *||' \ + -e 's| *$||' \ + -e '/^$/d' \ + | while read __vp_key __vp_value; do + + case "${__vp_key}" in + + "artifacts:") + ${__vp_artifacts_prop_exists} \ + && { _error_msg "profile: invalid duplicated 'artifacts' mapping."; return 1; } + __vp_artifacts_prop_exists=true + ;; + "description:") + if [ -z "${__vp_value}" ]; then + _error_msg "profile: 'description' must not be empty." + return 1 + fi + __vp_description_prop_exists=true + ;; + "name:") + if [ -z "${__vp_value}" ]; then + _error_msg "profile: 'name' must not be empty." + return 1 + fi + __vp_name_prop_exists=true + ;; + "-"*) + ${__vp_artifacts_prop_exists} \ + || { _error_msg "profile: missing 'artifacts' mapping."; return 1; } + + __vp_artifact=`echo "${__vp_value}" | sed -e 's|^!||' 2>/dev/null` + + if [ -z "${__vp_artifact}" ]; then + _error_msg "profile: invalid empty artifact entry." + return 1 + fi + + if find ${__vp_artifact} -print \ + >/dev/null 2>/dev/null; then + true + elif find "${__vp_artifacts_dir}"/${__vp_artifact} -print \ + >/dev/null 2>/dev/null; then + true + else + _error_msg "profile: artifact not found '${__vp_artifact}'" + return 1 + fi + __vp_artifact_list_exists=true + ;; + "__EOF__") + ${__vp_name_prop_exists} \ + || { _error_msg "profile: missing 'name' property."; return 1; } + ${__vp_description_prop_exists} \ + || { _error_msg "profile: missing 'description' property."; return 1; } + ${__vp_artifacts_prop_exists} \ + || { _error_msg "profile: missing 'artifacts' mapping."; return 1; } + ${__vp_artifact_list_exists} \ + || { _error_msg "profile: 'artifacts' must have at least one artifact."; return 1; } + ;; + *) + __vp_key=`echo "${__vp_key}" | sed -e 's|\|$||'` + _error_msg "profile: invalid property '${__vp_key}'." + return 1 + esac + done +} \ No newline at end of file diff --git a/lib/validate_profile_file.sh b/lib/validate_profile_file.sh deleted file mode 100644 index 7f8c0ef6..00000000 --- a/lib/validate_profile_file.sh +++ /dev/null @@ -1,129 +0,0 @@ -#!/bin/sh -# SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006 - -############################################################################### -# Validate profile file. -# Globals: -# UAC_DIR -# Requires: -# lrstrip -# Arguments: -# $1: profile file -# Outputs: -# None -# return Status: -# return with status 0 on success. -# return with status greater than 0 if errors occur. -############################################################################### -validate_profile_file() -{ - vp_profile_file="${1:-}" - - # return if profile file does not exist - if [ ! -f "${vp_profile_file}" ]; then - printf %b "uac: profile file: no such file or \ -directory: '${vp_profile_file}'\n" >&2 - return 2 - fi - - vp_name="" - vp_description="" - vp_artifacts_prop=false - vp_artifacts_file_prop=false - vp_artifact_file="" - vp_include_artifacts_file=false - - # add '__end__' line to the end of file - # remove lines starting with # (comments) - # remove inline comments - # remove blank lines - # shellcheck disable=SC2162 - printf %b "\n__end__" | cat "${vp_profile_file}" - \ - | sed -e 's/#.*$//g' -e '/^ *$/d' -e '/^$/d' 2>/dev/null \ - | while IFS=":" read vp_key vp_value || [ -n "${vp_key}" ]; do - - vp_key=`lrstrip "${vp_key}"` - - case "${vp_key}" in - "artifacts") - if ${vp_artifacts_prop}; then - printf %b "uac: profile file: invalid duplicated 'artifacts' \ -mapping.\n" >&2 - return 151 - fi - vp_artifacts_prop=true - ;; - "description") - vp_description=`lrstrip "${vp_value}"` - if [ -z "${vp_description}" ]; then - printf %b "uac: profile file: 'description' \ -must not be empty.\n" >&2 - return 152 - fi - ;; - "name") - vp_name=`lrstrip "${vp_value}"` - if [ -z "${vp_name}" ]; then - printf %b "uac: profile file: 'name' \ -must not be empty.\n" >&2 - return 152 - fi - ;; - "-"*) - if [ ${vp_artifacts_prop} = false ]; then - printf %b "uac: profile file: missing 'artifacts' \ -mapping.\n" >&2 - return 150 - fi - # extract file name from artifacts array - # shellcheck disable=SC2001 - vp_artifact_file=`echo "${vp_key}" | sed -e 's: *- *::g'` - if [ -z "${vp_artifact_file}" ]; then - printf %b "uac: profile file: invalid empty artifact \ -entry.\n" >&2 - return 152 - fi - - if echo "${vp_artifact_file}" | grep -q -E "^!" 2>/dev/null; then - # shellcheck disable=SC2001 - vp_artifact_file=`echo "${vp_artifact_file}" | sed -e 's:^!::g'` - else - vp_include_artifacts_file=true - fi - vp_artifacts_file_prop=true - - # shellcheck disable=SC2086 - find "${UAC_DIR}"/artifacts/${vp_artifact_file} -name "*.yaml" \ - -print >/dev/null 2>/dev/null - # shellcheck disable=SC2181 - if [ "$?" -gt 0 ]; then - printf %b "uac: profile file: no such \ -file or directory: '${UAC_DIR}/artifacts/${vp_artifact_file}'\n" >&2 - return 2 - fi - ;; - "__end__") - if [ ${vp_artifacts_file_prop} = false ]; then - printf %b "uac: profile file: 'artifacts' must not be \ -empty.\n" >&2 - return 152 - elif [ ${vp_include_artifacts_file} = false ]; then - printf %b "uac: profile file: 'artifacts' must have at \ -least one artifacts file.\n" >&2 - return 152 - fi - if [ -z "${vp_name}" ]; then - printf %b "uac: profile file: missing 'name' property.\n" >&2 - return 153 - fi - ;; - *) - printf %b "uac: profile file: invalid property \ -'${vp_key}'\n" >&2 - return 153 - esac - - done - -} \ No newline at end of file diff --git a/lib/verbose_msg.sh b/lib/verbose_msg.sh new file mode 100644 index 00000000..d3e166f8 --- /dev/null +++ b/lib/verbose_msg.sh @@ -0,0 +1,14 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Print verbose message. +# Arguments: +# string message: message +# Returns: +# none +_verbose_msg() +{ + __vm_message="${1:-}" + + ${__UAC_VERBOSE_MODE} && printf "%s\n" "${__vm_message}" +} \ No newline at end of file diff --git a/lib/zip_data.sh b/lib/zip_data.sh new file mode 100644 index 00000000..3a6a659d --- /dev/null +++ b/lib/zip_data.sh @@ -0,0 +1,32 @@ +#!/bin/sh +# SPDX-License-Identifier: Apache-2.0 + +# Zip files and directories. +# Arguments: +# string from_file: file containing the list of files to be zipped +# string destination_file: output file +# string password: password (optional) +# Returns: +# none +_zip_data() +{ + __zd_from_file="${1:-}" + __zd_destination_file="${2:-}" + __zd_password="${3:-}" + + if [ ! -f "${__zd_from_file}" ]; then + _error_msg "_zip_data: no such file or directory: '${__zd_from_file}'" + return 1 + fi + + __zd_zip_command="zip -6 -r \"${__zd_destination_file}\" -@ <\"${__zd_from_file}\"" + if [ -n "${__zd_password}" ]; then + __zd_zip_command="zip -6 -r --password \"${__zd_password}\" \"${__zd_destination_file}\" -@ <\"${__zd_from_file}\"" + fi + + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__zd_zip_command}" + eval "${__zd_zip_command}" \ + >>"${__UAC_TEMP_DATA_DIR}/zip_data.stdout.txt" \ + 2>>"${__UAC_TEMP_DATA_DIR}/zip_data.stderr.txt" + +} \ No newline at end of file diff --git a/logo/uac-dark.svg b/logo/uac-dark.svg new file mode 100644 index 00000000..be1a9674 --- /dev/null +++ b/logo/uac-dark.svg @@ -0,0 +1,96 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + diff --git a/logo/uac-light.svg b/logo/uac-light.svg new file mode 100644 index 00000000..5ef30779 --- /dev/null +++ b/logo/uac-light.svg @@ -0,0 +1,96 @@ + + + + + + + + + + image/svg+xml + + + + + + + + + + + + + + diff --git a/tools/avml/linux/avml b/tools/avml/linux/avml deleted file mode 100755 index 10087671..00000000 Binary files a/tools/avml/linux/avml and /dev/null differ diff --git a/tools/date_to_epoch.pl/date_to_epoch.pl b/tools/date_to_epoch_pl/date_to_epoch_pl old mode 100644 new mode 100755 similarity index 83% rename from tools/date_to_epoch.pl/date_to_epoch.pl rename to tools/date_to_epoch_pl/date_to_epoch_pl index 10f875ff..7b2266f8 --- a/tools/date_to_epoch.pl/date_to_epoch.pl +++ b/tools/date_to_epoch_pl/date_to_epoch_pl @@ -1,11 +1,11 @@ -#!/usr/bin/perl +#!/usr/bin/env perl # SPDX-License-Identifier: Apache-2.0 use strict; use Time::Local; sub usage { - print("Usage: date_to_epoch.pl YYYY-MM-DD\n"); + print("Usage: date_to_epoch_pl YYYY-MM-DD\n"); print("\n"); exit 1; } diff --git a/tools/find.pl/find.pl b/tools/find_pl/find_pl old mode 100644 new mode 100755 similarity index 71% rename from tools/find.pl/find.pl rename to tools/find_pl/find_pl index 8c85d9c0..f649a3d3 --- a/tools/find.pl/find.pl +++ b/tools/find_pl/find_pl @@ -3,59 +3,74 @@ use strict; use File::Find; +use Cwd (); sub usage { print <<"USAGE"; -Usage: find.pl starting-point [OPTIONS] +Usage: find_pl starting-point [expression] -OPERATORS - \\( expression \\) - Evaluates to the value True if the expression in parentheses is true. - Since parentheses are special to the shell, you will need to use backslashes \\( \\). +EXPRESSION + The part of the command line after the list of starting points is the expression. + This is a kind of query specification describing how we match files and what we do with the files that were matched. An expression is composed of a sequence of things: - ! not - -a and - -o or + Tests Tests return a true or false value, usually on the basis of some property of a file we are considering. The -empty test for example is true only + when the current file is empty. -OPTIONS - -maxdepth LEVELS - Descend at most levels (a non-negative integer) levels of directories below the starting-point. + Actions + Actions have side effects (such as printing something on the standard output) and return either true or false, usually based on whether or not they + are successful. The -print action for example prints the name of the current file on the standard output. - -name PATTERN - File name matches specified glob wildcard pattern (just as with using find). + Global options + Global options affect the operation of tests and actions specified on any part of the command line. Global options always return true. The -depth + option for example makes find traverse the file system in a depth-first order. - -iname PATTERN - Like -name, but the match is case insensitive. + Operators + Operators join together the other items within the expression. They include for example -o (meaning logical OR) and -a (meaning logical AND). + Where an operator is missing, -a is assumed. - -path PATTERN - File path matches specified glob wildcard pattern (just as with using find). +GLOBAL OPTIONS + -maxdepth LEVELS + Descend at most levels (a non-negative integer) levels of directories below the starting-point. - -ipath PATTERN - Like -path, but the match is case insensitive. +TESTS + Predicates which take a numeric argument N can come in three forms: + N is prefixed with a +: match values greater than N + N is prefixed with a -: match values less than N + N is not prefixed with either + or -: match only values equal to N -atime N File was last accessed N*24 hours ago. When find figures out how many 24-hour periods ago the file was last accessed, any fractional part is ignored, so to match -atime +1, a file has to have been accessed at least two days ago. + + -ctime N + File's status was last changed N*24 hours ago. See the comments for -atime to understand how rounding + affects the interpretation of file status change times. + + -iname PATTERN + Like -name, but the match is case insensitive. + + -ipath PATTERN + Like -path, but the match is case insensitive. -mtime N File's data was last modified N*24 hours ago. See the comments for -atime to understand how rounding affects the interpretation of file status change times. - -ctime N - File's status was last changed N*24 hours ago. See the comments for -atime to understand how rounding - affects the interpretation of file status change times. + -name PATTERN + File name matches specified glob wildcard pattern (just as with using find). - -type X - File is of type X: + -path PATTERN + File path matches specified glob wildcard pattern (just as with using find). - f regular file - d directory - l symbolic link - p named pipe (FIFO) - s socket - b block special - c character special + -perm MODE + File's permission bits are exactly mode (octal). Symbolic mode is not supported. + + -perm -MODE + All of the permission bits mode (octal) are set for the file. Symbolic mode is not supported. + + -perm /MODE + Any of the permission bits mode are set for the file. Symbolic mode is not supported. -size N[ckMG] File uses N units of space. The following suffixes can be used: @@ -65,17 +80,22 @@ sub usage { M for megabyte (MB, units of 1024 * 1024 bytes) G for gigabyte (GB, units of 1024 * 1024 * 1024 bytes) - -perm MODE - File's permission bits are exactly mode (octal). Symbolic mode is not supported. - - -perm -MODE - All of the permission bits mode (octal) are set for the file. Symbolic mode is not supported. + -type X + File is of type X: - -perm /MODE - Any of the permission bits mode are set for the file. Symbolic mode is not supported. + b block special + c character special + d directory + f regular file + l symbolic link + p named pipe (FIFO) + s socket - -prune - Always evaluates to the value True. If the file is a directory, do not descend into it. +ACTIONS + -exec COMMAND ; + Execute COMMAND; Any occurrence of {} in COMMAND will be replaced by the the path of the current file. + The ; must be passed as a distinct argument, so it may need to be surrounded by whitespace and/or quoted + from interpretation by the shell using a backslash (just as with using find(1)). -print Always evaluates to the value True. Print the full file name on the standard output, followed by a newline. @@ -83,10 +103,17 @@ sub usage { -print0 Always evaluates to the value True. Print the full file name on the standard output, followed by a null character. - Predicates which take a numeric argument N can come in three forms: - N is prefixed with a +: match values greater than N - N is prefixed with a -: match values less than N - N is not prefixed with either + or -: match only values equal to N + -prune + Always evaluates to the value True. If the file is a directory, do not descend into it. + +OPERATORS + \\( expression \\) + Evaluates to the value True if the expression in parentheses is true. + Since parentheses are special to the shell, you will need to use backslashes \\( \\). + + ! not + -a and + -o or USAGE } @@ -99,6 +126,53 @@ ($) "^$retval\\z"; } +my $cwd = Cwd::cwd(); +sub run_exec ($@) { + my @command = @_; + for my $word (@command) + { $word =~ s#{}#$File::Find::name#g } + chdir $cwd; + system @command; + chdir $File::Find::dir; + return !$?; +} + +# print file stats when "-exec stat_pl {}" is used +sub stat_pl { + my $filename = shift; + + my ($dev,$inode,$mode,$nlink,$uid,$gid,$rdev,$size,$atime,$mtime,$ctime,$blksize,$blocks) = lstat($filename); + + my @perms = qw(--- --x -w- -wx r-- r-x rw- rwx); + my @ftype = qw(. p c ? d ? b ? - ? l ? s ? ? ?); + + $ftype[0] = ''; + + my $setids = ($mode & 07000)>>9; + my @permstrs = @perms[($mode&0700)>>6, ($mode&0070)>>3, $mode&0007]; + my $ftype = $ftype[($mode & 0170000)>>12]; + + if ($setids) { + if ($setids & 01) { # Sticky bit + $permstrs[2] =~ s/([-x])$/$1 eq 'x' ? 't' : 'T'/e; + } + if ($setids & 04) { # Setuid bit + $permstrs[0] =~ s/([-x])$/$1 eq 'x' ? 's' : 'S'/e; + } + if ($setids & 02) { # Setgid bit + $permstrs[1] =~ s/([-x])$/$1 eq 'x' ? 's' : 'S'/e; + } + } + + my $mode_as_string = join '', $ftype, @permstrs; + + if (-l $filename) { + $filename = "$filename -> " . readlink "$filename"; + } + + print("0|$filename|$inode|$mode_as_string|$uid|$gid|$size|$atime|$mtime|$ctime|0\n"); +} + my @starting_points = (); my $wanted = "my (\$dev,\$inode,\$mode,\$nlink,\$uid,\$gid,\$rdev,\$size,\$atime,\$mtime,\$ctime,\$blksize,\$blocks); ((\$dev,\$inode,\$mode,\$nlink,\$uid,\$gid,\$rdev,\$size,\$atime,\$mtime,\$ctime,\$blksize,\$blocks) = lstat(\$File::Find::name)) && "; my $depth = 0; @@ -212,7 +286,20 @@ ($) $wanted .= "print(\"\$File::Find::name\\n\");"; $print_needed = 0; } elsif ($arg_option eq "-print0") { - $wanted .= "print(\"\$File::Find::name\");"; + $wanted .= "print(\"\$File::Find::name\0\");"; + $print_needed = 0; + } elsif ($arg_option eq "-exec") { + my @cmd = (); + while (@ARGV && $ARGV[0] ne ';') + { push(@cmd, shift) } + shift; + if ($cmd[0] eq "stat_pl" && $cmd[1] eq "{}") { + $wanted .= "stat_pl(\"\$File::Find::name\")"; + } else { + for (@cmd) + { s/'/\\'/g } + { local $" = "','"; $wanted .= "run_exec('@cmd')"; } + } $print_needed = 0; } else { die("find.pl: unrecognized option: $arg_option\n"); @@ -231,11 +318,10 @@ ($) } if ($print_needed) { - if ($wanted =~ /&&\s*$/) { - $wanted .= " print(\"\$File::Find::name\\n\");"; - } else { - $wanted .= " && print(\"\$File::Find::name\\n\");"; + if ($wanted !~ /&&\s*$/) { + $wanted .= "&& " } + $wanted .= "print(\"\$File::Find::name\\n\");"; } for my $starting_point (@starting_points) { diff --git a/tools/stat.pl/stat.pl b/tools/stat.pl/stat.pl deleted file mode 100644 index bdf0fda3..00000000 --- a/tools/stat.pl/stat.pl +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env perl -# SPDX-License-Identifier: Apache-2.0 - -my $filename = @ARGV[0]; - -if (defined $filename) { - - my ($dev,$inode,$mode,$nlink,$uid,$gid,$rdev,$size,$atime,$mtime,$ctime,$blksize,$blocks) = lstat($filename); - - my @perms = qw(--- --x -w- -wx r-- r-x rw- rwx); - my @ftype = qw(. p c ? d ? b ? - ? l ? s ? ? ?); - - $ftype[0] = ''; - - my $setids = ($mode & 07000)>>9; - my @permstrs = @perms[($mode&0700)>>6, ($mode&0070)>>3, $mode&0007]; - my $ftype = $ftype[($mode & 0170000)>>12]; - - if ($setids) { - if ($setids & 01) { # Sticky bit - $permstrs[2] =~ s/([-x])$/$1 eq 'x' ? 't' : 'T'/e; - } - if ($setids & 04) { # Setuid bit - $permstrs[0] =~ s/([-x])$/$1 eq 'x' ? 's' : 'S'/e; - } - if ($setids & 02) { # Setgid bit - $permstrs[1] =~ s/([-x])$/$1 eq 'x' ? 's' : 'S'/e; - } - } - - $mode_as_string = join '', $ftype, @permstrs; - - if (-l $filename) { - $filename = "$filename -> " . readlink "$filename"; - } - - print("0|$filename|$inode|$mode_as_string|$uid|$gid|$size|$atime|$mtime|$ctime|0\n"); -} diff --git a/tools/stat_pl/stat_pl b/tools/stat_pl/stat_pl new file mode 100755 index 00000000..1f130154 --- /dev/null +++ b/tools/stat_pl/stat_pl @@ -0,0 +1,60 @@ +#!/usr/bin/env perl +# SPDX-License-Identifier: Apache-2.0 + +sub usage { + print <<"USAGE"; +Usage: stat_pl FILE... + +USAGE +} + +sub get_file_stats { + my $filename = shift; + + if (defined $filename) { + + my ($dev,$inode,$mode,$nlink,$uid,$gid,$rdev,$size,$atime,$mtime,$ctime,$blksize,$blocks) = lstat($filename); + + my @perms = qw(--- --x -w- -wx r-- r-x rw- rwx); + my @ftype = qw(. p c ? d ? b ? - ? l ? s ? ? ?); + + $ftype[0] = ''; + + my $setids = ($mode & 07000)>>9; + my @permstrs = @perms[($mode&0700)>>6, ($mode&0070)>>3, $mode&0007]; + my $ftype = $ftype[($mode & 0170000)>>12]; + + if ($setids) { + if ($setids & 01) { # Sticky bit + $permstrs[2] =~ s/([-x])$/$1 eq 'x' ? 't' : 'T'/e; + } + if ($setids & 04) { # Setuid bit + $permstrs[0] =~ s/([-x])$/$1 eq 'x' ? 's' : 'S'/e; + } + if ($setids & 02) { # Setgid bit + $permstrs[1] =~ s/([-x])$/$1 eq 'x' ? 's' : 'S'/e; + } + } + + $mode_as_string = join '', $ftype, @permstrs; + + if (-l $filename) { + $filename = "$filename -> " . readlink "$filename"; + } + + return $filename, $inode, $mode_as_string, $uid, $gid, $size, $atime, $mtime, $ctime; + } + +} + +# print usage if no arguments are provided +if (@ARGV < 1) { + usage(); + exit 1; +} + +while (@ARGV) { + my $file = shift; + my ($filename, $inode, $mode_as_string, $uid, $gid, $size, $atime, $mtime, $ctime) = get_file_stats($file); + print("0|$filename|$inode|$mode_as_string|$uid|$gid|$size|$atime|$mtime|$ctime|0\n"); +} diff --git a/tools/statx/linux/arm/statx b/tools/statx/linux/arm/statx index 22491b8f..f9ea1aa9 100755 Binary files a/tools/statx/linux/arm/statx and b/tools/statx/linux/arm/statx differ diff --git a/tools/statx/linux/arm64/statx b/tools/statx/linux/arm64/statx index f4be347b..ea06a39a 100755 Binary files a/tools/statx/linux/arm64/statx and b/tools/statx/linux/arm64/statx differ diff --git a/tools/statx/linux/i386/statx b/tools/statx/linux/i386/statx index 387e3509..539220fd 100755 Binary files a/tools/statx/linux/i386/statx and b/tools/statx/linux/i386/statx differ diff --git a/tools/statx/linux/mips/statx b/tools/statx/linux/mips/statx index 4aab919c..7573adf7 100755 Binary files a/tools/statx/linux/mips/statx and b/tools/statx/linux/mips/statx differ diff --git a/tools/statx/linux/mips64/statx b/tools/statx/linux/mips64/statx index 6d0de9ed..9fbf8163 100755 Binary files a/tools/statx/linux/mips64/statx and b/tools/statx/linux/mips64/statx differ diff --git a/tools/statx/linux/ppc/statx b/tools/statx/linux/ppc/statx index 72cc3efb..911d633d 100755 Binary files a/tools/statx/linux/ppc/statx and b/tools/statx/linux/ppc/statx differ diff --git a/tools/statx/linux/ppc64/statx b/tools/statx/linux/ppc64/statx index 3604edf6..5ec10786 100755 Binary files a/tools/statx/linux/ppc64/statx and b/tools/statx/linux/ppc64/statx differ diff --git a/tools/statx/linux/ppc64le/statx b/tools/statx/linux/ppc64le/statx index 6a9dd1f3..309d4ef8 100755 Binary files a/tools/statx/linux/ppc64le/statx and b/tools/statx/linux/ppc64le/statx differ diff --git a/tools/statx/linux/s390/statx b/tools/statx/linux/s390x/statx similarity index 62% rename from tools/statx/linux/s390/statx rename to tools/statx/linux/s390x/statx index 0741d0f0..760ac24a 100755 Binary files a/tools/statx/linux/s390/statx and b/tools/statx/linux/s390x/statx differ diff --git a/tools/statx/linux/sparc64/statx b/tools/statx/linux/sparc64/statx index e2c8b8f0..ea174988 100755 Binary files a/tools/statx/linux/sparc64/statx and b/tools/statx/linux/sparc64/statx differ diff --git a/tools/statx/linux/x86_64/statx b/tools/statx/linux/x86_64/statx index 72d0315c..64871ce9 100755 Binary files a/tools/statx/linux/x86_64/statx and b/tools/statx/linux/x86_64/statx differ diff --git a/tools/zip/esxi_linux/arm/zip b/tools/zip/esxi_linux/arm/zip new file mode 100755 index 00000000..74223719 Binary files /dev/null and b/tools/zip/esxi_linux/arm/zip differ diff --git a/tools/zip/esxi_linux/arm64/zip b/tools/zip/esxi_linux/arm64/zip new file mode 100755 index 00000000..64357413 Binary files /dev/null and b/tools/zip/esxi_linux/arm64/zip differ diff --git a/tools/zip/esxi_linux/i386/zip b/tools/zip/esxi_linux/i386/zip new file mode 100755 index 00000000..57d94512 Binary files /dev/null and b/tools/zip/esxi_linux/i386/zip differ diff --git a/tools/zip/esxi_linux/x86_64/zip b/tools/zip/esxi_linux/x86_64/zip new file mode 100755 index 00000000..dba041b5 Binary files /dev/null and b/tools/zip/esxi_linux/x86_64/zip differ diff --git a/tools/zip/freebsd_netscaler/i386/zip b/tools/zip/freebsd_netscaler/i386/zip new file mode 100755 index 00000000..0300bc68 Binary files /dev/null and b/tools/zip/freebsd_netscaler/i386/zip differ diff --git a/tools/zip/freebsd_netscaler/x86_64/zip b/tools/zip/freebsd_netscaler/x86_64/zip new file mode 100755 index 00000000..17473655 Binary files /dev/null and b/tools/zip/freebsd_netscaler/x86_64/zip differ diff --git a/uac b/uac index f31a82c0..c94ea07a 100755 --- a/uac +++ b/uac @@ -1,12 +1,9 @@ #!/bin/sh # SPDX-License-Identifier: Apache-2.0 -# shellcheck disable=SC2006,SC2181 +# shellcheck disable=SC1091,SC2006 # remove all existing aliases -unalias -a - -# use a safe umask for created files -umask 022 +\unalias -a # set locale LANG=C @@ -14,1001 +11,775 @@ export LANG LC_ALL=C export LC_ALL -# standards conformance for GNU utilities -_POSIX2_VERSION=199209 -export _POSIX2_VERSION +# UAC version information +__UAC_VERSION="DEVELOPMENT VERSION" -# get current working dir -# $PWD is not set in solaris 10 -UAC_DIR=`pwd` +# get UAC working dir +__UAC_DIR=`pwd` -# check if UAC is being executed from untarred directory -if [ ! -d "${UAC_DIR}/lib" ] || [ ! -d "${UAC_DIR}/artifacts" ]; then - printf %b "uac: required files not found. Make sure you are executing uac \ -from untarred directory.\n" >&2 +# check whether UAC is being executed from untarred directory +if [ ! -d "${__UAC_DIR}/artifacts" ] || [ ! -d "${__UAC_DIR}/bin" ] \ + || [ ! -d "${__UAC_DIR}/config" ] || [ ! -d "${__UAC_DIR}/lib" ] \ + || [ ! -d "${__UAC_DIR}/profiles" ] || [ ! -d "${__UAC_DIR}/tools" ]; then + printf "%s\n" "uac: required files not found. Make sure you are executing uac from untarred directory." >&2 exit 1 fi -# set path -ua_path="/usr/xpg4/bin:/usr/xpg6/bin:/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin" -ua_path="${ua_path}:/usr/local/sbin:/usr/ucb:/usr/ccs/bin:/opt/bin:/opt/sbin" -ua_path="${ua_path}:/opt/local/bin:/snap/bin:/netscaler:/opt/homebrew/bin" -PATH="${ua_path}:${PATH}" -export PATH +# save current command line +# shellcheck disable=SC2124 +__ua_command_line="${0} ${@}" # load lib files -# shellcheck disable=SC1091 -. "${UAC_DIR}/lib/load_lib_files.sh" - -# global vars -UAC_VERSION="DEVELOPMENT VERSION" -MOUNT_POINT="/" -OPERATING_SYSTEM="" -SYSTEM_ARCH="" -START_DATE="" -START_DATE_DAYS="" -START_DATE_EPOCH="" -END_DATE="" -END_DATE_DAYS="" -END_DATE_EPOCH="" - -# the following variables are not always set on some systems, so they are set -# here to avoid the script exiting with errors after set -u is used below -# shellcheck disable=SC2269 -HOME="${HOME}" -# shellcheck disable=SC2269 -HOSTNAME="${HOSTNAME}" - -# local vars -ua_artifacts="" -ua_destination_dir="" -ua_run_as_non_root=false -ua_temp_dir="" -ua_output_base_filename="" -ua_output_filename="" -ua_case_number="" -ua_evidence_number="" -ua_evidence_description="" -ua_examiner="" -ua_notes="" -ua_hostname="" -ua_sftp_destination="" -ua_sftp_port="" -ua_sftp_identity_file="" -ua_s3_presigned_url="" -ua_s3_presigned_url_log_file="" -ua_azure_storage_sas_url="" -ua_azure_storage_sas_url_log_file="" -ua_ibm_cos_url="" -ua_ibm_cos_url_log_file="" -ua_ibm_cloud_api_key="" -ua_delete_local_on_successful_transfer=false -ua_debug_mode=false -ua_temp_data_dir_symlink_support=false +. "${__UAC_DIR}/lib/load_libraries.sh" -# load config file -load_config_file "${UAC_DIR}/config/uac.conf" || exit 1 +if [ -z "${1}" ]; then + _usage + _exit_fatal +fi -# get current command -# shellcheck disable=SC2124 -ua_command_line="$0 $@" - -# parse command line arguments -while [ "${1:-}" != "" ]; do - case "${1}" in - # optional arguments - "-h"|"--help") - usage - exit 1 - ;; - "-V"|"--version") - printf %b "UAC (Unix-like Artifacts Collector) ${UAC_VERSION}\n" - exit 0 - ;; - "--debug") - ua_debug_mode=true - ;; - # profiling arguments - "-p"|"--profile") - if [ -n "${2}" ]; then - # print available profiles - if [ "${2}" = "list" ]; then - list_profiles - exit 1 - fi - # get profile file based on the profile name - ua_profile_file=`get_profile_file "${2}"` - # exit if profile not found - if [ -z "${ua_profile_file}" ]; then - printf %b "uac: profile not found '${2}'\n" - exit 1 - fi - ua_profile_file="${UAC_DIR}/profiles/${ua_profile_file}" - # check if profile file is valid - validate_profile_file "${ua_profile_file}" || exit 1 - - # convert profile file into a comma separated list of artifacts - ua_artifacts_from_profile=`profile_file_to_artifact_list "${ua_profile_file}"` - ua_artifacts="${ua_artifacts},${ua_artifacts_from_profile}" - - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "-a"|"--artifacts") - if [ -n "${2}" ]; then - # print available artifacts - if [ "${2}" = "list" ]; then - list_artifacts - exit 1 - fi - ua_artifacts="${ua_artifacts},${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - # collection arguments - "-m"|"--mount-point") - if [ -n "${2}" ]; then - MOUNT_POINT="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "-s"|"--operating-system") - if [ -n "${2}" ]; then - OPERATING_SYSTEM="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "-u"|"--run-as-non-root") - ua_run_as_non_root=true - ;; - "--hostname") - if [ -n "${2}" ]; then - ua_hostname="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--temp-dir") - if [ -n "${2}" ]; then - ua_temp_dir="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - # filter arguments - "--start-date"|"--date-range-start") - if [ -n "${2}" ]; then - START_DATE="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--end-date"|"--date-range-end") - if [ -n "${2}" ]; then - END_DATE="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - # informational arguments - "--case-number") - if [ -n "${2}" ]; then - ua_case_number="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--description") - if [ -n "${2}" ]; then - ua_evidence_description="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--evidence-number") - if [ -n "${2}" ]; then - ua_evidence_number="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--examiner") - if [ -n "${2}" ]; then - ua_examiner="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--notes") - if [ -n "${2}" ]; then - ua_notes="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - # remote transfer arguments - "--sftp") - if [ -n "${2}" ]; then - ua_sftp_destination="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--sftp-port") - if [ -n "${2}" ]; then - ua_sftp_port="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--sftp-identity-file") - if [ -n "${2}" ]; then - ua_sftp_identity_file="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--s3-presigned-url") - if [ -n "${2}" ]; then - ua_s3_presigned_url="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--s3-presigned-url-log-file") - if [ -n "${2}" ]; then - ua_s3_presigned_url_log_file="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--azure-storage-sas-url") - if [ -n "${2}" ]; then - ua_azure_storage_sas_url="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--azure-storage-sas-url-log-file") - if [ -n "${2}" ]; then - ua_azure_storage_sas_url_log_file="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--ibm-cos-url") - if [ -n "${2}" ]; then - ua_ibm_cos_url="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--ibm-cos-url-log-file") - if [ -n "${2}" ]; then - ua_ibm_cos_url_log_file="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--ibm-cloud-api-key") - if [ -n "${2}" ]; then - ua_ibm_cloud_api_key="${2}" - shift - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - "--delete-local-on-successful-transfer") - ua_delete_local_on_successful_transfer=true - ;; - # validation arguments - "--validate-artifacts-file") - if [ -n "${2}" ]; then - validate_artifacts_file "${2}" || exit 1 - printf %b "uac: artifacts file '${2}' successfully validated.\n" - exit 0 - else - printf %b "uac: option '${1}' requires an argument.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - # invalid arguments - -*) - printf %b "uac: invalid option '${1}'\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - ;; - # positional arguments - *) - if [ -z "${ua_destination_dir}" ]; then - ua_destination_dir="${1}" - else - printf %b "uac: invalid option '${1}'\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 - fi - ;; - esac - shift -done +__ua_path="/usr/xpg4/bin:/usr/xpg6/bin:/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin" +__ua_path="${__ua_path}:/usr/local/sbin:/usr/ucb:/usr/ccs/bin:/opt/bin:/opt/sbin" +__ua_path="${__ua_path}:/opt/local/bin:/snap/bin:/netscaler:/opt/homebrew/bin" -# do not allow using undefined variables -# set -u cannot be set at the beginning of the file since it will fail on $@ -set -u +PATH="${__ua_path}:${PATH}" +export PATH -# exit if list of artifacts or destination dir is empty -if [ -z "${ua_artifacts}" ] || [ -z "${ua_destination_dir}" ] ; then - usage - exit 1 -fi +# constants +__UAC_LOG_FILE="uac.log" +__UAC_VERBOSE_CMD_PREFIX=" > " + +# set by _parse_command_line_arguments() +__UAC_VERBOSE_MODE=false +__UAC_DEBUG_MODE=false +__UAC_TRACE_MODE=false +__UAC_ARTIFACT_LIST="" +__UAC_OUTPUT_BASE_NAME="uac-%hostname%-%os%-%timestamp%" +__UAC_OUTPUT_FORMAT="" +__UAC_OUTPUT_EXTENSION="" +__UAC_OUTPUT_PASSWORD="" +__UAC_HASH_COLLECTED=false +__UAC_CONFIG_FILE="${__UAC_DIR}/config/uac.conf" +__UAC_MOUNT_POINT="/" +__UAC_OPERATING_SYSTEM="" +__UAC_MAX_THREADS="2" +__UAC_RUN_AS_NON_ROOT=false +__UAC_PROCESSING_UNITS="" +__UAC_HOSTNAME="" +__UAC_TEMP_DIR="" +__UAC_START_DATE="" +__UAC_START_DATE_DAYS=0 +__UAC_START_DATE_EPOCH=0 +__UAC_END_DATE="" +__UAC_END_DATE_DAYS=0 +__UAC_END_DATE_EPOCH=0 +__UAC_CASE_NUMBER="" +__UAC_EVIDENCE_DESCRIPTION="" +__UAC_EVIDENCE_NUMBER="" +__UAC_EXAMINER="" +__UAC_EVIDENCE_NOTES="" +__UAC_SFTP="" +__UAC_SFTP_PORT="" +__UAC_SFTP_IDENTITY_FILE="" +__UAC_SFTP_SSH_OPTIONS="" +__UAC_S3_PROVIDER="" +__UAC_S3_REGION="" +__UAC_S3_BUCKET="" +__UAC_S3_ACCESS_KEY="" +__UAC_S3_SECRET_KEY="" +__UAC_S3_TOKEN="" +__UAC_AWS_S3_PRESIGNED_URL="" +__UAC_AWS_S3_PRESIGNED_URL_LOG_FILE="" +__UAC_AZURE_STORAGE_SAS_URL="" +__UAC_AZURE_STORAGE_SAS_URL_LOG_FILE="" +__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER=false +__UAC_DESTINATION_DIR="" +__UAC_USER_HOME_LIST="" +__UAC_VALID_SHELL_ONLY_USER_HOME_LIST="" +__UAC_EXCLUDE_MOUNT_POINTS="" +__UAC_TEMP_DATA_DIR_NO_SYMLINK_SUPPORT=true + +_parse_command_line_arguments "${@}" || _exit_fatal -# sanitize artifact list -ua_artifacts=`sanitize_artifact_list "${ua_artifacts}"` +# do not allow using undefined variables +# set -u cannot be set at the beginning of the script since it will fail on $@ +set -u -OIFS="${IFS}" -IFS="," -# check if artifacts exist -for ua_artifact_file in ${ua_artifacts}; do - ua_artifact_file=`echo "${ua_artifact_file}" | sed -e 's:^!::'` - if artifact_file_exist "${ua_artifact_file}"; then - true - else - printf %b "uac: artifact file not found '${UAC_DIR}/artifacts/${ua_artifact_file}'\n" >&2 - exit 1 - fi -done -IFS="${OIFS}" +${__UAC_TRACE_MODE} && set -x -# check if destination directory exists -if [ ! -d "${ua_destination_dir}" ]; then - printf %b "uac: no such file or directory '${ua_destination_dir}'\n" >&2 - exit 1 +# exit whether list of artifacts or destination directory is empty +if [ -z "${__UAC_ARTIFACT_LIST}" ] || [ -z "${__UAC_DESTINATION_DIR}" ] ; then + _usage + _exit_fatal fi -# check if temp-dir exists -if [ -n "${ua_temp_dir}" ] && [ ! -d "${ua_temp_dir}" ]; then - printf %b "uac: no such file or directory '${ua_temp_dir}'\n" >&2 - exit 1 +# check whether destination directory exists +if [ ! -d "${__UAC_DESTINATION_DIR}" ]; then + _exit_fatal "no such file or directory '${__UAC_DESTINATION_DIR}'" fi - # get absolute destination directory path -ua_destination_dir=`get_absolute_directory_path "${ua_destination_dir}" 2>/dev/null` +__UAC_DESTINATION_DIR=`_get_absolute_path "${__UAC_DESTINATION_DIR}" 2>/dev/null` # get operating system if not set by --operating-system -if [ -z "${OPERATING_SYSTEM}" ]; then - OPERATING_SYSTEM=`get_operating_system` +if [ -z "${__UAC_OPERATING_SYSTEM}" ]; then + __UAC_OPERATING_SYSTEM=`_get_operating_system` fi + # check if operating system is supported -if is_valid_operating_system "${OPERATING_SYSTEM}"; then - true +if _is_in_list "${__UAC_OPERATING_SYSTEM}" "aix|esxi|freebsd|linux|macos|netbsd|netscaler|openbsd|solaris"; then + # get system arch + __UAC_SYSTEM_ARCH=`_get_system_arch "${__UAC_OPERATING_SYSTEM}"` else - printf %b "uac: invalid operating system '${OPERATING_SYSTEM}'. \ -Use '-s' option to set one.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 + _exit_fatal "invalid operating system '${__UAC_OPERATING_SYSTEM}'" fi -# check if start and end dates are valid -if [ -n "${START_DATE}" ]; then - START_DATE_EPOCH=`get_epoch_date "${START_DATE}"` || exit 1 - # shellcheck disable=SC2034 - START_DATE_DAYS=`get_days_since_date_until_now "${START_DATE}"` -fi -if [ -n "${END_DATE}" ]; then - END_DATE_EPOCH=`get_epoch_date "${END_DATE}"` || exit 1 - # shellcheck disable=SC2034 - END_DATE_DAYS=`get_days_since_date_until_now "${END_DATE}"` - if [ "${START_DATE_EPOCH}" -gt "${END_DATE_EPOCH}" ]; then - printf %b "uac: start date cannot be greater than end date.\n" >&2 - exit 1 - fi +# check if mount point exists +if [ ! -d "${__UAC_MOUNT_POINT}" ]; then + _exit_fatal "no such file or directory '${__UAC_MOUNT_POINT}'" fi +# get absolute mount point path +__UAC_MOUNT_POINT=`_get_absolute_path "${__UAC_MOUNT_POINT}" 2>/dev/null` -# check if mount point exists -MOUNT_POINT=`sanitize_path "${MOUNT_POINT}"` -if [ ! -d "${MOUNT_POINT}" ]; then - printf %b "uac: invalid mount point. \ -No such file or directory '${MOUNT_POINT}'\n" >&2 - exit 1 +# get hostname if not set by --hostname +# useful when running UAC against a mounted image file/disk +if [ -z "${__UAC_HOSTNAME}" ]; then + __UAC_HOSTNAME=`_get_hostname "${__UAC_MOUNT_POINT}"` fi -# cannot use not (!) as Solaris 10 does not support it -if is_running_with_root_privileges || "${ua_run_as_non_root}"; then - true +# check if max-threads is a positive integer (greater than zero) +if _is_digit "${__UAC_MAX_THREADS}" && [ "${__UAC_MAX_THREADS}" -ge 1 ]; then + true else - printf %b "uac: this script requires root privileges to run properly. \ -Use '-u' option to disable root user check.\n\ -Try 'uac --help' for more information.\n" >&2 - exit 1 + _exit_fatal "'max-threads' must be a positive integer." fi -# get hostname if not set by --hostname -# useful when running UAC against a mounted image file/disk -if [ -z "${ua_hostname}" ]; then - ua_hostname=`get_hostname 2>/dev/null` -fi - -# get current date and time string -ua_current_date_time=`date "+%Y%m%d%H%M%S"` -if [ -n "${ua_output_base_filename}" ]; then - ua_output_base_filename=`echo "${ua_output_base_filename}" \ - | sed -e "s|%hostname%|${ua_hostname}|g" \ - -e "s|%os%|${OPERATING_SYSTEM}|g" \ - -e "s|%timestamp%|${ua_current_date_time}|g"` - if [ -z "${ua_output_base_filename}" ]; then - printf %b "uac: invalid empty output filename\n" >&2 - exit 1 - fi +# check whether is running as root +if _is_root || ${__UAC_RUN_AS_NON_ROOT}; then + true else - ua_output_base_filename="uac-${ua_hostname}-${OPERATING_SYSTEM}-${ua_current_date_time}" + _exit_fatal "this script requires root privileges to run properly. \ +Use '-u' option to disable root user check." fi -output_file_exists "${ua_destination_dir}/${ua_output_base_filename}" && exit 1 +_verbose_msg "Validating artifact list..." +__ua_artifact_list=`_validate_artifact_list "${__UAC_ARTIFACT_LIST}"` || _exit_fatal -# check if destination directory's file system supports symlink creation -if [ -n "${ua_temp_dir}" ]; then - file_system_symlink_support "${ua_temp_dir}" >/dev/null 2>/dev/null \ - && ua_temp_data_dir_symlink_support=true - TEMP_DATA_DIR="${ua_temp_dir}/uac-data.tmp" -else - file_system_symlink_support "${ua_destination_dir}" >/dev/null 2>/dev/null \ - && ua_temp_data_dir_symlink_support=true - TEMP_DATA_DIR="${ua_destination_dir}/uac-data.tmp" +# load config file +_verbose_msg "Loading config file..." +_load_config_file "${__UAC_CONFIG_FILE}" || _exit_fatal + +# set tar as the default output format if none has been set via command line +if [ -z "${__UAC_OUTPUT_FORMAT}" ]; then + __UAC_OUTPUT_FORMAT="tar" fi +__UAC_OUTPUT_EXTENSION=`_is_output_format_supported "${__UAC_OUTPUT_FORMAT}" "${__UAC_OUTPUT_PASSWORD}"` || _exit_fatal + +# set output file basename +__ua_current_date_time=`date "+%Y%m%d%H%M%S"` +# shellcheck disable=SC2153 +__UAC_OUTPUT_BASE_NAME=`echo "${__UAC_OUTPUT_BASE_NAME}" \ + | sed -e "s|%hostname%|${__UAC_HOSTNAME}|g" \ + -e "s|%os%|${__UAC_OPERATING_SYSTEM}|g" \ + -e "s|%timestamp%|${__ua_current_date_time}|g"` +__UAC_OUTPUT_BASE_NAME=`_sanitize_output_file "${__UAC_OUTPUT_BASE_NAME}"` + +_output_exists "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" && _exit_fatal + +# add proper local 'bin' and 'tools' directory to path +__ua_bin_path=`_get_bin_path "${__UAC_OPERATING_SYSTEM}" "${__UAC_SYSTEM_ARCH}"` +PATH="${__ua_bin_path}:${PATH}" +PATH="${__UAC_DIR}/tools/date_to_epoch_pl:${__UAC_DIR}/tools/find_pl:${__UAC_DIR}/tools/stat_pl:${PATH}" +export PATH -# check available system tools -check_available_system_tools >/dev/null 2>/dev/null +# check if start and end dates are valid +if [ -n "${__UAC_START_DATE}" ]; then + __UAC_START_DATE_EPOCH=`_get_epoch_date "${__UAC_START_DATE}"` || _exit_fatal "invalid date '${__UAC_START_DATE}'." + __UAC_START_DATE_DAYS=`_get_days_since_date_until_now "${__UAC_START_DATE}"` || _exit_fatal +fi +if [ -n "${__UAC_END_DATE}" ]; then + __UAC_END_DATE_EPOCH=`_get_epoch_date "${__UAC_END_DATE}"` || _exit_fatal "invalid date '${__UAC_END_DATE}'." + __UAC_END_DATE_DAYS=`_get_days_since_date_until_now "${__UAC_END_DATE}"` || _exit_fatal + [ "${__UAC_START_DATE_EPOCH}" -gt "${__UAC_END_DATE_EPOCH}" ] && _exit_fatal "start date cannot be greater than end date." +fi # test the connectivity to remote sftp server -if [ -n "${ua_sftp_destination}" ]; then - if sftp_transfer_test "${ua_sftp_destination}" "${ua_sftp_port}" \ - "${ua_sftp_identity_file}" >/dev/null; then - true - else - exit 1 - fi +if [ -n "${__UAC_SFTP}" ]; then + command_exists "sftp" || _exit_fatal "cannot transfer to sftp server as sftp tool was not found" + printf "Validating connectivity to SFTP server...\n" + _sftp_transfer \ + "" \ + "${__UAC_SFTP}" \ + "${__UAC_SFTP_PORT}" \ + "${__UAC_SFTP_IDENTITY_FILE}" \ + "${__UAC_SFTP_SSH_OPTIONS}" \ + || _exit_fatal "error connecting to SFTP server" fi -# test the connectivity to S3 presigned url -if [ -n "${ua_s3_presigned_url}" ]; then - if ${CURL_TOOL_AVAILABLE}; then - if s3_presigned_url_transfer_test "${ua_s3_presigned_url}"; then - true - else - exit 1 - fi +# test the connectivity to S3 +if [ -n "${__UAC_S3_PROVIDER}" ]; then + if [ -z "${__UAC_S3_BUCKET}" ]; then + _exit_fatal "bucket name required" + fi + if [ "${__UAC_OUTPUT_FORMAT}" = "none" ]; then + _exit_fatal "cannot transfer to S3 as output format is none" + fi + if command_exists "curl" || command_exists "wget"; then + printf "Validating connectivity to S3 server...\n" + case "${__UAC_S3_PROVIDER}" in + "amazon") + command_exists "openssl" || _exit_fatal "cannot transfer to S3 as openssl tool was not found" + _s3_transfer_amazon \ + "" \ + "${__UAC_S3_REGION}" \ + "${__UAC_S3_BUCKET}" \ + "${__UAC_S3_ACCESS_KEY}" \ + "${__UAC_S3_SECRET_KEY}" \ + || _exit_fatal "error connecting to S3 server" + ;; + "google") + _s3_transfer_google \ + "" \ + "${__UAC_S3_BUCKET}" \ + "${__UAC_S3_TOKEN}" \ + || _exit_fatal "error connecting to S3 server" + ;; + "ibm") + _s3_transfer_ibm \ + "" \ + "${__UAC_S3_REGION}" \ + "${__UAC_S3_BUCKET}" \ + "${__UAC_S3_TOKEN}" \ + || _exit_fatal "error connecting to S3 server" + ;; + esac else - printf %b "uac: cannot transfer to S3 presigned URL as 'curl' \ -tool was not found.\n" - exit 1 + _exit_fatal "cannot transfer to S3 as neither curl nor wget tools are available" fi fi -# test the connectivity to Azure Blob Storage SAS url -if [ -n "${ua_azure_storage_sas_url}" ]; then - if ${CURL_TOOL_AVAILABLE}; then - if azure_storage_sas_url_transfer_test "${ua_azure_storage_sas_url}"; then - true - else - exit 1 - fi - else - printf %b "uac: cannot transfer to Azure Blob Storage SAS URL as 'curl' \ -tool was not found.\n" - exit 1 +# test the connectivity to Amazon S3 presigned URL +if [ -n "${__UAC_AWS_S3_PRESIGNED_URL}" ]; then + if [ "${__UAC_OUTPUT_FORMAT}" = "none" ]; then + _exit_fatal "cannot transfer to Amazon S3 presigned URL as output format is none" fi + printf "Validating connectivity to AWS S3 presigned URL...\n" + _aws_s3_presigned_url_transfer \ + "" \ + "${__UAC_AWS_S3_PRESIGNED_URL}" \ + || _exit_fatal "error connecting to aws S3 presigned URL" fi -# test the connectivity to IBM Cloud Object Storage url -if [ -n "${ua_ibm_cos_url}" ]; then - if [ -n "${ua_ibm_cloud_api_key}" ]; then - if ${CURL_TOOL_AVAILABLE}; then - if ibm_cos_transfer_test "${ua_ibm_cos_url}" "${ua_ibm_cloud_api_key}"; then - true - else - exit 1 - fi - else - printf %b "uac: cannot transfer to IBM Cloud Object Storage as 'curl' \ -tool was not found.\n" - exit 1 - fi - else - printf %b "uac: cannot transfer to IBM Cloud Object Storage. No API \ -key / Bearer token provided. Please use --ibm-cloud-api-key option to \ -provide one.\n" - exit 1 +# test the connectivity to Azure Storage SAS URL +if [ -n "${__UAC_AZURE_STORAGE_SAS_URL}" ]; then + if [ "${__UAC_OUTPUT_FORMAT}" = "none" ]; then + _exit_fatal "cannot transfer to Azure Storage SAS URL as output format is none" fi + printf "Validating connectivity to Azure Storage SAS URL...\n" + _azure_storage_sas_url_transfer \ + "" \ + "${__UAC_AZURE_STORAGE_SAS_URL}" \ + || _exit_fatal "error connecting to Azure Storage SAS URL" fi -# remove any existing (old) collected data -if [ -d "${TEMP_DATA_DIR}" ]; then - rm -rf "${TEMP_DATA_DIR}" >/dev/null - if [ "$?" -gt 0 ]; then - printf %b "uac: cannot remove old temporary data directory from previous \ -collection '${TEMP_DATA_DIR}'.\n" - exit 1 +# use a safe umask for created files +umask 027 + +# check whether temp-dir exists +if [ -n "${__UAC_TEMP_DIR}" ]; then + if [ ! -d "${__UAC_TEMP_DIR}" ]; then + _exit_fatal "no such file or directory '${__UAC_TEMP_DIR}'" fi + # get absolute temp-dir path + __UAC_TEMP_DIR=`_get_absolute_path "${__UAC_TEMP_DIR}" 2>/dev/null` + __UAC_TEMP_DATA_DIR="${__UAC_TEMP_DIR}/uac-data.tmp" +else + __UAC_TEMP_DATA_DIR="${__UAC_DESTINATION_DIR}/uac-data.tmp" fi -# create temporary directory -mkdir "${TEMP_DATA_DIR}" >/dev/null -if [ "$?" -gt 0 ]; then - printf %b "uac: cannot create temporary data directory '${TEMP_DATA_DIR}'.\n" - exit 1 -fi +_verbose_msg "Creating temp directory..." +_init_temp_data_dir || _exit_fatal # clean up and exit if SIGINT (ctrl-c) is sent -trap terminate INT - -# set log files -# shellcheck disable=SC2034 -UAC_LOG_FILE="${TEMP_DATA_DIR}/uac.log" -UAC_STDERR_LOG_FILE="${TEMP_DATA_DIR}/uac.log.stderr" +trap _terminate INT # get current user -ua_current_user=`get_current_user 2>>"${UAC_STDERR_LOG_FILE}"` -# get system arch -SYSTEM_ARCH=`get_system_arch 2>>"${UAC_STDERR_LOG_FILE}"` - -# add local 'bin' directory to path -PATH="${UAC_DIR}/bin/${OPERATING_SYSTEM}/${SYSTEM_ARCH}:${PATH}" -# add 'avml' tool directory to path -if [ "${OPERATING_SYSTEM}" = "esxi" ] || [ "${OPERATING_SYSTEM}" = "linux" ]; then - PATH="${UAC_DIR}/tools/avml/linux:${PATH}" -fi -# add 'linux_procmemdump.sh' tool directory to path -PATH="${UAC_DIR}/tools/linux_procmemdump.sh:${PATH}" -export PATH +__ua_current_user=`_get_current_user` +# get the number of processing units available +__UAC_PROCESSING_UNITS=`_get_nproc` + +# show UAC banner and system information +printf "%s\n" "--------------------------------------------------------------------------------" +printf "%s\n" " ___ ___ " +printf "%s\n" " | \\ | | |" +printf "%s\n" " | / | | |" +printf "%s\n" " | | | |" +printf "%s\n" " |_ |___| |" +printf "%s\n" " |_________|" +printf "\n" +printf " Unix-like Artifacts Collector %s\n" "${__UAC_VERSION}" +printf "%s\n" "--------------------------------------------------------------------------------" +printf "Operating System : %s\n" "${__UAC_OPERATING_SYSTEM}" +printf "System Architecture : %s\n" "${__UAC_SYSTEM_ARCH}" +printf "Hostname : %s\n" "${__UAC_HOSTNAME}" +printf "Mount Point : %s\n" "${__UAC_MOUNT_POINT}" +printf "Running as : %s\n" "${__ua_current_user}" +printf "Temp Directory : %s\n" "${__UAC_TEMP_DATA_DIR}" +printf "%s\n" "--------------------------------------------------------------------------------" +_log_msg INF "Unix-like Artifacts Collector ${__UAC_VERSION}" +_log_msg INF "UAC directory: ${__UAC_DIR}" +_log_msg INF "Command line: ${__ua_command_line}" +_log_msg INF "Operating system: ${__UAC_OPERATING_SYSTEM}" +_log_msg INF "System architecture: ${__UAC_SYSTEM_ARCH}" +_log_msg INF "Processing units: ${__UAC_PROCESSING_UNITS}" +_log_msg INF "Hostname: ${__UAC_HOSTNAME}" +_log_msg INF "Mount point: ${__UAC_MOUNT_POINT}" +_log_msg INF "Running as: ${__ua_current_user}" +_log_msg INF "Temp Directory: ${__UAC_TEMP_DATA_DIR}" +_log_msg INF "Output format: ${__UAC_OUTPUT_FORMAT}" +_log_msg INF "Current PID: ${$}" +_log_msg INF "PATH: ${PATH}" +_log_msg INF "Loading uac.conf settings" +_log_msg INF "Exclude path pattern: ${__UAC_CONF_EXCLUDE_PATH_PATTERN}" +_log_msg INF "Exclude name pattern: ${__UAC_CONF_EXCLUDE_NAME_PATTERN}" +_log_msg INF "Exclude file system: ${__UAC_CONF_EXCLUDE_FILE_SYSTEM}" +_log_msg INF "Hash algorithm: ${__UAC_CONF_HASH_ALGORITHM}" +_log_msg INF "Max depth: ${__UAC_CONF_MAX_DEPTH}" +_log_msg INF "Enable find mtime: ${__UAC_CONF_ENABLE_FIND_MTIME}" +_log_msg INF "Enable find atime: ${__UAC_CONF_ENABLE_FIND_ATIME}" +_log_msg INF "Enable find ctime: ${__UAC_CONF_ENABLE_FIND_CTIME}" + +_verbose_msg "Setting up tools and parameters..." +_log_msg INF "Setting up tools and parameters" + +# setup tools +_setup_tools 2>"${__UAC_TEMP_DATA_DIR}/setup_tools.stderr.txt" + +# get user list and their home directories (user:home) +__UAC_USER_HOME_LIST=`_get_user_home_list false "${__UAC_MOUNT_POINT}"` +__UAC_VALID_SHELL_ONLY_USER_HOME_LIST=`_get_user_home_list true "${__UAC_MOUNT_POINT}"` + +# exclude file systems / mount points (global) +if [ -n "${__UAC_CONF_EXCLUDE_FILE_SYSTEM}" ]; then + __UAC_EXCLUDE_MOUNT_POINTS=`_get_mount_point_by_file_system "${__UAC_CONF_EXCLUDE_FILE_SYSTEM}" "${__UAC_OPERATING_SYSTEM}"` +fi -printf %b "--------------------------------------------------------------------------------\n" -printf %b " __ __ _______ _______ \n" -printf %b " |: | | |: _ |: ____|\n" -printf %b " | |_| | | | | |____ \n" -printf %b " |_______|__| |__|_______|\n" -printf %b "\n" -printf %b " Unix-like Artifacts Collector ${UAC_VERSION}\n" -printf %b "--------------------------------------------------------------------------------\n" -printf %b "Operating System : ${OPERATING_SYSTEM}\n" -printf %b "System Architecture : ${SYSTEM_ARCH}\n" -printf %b "Hostname : ${ua_hostname}\n" -printf %b "Mount Point : ${MOUNT_POINT}\n" -printf %b "Running as : ${ua_current_user}\n" -printf %b "Temp Directory : ${TEMP_DATA_DIR}\n" -printf %b "--------------------------------------------------------------------------------\n" - -# start uac.log file -log_message INFO "UAC (Unix-like Artifacts Collector) ${UAC_VERSION}" -log_message INFO "Command line: ${ua_command_line}" -log_message INFO "Operating system: ${OPERATING_SYSTEM}" -log_message INFO "System architecture: ${SYSTEM_ARCH}" -log_message INFO "Hostname: ${ua_hostname}" -log_message INFO "Mount point: ${MOUNT_POINT}" -log_message INFO "Running as: ${ua_current_user}" -log_message INFO "Date range start: ${START_DATE}" -log_message INFO "Date range end: ${END_DATE}" -log_message INFO "Case number: ${ua_case_number}" -log_message INFO "Evidence number: ${ua_evidence_number}" -log_message INFO "Description: ${ua_evidence_description}" -log_message INFO "Examiner: ${ua_examiner}" -log_message INFO "Notes: ${ua_notes}" -log_message INFO "Temp directory: ${TEMP_DATA_DIR}" -log_message INFO "Current PID: ${$}" - -# global exclusions from uac.conf -log_message INFO "Loading uac.conf settings" -log_message INFO "Global exclude path pattern: ${GLOBAL_EXCLUDE_PATH_PATTERN}" -log_message INFO "Global exclude name pattern: ${GLOBAL_EXCLUDE_NAME_PATTERN}" -log_message INFO "Global exclude file system: ${GLOBAL_EXCLUDE_FILE_SYSTEM}" -# get mount points to globally exclude from collection -GLOBAL_EXCLUDE_MOUNT_POINT="" -if [ -n "${GLOBAL_EXCLUDE_FILE_SYSTEM}" ]; then - GLOBAL_EXCLUDE_MOUNT_POINT=`get_mount_point_by_file_system \ - "${GLOBAL_EXCLUDE_FILE_SYSTEM}" 2>>"${UAC_STDERR_LOG_FILE}"` -fi -log_message INFO "Global exclude mount point: ${GLOBAL_EXCLUDE_MOUNT_POINT}" -log_message INFO "Hash algorithm: ${HASH_ALGORITHM}" -log_message INFO "Enable find mtime: ${ENABLE_FIND_MTIME}" -log_message INFO "Enable find atime: ${ENABLE_FIND_ATIME}" -log_message INFO "Enable find ctime: ${ENABLE_FIND_CTIME}" -log_message INFO "'find' opperators support: ${FIND_OPERATORS_SUPPORT}" -log_message INFO "'find -path' support: ${FIND_PATH_SUPPORT}" -log_message INFO "'find -type' support: ${FIND_TYPE_SUPPORT}" -log_message INFO "'find -maxdepth' support: ${FIND_MAXDEPTH_SUPPORT}" -log_message INFO "'find -size' support: ${FIND_SIZE_SUPPORT}" -log_message INFO "'find -perm' support: ${FIND_PERM_SUPPORT}" -log_message INFO "'find -atime' support: ${FIND_ATIME_SUPPORT}" -log_message INFO "'find -mtime' support: ${FIND_MTIME_SUPPORT}" -log_message INFO "'find -ctime' support: ${FIND_CTIME_SUPPORT}" -log_message INFO "MD5 hashing tool: ${MD5_HASHING_TOOL}" -log_message INFO "SHA1 hashing tool: ${SHA1_HASHING_TOOL}" -log_message INFO "SHA256 hashing tool: ${SHA256_HASHING_TOOL}" -log_message INFO "'gzip' tool available: ${GZIP_TOOL_AVAILABLE}" -log_message INFO "'perl' tool available: ${PERL_TOOL_AVAILABLE}" -log_message INFO "'stat' tool available: ${STAT_TOOL_AVAILABLE}" -log_message INFO "'stat' btime support: ${STAT_BTIME_SUPPORT}" -log_message INFO "'statx' tool available: ${STATX_TOOL_AVAILABLE}" - -log_message INFO "PATH: ${PATH}" - -# add UAC_DIR abd TEMP_DATA_DIR to GLOBAL_EXCLUDE_PATH_PATTERN -if [ -n "${GLOBAL_EXCLUDE_PATH_PATTERN}" ]; then - GLOBAL_EXCLUDE_PATH_PATTERN="${GLOBAL_EXCLUDE_PATH_PATTERN},${UAC_DIR},${TEMP_DATA_DIR}" +# check whether temp data dir's file system supports symlink creation. +if ln -s "${__UAC_DIR}/uac" "${__UAC_TEMP_DATA_DIR}/uac-symlink-support-test.tmp" >/dev/null; then + rm "${__UAC_TEMP_DATA_DIR}/uac-symlink-support-test.tmp" >/dev/null + __UAC_TEMP_DATA_DIR_NO_SYMLINK_SUPPORT=false +fi + +_log_msg INF "find operators support: ${__UAC_TOOL_FIND_OPERATORS_SUPPORT}" +_log_msg INF "find -path support: ${__UAC_TOOL_FIND_PATH_SUPPORT}" +_log_msg INF "find -prune support: ${__UAC_TOOL_FIND_PRUNE_SUPPORT}" +_log_msg INF "find -size support: ${__UAC_TOOL_FIND_SIZE_SUPPORT}" +_log_msg INF "find -maxdepth support: ${__UAC_TOOL_FIND_MAXDEPTH_SUPPORT}" +_log_msg INF "find -perm support: ${__UAC_TOOL_FIND_PERM_SUPPORT}" +_log_msg INF "find -type support: ${__UAC_TOOL_FIND_TYPE_SUPPORT}" +_log_msg INF "find -mtime support: ${__UAC_TOOL_FIND_MTIME_SUPPORT}" +_log_msg INF "find -atime support: ${__UAC_TOOL_FIND_ATIME_SUPPORT}" +_log_msg INF "find -ctime support: ${__UAC_TOOL_FIND_CTIME_SUPPORT}" +_log_msg INF "find -print0 support: ${__UAC_TOOL_FIND_PRINT0_SUPPORT}" +if command_exists "tar"; then + _log_msg INF "tar command exists: true" +else + _log_msg INF "tar command exists: false" +fi +if ${__UAC_TOOL_TAR_NO_FROM_FILE_SUPPORT}; then + _log_msg INF "tar load from file support: false" +else + _log_msg INF "tar load from file support: true" +fi +if command_exists "gzip"; then + _log_msg INF "gzip command exists: true" +else + _log_msg INF "gzip command exists: false" +fi +if command_exists "zip"; then + _log_msg INF "zip command exists: true" +else + _log_msg INF "zip command exists: false" +fi +if command_exists "perl"; then + _log_msg INF "perl command exists: true" else - GLOBAL_EXCLUDE_PATH_PATTERN="${UAC_DIR},${TEMP_DATA_DIR}" + _log_msg INF "perl command exists: false" fi +_log_msg INF "xargs -0 support: ${__UAC_TOOL_XARGS_NULL_DELIMITER_SUPPORT}" +if [ -n "${__UAC_TOOL_XARGS_MAX_PROCS_PARAM}" ]; then + _log_msg INF "xargs -P support: true" +else + _log_msg INF "xargs -P support: false" +fi +_log_msg INF "MD5 hashing tool: ${__UAC_TOOL_MD5_BIN}" +_log_msg INF "SHA1 hashing tool: ${__UAC_TOOL_SHA1_BIN}" +_log_msg INF "SHA256 hashing tool: ${__UAC_TOOL_SHA256_BIN}" +# shellcheck disable=SC2153 +_log_msg INF "stat tool: ${__UAC_TOOL_STAT_BIN}${__UAC_TOOL_STAT_PARAMS:+ }${__UAC_TOOL_STAT_PARAMS}" +if ${__UAC_TEMP_DATA_DIR_NO_SYMLINK_SUPPORT}; then + _log_msg INF "Temp directory symlink creation support: false" +else + _log_msg INF "Temp directory symlink creation support: true" +fi +_log_msg INF "Exclude mount points: ${__UAC_EXCLUDE_MOUNT_POINTS}" -# get all user/home list -# shellcheck disable=SC2034 -USER_HOME_LIST=`get_user_home_list 2>>"${UAC_STDERR_LOG_FILE}"` -# get user/home list skipping users with non-interactive shells -# shellcheck disable=SC2034 -VALID_SHELL_ONLY_USER_HOME_LIST=`get_user_home_list true 2>>"${UAC_STDERR_LOG_FILE}"` +_verbose_msg "Building artifact list..." +_log_msg INF "Building artifact list" + +# build artifact list based on the operating system +# skip artifacts that are not applicable to the target operating system +__ua_artifact_list=`_build_artifact_list "${__ua_artifact_list}" "${__UAC_OPERATING_SYSTEM}"` || _exit_fatal # acquisition start date -ua_acq_start_date=`date "+%a %b %d %H:%M:%S %Y %z" 2>>"${UAC_STDERR_LOG_FILE}"` +__ua_acquisition_start_date=`date "+%a %b %d %H:%M:%S %Y %z"` # acquisition start epoch date -ua_acq_start_date_epoch=`get_epoch_date 2>>"${UAC_STDERR_LOG_FILE}"` - -log_message INFO "Artifacts collection started" -printf %b "Artifacts collection started...\n" +__ua_acquisition_start_date_epoch=`_get_epoch_date` -# create artifact list -create_artifact_list "${ua_artifacts}" \ - >"${TEMP_DATA_DIR}/.artifacts.tmp" \ - 2>>"${UAC_STDERR_LOG_FILE}" +__ua_collection_progress=0 +__ua_collection_progress_total=`echo "${__ua_artifact_list}" | wc -l | awk '{print $1}'` +_log_msg INF "${__ua_collection_progress_total} artifact(s) selected" -ua_progress_current=0 -ua_progress_total=`wc -l "${TEMP_DATA_DIR}/.artifacts.tmp" | awk '{print $1}'` +printf "%s\n" "Artifacts collection started" +_log_msg INF "Artifacts collection started" -# enable debug mode if it is set to true -${ua_debug_mode} && set -x +# store artifact list into a file as netscaler raises a segmentation faul in the following situation: +# echo "${__ua_artifact_list}" | while read __ua_artifact; do .....; done +echo "${__ua_artifact_list}" >"${__UAC_TEMP_DATA_DIR}/artifact_list.tmp" # shellcheck disable=SC2162 -while read ua_artifact_file || [ -n "${ua_artifact_file}" ]; do - log_message INFO "Parsing artifacts file '${ua_artifact_file}'" +while read __ua_artifact || [ -n "${__ua_artifact}" ]; do + __ua_artifact_without_uac_dir=`echo "${__ua_artifact}" | sed -e "s|^${__UAC_DIR}/artifacts/||"` # shellcheck disable=SC2003,SC2086 - ua_progress_current=`expr ${ua_progress_current} + 1` - ua_progress_timestamp=`date "+%Y-%m-%d %H:%M:%S %z"` - printf "[%03d/%03d] %b %b\n" "${ua_progress_current}" \ - "${ua_progress_total}" "${ua_progress_timestamp}" "${ua_artifact_file}" - ua_artifacts_root_output_directory=`dirname "${ua_artifact_file}"` - parse_artifacts_file "${UAC_DIR}/artifacts/${ua_artifact_file}" \ - "${ua_artifacts_root_output_directory}" - find "${TEMP_DATA_DIR}/${ua_artifacts_root_output_directory}" -type f -print \ - | sed -e "s|^${TEMP_DATA_DIR}/||" >>"${TEMP_DATA_DIR}/.output_file.tmp" -done <"${TEMP_DATA_DIR}/.artifacts.tmp" 2>>"${UAC_STDERR_LOG_FILE}" - -# disable debug mode -${ua_debug_mode} && set +x - -# acquisition end date -ua_acq_end_date=`date "+%a %b %d %H:%M:%S %Y %z" 2>>"${UAC_STDERR_LOG_FILE}"` -# acquisition end epoch date -ua_acq_end_date_epoch=`get_epoch_date 2>>"${UAC_STDERR_LOG_FILE}"` + __ua_collection_progress=`expr ${__ua_collection_progress} + 1` + __ua_collection_progress_timestamp=`date "+%Y-%m-%d %H:%M:%S %z"` + printf "[%03d/%03d] %s %s\n" "${__ua_collection_progress}" "${__ua_collection_progress_total}" "${__ua_collection_progress_timestamp}" "${__ua_artifact_without_uac_dir}" + _log_msg INF "Parsing ${__ua_artifact_without_uac_dir}" + _parse_artifact "${__ua_artifact}" +done <"${__UAC_TEMP_DATA_DIR}/artifact_list.tmp" + +# acquisition start date +__ua_acquisition_end_date=`date "+%a %b %d %H:%M:%S %Y %z"` +# acquisition start epoch date +__ua_acquisition_end_date_epoch=`_get_epoch_date` # calculate running time # shellcheck disable=SC2003 -ua_total_running_time=`expr "${ua_acq_end_date_epoch}" - "${ua_acq_start_date_epoch}"` - -printf %b "--------------------------------------------------------------------------------\n" -log_message INFO "Artifacts collection complete. \ -Total execution time: ${ua_total_running_time} seconds" - -printf %b "Artifacts collection complete. \ -Total execution time: ${ua_total_running_time} seconds\n" - -# acquisition log file name -ua_acquisition_log="${ua_output_base_filename}.log" -# output file hash -ua_output_file_hash="-" - -# sort and uniq -sort_uniq_file "${TEMP_DATA_DIR}/.output_file.tmp" 2>>"${UAC_STDERR_LOG_FILE}" -# add uac.log.stderr to the list of files to be archived/copied within the output file -echo "uac.log.stderr" | cat - "${TEMP_DATA_DIR}/.output_file.tmp" >"${TEMP_DATA_DIR}/.temp_output_file.tmp" -# add uac.log to the list of files to be archived/copied within the output file -echo "uac.log" | cat - "${TEMP_DATA_DIR}/.temp_output_file.tmp" >"${TEMP_DATA_DIR}/.output_file.tmp" - -# create output file -if command_exists "tar"; then - if [ -f "${TEMP_DATA_DIR}/.files.tmp" ]; then - grep -v -E "${UAC_DIR}|/uac-data.tmp/" "${TEMP_DATA_DIR}/.files.tmp" >"${TEMP_DATA_DIR}/.files_uac-data.tmp_removed.tmp" - cp "${TEMP_DATA_DIR}/.files_uac-data.tmp_removed.tmp" "${TEMP_DATA_DIR}/.files.tmp" - # sort and uniq - sort_uniq_file "${TEMP_DATA_DIR}/.files.tmp" 2>>"${UAC_STDERR_LOG_FILE}" - if ${ua_temp_data_dir_symlink_support}; then - # create symbolic link to / - ln -s "${MOUNT_POINT}" "${TEMP_DATA_DIR}/[root]" 2>>"${UAC_STDERR_LOG_FILE}" - else - # copy files to uac-data.tmp/[root] - printf %b "Copying files to ${TEMP_DATA_DIR}/[root]. Please wait...\n" - copy_data "${TEMP_DATA_DIR}/.files.tmp" "${TEMP_DATA_DIR}/[root]" \ - 2>>"${UAC_STDERR_LOG_FILE}" - fi - # add [root] string to the beginning of each entry in .files.tmp - # and add them to the list of files to be archived within the output file - sed -e "s:^${MOUNT_POINT}:\[root\]/:" -e 's://*:/:g' "${TEMP_DATA_DIR}/.files.tmp" \ - >>"${TEMP_DATA_DIR}/.output_file.tmp" - fi - - # archive (and compress) collected artifacts to output file - printf %b "Creating output file. Please wait...\n" - cd "${TEMP_DATA_DIR}" || exit 1 - - if ${GZIP_TOOL_AVAILABLE}; then - ua_output_filename="${ua_output_base_filename}.tar.gz" - archive_compress_data ".output_file.tmp" \ - "${ua_destination_dir}/${ua_output_filename}" 2>/dev/null - else - ua_output_filename="${ua_output_base_filename}.tar" - archive_data ".output_file.tmp" \ - "${ua_destination_dir}/${ua_output_filename}" 2>/dev/null +__ua_total_execution_time=`expr "${__ua_acquisition_end_date_epoch}" - "${__ua_acquisition_start_date_epoch}"` + +printf "%s\n" "--------------------------------------------------------------------------------" +printf "Artifacts collection complete. Total execution time: %d seconds\n" "${__ua_total_execution_time}" +_log_msg INF "Artifacts collection complete. Total execution time: ${__ua_total_execution_time} seconds" + +# prepare output +printf "Filtering out non-regular files. Please wait...\n" + +# get a list of all files and directories whithin __UAC_TEMP_DATA_DIR/collected directory +find "${__UAC_TEMP_DATA_DIR}/collected" -print >"${__UAC_TEMP_DATA_DIR}/find_collected.tmp" 2>/dev/null +_remove_non_regular_files "${__UAC_TEMP_DATA_DIR}/find_collected.tmp" + +mv "${__UAC_TEMP_DATA_DIR}/${__UAC_LOG_FILE}" "${__UAC_TEMP_DATA_DIR}/collected" 2>/dev/null +# add files to the beginning of the archive file +{ + echo "${__UAC_TEMP_DATA_DIR}/collected/${__UAC_LOG_FILE}"; + echo "${__UAC_TEMP_DATA_DIR}/collected/hash_list.md5"; + echo "${__UAC_TEMP_DATA_DIR}/collected/hash_list.sha1"; + echo "${__UAC_TEMP_DATA_DIR}/collected/hash_list.sha256"; +} >"${__UAC_TEMP_DATA_DIR}/collected.tmp" +cat "${__UAC_TEMP_DATA_DIR}/find_collected.tmp" >>"${__UAC_TEMP_DATA_DIR}/collected.tmp" + +# remove ${__UAC_TEMP_DATA_DIR}/collected from the beginning of any entry +# remove entries that match ${__UAC_DIR}, /uac-data.tmp/, uac-x-x-x, and uac-%os%-%hostname%-%timestamp% +sed -e "s|^${__UAC_TEMP_DATA_DIR}/collected||" \ + -e 's|//*|/|g' \ + -e 's|^/||' \ + "${__UAC_TEMP_DATA_DIR}/collected.tmp" \ + | grep -v -E "${__UAC_DIR}|/uac-data.tmp/|uac-[0-9]+.[0-9]+.[0-9]+-?|uac-[[:print:]]+-[[:print:]]+[0-9]{14}" \ + >"${__UAC_TEMP_DATA_DIR}/output.tmp" + +_remove_non_regular_files "${__UAC_TEMP_DATA_DIR}/file_collector.tmp" + +# remove ${__UAC_MOUNT_POINT}|\[root\]/ from the beginning of any entry +# remove entries that match ${__UAC_DIR}, /uac-data.tmp/, uac-x-x-x, and uac-%os%-%hostname%-%timestamp% +sed -e "s|^${__UAC_MOUNT_POINT}|\[root\]/|" \ + -e 's|//*|/|g' \ + -e 's|^/||' \ + "${__UAC_TEMP_DATA_DIR}/file_collector.tmp" \ + | grep -v -E "${__UAC_DIR}|/uac-data.tmp/|uac-[0-9]+.[0-9]+.[0-9]+-?|uac-[[:print:]]+-[[:print:]]+[0-9]{14}" \ + >>"${__UAC_TEMP_DATA_DIR}/output.tmp" + +if [ "${__UAC_OUTPUT_FORMAT}" = "none" ] || ${__UAC_HASH_COLLECTED} || ${__UAC_TEMP_DATA_DIR_NO_SYMLINK_SUPPORT} || ${__UAC_TOOL_TAR_NO_FROM_FILE_SUPPORT}; then + printf "Copying files to '%s'. Please wait...\n" "${__UAC_TEMP_DATA_DIR}/collected/[root]" + _copy_data "${__UAC_TEMP_DATA_DIR}/file_collector.tmp" "${__UAC_TEMP_DATA_DIR}/collected/[root]" \ + 2>>"${__UAC_TEMP_DATA_DIR}/copy_data.stderr.txt" + + if ${__UAC_HASH_COLLECTED}; then + printf "Hashing collected files. Please wait...\n" + cd "${__UAC_TEMP_DATA_DIR}/collected" || _exit_fatal "cd: ${__UAC_TEMP_DATA_DIR}/collected: No such file or directory" + + for __hc_algorithm in `echo "${__UAC_CONF_HASH_ALGORITHM}" | sed -e 's:|: :g'`; do + __hc_hashing_tool="" + if [ "${__hc_algorithm}" = "md5" ]; then + __hc_hashing_tool="${__UAC_TOOL_MD5_BIN}" + elif [ "${__hc_algorithm}" = "sha1" ]; then + __hc_hashing_tool="${__UAC_TOOL_SHA1_BIN}" + elif [ "${__hc_algorithm}" = "sha256" ]; then + __hc_hashing_tool="${__UAC_TOOL_SHA256_BIN}" + fi + + if [ -n "${__hc_hashing_tool}" ]; then + __hc_hash_command="sed 's|.|\\\\&|g' \"${__UAC_TEMP_DATA_DIR}/output.tmp\" | xargs ${__UAC_TOOL_XARGS_MAX_PROCS_PARAM}${__UAC_TOOL_XARGS_MAX_PROCS_PARAM:+ }${__hc_hashing_tool}" + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__hc_hash_command}" + eval "${__hc_hash_command}" \ + >"${__UAC_TEMP_DATA_DIR}/collected/hash_list.${__hc_algorithm}" \ + 2>>"${__UAC_TEMP_DATA_DIR}/hash_collected.stderr.txt" + fi + done + cd "${__UAC_DIR}" || _exit_fatal "cd: ${__UAC_DIR}: No such file or directory" fi +else + ln -s "${__UAC_MOUNT_POINT}" "${__UAC_TEMP_DATA_DIR}/collected/[root]" +fi - if [ -f "${ua_destination_dir}/${ua_output_filename}" ]; then - printf %b "Output file created '${ua_destination_dir}/${ua_output_filename}'\n" - cd "${UAC_DIR}" || exit 1 - if ${ua_debug_mode}; then - printf %b "Temporary directory not removed '${TEMP_DATA_DIR}'\n" - else - rm -rf "${TEMP_DATA_DIR}" >/dev/null 2>/dev/null - if [ -d "${TEMP_DATA_DIR}" ]; then - printf %b "Cannot remove temporary directory '${TEMP_DATA_DIR}'\n" - fi - fi - # hash output file - printf %b "Hashing output file. Please wait...\n" - cd "${ua_destination_dir}" || exit 1 - ua_output_file_hash=`${MD5_HASHING_TOOL} "${ua_output_filename}"` - cd "${UAC_DIR}" || exit 1 +if [ "${__UAC_OUTPUT_FORMAT}" = "none" ]; then + printf "Moving files to output directory. Please wait...\n" + mv "${__UAC_TEMP_DATA_DIR}/collected" "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}" + if [ -d "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}" ]; then + printf "Output directory created '%s'\n" "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}" + _remove_temp_data_dir else - printf %b "Cannot create output file\n" - printf %b "Please check collected artifacts in '${TEMP_DATA_DIR}'\n" - cd "${UAC_DIR}" && exit 1 + _error_msg "cannot create output directory '%s'\nPlease check collected artifacts in '%s'\n" \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}" "${__UAC_TEMP_DATA_DIR}" fi else - ua_output_filename="${ua_output_base_filename}" - printf %b "'tar' not found. Copying collected artifacts to '${ua_destination_dir}/${ua_output_filename}'. Please wait...\n" - if [ -f "${TEMP_DATA_DIR}/.files.tmp" ]; then - grep -v -E "${UAC_DIR}|/uac-data.tmp/" "${TEMP_DATA_DIR}/.files.tmp" >"${TEMP_DATA_DIR}/.files_uac-data.tmp_removed.tmp" - cp "${TEMP_DATA_DIR}/.files_uac-data.tmp_removed.tmp" "${TEMP_DATA_DIR}/.files.tmp" - # sort and uniq - sort_uniq_file "${TEMP_DATA_DIR}/.files.tmp" 2>>"${UAC_STDERR_LOG_FILE}" - copy_data "${TEMP_DATA_DIR}/.files.tmp" "${ua_destination_dir}/${ua_output_filename}/[root]" \ - 2>>"${UAC_STDERR_LOG_FILE}" + printf "Creating output file. Please wait...\n" + cd "${__UAC_TEMP_DATA_DIR}/collected" || _exit_fatal "cd: ${__UAC_DIR}: No such file or directory" + case "${__UAC_OUTPUT_EXTENSION}" in + "tar") + _tar_data "${__UAC_TEMP_DATA_DIR}/output.tmp" \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.${__UAC_OUTPUT_EXTENSION}" + ;; + "tar.gz") + _tar_gz_data "${__UAC_TEMP_DATA_DIR}/output.tmp" \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.${__UAC_OUTPUT_EXTENSION}" + ;; + "zip") + _zip_data "${__UAC_TEMP_DATA_DIR}/output.tmp" \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.${__UAC_OUTPUT_EXTENSION}" \ + "${__UAC_OUTPUT_PASSWORD}" + ;; + esac + cd "${__UAC_DIR}" || _exit_fatal "cd: ${__UAC_DIR}: No such file or directory" + if [ -f "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.${__UAC_OUTPUT_EXTENSION}" ]; then + printf "Output file created '%s'\n" "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.${__UAC_OUTPUT_EXTENSION}" + _remove_temp_data_dir + else + _error_msg "cannot create output file '%s'\nPlease check collected artifacts in '%s'\n" \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.${__UAC_OUTPUT_EXTENSION}" "${__UAC_TEMP_DATA_DIR}" fi +fi - cd "${TEMP_DATA_DIR}" || exit 1 - copy_data "${TEMP_DATA_DIR}/.output_file.tmp" "${ua_destination_dir}/${ua_output_filename}" \ - 2>>"${UAC_STDERR_LOG_FILE}" - ua_file_count=`find "${ua_destination_dir}/${ua_output_filename}" -print | wc -l` - if [ "${ua_file_count}" -gt 2 ]; then - printf %b "Please check collected artifacts in '${ua_destination_dir}/${ua_output_filename}'\n" - cd "${UAC_DIR}" || exit 1 - if ${ua_debug_mode}; then - printf %b "Temporary directory not removed '${TEMP_DATA_DIR}'\n" - else - rm -rf "${TEMP_DATA_DIR}" >/dev/null 2>/dev/null - if [ -d "${TEMP_DATA_DIR}" ]; then - printf %b "Cannot remove temporary directory '${TEMP_DATA_DIR}'\n" - fi +# hash output file +__ua_output_file_computed_hashes="" +if [ "${__UAC_OUTPUT_FORMAT}" != "none" ]; then + printf "Hashing output file. Please wait...\n" + for __ua_algorithm in `echo "${__UAC_CONF_HASH_ALGORITHM}" | sed -e 's:|: :g'`; do + __ua_hash="" + if [ "${__ua_algorithm}" = "md5" ] && [ -n "${__UAC_TOOL_MD5_BIN}" ]; then + __ua_hash_command="${__UAC_TOOL_MD5_BIN} \"${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.${__UAC_OUTPUT_EXTENSION}\"" + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__ua_hash_command}" + __ua_hash=`eval "${__ua_hash_command}" | _grep_o "[0-9a-fA-F]\{32\}"` + elif [ "${__ua_algorithm}" = "sha1" ] && [ -n "${__UAC_TOOL_SHA1_BIN}" ]; then + __ua_hash_command="${__UAC_TOOL_SHA1_BIN} \"${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.${__UAC_OUTPUT_EXTENSION}\"" + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__ua_hash_command}" + __ua_hash=`eval "${__ua_hash_command}" | _grep_o "[0-9a-fA-F]\{40\}"` + elif [ "${__ua_algorithm}" = "sha256" ] && [ -n "${__UAC_TOOL_SHA256_BIN}" ]; then + __ua_hash_command="${__UAC_TOOL_SHA256_BIN} \"${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.${__UAC_OUTPUT_EXTENSION}\"" + _verbose_msg "${__UAC_VERBOSE_CMD_PREFIX}${__ua_hash_command}" + __ua_hash=`eval "${__ua_hash_command}" | _grep_o "[0-9a-fA-F]\{64\}"` fi - else - printf %b "Cannot copy collected artifacts\n" - printf %b "Please check collected artifacts in '${TEMP_DATA_DIR}'\n" - exit 1 - fi + __ua_output_file_computed_hashes="${__ua_output_file_computed_hashes}${__ua_output_file_computed_hashes:+ +}${__ua_algorithm} checksum: ${__ua_hash}" + done fi -# create the acquisition log -if create_acquisition_log \ - "${ua_case_number}" \ - "${ua_evidence_number}" \ - "${ua_evidence_description}" \ - "${ua_examiner}" \ - "${ua_notes}" \ - "${ua_hostname}" \ - "${ua_acq_start_date}" \ - "${ua_acq_end_date}" \ - "${ua_output_file_hash}" \ - "${ua_destination_dir}" \ - "${ua_acquisition_log}" 2>/dev/null; then - printf %b "Acquisition log created '${ua_destination_dir}/${ua_acquisition_log}'\n" +# create acquisition log file +if _create_acquisition_log \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.log" \ + "${__ua_acquisition_start_date}" \ + "${__ua_acquisition_end_date}" \ + "${__ua_output_file_computed_hashes}"; then + printf "Acquisition log created '%s'\n" "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.log" else - printf %b "Cannot create acquisition log\n" -fi - -# transfer output and log file to remote sftp server -if [ -n "${ua_sftp_destination}" ]; then - if [ -f "${ua_destination_dir}/${ua_output_filename}" ] \ - || [ -d "${ua_destination_dir}/${ua_output_filename}" ]; then - printf %b "Transferring output file to remote SFTP server. Please wait...\n" - if sftp_transfer "${ua_destination_dir}/${ua_output_filename}" \ - "${ua_sftp_destination}" "${ua_sftp_port}" "${ua_sftp_identity_file}"; then - printf %b "File transferred successfully\n" - # delete output file on success transfer - ${ua_delete_local_on_successful_transfer} \ - && rm -f "${ua_destination_dir}/${ua_output_filename}" 2>/dev/null - printf %b "Transferring log file to remote SFTP server. Please wait...\n" - if sftp_transfer "${ua_destination_dir}/${ua_acquisition_log}" \ - "${ua_sftp_destination}" "${ua_sftp_port}" "${ua_sftp_identity_file}"; then - printf %b "File transferred successfully\n" - # delete log file on success transfer - ${ua_delete_local_on_successful_transfer} \ - && rm -f "${ua_destination_dir}/${ua_acquisition_log}" 2>/dev/null - else - printf %b "Could not transfer log file to remote SFTP server\n" - exit 1 - fi - else - printf %b "Could not transfer output file to remote SFTP server\n" - exit 1 - fi + _error_msg "cannot create acquisition log\n" +fi + +# transfer output and log file to SFTP server +if [ -n "${__UAC_SFTP}" ]; then + cd "${__UAC_DESTINATION_DIR}" || _exit_fatal "cd: ${__UAC_DESTINATION_DIR}: No such file or directory" + printf "Transferring '%s' to remote SFTP server. Please wait...\n" \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" + if _sftp_transfer \ + "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" \ + "${__UAC_SFTP}" \ + "${__UAC_SFTP_PORT}" \ + "${__UAC_SFTP_IDENTITY_FILE}" \ + "${__UAC_SFTP_SSH_OPTIONS}"; then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" + else + _error_msg "could not transfer output file to remote SFTP server" fi + printf "Transferring '%s' to remote S3 server. Please wait...\n" "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.log" + if _sftp_transfer \ + "${__UAC_OUTPUT_BASE_NAME}.log" \ + "${__UAC_SFTP}" \ + "${__UAC_SFTP_PORT}" \ + "${__UAC_SFTP_IDENTITY_FILE}" \ + "${__UAC_SFTP_SSH_OPTIONS}"; then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}.log" + else + _error_msg "could not transfer log file to remote SFTP server" + fi + cd "${__UAC_DIR}" || _exit_fatal "cd: ${__UAC_DIR}: No such file or directory" fi -# transfer output and log file to S3 presigned url -if [ -n "${ua_s3_presigned_url}" ]; then - if [ -f "${ua_destination_dir}/${ua_output_filename}" ]; then - printf %b "Transferring output file to S3 presigned URL. Please wait...\n" - if s3_presigned_url_transfer "${ua_destination_dir}/${ua_output_filename}" \ - "${ua_s3_presigned_url}"; then - printf %b "File transferred successfully\n" - # delete output file on success transfer - ${ua_delete_local_on_successful_transfer} \ - && rm -f "${ua_destination_dir}/${ua_output_filename}" 2>/dev/null - else - printf %b "Could not transfer output file to S3 presigned URL\n" - exit 1 - fi - if [ -n "${ua_s3_presigned_url_log_file}" ]; then - printf %b "Transferring log file to S3 presigned URL. Please wait...\n" - if s3_presigned_url_transfer "${ua_destination_dir}/${ua_acquisition_log}" \ - "${ua_s3_presigned_url_log_file}"; then - printf %b "File transferred successfully\n" - # delete output file on success transfer - ${ua_delete_local_on_successful_transfer} \ - && rm -f "${ua_destination_dir}/${ua_acquisition_log}" 2>/dev/null +# transfer output and log file to S3 server +if [ -n "${__UAC_S3_PROVIDER}" ] && [ -n "${__UAC_S3_BUCKET}" ]; then + cd "${__UAC_DESTINATION_DIR}" || _exit_fatal "cd: ${__UAC_DESTINATION_DIR}: No such file or directory" + case "${__UAC_S3_PROVIDER}" in + "amazon") + printf "Transferring '%s' to remote S3 server. Please wait...\n" \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" + if _s3_transfer_amazon \ + "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" \ + "${__UAC_S3_REGION}" \ + "${__UAC_S3_BUCKET}" \ + "${__UAC_S3_ACCESS_KEY}" \ + "${__UAC_S3_SECRET_KEY}"; then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" else - printf %b "Could not transfer log file to S3 presigned URL\n" - exit 1 + _error_msg "could not transfer output file to remote S3 server" + fi + printf "Transferring '%s' to remote S3 server. Please wait...\n" "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.log" + if _s3_transfer_amazon \ + "${__UAC_OUTPUT_BASE_NAME}.log" \ + "${__UAC_S3_REGION}" \ + "${__UAC_S3_BUCKET}" \ + "${__UAC_S3_ACCESS_KEY}" \ + "${__UAC_S3_SECRET_KEY}"; then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}.log" + else + _error_msg "could not transfer log file to S3 server" fi - fi - fi + ;; + "google") + printf "Transferring '%s' to remote S3 server. Please wait...\n" \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" + if _s3_transfer_google \ + "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" \ + "${__UAC_S3_BUCKET}" \ + "${__UAC_S3_TOKEN}"; then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" + else + _error_msg "could not transfer output file to remote S3 server" + fi + printf "Transferring '%s' to remote S3 server. Please wait...\n" "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.log" + if _s3_transfer_google \ + "${__UAC_OUTPUT_BASE_NAME}.log" \ + "${__UAC_S3_BUCKET}" \ + "${__UAC_S3_TOKEN}"; then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}.log" + else + _error_msg "could not transfer log file to S3 server" + fi + ;; + "ibm") + printf "Transferring '%s' to remote S3 server. Please wait...\n" \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" + if _s3_transfer_ibm \ + "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" \ + "${__UAC_S3_REGION}" \ + "${__UAC_S3_BUCKET}" \ + "${__UAC_S3_TOKEN}"; then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" + else + _error_msg "could not transfer output file to remote S3 server" + fi + printf "Transferring '%s' to remote S3 server. Please wait...\n" "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.log" + if _s3_transfer_ibm \ + "${__UAC_OUTPUT_BASE_NAME}.log" \ + "${__UAC_S3_REGION}" \ + "${__UAC_S3_BUCKET}" \ + "${__UAC_S3_TOKEN}"; then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}.log" + else + _error_msg "could not transfer log file to S3 server" + fi + ;; + esac + cd "${__UAC_DIR}" || _exit_fatal "cd: ${__UAC_DIR}: No such file or directory" fi -# transfer output and log file to Azure Storage SAS url -if [ -n "${ua_azure_storage_sas_url}" ]; then - if [ -f "${ua_destination_dir}/${ua_output_filename}" ]; then - printf %b "Transferring output file to Azure Storage SAS URL. Please wait...\n" - if azure_storage_sas_url_transfer "${ua_destination_dir}/${ua_output_filename}" \ - "${ua_azure_storage_sas_url}"; then - printf %b "File transferred successfully\n" - # delete output file on success transfer - ${ua_delete_local_on_successful_transfer} \ - && rm -f "${ua_destination_dir}/${ua_output_filename}" 2>/dev/null +# transfer output and log file to Amazon S3 presigned URL +if [ -n "${__UAC_AWS_S3_PRESIGNED_URL}" ]; then + cd "${__UAC_DESTINATION_DIR}" || _exit_fatal "cd: ${__UAC_DESTINATION_DIR}: No such file or directory" + printf "Transferring '%s' to Amazon S3 presigned URL. Please wait...\n" \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" + if _aws_s3_presigned_url_transfer \ + "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" \ + "${__UAC_AWS_S3_PRESIGNED_URL}"; then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" + else + _error_msg "could not transfer output file to Amazon S3 presigned URL" + fi + if [ -n "${__UAC_AWS_S3_PRESIGNED_URL_LOG_FILE}" ]; then + printf "Transferring '%s' to Amazon S3 presigned URL. Please wait...\n" "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.log" + if _aws_s3_presigned_url_transfer \ + "${__UAC_OUTPUT_BASE_NAME}.log" \ + "${__UAC_AWS_S3_PRESIGNED_URL_LOG_FILE}"; then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" else - printf %b "Could not transfer output file to Azure Storage SAS URL\n" - exit 1 - fi - if [ -n "${ua_azure_storage_sas_url_log_file}" ]; then - printf %b "Transferring log file to Azure Storage SAS URL. Please wait...\n" - if azure_storage_sas_url_transfer "${ua_destination_dir}/${ua_acquisition_log}" \ - "${ua_azure_storage_sas_url_log_file}"; then - printf %b "File transferred successfully\n" - # delete output file on success transfer - ${ua_delete_local_on_successful_transfer} \ - && rm -f "${ua_destination_dir}/${ua_acquisition_log}" 2>/dev/null - else - printf %b "Could not transfer log file to Azure Storage SAS URL\n" - exit 1 - fi + _error_msg "could not transfer log file to Amazon S3 presigned URL" fi fi + cd "${__UAC_DIR}" || _exit_fatal "cd: ${__UAC_DIR}: No such file or directory" fi -# transfer output and log file to IBM Cloud Object Storage -if [ -n "${ua_ibm_cos_url}" ]; then - if [ -f "${ua_destination_dir}/${ua_output_filename}" ]; then - printf %b "Transferring output file to IBM Cloud Object Storage. Please wait...\n" - if ibm_cos_transfer "${ua_destination_dir}/${ua_output_filename}" \ - "${ua_ibm_cos_url}" "${ua_ibm_cloud_api_key}"; then - printf %b "File transferred successfully\n" - # delete output file on success transfer - ${ua_delete_local_on_successful_transfer} \ - && rm -f "${ua_destination_dir}/${ua_output_filename}" 2>/dev/null +# transfer output and log file to Azure Storage SAS URL +if [ -n "${__UAC_AZURE_STORAGE_SAS_URL}" ]; then + cd "${__UAC_DESTINATION_DIR}" || _exit_fatal "cd: ${__UAC_DESTINATION_DIR}: No such file or directory" + printf "Transferring '%s' to Azure Storage SAS URL. Please wait...\n" \ + "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" + if _azure_storage_sas_url_transfer \ + "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" \ + "${__UAC_AZURE_STORAGE_SAS_URL}";then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}${__UAC_OUTPUT_EXTENSION:+.}${__UAC_OUTPUT_EXTENSION}" + else + _error_msg "could not transfer output file to Azure Storage SAS URL" + fi + if [ -n "${__UAC_AZURE_STORAGE_SAS_URL_LOG_FILE}" ]; then + printf "Transferring '%s' to Azure Storage SAS URL. Please wait...\n" "${__UAC_DESTINATION_DIR}/${__UAC_OUTPUT_BASE_NAME}.log" + if _azure_storage_sas_url_transfer \ + "${__UAC_OUTPUT_BASE_NAME}.log" \ + "${__UAC_AZURE_STORAGE_SAS_URL_LOG_FILE}"; then + printf "Transfer complete!\n" + ${__UAC_DELETE_LOCAL_ON_SUCCESSFUL_TRANSFER} && rm -rf "${__UAC_OUTPUT_BASE_NAME}.log" else - printf %b "Could not transfer output file to IBM Cloud Object Storage\n" - exit 1 - fi - if [ -n "${ua_ibm_cos_url_log_file}" ]; then - printf %b "Transferring log file to IBM Cloud Object Storage. Please wait...\n" - if ibm_cos_transfer "${ua_destination_dir}/${ua_acquisition_log}" \ - "${ua_ibm_cos_url_log_file}" "${ua_ibm_cloud_api_key}"; then - printf %b "File transferred successfully\n" - # delete output file on success transfer - ${ua_delete_local_on_successful_transfer} \ - && rm -f "${ua_destination_dir}/${ua_acquisition_log}" 2>/dev/null - else - printf %b "Could not transfer log file to IBM Cloud Object Storage\n" - exit 1 - fi + _error_msg "could not transfer log file to Azure Storage SAS URL" fi fi + cd "${__UAC_DIR}" || _exit_fatal "cd: ${__UAC_DIR}: No such file or directory" fi +${__UAC_TRACE_MODE} && set +x + exit 0