diff --git a/.dockerignore b/.dockerignore index f1bb98a9..48f3b1a9 100644 --- a/.dockerignore +++ b/.dockerignore @@ -8,10 +8,8 @@ !/glances/outputs/static # Include Requirements files -!/requirements.txt +!/all-requirements.txt !/docker-requirements.txt -!/webui-requirements.txt -!/optional-requirements.txt # Include Config file !/docker-compose/glances.conf @@ -19,3 +17,6 @@ # Include Binary file !/docker-bin.sh + +# Include TOML file +!/pyproject.toml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9b5192d0..c5d2df52 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,9 +12,9 @@ jobs: if: github.event_name == 'push' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.13" - name: Install pypa/build @@ -45,7 +45,7 @@ jobs: id-token: write steps: - name: Download all the dists - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 with: name: python-package-distributions path: dist/ @@ -54,6 +54,7 @@ jobs: with: skip-existing: true attestations: false + print-hash: true pypi_test: name: Publish Python 🐍 distribution πŸ“¦ to TestPyPI @@ -69,7 +70,7 @@ jobs: id-token: write steps: - name: Download all the dists - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 with: name: python-package-distributions path: dist/ diff --git a/.github/workflows/build_docker.yml b/.github/workflows/build_docker.yml index 0fd2f188..22145114 100644 --- a/.github/workflows/build_docker.yml +++ b/.github/workflows/build_docker.yml @@ -63,7 +63,7 @@ jobs: tag: ${{ fromJson(needs.create_docker_images_list.outputs.tags) }} steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Retrieve Repository Docker metadata id: docker_meta diff --git a/.github/workflows/cyber.yml b/.github/workflows/cyber.yml index 16752ca4..c6273957 100644 --- a/.github/workflows/cyber.yml +++ b/.github/workflows/cyber.yml @@ -11,7 +11,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Run Trivy vulnerability scanner in repo mode uses: aquasecurity/trivy-action@master diff --git a/.github/workflows/inactive_issues.yml b/.github/workflows/inactive_issues.yml index 428f72d0..dad91765 100644 --- a/.github/workflows/inactive_issues.yml +++ b/.github/workflows/inactive_issues.yml @@ -10,7 +10,7 @@ jobs: issues: write pull-requests: write steps: - - uses: actions/stale@v9 + - uses: actions/stale@v10 with: days-before-issue-stale: 90 days-before-issue-close: -1 diff --git a/.github/workflows/quality.yml b/.github/workflows/quality.yml index 2d2ea63e..6c810960 100644 --- a/.github/workflows/quality.yml +++ b/.github/workflows/quality.yml @@ -22,7 +22,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 74e64514..fc7da00f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Check formatting with Ruff uses: chartboost/ruff-action@v1 @@ -37,14 +37,14 @@ jobs: runs-on: ubuntu-24.04 strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -70,14 +70,14 @@ jobs: runs-on: windows-2025 strategy: matrix: - # Windows-curses not available for Python 3.13 for the moment - python-version: ["3.9", "3.10", "3.11", "3.12"] + # Windows-curses not available for Python 3.14 for the moment + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -96,7 +96,7 @@ jobs: needs: source-code-checks # https://github.com/actions/runner-images?tab=readme-ov-file#available-images - runs-on: macos-14 + runs-on: macos-15 strategy: matrix: # Only test the latest stable version @@ -104,10 +104,10 @@ jobs: steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} cache: 'pip' diff --git a/.github/workflows/webui.yml b/.github/workflows/webui.yml index e3c9c95a..1b9ee4b1 100644 --- a/.github/workflows/webui.yml +++ b/.github/workflows/webui.yml @@ -14,9 +14,9 @@ jobs: # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Glances will be build with Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v4 + uses: actions/setup-node@v5 with: node-version: ${{ matrix.node-version }} cache: 'npm' diff --git a/.gitignore b/.gitignore index a5a48d85..e48fd58c 100644 --- a/.gitignore +++ b/.gitignore @@ -63,7 +63,9 @@ bower_components/ /*_source.tar.bz2 # Virtual env -/venv*/ +.venv/ +uv.lock +.python-version # Test .coverage diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c14879c7..928e0add 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + rev: v6.0.0 hooks: - id: check-ast - id: check-docstring-first @@ -15,8 +15,23 @@ repos: - id: requirements-txt-fixer - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.5 + rev: v0.14.1 hooks: - - id: ruff-format - - id: ruff - args: [--fix, --exit-non-zero-on-fix] \ No newline at end of file + # Run the linter. + - id: ruff-check + types_or: [ python, pyi ] + args: [ --fix, --exit-non-zero-on-fix ] + # Run the formatter. + - id: ruff-format + types_or: [ python, pyi ] + + - repo: local + hooks: + # test duplicate line at the end of file with a custom script + # /bin/bash tests-data/tools/find-duplicate-lines.sh + - id: find-duplicate-lines + name: find duplicate lines at the end of file + entry: bash tests-data/tools/find-duplicate-lines.sh + language: system + types: [python] + pass_filenames: false diff --git a/.readthedocs.yaml b/.readthedocs.yaml index cf9b7a17..519798a5 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -31,4 +31,4 @@ sphinx: # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html python: install: - - requirements: doc-requirements.txt \ No newline at end of file + - requirements: dev-requirements.txt \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in index a82b1c54..bbbf4f40 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -6,6 +6,7 @@ include README.rst include SECURITY.md include conf/glances.conf include requirements.txt +include all-requirements.txt recursive-include docs * recursive-include glances *.py recursive-include glances/outputs/static * diff --git a/Makefile b/Makefile index 9c716a71..a3ce250c 100644 --- a/Makefile +++ b/Makefile @@ -1,17 +1,6 @@ -PORT ?= 8008 -venv_full:= venv/bin -venv_min := venv-min/bin -CONF := conf/glances.conf -PIP := $(venv_full)/pip -PYTHON := $(venv_full)/python -PYTEST := $(venv_full)/python -m pytest -LASTTAG = $(shell git describe --tags --abbrev=0) - -VENV_TYPES := full min -VENV_PYTHON := $(VENV_TYPES:%=venv-%-python) -VENV_UPG := $(VENV_TYPES:%=venv-%-upgrade) -VENV_DEPS := $(VENV_TYPES:%=venv-%) -VENV_INST_UPG := $(VENV_DEPS) $(VENV_UPG) +PORT ?= 8008 +CONF := conf/glances.conf +LASTTAG = $(shell git describe --tags --abbrev=0) IMAGES_TYPES := full minimal DISTROS := alpine ubuntu @@ -27,10 +16,15 @@ DOCKER_SOCK ?= /var/run/docker.sock DOCKER_SOCKS := -v $(PODMAN_SOCK):$(PODMAN_SOCK):ro -v $(DOCKER_SOCK):$(DOCKER_SOCK):ro DOCKER_OPTS := --rm -e TZ="${TZ}" -e GLANCES_OPT="" --pid host --network host +# User-friendly check for uv +ifeq ($(shell which uv >/dev/null 2>&1; echo $$?), 1) +$(error The 'uv' command was not found. Make sure you have Astral Uv installed, then set the UV environment variable to point to the full path of the 'uv' executable. Alternatively more information with make install-uv) +endif + # if the command is only `make`, the default tasks will be the printing of the help. .DEFAULT_GOAL := help -.PHONY: help test docs docs-server venv venv-min +.PHONY: help test docs docs-server venv help: ## List all make commands available @grep -E '^[\.a-zA-Z_%-]+:.*?## .*$$' $(MAKEFILE_LIST) | \ @@ -44,76 +38,88 @@ help: ## List all make commands available # Virtualenv # =================================================================== -venv-%-upgrade: UPGRADE = --upgrade +install-uv: ## Instructions to install the UV tool + @echo "Install the UV tool (https://astral.sh/uv/)" + @echo "Please install the UV tool manually" + @echo "For example with: curl -LsSf https://astral.sh/uv/install.sh | sh" + @echo "Or via a package manager of your distribution" + @echo "For example for Snap: snap install astral-uv" -define DEFINE_VARS_FOR_TYPE -venv-$(TYPE) venv-$(TYPE)-upgrade: VIRTUAL_ENV = $(venv_$(TYPE)) -endef +upgrade-uv: ## Upgrade the UV tool + uv self update -$(foreach TYPE,$(VENV_TYPES),$(eval $(DEFINE_VARS_FOR_TYPE))) +venv: ## Create the virtualenv with all dependencies + uv sync --all-extras --no-group dev -$(VENV_PYTHON): venv-%-python: - virtualenv -p python3 $(if $(filter full,$*),venv,venv-$*) +venv-upgrade venv-switch-to-full: ## Upgrade the virtualenv with all dependencies + uv sync --upgrade --all-extras -$(VENV_INST_UPG): venv-%: - $(if $(UPGRADE),$(VIRTUAL_ENV)/pip install --upgrade pip,) - $(foreach REQ,$(REQS), $(VIRTUAL_ENV)/pip install $(UPGRADE) -r $(REQ);) - $(if $(PRE_COMMIT),$(VIRTUAL_ENV)/pre-commit install --hook-type pre-commit,) +venv-min: ## Create the virtualenv with minimal dependencies + uv sync -venv-python: $(VENV_PYTHON) ## Install all Python 3 venv -venv: $(VENV_DEPS) ## Install all Python 3 dependencies -venv-upgrade: $(VENV_UPG) ## Upgrade all Python 3 dependencies +venv-upgrade-min venv-switch-to-min: ## Upgrade the virtualenv with minimal dependencies + uv sync --upgrade -# For full installation (with optional dependencies) +venv-clean: ## Remove the virtualenv + rm -rf .venv -venv-full venv-full-upgrade: REQS = requirements.txt optional-requirements.txt dev-requirements.txt doc-requirements.txt +venv-dev: ## Create the virtualenv with dev dependencies + uv sync --dev --all-extras + uv run pre-commit install --hook-type pre-commit -venv-full-python: ## Install Python 3 venv -venv-full: venv-python ## Install Python 3 run-time -venv-full-upgrade: ## Upgrade Python 3 run-time dependencies -venv-full: PRE_COMMIT = 1 +# =================================================================== +# Requirements +# +# Note: the --no-hashes option should be used because pip (in CI) has +# issues with hashes. +# =================================================================== -# For minimal installation (without optional dependencies) +requirements-min: ## Generate the requirements.txt files (minimal dependencies) + uv export --no-emit-workspace --no-hashes --no-group dev --output-file requirements.txt -venv-min venv-min-upgrade: REQS = requirements.txt dev-requirements.txt doc-requirements.txt +requirements-all: ## Generate the all-requirements.txt files (all dependencies) + uv export --no-emit-workspace --no-hashes --all-extras --no-group dev --output-file all-requirements.txt -venv-min-python: ## Install Python 3 venv minimal -venv-min: venv-min-python ## Install Python 3 minimal run-time dependencies -venv-min-upgrade: ## Upgrade Python 3 minimal run-time dependencies +requirements-docker: ## Generate the docker-requirements.txt files (Docker specific dependencies) + uv export --no-emit-workspace --no-hashes --no-group dev --extra containers --extra web --output-file docker-requirements.txt + +requirements-dev: ## Generate the dev-requirements.txt files (dev dependencies) + uv export --no-hashes --only-dev --output-file dev-requirements.txt + +requirements: requirements-min requirements-all requirements-dev requirements-docker ## Generate all the requirements files + +requirements-upgrade: venv-upgrade requirements ## Upgrade the virtualenv and regenerate all the requirements files # =================================================================== # Tests # =================================================================== test: ## Run All unit tests - $(PYTEST) + uv run pytest test-core: ## Run Core unit tests - $(PYTEST) tests/test_core.py + uv run pytest tests/test_core.py + +test-api: ## Run API unit tests + uv run pytest tests/test_api.py test-memoryleak: ## Run Memory-leak unit tests - $(PYTEST) tests/test_memoryleak.py + uv run pytest tests/test_memoryleak.py test-perf: ## Run Perf unit tests - $(PYTEST) tests/test_perf.py + uv run pytest tests/test_perf.py test-restful: ## Run Restful API unit tests - $(PYTEST) tests/test_restful.py + uv run pytest tests/test_restful.py test-webui: ## Run WebUI unit tests - $(PYTEST) tests/test_webui.py + uv run pytest tests/test_webui.py test-xmlrpc: ## Run XMLRPC API unit tests - $(PYTEST) tests/test_xmlrpc.py + uv run pytest tests/test_xmlrpc.py test-with-upgrade: venv-upgrade test ## Upgrade deps and run unit tests -test-min: ## Run core unit tests in minimal environment - $(venv_min)/python -m pytest tests/test_core.py - -test-min-with-upgrade: venv-min-upgrade ## Upgrade deps and run unit tests in minimal environment - $(venv_min)/python -m pytest tests/test_core.py - test-export-csv: ## Run interface tests with CSV /bin/bash ./tests/test_export_csv.sh @@ -129,26 +135,29 @@ test-export-influxdb-v3: ## Run interface tests with InfluxDB version 3 (Core) test-export-timescaledb: ## Run interface tests with TimescaleDB /bin/bash ./tests/test_export_timescaledb.sh -test-export: test-export-csv test-export-json test-export-influxdb-v1 test-export-influxdb-v3 test-export-timescaledb## Tests all exports +test-exports: test-export-csv test-export-json test-export-influxdb-v1 test-export-influxdb-v3 test-export-timescaledb ## Tests all exports # =================================================================== # Linters, profilers and cyber security # =================================================================== +find-duplicate-lines: + /bin/bash tests-data/tools/find-duplicate-lines.sh + format: ## Format the code - $(venv_full)/python -m ruff format . + uv run ruff format . lint: ## Lint the code. - $(venv_full)/python -m ruff check . --fix + uv run ruff check . --fix lint-readme: ## Lint the main README.rst file - $(venv_full)/python -m rstcheck README.rst + uv run rstcheck README.rst codespell: ## Run codespell to fix common misspellings in text files - $(venv_full)/codespell -S .git,./docs/_build,./Glances.egg-info,./venv*,./glances/outputs,*.svg -L hart,bu,te,statics -w + uv run codespell -S .git,./docs/_build,./Glances.egg-info,./venv*,./glances/outputs,*.svg -L hart,bu,te,statics -w semgrep: ## Run semgrep to find bugs and enforce code standards - $(venv_full)/semgrep scan --config=auto + uv run semgrep scan --config=auto profiling-%: SLEEP = 3 profiling-%: TIMES = 30 @@ -162,27 +171,27 @@ endef profiling-gprof: CPROF = glances.cprof profiling-gprof: ## Callgraph profiling (need "apt install graphviz") $(DISPLAY-BANNER) - $(PYTHON) -m cProfile -o $(CPROF) run-venv.py -C $(CONF) --stop-after $(TIMES) - $(venv_full)/gprof2dot -f pstats $(CPROF) | dot -Tsvg -o $(OUT_DIR)/glances-cgraph.svg + uv run python -m cProfile -o $(CPROF) run-venv.py -C $(CONF) --stop-after $(TIMES) + uv run gprof2dot -f pstats $(CPROF) | dot -Tsvg -o $(OUT_DIR)/glances-cgraph.svg rm -f $(CPROF) profiling-pyinstrument: ## PyInstrument profiling $(DISPLAY-BANNER) - $(PIP) install pyinstrument - $(PYTHON) -m pyinstrument -r html -o $(OUT_DIR)/glances-pyinstrument.html -m glances -C $(CONF) --stop-after $(TIMES) + uv add pyinstrument + uv run pyinstrument -r html -o $(OUT_DIR)/glances-pyinstrument.html -m glances -C $(CONF) --stop-after $(TIMES) profiling-pyspy: ## Flame profiling $(DISPLAY-BANNER) - $(venv_full)/py-spy record -o $(OUT_DIR)/glances-flame.svg -d 60 -s -- $(PYTHON) run-venv.py -C $(CONF) --stop-after $(TIMES) + uv run py-spy record -o $(OUT_DIR)/glances-flame.svg -d 60 -s -- uv run python run-venv.py -C $(CONF) --stop-after $(TIMES) profiling: profiling-gprof profiling-pyinstrument profiling-pyspy ## Profiling of the Glances software trace-malloc: ## Trace the malloc() calls @echo "Malloc test is running, please wait ~30 secondes..." - $(PYTHON) -m glances -C $(CONF) --trace-malloc --stop-after 15 --quiet + uv run python -m glances -C $(CONF) --trace-malloc --stop-after 15 --quiet memory-leak: ## Profile memory leaks - $(PYTHON) -m glances -C $(CONF) --memory-leak + uv run python -m glances -C $(CONF) --memory-leak memory-profiling: TIMES = 2400 memory-profiling: PROFILE = mprofile_*.dat @@ -191,30 +200,31 @@ memory-profiling: ## Profile memory usage @echo "It's a very long test (~4 hours)..." rm -f $(PROFILE) @echo "1/2 - Start memory profiling with the history option enable" - $(venv_full)/mprof run -T 1 -C run-venv.py -C $(CONF) --stop-after $(TIMES) --quiet - $(venv_full)/mprof plot --output $(OUT_DIR)/glances-memory-profiling-with-history.png + uv run mprof run -T 1 -C run-venv.py -C $(CONF) --stop-after $(TIMES) --quiet + uv run mprof plot --output $(OUT_DIR)/glances-memory-profiling-with-history.png rm -f $(PROFILE) @echo "2/2 - Start memory profiling with the history option disable" - $(venv_full)/mprof run -T 1 -C run-venv.py -C $(CONF) --disable-history --stop-after $(TIMES) --quiet - $(venv_full)/mprof plot --output $(OUT_DIR)/glances-memory-profiling-without-history.png + uv run mprof run -T 1 -C run-venv.py -C $(CONF) --disable-history --stop-after $(TIMES) --quiet + uv run mprof plot --output $(OUT_DIR)/glances-memory-profiling-without-history.png rm -f $(PROFILE) # Trivy installation: https://aquasecurity.github.io/trivy/latest/getting-started/installation/ trivy: ## Run Trivy to find vulnerabilities in container images - trivy fs . + uv run trivy fs . # =================================================================== # Docs # =================================================================== docs: ## Create the documentation - $(PYTHON) ./generate_openapi.py - $(PYTHON) -m glances -C $(CONF) --api-doc > ./docs/api.rst + uv run python -m glances -C $(CONF) --api-doc > ./docs/api/python.rst + uv run python ./generate_openapi.py + uv run python -m glances -C $(CONF) --api-restful-doc > ./docs/api/restful.rst cd docs && ./build.sh && cd .. docs-server: docs ## Start a Web server to serve the documentation (sleep 2 && sensible-browser "http://localhost:$(PORT)") & - cd docs/_build/html/ && ../../../venv/bin/python -m http.server $(PORT) + cd docs/_build/html/ && uv run python -m http.server $(PORT) release-note: ## Generate release note git --no-pager log $(LASTTAG)..HEAD --first-parent --pretty=format:"* %s" @@ -231,17 +241,19 @@ install: ## Open a Web Browser to the installation procedure webui webui%: DIR = glances/outputs/static/ -webui: ## Build the Web UI - $(PYTHON) -c 'import json; from glances.outputs.glances_curses import _GlancesCurses; print(json.dumps({ "leftMenu": [p for p in _GlancesCurses._left_sidebar if p != "now"]}, indent=4))' > ./glances/outputs/static/js/uiconfig.json +webui-gen-config: ## Generate the Web UI config file + uv run python ./generate_webui_conf.py > ./glances/outputs/static/js/uiconfig.json + +webui: webui-gen-config ## Build the Web UI cd $(DIR) && npm ci && npm run build webui-audit: ## Audit the Web UI cd $(DIR) && npm audit -webui-audit-fix: ## Fix audit the Web UI +webui-audit-fix: webui-gen-config ## Fix audit the Web UI cd $(DIR) && npm audit fix && npm ci && npm run build -webui-update: ## Update JS dependencies +webui-update: webui-gen-config ## Update JS dependencies cd $(DIR) && npm update --save && npm ci && npm run build # =================================================================== @@ -250,7 +262,7 @@ webui-update: ## Update JS dependencies flatpak: venv-upgrade ## Generate FlatPack JSON file git clone https://github.com/flatpak/flatpak-builder-tools.git - $(PYTHON) ./flatpak-builder-tools/pip/flatpak-pip-generator glances + uv run python ./flatpak-builder-tools/pip/flatpak-pip-generator glances rm -rf ./flatpak-builder-tools @echo "Now follow: https://github.com/flathub/flathub/wiki/App-Submission" @@ -289,28 +301,22 @@ docker-ubuntu-dev: ## Generate local docker image (Ubuntu dev) # =================================================================== run: ## Start Glances in console mode (also called standalone) - $(PYTHON) -m glances -C $(CONF) + uv run python -m glances -C $(CONF) run-debug: ## Start Glances in debug console mode (also called standalone) - $(PYTHON) -m glances -C $(CONF) -d + uv run python -m glances -C $(CONF) -d run-local-conf: ## Start Glances in console mode with the system conf file - $(PYTHON) -m glances + uv run python -m glances run-local-conf-hide-public: ## Start Glances in console mode with the system conf file and hide public information - $(PYTHON) -m glances --hide-public-info - -run-min: ## Start minimal Glances in console mode (also called standalone) - $(venv_min)/python -m glances -C $(CONF) - -run-min-debug: ## Start minimal Glances in debug console mode (also called standalone) - $(venv_min)/python -m glances -C $(CONF) -d - -run-min-local-conf: ## Start minimal Glances in console mode with the system conf file - $(venv_min)/python -m glances + uv run python -m glances --hide-public-info run-like-htop: ## Start Glances with the same features than Htop - $(venv_min)/python -m glances --disable-plugin network,ports,wifi,connections,diskio,fs,irq,folders,raid,smart,sensors,vms,containers,ip,amps --disable-left-sidebar + uv run python -m glances --disable-plugin network,ports,wifi,connections,diskio,fs,irq,folders,raid,smart,sensors,vms,containers,ip,amps --disable-left-sidebar + +run-fetch: ## Start Glances in fetch mode + uv run python -m glances --fetch $(DOCKER_RUNTIMES): run-docker-%: $(DOCKER_RUN) $(DOCKER_OPTS) $(DOCKER_SOCKS) -it glances:local-$* @@ -323,31 +329,31 @@ run-docker-ubuntu-full: ## Start Glances Ubuntu Docker full in console mode run-docker-ubuntu-dev: ## Start Glances Ubuntu Docker dev in console mode run-webserver: ## Start Glances in Web server mode - $(PYTHON) -m glances -C $(CONF) -w + uv run python -m glances -C $(CONF) -w run-webserver-local-conf: ## Start Glances in Web server mode with the system conf file - $(PYTHON) -m glances -w + uv run python -m glances -w run-webserver-local-conf-hide-public: ## Start Glances in Web server mode with the system conf file and hide public info - $(PYTHON) -m glances -w --hide-public-info + uv run python -m glances -w --hide-public-info run-restapiserver: ## Start Glances in REST API server mode - $(PYTHON) -m glances -C $(CONF) -w --disable-webui + uv run python -m glances -C $(CONF) -w --disable-webui run-server: ## Start Glances in server mode (RPC) - $(PYTHON) -m glances -C $(CONF) -s + uv run python -m glances -C $(CONF) -s run-client: ## Start Glances in client mode (RPC) - $(PYTHON) -m glances -C $(CONF) -c localhost + uv run python -m glances -C $(CONF) -c localhost run-browser: ## Start Glances in browser mode (RPC) - $(PYTHON) -m glances -C $(CONF) --browser + uv run python -m glances -C $(CONF) --browser run-web-browser: ## Start Web Central Browser - $(PYTHON) -m glances -C $(CONF) -w --browser + uv run python -m glances -C $(CONF) -w --browser run-issue: ## Start Glances in issue mode - $(PYTHON) -m glances -C $(CONF) --issue + uv run python -m glances -C $(CONF) --issue run-multipass: ## Install and start Glances in a VM (only available on Ubuntu with multipass already installed) multipass launch -n glances-on-lts lts @@ -357,4 +363,4 @@ run-multipass: ## Install and start Glances in a VM (only available on Ubuntu wi multipass delete glances-on-lts show-version: ## Show Glances version number - $(PYTHON) -m glances -C $(CONF) -V + uv run python -m glances -C $(CONF) -V diff --git a/NEWS.rst b/NEWS.rst index 814a2a01..bbe8250b 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -2,6 +2,87 @@ Glances ChangeLog ============================================================================== +============= +Version 4.4.0 +============= + +Breaking changes: + +* A new Python API is now available to use Glances as a Python lib in your hown development #3237 +* In the process list, the long command line is now truncated by default. Use the arrow keys to show the full command line. SHIFT + arrow keys are used to switch between column sorts (TUI). +* Prometheus export format is now more user friendly (see detail in #3283) + +Enhancements: + +* Make a Glances API in order to use Glances as a Python lib #3237 +* Add a new --fetch (neofetch like) option to display a snapshot of the current system status #3281 +* Show used port in container section #2054 +* Show long command line with arrow key #1553 +* Sensors plugin refresh by default every 10 seconds +* Do not call update if a call is done to a specific plugin through the API #3033 +* [UI] Process virtual memory display can be disable by configuration #3299 +* Choose between used or available in the mem plugin #3288 +* [Experimental] Add export to DuckDB database #3205 +* Add Disk I/O Latency stats #1070 +* Filter fields to export #3258 +* Remove .keys() from loops over dicts #3253 +* Remove iterator helpers #3252 + +Bug corrected: + +* [MACOS]Β Glances not showing Processes on MacOS #3100 +* Last dev build broke Homepage API calls ? only 1 widget still working #3322 +* Cloud plugin always generate communication with 169.254.169.254, even if the plugin is disabled #3316 +* API response delay (3+ minutes) when VMs are running #3317 +* [WINDOWS] Glances do not display CPU stat correctly #3155 +* Glances hangs if network device (NFS) is no available #3290 +* Fix prometheus export format #3283 +* Issue #3279 zfs cache and memory math issues #3289 +* [MACOS] Glances crashes when I try to filter #3266 +* Glances hang when killing process with muliple CTRL-C #3264 +* Issues after disabling system and processcount plugins #3248 +* Headers missing from predefined fields in TUI browser machine list #3250 +* Add another check for the famous Netifaces issue - Related to #3219 +* Key error 'type' in server_list_static.py (load_server_list) #3247 + +Continious integration and documentation: + +* Glances now use uv for the dev environment #3025 +* Glances is compatible with Python 3.14 #3319 +* Glances provides requirements files with specific versions for each release +* Requirements files are now generated dynamically with the make requirements or requirements-upgrade target +* Add duplicate line check in pre-commit (strange behavor with some VScode extension) +* Solve issue with multiprocessing exception with Snap package +* Add a test script for identify CPU consumption of sensor plugin +* Refactor port to take into account netifaces2 +* Correct issue with Chrome driver in WebUI unit test +* Upgrade export test with InfluxDB 1.12 +* Fix typo of --export-process-filter help message #3314 +* In the outdated feature, catch error message if Pypi server not reachable +* Add unit test for auto_unit +* Label error in docs #3286 +* Put WebUI conf generator in a dedicated script +* Refactor the Makefile to generate WebUI config file for all webui targets +* Update sensors documentation #3275 +* Update docker compose env quote #3273 +* Update docker-compose.yml #3249 +* Update API doc generation +* Update README with nice icons #3236 +* Add documentation for WebUI test + +Thanks to all contributors and bug reporters ! + +Special thanks to: +- Adi +- Bennett Kanuka +- Tim Potter +- Ariel Otilibili +- Boris Okassa +- Lawrence +- Shohei YOSHIDA +- jmwallach +- korn3r + ============= Version 4.3.3 ============= @@ -424,7 +505,7 @@ See release note in Wiki format: https://github.com/nicolargo/glances/wiki/Glanc **BREAKING CHANGES:** * The minimal Python version is 3.8 -* The Glances API version 3 is replaced by the version 4. So Restfull API URL is now /api/4/ #2610 +* The Glances API version 3 is replaced by the version 4. So Restful API URL is now /api/4/ #2610 * Alias definition change in the configuration file #1735 Glances version 3.x and lower: @@ -449,9 +530,9 @@ Minimal requirements for Glances version 4 are: * packaging * ujson * pydantic -* fastapi (for WebUI / RestFull API) -* uvicorn (for WebUI / RestFull API) -* jinja2 (for WebUI / RestFull API) +* fastapi (for WebUI / RestFul API) +* uvicorn (for WebUI / RestFul API) +* jinja2 (for WebUI / RestFul API) Majors changes between Glances version 3 and version 4: @@ -511,7 +592,7 @@ Bug corrected: CI and documentation: * New logo for Glances version 4.0 #2713 -* Update api.rst documentation #2496 +* Update api-restful.rst documentation #2496 * Change Renovate config #2729 * Docker compose password unrecognized arguments when applying docs #2698 * Docker includes OS Release Volume mount info #2473 @@ -889,7 +970,7 @@ Bugs corrected: * Threading.Event.isSet is deprecated in Python 3.10 #2017 * Fix code scanning alert - Clear-text logging of sensitive information security #2006 * The gpu temperature unit are displayed incorrectly in web ui bug #2002 -* Doc for 'alert' Restfull/JSON API response documentation #1994 +* Doc for 'alert' Restful/JSON API response documentation #1994 * Show the spinning state of a disk documentation #1993 * Web server status check endpoint enhancement #1988 * --time parameter being ignored for client/server mode bug #1978 @@ -984,7 +1065,7 @@ Bugs corrected: * [3.2.0/3.2.1] keybinding not working anymore #1904 * InfluxDB/InfluxDB2 Export object has no attribute hostname #1899 -Documentation: The "make docs" generate RestFull/API documentation file. +Documentation: The "make docs" generate RestFul/API documentation file. =============== Version 3.2.1 @@ -2011,7 +2092,7 @@ Version 2.1 * Add Glances log message (in the /tmp/glances.log file) The default log level is INFO, you can switch to the DEBUG mode using the -d option on the command line. * Add RESTful API to the Web server mode - RESTful API doc: https://github.com/nicolargo/glances/wiki/The-Glances-RESTFULL-JSON-API + RESTful API doc: https://github.com/nicolargo/glances/wiki/The-Glances-RESTFUL-JSON-API * Improve SNMP fallback mode for Cisco IOS, VMware ESXi * Add --theme-white feature to optimize display for white background * Experimental history feature (--enable-history option on the command line) diff --git a/README.rst b/README.rst index 40a0d113..f4172639 100644 --- a/README.rst +++ b/README.rst @@ -3,7 +3,7 @@ Glances - An Eye on your System =============================== | |pypi| |test| |contributors| |quality| -| |starts| |docker| |pypistat| |ossrank| +| |starts| |docker| |pypistat| | |sponsors| |twitter| .. |pypi| image:: https://img.shields.io/pypi/v/glances.svg @@ -21,10 +21,6 @@ Glances - An Eye on your System :target: https://pepy.tech/project/glances :alt: Pypi downloads -.. |ossrank| image:: https://shields.io/endpoint?url=https://ossrank.com/shield/3689 - :target: https://ossrank.com/p/3689 - :alt: OSSRank - .. |test| image:: https://github.com/nicolargo/glances/actions/workflows/ci.yml/badge.svg?branch=develop :target: https://github.com/nicolargo/glances/actions :alt: Linux tests (GitHub Actions) @@ -45,8 +41,8 @@ Glances - An Eye on your System :target: https://twitter.com/nicolargo :alt: @nicolargo -Summary -======= +Summary 🌟 +========== **Glances** is an open-source system cross-platform monitoring tool. It allows real-time monitoring of various aspects of your system such as @@ -58,21 +54,19 @@ and can also be used for remote monitoring of systems via a web interface or com line interface. It is easy to install and use and can be customized to show only the information that you are interested in. -.. image:: https://raw.githubusercontent.com/nicolargo/glances/develop/docs/_static/glances-summary.png - In client/server mode, remote monitoring could be done via terminal, Web interface or API (XML-RPC and RESTful). Stats can also be exported to files or external time/value databases, CSV or direct output to STDOUT. -.. image:: https://raw.githubusercontent.com/nicolargo/glances/develop/docs/_static/glances-responsive-webdesign.png +.. image:: ./docs/_static/glances-responsive-webdesign.png Glances is written in Python and uses libraries to grab information from your system. It is based on an open architecture where developers can add new plugins or exports modules. -Usage -===== +Usage πŸ‘‹ +======== For the standalone mode, just run: @@ -80,6 +74,8 @@ For the standalone mode, just run: $ glances +.. image:: ./docs/_static/glances-summary.png + For the Web server mode, run: .. code-block:: console @@ -88,20 +84,22 @@ For the Web server mode, run: and enter the URL ``http://:61208`` in your favorite web browser. -For the client/server mode, run: +In this mode, a HTTP/Restful API is exposed, see document `RestfulApi`_ for more details. + +.. image:: ./docs/_static/screenshot-web.png + +For the client/server mode (remote monitoring through XML-RPC), run the following command on the server: .. code-block:: console $ glances -s -on the server side and run: +and this one on the client: .. code-block:: console $ glances -c -on the client one. - You can also detect and display all Glances servers available on your network (or defined in the configuration file) in TUI: @@ -147,17 +145,86 @@ or in a JSON format thanks to the stdout-json option (attribute not supported in mem: {"total": 7837949952, "available": 2919079936, "percent": 62.8, "used": 4918870016, "free": 2919079936, "active": 2841214976, "inactive": 3340550144, "buffers": 546799616, "cached": 3068141568, "shared": 788156416} ... +Last but not least, you can use the fetch mode to get a quick look of a machine: + +.. code-block:: console + + $ glances --fetch + +Results look like this: + +.. image:: ./docs/_static/screenshot-fetch.png + and RTFM, always. -Documentation -============= +Use Glances as a Python library πŸ“š +================================== + +You can access the Glances API by importing the `glances.api` module and creating an +instance of the `GlancesAPI` class. This instance provides access to all Glances plugins +and their fields. For example, to access the CPU plugin and its total field, you can +use the following code: + +.. code-block:: python + + >>> from glances import api + >>> gl = api.GlancesAPI() + >>> gl.cpu + {'cpucore': 16, + 'ctx_switches': 1214157811, + 'guest': 0.0, + 'idle': 91.4, + 'interrupts': 991768733, + 'iowait': 0.3, + 'irq': 0.0, + 'nice': 0.0, + 'soft_interrupts': 423297898, + 'steal': 0.0, + 'syscalls': 0, + 'system': 5.4, + 'total': 7.3, + 'user': 3.0} + >>> gl.cpu["total"] + 7.3 + >>> gl.mem["used"] + 12498582144 + >>> gl.auto_unit(gl.mem["used"]) + 11.6G + +If the stats return a list of items (like network interfaces or processes), you can +access them by their name: + +.. code-block:: python + + >>> gl.network.keys() + ['wlp0s20f3', 'veth33b370c', 'veth19c7711'] + >>> gl.network["wlp0s20f3"] + {'alias': None, + 'bytes_all': 362, + 'bytes_all_gauge': 9242285709, + 'bytes_all_rate_per_sec': 1032.0, + 'bytes_recv': 210, + 'bytes_recv_gauge': 7420522678, + 'bytes_recv_rate_per_sec': 599.0, + 'bytes_sent': 152, + 'bytes_sent_gauge': 1821763031, + 'bytes_sent_rate_per_sec': 433.0, + 'interface_name': 'wlp0s20f3', + 'key': 'interface_name', + 'speed': 0, + 'time_since_update': 0.3504955768585205} + +For a complete example of how to use Glances as a library, have a look to the `PythonApi`_. + +Documentation πŸ“œ +================ For complete documentation have a look at the readthedocs_ website. -If you have any question (after RTFM!), please post it on the official Q&A `forum`_. +If you have any question (after RTFM!), please post it on the official Reddit `forum`_. -Gateway to other services -========================= +Gateway to other services 🌐 +============================ Glances can export stats to: @@ -178,8 +245,8 @@ Glances can export stats to: - ``Graphite`` server - ``RESTful`` endpoint -Installation -============ +Installation πŸš€ +=============== There are several methods to test/install Glances on your system. Choose your weapon! @@ -262,6 +329,15 @@ Install Glances (with all features): The glances script will be installed in the ~/.local/bin folder. +Brew: The missing package manager +--------------------------------- + +For Linux and Mac OS, it is also possible to install Glances with `Brew`_: + +.. code-block:: console + + brew install glances + Docker: the cloudy way ---------------------- @@ -447,8 +523,8 @@ Ansible A Glances ``Ansible`` role is available: https://galaxy.ansible.com/zaxos/glances-ansible-role/ -Shell tab completion -==================== +Shell tab completion πŸ” +======================= Glances 4.3.2 and higher includes shell tab autocompletion thanks to the --print-completion option. @@ -462,8 +538,8 @@ For example, on a Linux operating system with bash shell: Following shells are supported: bash, zsh and tcsh. -Requirements -============ +Requirements 🧩 +=============== Glances is developed in Python. A minimal Python version 3.9 or higher should be installed on your system. @@ -480,8 +556,9 @@ Dependencies: - ``packaging`` (for the version comparison) - ``windows-curses`` (Windows Curses implementation) [Windows-only] - ``shtab`` (Shell autocompletion) [All but Windows] +- ``jinja2`` (for fetch mode and templating) -Optional dependencies: +Extra dependencies: - ``batinfo`` (for battery monitoring) - ``bernhard`` (for the Riemann export module) @@ -494,7 +571,6 @@ Optional dependencies: - ``hddtemp`` (for HDD temperature monitoring support) [Linux-only] - ``influxdb`` (for the InfluxDB version 1 export module) - ``influxdb-client`` (for the InfluxDB version 2 export module) -- ``jinja2`` (for templating, used under the hood by FastAPI) - ``kafka-python`` (for the Kafka export module) - ``netifaces2`` (for the IP plugin) - ``nvidia-ml-py`` (for the GPU plugin) @@ -516,8 +592,8 @@ Optional dependencies: - ``wifi`` (for the wifi plugin) [Linux-only] - ``zeroconf`` (for the autodiscover mode) -How to contribute ? -=================== +How to contribute ? 🀝 +====================== If you want to contribute to the Glances project, read this `wiki`_ page. @@ -526,8 +602,8 @@ There is also a chat dedicated to the Glances developers: .. image:: https://badges.gitter.im/Join%20Chat.svg :target: https://gitter.im/nicolargo/glances?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge -Project sponsorship -=================== +Project sponsorship πŸ™Œ +====================== You can help me to achieve my goals of improving this open-source project or just say "thank you" by: @@ -538,21 +614,21 @@ or just say "thank you" by: Any and all contributions are greatly appreciated. -Author -====== +Authors and Contributors πŸ”₯ +=========================== Nicolas Hennion (@nicolargo) .. image:: https://img.shields.io/twitter/url/https/twitter.com/cloudposse.svg?style=social&label=Follow%20%40nicolargo :target: https://twitter.com/nicolargo -License -======= +License πŸ“œ +========== Glances is distributed under the LGPL version 3 license. See ``COPYING`` for more details. -More stars ! -============ +More stars ! 🌟 +=============== Please give us a star on `GitHub`_ if you like this project. @@ -561,13 +637,17 @@ Please give us a star on `GitHub`_ if you like this project. :alt: Star history .. _psutil: https://github.com/giampaolo/psutil +.. _Brew: https://formulae.brew.sh/formula/glances .. _Python: https://www.python.org/getit/ .. _Termux: https://play.google.com/store/apps/details?id=com.termux .. _readthedocs: https://glances.readthedocs.io/ -.. _forum: https://groups.google.com/forum/?hl=en#!forum/glances-users +.. _forum: https://www.reddit.com/r/glances/ .. _wiki: https://github.com/nicolargo/glances/wiki/How-to-contribute-to-Glances-%3F .. _package: https://repology.org/project/glances/versions .. _sponsors: https://github.com/sponsors/nicolargo .. _wishlist: https://www.amazon.fr/hz/wishlist/ls/BWAAQKWFR3FI?ref_=wl_share .. _Docker: https://github.com/nicolargo/glances/blob/develop/docs/docker.rst .. _GitHub: https://github.com/nicolargo/glances +.. _PythonApi: https://glances.readthedocs.io/en/develop/api/python.html +.. _RestfulApi: https://glances.readthedocs.io/en/develop/api/restful.html + diff --git a/all-requirements.txt b/all-requirements.txt new file mode 100644 index 00000000..d11f5899 --- /dev/null +++ b/all-requirements.txt @@ -0,0 +1,271 @@ +# This file was autogenerated by uv via the following command: +# uv export --no-emit-workspace --no-hashes --all-extras --no-group dev --output-file all-requirements.txt +annotated-doc==0.0.3 + # via fastapi +annotated-types==0.7.0 + # via pydantic +anyio==4.11.0 + # via + # elasticsearch + # starlette +batinfo==0.4.2 ; sys_platform == 'linux' + # via glances +bernhard==0.2.6 + # via glances +cassandra-driver==3.29.3 + # via glances +certifi==2025.10.5 + # via + # elastic-transport + # influxdb-client + # influxdb3-python + # requests +cffi==2.0.0 ; implementation_name == 'pypy' or platform_python_implementation != 'PyPy' + # via + # cryptography + # pyzmq +charset-normalizer==3.4.4 + # via requests +chevron==0.14.0 + # via glances +click==8.1.8 + # via + # geomet + # uvicorn +colorama==0.4.6 ; sys_platform == 'win32' + # via + # click + # pytest +coverage==7.10.7 ; python_full_version < '3.10' + # via pytest-cov +coverage==7.11.0 ; python_full_version >= '3.10' + # via pytest-cov +cryptography==46.0.3 + # via pysnmpcrypto +defusedxml==0.7.1 + # via glances +dnspython==2.7.0 ; python_full_version < '3.10' + # via pymongo +dnspython==2.8.0 ; python_full_version >= '3.10' + # via pymongo +docker==7.1.0 + # via glances +elastic-transport==9.1.0 ; python_full_version < '3.10' + # via elasticsearch +elastic-transport==9.2.0 ; python_full_version >= '3.10' + # via elasticsearch +elasticsearch==9.1.2 ; python_full_version < '3.10' + # via glances +elasticsearch==9.2.0 ; python_full_version >= '3.10' + # via glances +exceptiongroup==1.2.2 ; python_full_version < '3.11' + # via + # anyio + # pytest +fastapi==0.120.4 + # via glances +geomet==1.1.0 + # via cassandra-driver +graphitesender==0.11.2 + # via glances +h11==0.16.0 + # via uvicorn +ibm-cloud-sdk-core==3.24.2 + # via ibmcloudant +ibmcloudant==0.11.0 + # via glances +idna==3.11 + # via + # anyio + # requests +ifaddr==0.2.0 + # via zeroconf +importlib-metadata==7.1.0 ; python_full_version < '3.10' + # via pygal +importlib-metadata==8.7.0 ; python_full_version >= '3.10' + # via pygal +influxdb==5.3.2 + # via glances +influxdb-client==1.49.0 + # via glances +influxdb3-python==0.16.0 + # via glances +iniconfig==2.1.0 ; python_full_version < '3.10' + # via pytest +iniconfig==2.3.0 ; python_full_version >= '3.10' + # via pytest +jinja2==3.1.6 + # via + # glances + # pysmi-lextudio +kafka-python==2.2.15 + # via glances +markupsafe==3.0.3 + # via jinja2 +msgpack==1.1.2 + # via influxdb +netifaces2==0.0.22 + # via glances +nvidia-ml-py==13.580.82 + # via glances +packaging==25.0 + # via + # glances + # pytest +paho-mqtt==2.1.0 + # via glances +pbkdf2==1.3 + # via wifi +pika==1.3.2 + # via glances +pluggy==1.6.0 + # via pytest +ply==3.11 + # via pysmi-lextudio +podman==5.6.0 + # via glances +potsdb==1.0.3 + # via glances +prometheus-client==0.23.1 + # via glances +protobuf==4.25.8 ; python_full_version < '3.10' + # via bernhard +protobuf==6.33.0 ; python_full_version >= '3.10' + # via bernhard +psutil==7.1.2 + # via glances +psycopg==3.2.12 + # via glances +psycopg-binary==3.2.12 ; implementation_name != 'pypy' + # via psycopg +pyarrow==21.0.0 ; python_full_version < '3.10' + # via influxdb3-python +pyarrow==22.0.0 ; python_full_version >= '3.10' + # via influxdb3-python +pyasn1==0.6.0 + # via pysnmp-lextudio +pycparser==2.23 ; (implementation_name != 'PyPy' and platform_python_implementation != 'PyPy') or (implementation_name == 'pypy' and platform_python_implementation == 'PyPy') + # via cffi +pydantic==2.12.3 + # via fastapi +pydantic-core==2.41.4 + # via pydantic +pygal==3.0.5 + # via glances +pygments==2.19.2 + # via pytest +pyjwt==2.10.1 + # via + # ibm-cloud-sdk-core + # ibmcloudant +pymdstat==0.4.3 + # via glances +pymongo==4.15.3 + # via glances +pysmart-smartx==0.3.10 + # via glances +pysmi-lextudio==1.4.3 + # via pysnmp-lextudio +pysnmp-lextudio==6.3.0 + # via glances +pysnmpcrypto==0.0.4 + # via pysnmp-lextudio +pytest==8.4.2 + # via pytest-cov +pytest-cov==4.1.0 + # via pysnmp-lextudio +python-dateutil==2.9.0.post0 + # via + # elasticsearch + # glances + # ibm-cloud-sdk-core + # ibmcloudant + # influxdb + # influxdb-client + # influxdb3-python +pytz==2025.2 + # via influxdb +pywin32==311 ; sys_platform == 'win32' + # via docker +pyzmq==27.1.0 + # via glances +reactivex==4.0.4 + # via + # influxdb-client + # influxdb3-python +requests==2.32.5 + # via + # docker + # glances + # ibm-cloud-sdk-core + # ibmcloudant + # influxdb + # podman + # pysmi-lextudio +setuptools==80.9.0 + # via + # influxdb-client + # wifi +shtab==1.7.2 ; sys_platform != 'win32' + # via glances +six==1.17.0 + # via + # glances + # influxdb + # python-dateutil +sniffio==1.3.1 + # via + # anyio + # elastic-transport + # elasticsearch +sparklines==0.7.0 + # via glances +starlette==0.49.2 + # via fastapi +statsd==4.0.1 + # via glances +termcolor==3.1.0 ; python_full_version < '3.10' + # via sparklines +termcolor==3.2.0 ; python_full_version >= '3.10' + # via sparklines +tomli==2.0.2 ; python_full_version <= '3.11' + # via + # coverage + # podman + # pytest +typing-extensions==4.15.0 + # via + # anyio + # cryptography + # elasticsearch + # fastapi + # psycopg + # pydantic + # pydantic-core + # reactivex + # starlette + # typing-inspection + # uvicorn +typing-inspection==0.4.2 + # via pydantic +tzdata==2025.2 ; sys_platform == 'win32' + # via psycopg +urllib3==2.5.0 + # via + # docker + # elastic-transport + # ibm-cloud-sdk-core + # influxdb-client + # influxdb3-python + # podman + # requests +uvicorn==0.38.0 + # via glances +wifi==0.3.8 + # via glances +windows-curses==2.4.1 ; sys_platform == 'win32' + # via glances +zeroconf==0.148.0 + # via glances +zipp==3.23.0 + # via importlib-metadata diff --git a/conf/glances.conf b/conf/glances.conf index 643552d3..0e683825 100644 --- a/conf/glances.conf +++ b/conf/glances.conf @@ -49,7 +49,7 @@ history_size=1200 # You can download it in a specific folder # thanks to https://github.com/nicolargo/glances/issues/2021 # then configure this folder with the webui_root_path key -# Default is folder where glances_restfull_api.py is hosted +# Default is folder where glances_restful_api.py is hosted #webui_root_path= # CORS options # Comma separated list of origins that should be permitted to make cross-origin requests. @@ -181,6 +181,8 @@ temperature_critical=80 [mem] disable=False +# Display available memory instead of used memory +#available=True # Define RAM thresholds in % # Default values if not defined: 50/70/90 careful=50 @@ -300,15 +302,32 @@ hide_zero=False #show=sda.* # Alias for sda1 and sdb1 #alias=sda1:SystemDisk,sdb1:DataDisk -# Set thresholds (in bytes per second) for a given disk name (rx = read / tx = write) +# Default latency thresholds (in ms) (rx = read / tx = write) +rx_latency_careful=10 +rx_latency_warning=20 +rx_latency_critical=50 +tx_latency_careful=10 +tx_latency_warning=20 +tx_latency_critical=50 +# Set latency thresholds (latency in ms) for a given disk name (rx = read / tx = write) +# dm-0_rx_latency_careful=10 +# dm-0_rx_latency_warning=20 +# dm-0_rx_latency_critical=50 +# dm-0_rx_latency_log=False +# dm-0_tx_latency_careful=10 +# dm-0_tx_latency_warning=20 +# dm-0_tx_latency_critical=50 +# dm-0_tx_latency_log=False +# There is no default bitrate thresholds for disk (because it is not possible to know the disk speed) +# Set bitrate thresholds (in bytes per second) for a given disk name (rx = read / tx = write) #dm-0_rx_careful=4000000000 #dm-0_rx_warning=5000000000 #dm-0_rx_critical=6000000000 -#dm-0_rx_log=True +#dm-0_rx_log=False #dm-0_tx_careful=700000000 #dm-0_tx_warning=900000000 #dm-0_tx_critical=1000000000 -#dm-0_tx_log=True +#dm-0_tx_log=False [fs] disable=False @@ -384,8 +403,8 @@ port=7634 # Documentation: https://glances.readthedocs.io/en/latest/aoa/sensors.html disable=False # Set the refresh multiplicator for the sensors -# By default refresh every Glances refresh * 3 (increase to reduce CPU consumption) -#refresh=3 +# By default refresh every Glances refresh * 5 (increase to reduce CPU consumption) +#refresh=5 # Hide some sensors (comma separated list of regexp) hide=unknown.* # Show only the following sensors (comma separated list of regexp) @@ -431,6 +450,8 @@ disable=False # Stats that can be disabled: cpu_percent,memory_info,memory_percent,username,cpu_times,num_threads,nice,status,io_counters,cmdline # Stats that can not be disable: pid,name #disable_stats=cpu_percent,memory_info,memory_percent,username,cpu_times,num_threads,nice,status,io_counters,cmdline +# Disable display of virtual memory +#disable_virtual_memory=True # Define CPU/MEM (per process) thresholds in % # Default values if not defined: 50/70/90 cpu_careful=50 @@ -526,8 +547,8 @@ disable=False # Define the maximum docker size name (default is 20 chars) max_name_size=20 # List of stats to disable (not display) -# Following stats can be disabled: name,status,uptime,cpu,mem,diskio,networkio,command -; disable_stats=diskio,networkio +# Following stats can be disabled: name,status,uptime,cpu,mem,diskio,networkio,ports,command +disable_stats=command # Thresholds for CPU and MEM (in %) ; cpu_careful=50 ; cpu_warning=70 @@ -605,6 +626,11 @@ disable=False # Exports ############################################################################## +[export] +# Common section for all exporters +# Do not export following fields (comma separated list of regex) +#exclude_fields=.*_critical,.*_careful,.*_warning,.*\.key$ + [graph] # Configuration for the --export graph option # Set the path where the graph (.svg files) will be created diff --git a/dev-requirements.txt b/dev-requirements.txt index f4052794..3b25ef8b 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,20 +1,483 @@ -codespell -coverage -fonttools>=4.43.0 # not directly required, pinned by Snyk to avoid a vulnerability -gprof2dot -matplotlib -memory-profiler -numpy>=1.22.2 # not directly required, pinned by Snyk to avoid a vulnerability -pillow>=10.0.1 # not directly required, pinned by Snyk to avoid a vulnerability -pre-commit -py-spy -pyright -pytest -requirements-parser -rstcheck -ruff -selenium -semgrep; platform_system == 'Linux' -setuptools>=65.5.1 # not directly required, pinned by Snyk to avoid a vulnerability -webdriver-manager -h11>=0.16.0 # not directly required, pinned by Snyk to avoid a vulnerability +# This file was autogenerated by uv via the following command: +# uv export --no-hashes --only-dev --output-file dev-requirements.txt +alabaster==0.7.16 ; python_full_version < '3.10' + # via sphinx +alabaster==1.0.0 ; python_full_version >= '3.10' + # via sphinx +annotated-types==0.7.0 + # via pydantic +anyio==4.11.0 ; python_full_version >= '3.10' + # via + # httpx + # mcp + # sse-starlette + # starlette +attrs==25.4.0 + # via + # glom + # jsonschema + # outcome + # referencing + # reuse + # semgrep + # trio +babel==2.17.0 + # via sphinx +binaryornot==0.4.4 ; python_full_version < '3.10' + # via reuse +boltons==21.0.0 + # via + # face + # glom + # semgrep +boolean-py==5.0 + # via + # license-expression + # reuse +bracex==2.6 + # via wcmatch +certifi==2025.10.5 + # via + # httpcore + # httpx + # requests + # selenium +cffi==2.0.0 ; implementation_name != 'pypy' and os_name == 'nt' + # via trio +cfgv==3.4.0 + # via pre-commit +chardet==5.2.0 ; python_full_version < '3.10' + # via binaryornot +charset-normalizer==3.4.4 + # via + # python-debian + # requests +click==8.1.8 + # via + # click-option-group + # reuse + # semgrep + # typer + # uvicorn +click-option-group==0.5.9 + # via semgrep +codespell==2.4.1 +colorama==0.4.6 + # via + # click + # pytest + # semgrep + # sphinx +contourpy==1.3.0 ; python_full_version < '3.10' + # via matplotlib +contourpy==1.3.2 ; python_full_version == '3.10.*' + # via matplotlib +contourpy==1.3.3 ; python_full_version >= '3.11' + # via matplotlib +cycler==0.12.1 + # via matplotlib +defusedxml==0.7.1 ; python_full_version < '3.10' + # via semgrep +deprecated==1.3.1 ; python_full_version < '3.10' + # via + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-http +distlib==0.4.0 + # via virtualenv +docutils==0.21.2 + # via + # rstcheck-core + # sphinx + # sphinx-rtd-theme +exceptiongroup==1.2.2 + # via + # anyio + # pytest + # semgrep + # trio + # trio-websocket +face==24.0.0 + # via glom +filelock==3.19.1 ; python_full_version < '3.10' + # via virtualenv +filelock==3.20.0 ; python_full_version >= '3.10' + # via virtualenv +fonttools==4.60.1 + # via matplotlib +glom==22.1.0 + # via semgrep +googleapis-common-protos==1.71.0 + # via opentelemetry-exporter-otlp-proto-http +gprof2dot==2025.4.14 +h11==0.16.0 + # via + # httpcore + # uvicorn + # wsproto +httpcore==1.0.9 ; python_full_version >= '3.10' + # via httpx +httpx==0.28.1 ; python_full_version >= '3.10' + # via mcp +httpx-sse==0.4.3 ; python_full_version >= '3.10' + # via mcp +identify==2.6.15 + # via pre-commit +idna==3.11 + # via + # anyio + # httpx + # requests + # trio +imagesize==1.4.1 + # via sphinx +importlib-metadata==7.1.0 ; python_full_version < '3.10' + # via + # opentelemetry-api + # sphinx +importlib-metadata==8.7.0 ; python_full_version >= '3.10' + # via opentelemetry-api +importlib-resources==6.5.2 ; python_full_version < '3.10' + # via matplotlib +iniconfig==2.1.0 ; python_full_version < '3.10' + # via pytest +iniconfig==2.3.0 ; python_full_version >= '3.10' + # via pytest +jinja2==3.1.6 + # via + # reuse + # sphinx +jsonschema==4.25.1 + # via + # mcp + # semgrep +jsonschema-specifications==2025.9.1 + # via jsonschema +kiwisolver==1.4.7 ; python_full_version < '3.10' + # via matplotlib +kiwisolver==1.4.9 ; python_full_version >= '3.10' + # via matplotlib +license-expression==30.4.4 + # via reuse +markdown-it-py==3.0.0 ; python_full_version < '3.10' + # via rich +markdown-it-py==4.0.0 ; python_full_version >= '3.10' + # via rich +markupsafe==3.0.3 + # via jinja2 +matplotlib==3.9.4 ; python_full_version < '3.10' +matplotlib==3.10.7 ; python_full_version >= '3.10' +mcp==1.16.0 ; python_full_version >= '3.10' + # via semgrep +mdurl==0.1.2 + # via markdown-it-py +memory-profiler==0.61.0 +nodeenv==1.9.1 + # via + # pre-commit + # pyright +numpy==2.0.2 ; python_full_version < '3.10' + # via + # contourpy + # matplotlib +numpy==2.2.6 ; python_full_version == '3.10.*' + # via + # contourpy + # matplotlib +numpy==2.3.4 ; python_full_version >= '3.11' + # via + # contourpy + # matplotlib +opentelemetry-api==1.25.0 ; python_full_version < '3.10' + # via + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-instrumentation + # opentelemetry-instrumentation-requests + # opentelemetry-sdk + # opentelemetry-semantic-conventions + # semgrep +opentelemetry-api==1.37.0 ; python_full_version >= '3.10' + # via + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-instrumentation + # opentelemetry-instrumentation-requests + # opentelemetry-sdk + # opentelemetry-semantic-conventions + # semgrep +opentelemetry-exporter-otlp-proto-common==1.25.0 ; python_full_version < '3.10' + # via opentelemetry-exporter-otlp-proto-http +opentelemetry-exporter-otlp-proto-common==1.37.0 ; python_full_version >= '3.10' + # via opentelemetry-exporter-otlp-proto-http +opentelemetry-exporter-otlp-proto-http==1.25.0 ; python_full_version < '3.10' + # via semgrep +opentelemetry-exporter-otlp-proto-http==1.37.0 ; python_full_version >= '3.10' + # via semgrep +opentelemetry-instrumentation==0.46b0 ; python_full_version < '3.10' + # via opentelemetry-instrumentation-requests +opentelemetry-instrumentation==0.58b0 ; python_full_version >= '3.10' + # via opentelemetry-instrumentation-requests +opentelemetry-instrumentation-requests==0.46b0 ; python_full_version < '3.10' + # via semgrep +opentelemetry-instrumentation-requests==0.58b0 ; python_full_version >= '3.10' + # via semgrep +opentelemetry-proto==1.25.0 ; python_full_version < '3.10' + # via + # opentelemetry-exporter-otlp-proto-common + # opentelemetry-exporter-otlp-proto-http +opentelemetry-proto==1.37.0 ; python_full_version >= '3.10' + # via + # opentelemetry-exporter-otlp-proto-common + # opentelemetry-exporter-otlp-proto-http +opentelemetry-sdk==1.25.0 ; python_full_version < '3.10' + # via + # opentelemetry-exporter-otlp-proto-http + # semgrep +opentelemetry-sdk==1.37.0 ; python_full_version >= '3.10' + # via + # opentelemetry-exporter-otlp-proto-http + # semgrep +opentelemetry-semantic-conventions==0.46b0 ; python_full_version < '3.10' + # via + # opentelemetry-instrumentation-requests + # opentelemetry-sdk +opentelemetry-semantic-conventions==0.58b0 ; python_full_version >= '3.10' + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-requests + # opentelemetry-sdk +opentelemetry-util-http==0.46b0 ; python_full_version < '3.10' + # via opentelemetry-instrumentation-requests +opentelemetry-util-http==0.58b0 ; python_full_version >= '3.10' + # via opentelemetry-instrumentation-requests +outcome==1.3.0.post0 + # via + # trio + # trio-websocket +packaging==25.0 + # via + # matplotlib + # opentelemetry-instrumentation + # pytest + # requirements-parser + # semgrep + # sphinx + # webdriver-manager +peewee==3.18.2 + # via semgrep +pillow==11.3.0 ; python_full_version < '3.10' + # via matplotlib +pillow==12.0.0 ; python_full_version >= '3.10' + # via matplotlib +platformdirs==4.4.0 ; python_full_version < '3.10' + # via virtualenv +platformdirs==4.5.0 ; python_full_version >= '3.10' + # via virtualenv +pluggy==1.6.0 + # via pytest +pre-commit==4.3.0 + # via + # googleapis-common-protos + # opentelemetry-proto +protobuf==6.33.0 ; python_full_version >= '3.10' +protobuf==4.25.8 ; python_full_version < '3.10' + # via + # googleapis-common-protos + # opentelemetry-proto +psutil==7.1.2 + # via memory-profiler +py-spy==0.4.1 +pycparser==2.23 ; implementation_name != 'PyPy' and implementation_name != 'pypy' and os_name == 'nt' + # via cffi +pydantic==2.12.3 + # via + # mcp + # pydantic-settings + # rstcheck-core +pydantic-core==2.41.4 + # via pydantic +pydantic-settings==2.11.0 ; python_full_version >= '3.10' + # via mcp +pygments==2.19.2 + # via + # pytest + # rich + # sphinx +pyinstrument==5.1.1 +pyparsing==3.2.5 + # via matplotlib +pyright==1.1.407 +pysocks==1.7.1 + # via urllib3 +pytest==8.4.2 +python-dateutil==2.9.0.post0 + # via matplotlib +python-debian==1.0.1 + # via reuse +python-dotenv==1.2.1 + # via + # pydantic-settings + # webdriver-manager +python-magic==0.4.27 ; python_full_version >= '3.10' + # via reuse +python-multipart==0.0.20 ; python_full_version >= '3.10' + # via mcp +pywin32==311 ; python_full_version >= '3.10' and sys_platform == 'win32' + # via + # mcp + # semgrep +pyyaml==6.0.3 + # via pre-commit +referencing==0.36.2 ; python_full_version < '3.10' + # via + # jsonschema + # jsonschema-specifications +referencing==0.37.0 ; python_full_version >= '3.10' + # via + # jsonschema + # jsonschema-specifications +requests==2.32.5 + # via + # opentelemetry-exporter-otlp-proto-http + # semgrep + # sphinx + # webdriver-manager +requirements-parser==0.13.0 +reuse==5.1.1 ; python_full_version < '3.10' +reuse==6.2.0 ; python_full_version >= '3.10' +rich==13.5.3 + # via + # semgrep + # typer +roman-numerals-py==3.1.0 ; python_full_version >= '3.11' + # via sphinx +rpds-py==0.27.1 ; python_full_version < '3.10' + # via + # jsonschema + # referencing +rpds-py==0.28.0 ; python_full_version >= '3.10' + # via + # jsonschema + # referencing +rstcheck==6.2.5 +rstcheck-core==1.2.2 + # via rstcheck +ruamel-yaml==0.18.16 + # via semgrep +ruamel-yaml-clib==0.2.14 ; python_full_version >= '3.10' or platform_python_implementation == 'CPython' + # via + # ruamel-yaml + # semgrep +ruff==0.14.3 +selenium==4.36.0 ; python_full_version < '3.10' +selenium==4.38.0 ; python_full_version >= '3.10' +semgrep==1.136.0 ; python_full_version < '3.10' +semgrep==1.142.0 ; python_full_version >= '3.10' +setuptools==80.9.0 + # via opentelemetry-instrumentation +shellingham==1.5.4 + # via typer +six==1.17.0 + # via python-dateutil +sniffio==1.3.1 + # via + # anyio + # trio +snowballstemmer==3.0.1 + # via sphinx +sortedcontainers==2.4.0 + # via trio +sphinx==7.4.7 ; python_full_version < '3.10' + # via + # sphinx-rtd-theme + # sphinxcontrib-jquery +sphinx==8.1.3 ; python_full_version == '3.10.*' + # via + # sphinx-rtd-theme + # sphinxcontrib-jquery +sphinx==8.2.3 ; python_full_version >= '3.11' + # via + # sphinx-rtd-theme + # sphinxcontrib-jquery +sphinx-rtd-theme==3.0.2 +sphinxcontrib-applehelp==2.0.0 + # via sphinx +sphinxcontrib-devhelp==2.0.0 + # via sphinx +sphinxcontrib-htmlhelp==2.1.0 + # via sphinx +sphinxcontrib-jquery==4.1 + # via sphinx-rtd-theme +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==2.0.0 + # via sphinx +sphinxcontrib-serializinghtml==2.0.0 + # via sphinx +sse-starlette==3.0.3 ; python_full_version >= '3.10' + # via mcp +starlette==0.49.2 ; python_full_version >= '3.10' + # via mcp +tomli==2.0.2 + # via + # pytest + # semgrep + # sphinx +tomlkit==0.13.3 + # via reuse +trio==0.31.0 ; python_full_version < '3.10' + # via + # selenium + # trio-websocket +trio==0.32.0 ; python_full_version >= '3.10' + # via + # selenium + # trio-websocket +trio-websocket==0.12.2 + # via selenium +typer==0.20.0 + # via rstcheck +typing-extensions==4.15.0 + # via + # anyio + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-sdk + # opentelemetry-semantic-conventions + # pydantic + # pydantic-core + # pyright + # referencing + # selenium + # semgrep + # starlette + # typer + # typing-inspection + # uvicorn + # virtualenv +typing-inspection==0.4.2 + # via + # pydantic + # pydantic-settings +urllib3==2.5.0 + # via + # requests + # selenium + # semgrep +uvicorn==0.38.0 ; python_full_version >= '3.10' and sys_platform != 'emscripten' + # via mcp +virtualenv==20.35.4 + # via pre-commit +wcmatch==8.5.2 + # via semgrep +webdriver-manager==4.0.2 +websocket-client==1.9.0 + # via selenium +wrapt==1.17.3 + # via + # deprecated + # opentelemetry-instrumentation +wsproto==1.2.0 + # via trio-websocket +zipp==3.23.0 + # via + # importlib-metadata + # importlib-resources diff --git a/doc-requirements.txt b/doc-requirements.txt deleted file mode 100644 index 28ee5654..00000000 --- a/doc-requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -psutil -defusedxml -orjson -reuse -setuptools>=65.5.1 # not directly required, pinned by Snyk to avoid a vulnerability -sphinx -sphinx_rtd_theme diff --git a/docker-compose/docker-compose.yml b/docker-compose/docker-compose.yml index f71346c6..b0e32e47 100644 --- a/docker-compose/docker-compose.yml +++ b/docker-compose/docker-compose.yml @@ -12,6 +12,9 @@ services: - "/var/run/docker.sock:/var/run/docker.sock:ro" - "/run/user/1000/podman/podman.sock:/run/user/1000/podman/podman.sock:ro" - "./glances.conf:/glances/conf/glances.conf" +# # Uncomment for proper distro information in upper panel. +# # Works only for distros that do have this file (most of distros do). +# - "/etc/os-release:/etc/os-release:ro" environment: - TZ=${TZ} - GLANCES_OPT=-C /glances/conf/glances.conf -w diff --git a/docker-compose/glances.conf b/docker-compose/glances.conf index 072526ee..e11f1eae 100755 --- a/docker-compose/glances.conf +++ b/docker-compose/glances.conf @@ -49,7 +49,7 @@ max_processes_display=25 # You can download it in a specific folder # thanks to https://github.com/nicolargo/glances/issues/2021 # then configure this folder with the webui_root_path key -# Default is folder where glances_restfull_api.py is hosted +# Default is folder where glances_restful_api.py is hosted #webui_root_path= # CORS options # Comma separated list of origins that should be permitted to make cross-origin requests. @@ -181,6 +181,8 @@ temperature_critical=80 [mem] disable=False +# Display available memory instead of used memory +#available=True # Define RAM thresholds in % # Default values if not defined: 50/70/90 careful=50 @@ -300,15 +302,32 @@ hide_zero=False #show=sda.* # Alias for sda1 and sdb1 #alias=sda1:SystemDisk,sdb1:DataDisk -# Set thresholds (in bytes per second) for a given disk name (rx = read / tx = write) +# Default latency thresholds (in ms) (rx = read / tx = write) +rx_latency_careful=10 +rx_latency_warning=20 +rx_latency_critical=50 +tx_latency_careful=10 +tx_latency_warning=20 +tx_latency_critical=50 +# Set latency thresholds (latency in ms) for a given disk name (rx = read / tx = write) +# dm-0_rx_latency_careful=10 +# dm-0_rx_latency_warning=20 +# dm-0_rx_latency_critical=50 +# dm-0_rx_latency_log=False +# dm-0_tx_latency_careful=10 +# dm-0_tx_latency_warning=20 +# dm-0_tx_latency_critical=50 +# dm-0_tx_latency_log=False +# There is no default bitrate thresholds for disk (because it is not possible to know the disk speed) +# Set bitrate thresholds (in bytes per second) for a given disk name (rx = read / tx = write) #dm-0_rx_careful=4000000000 #dm-0_rx_warning=5000000000 #dm-0_rx_critical=6000000000 -#dm-0_rx_log=True +#dm-0_rx_log=False #dm-0_tx_careful=700000000 #dm-0_tx_warning=900000000 #dm-0_tx_critical=1000000000 -#dm-0_tx_log=True +#dm-0_tx_log=False [fs] disable=False @@ -384,8 +403,8 @@ port=7634 # Documentation: https://glances.readthedocs.io/en/latest/aoa/sensors.html disable=False # Set the refresh multiplicator for the sensors -# By default refresh every Glances refresh * 3 (increase to reduce CPU consumption) -#refresh=3 +# By default refresh every Glances refresh * 5 (increase to reduce CPU consumption) +#refresh=5 # Hide some sensors (comma separated list of regexp) hide=unknown.* # Show only the following sensors (comma separated list of regexp) @@ -431,6 +450,8 @@ disable=False # Stats that can be disabled: cpu_percent,memory_info,memory_percent,username,cpu_times,num_threads,nice,status,io_counters,cmdline # Stats that can not be disable: pid,name #disable_stats=cpu_percent,memory_info,memory_percent,username,cpu_times,num_threads,nice,status,io_counters,cmdline +# Disable display of virtual memory +#disable_virtual_memory=True # Define CPU/MEM (per process) thresholds in % # Default values if not defined: 50/70/90 cpu_careful=50 @@ -510,7 +531,8 @@ port_default_gateway=False disable=True # Define the maximum VMs size name (default is 20 chars) max_name_size=20 -# By default, Glances only display running VMs with states: 'Running', 'Starting' or 'Restarting' +# By default, Glances only display running VMs with states: +# 'Running', 'Paused', 'Starting' or 'Restarting' # Set the following key to True to display all VMs regarding their states all=False @@ -525,8 +547,8 @@ disable=False # Define the maximum docker size name (default is 20 chars) max_name_size=20 # List of stats to disable (not display) -# Following stats can be disabled: name,status,uptime,cpu,mem,diskio,networkio,command -; disable_stats=diskio,networkio +# Following stats can be disabled: name,status,uptime,cpu,mem,diskio,networkio,ports,command +disable_stats=command # Thresholds for CPU and MEM (in %) ; cpu_careful=50 ; cpu_warning=70 @@ -604,6 +626,11 @@ disable=False # Exports ############################################################################## +[export] +# Common section for all exporters +# Do not export following fields (comma separated list of regex) +#exclude_fields=.*_critical,.*_careful,.*_warning,.*\.key$ + [graph] # Configuration for the --export graph option # Set the path where the graph (.svg files) will be created diff --git a/docker-files/alpine.Dockerfile b/docker-files/alpine.Dockerfile index 8c3f84dc..cedf5e32 100644 --- a/docker-files/alpine.Dockerfile +++ b/docker-files/alpine.Dockerfile @@ -66,7 +66,7 @@ RUN /venv-build/bin/python${PYTHON_VERSION} -m pip install --upgrade pip RUN python${PYTHON_VERSION} -m venv --without-pip venv -COPY requirements.txt docker-requirements.txt webui-requirements.txt optional-requirements.txt ./ +COPY pyproject.toml docker-requirements.txt all-requirements.txt ./ ############################################################################## # BUILD: Install the minimal image deps @@ -74,9 +74,7 @@ FROM build AS buildminimal ARG PYTHON_VERSION RUN /venv-build/bin/python${PYTHON_VERSION} -m pip install --target="/venv/lib/python${PYTHON_VERSION}/site-packages" \ - -r requirements.txt \ - -r docker-requirements.txt \ - -r webui-requirements.txt + -r docker-requirements.txt ############################################################################## # BUILD: Install all the deps @@ -89,8 +87,7 @@ ARG CASS_DRIVER_NO_CYTHON=1 ARG CARGO_NET_GIT_FETCH_WITH_CLI=true RUN /venv-build/bin/python${PYTHON_VERSION} -m pip install --target="/venv/lib/python${PYTHON_VERSION}/site-packages" \ - -r requirements.txt \ - -r optional-requirements.txt + -r all-requirements.txt ############################################################################## # RELEASE Stages diff --git a/docker-files/ubuntu.Dockerfile b/docker-files/ubuntu.Dockerfile index de5a71ed..2ae96056 100644 --- a/docker-files/ubuntu.Dockerfile +++ b/docker-files/ubuntu.Dockerfile @@ -55,7 +55,7 @@ RUN apt-get clean \ RUN python3 -m venv --without-pip venv -COPY requirements.txt docker-requirements.txt webui-requirements.txt optional-requirements.txt ./ +COPY pyproject.toml docker-requirements.txt all-requirements.txt ./ ############################################################################## # BUILD: Install the minimal image deps @@ -63,9 +63,7 @@ FROM build AS buildminimal ARG PYTHON_VERSION RUN python3 -m pip install --target="/venv/lib/python${PYTHON_VERSION}/site-packages" \ - -r requirements.txt \ - -r docker-requirements.txt \ - -r webui-requirements.txt + -r docker-requirements.txt ############################################################################## # BUILD: Install all the deps @@ -73,8 +71,7 @@ FROM build AS buildfull ARG PYTHON_VERSION RUN python3 -m pip install --target="/venv/lib/python${PYTHON_VERSION}/site-packages" \ - -r requirements.txt \ - -r optional-requirements.txt + -r all-requirements.txt ############################################################################## # RELEASE Stages diff --git a/docker-requirements.txt b/docker-requirements.txt index 28945aec..b8696713 100644 --- a/docker-requirements.txt +++ b/docker-requirements.txt @@ -1,10 +1,85 @@ -# install with base requirements file --r requirements.txt - -docker>=6.1.1 -orjson # JSON Serialization speedup -podman -python-dateutil -requests -six -urllib3 +# This file was autogenerated by uv via the following command: +# uv export --no-emit-workspace --no-hashes --no-group dev --extra containers --extra web --output-file docker-requirements.txt +annotated-doc==0.0.3 + # via fastapi +annotated-types==0.7.0 + # via pydantic +anyio==4.11.0 + # via starlette +certifi==2025.10.5 + # via requests +charset-normalizer==3.4.4 + # via requests +click==8.1.8 + # via uvicorn +colorama==0.4.6 ; sys_platform == 'win32' + # via click +defusedxml==0.7.1 + # via glances +docker==7.1.0 + # via glances +exceptiongroup==1.2.2 ; python_full_version < '3.11' + # via anyio +fastapi==0.120.4 + # via glances +h11==0.16.0 + # via uvicorn +idna==3.11 + # via + # anyio + # requests +jinja2==3.1.6 + # via glances +markupsafe==3.0.3 + # via jinja2 +packaging==25.0 + # via glances +podman==5.6.0 + # via glances +psutil==7.1.2 + # via glances +pydantic==2.12.3 + # via fastapi +pydantic-core==2.41.4 + # via pydantic +python-dateutil==2.9.0.post0 + # via glances +pywin32==311 ; sys_platform == 'win32' + # via docker +requests==2.32.5 + # via + # docker + # glances + # podman +shtab==1.7.2 ; sys_platform != 'win32' + # via glances +six==1.17.0 + # via + # glances + # python-dateutil +sniffio==1.3.1 + # via anyio +starlette==0.49.2 + # via fastapi +tomli==2.0.2 ; python_full_version < '3.11' + # via podman +typing-extensions==4.15.0 + # via + # anyio + # fastapi + # pydantic + # pydantic-core + # starlette + # typing-inspection + # uvicorn +typing-inspection==0.4.2 + # via pydantic +urllib3==2.5.0 + # via + # docker + # podman + # requests +uvicorn==0.38.0 + # via glances +windows-curses==2.4.1 ; sys_platform == 'win32' + # via glances diff --git a/docs/Makefile b/docs/Makefile index 75bae57f..0c72c700 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -3,7 +3,7 @@ # You can set these variables from the command line. SPHINXOPTS = -SPHINXBUILD = ../venv/bin/sphinx-build +SPHINXBUILD = ../.venv/bin/sphinx-build PAPER = BUILDDIR = _build diff --git a/docs/_static/glances-flame.svg b/docs/_static/glances-flame.svg index 93a73b98..53521abc 100644 --- a/docs/_static/glances-flame.svg +++ b/docs/_static/glances-flame.svg @@ -1,4 +1,4 @@ - \ No newline at end of file diff --git a/docs/_static/glances-memory-profiling-with-history.png b/docs/_static/glances-memory-profiling-with-history.png index f2d8b659..b0843751 100644 Binary files a/docs/_static/glances-memory-profiling-with-history.png and b/docs/_static/glances-memory-profiling-with-history.png differ diff --git a/docs/_static/glances-memory-profiling-without-history.png b/docs/_static/glances-memory-profiling-without-history.png index ab39aefe..76df3995 100644 Binary files a/docs/_static/glances-memory-profiling-without-history.png and b/docs/_static/glances-memory-profiling-without-history.png differ diff --git a/docs/_static/screenshot-fetch.png b/docs/_static/screenshot-fetch.png new file mode 100644 index 00000000..6b753b61 Binary files /dev/null and b/docs/_static/screenshot-fetch.png differ diff --git a/docs/aoa/actions.rst b/docs/aoa/actions.rst index 65030cd1..778b2cb0 100644 --- a/docs/aoa/actions.rst +++ b/docs/aoa/actions.rst @@ -63,7 +63,7 @@ Within ``/etc/glances/actions.d/fs-critical.py``: .. note:: You can use all the stats for the current plugin. See - https://github.com/nicolargo/glances/wiki/The-Glances-RESTFULL-JSON-API + https://github.com/nicolargo/glances/wiki/The-Glances-RESTFUL-JSON-API for the stats list. It is also possible to repeat action until the end of the alert. diff --git a/docs/aoa/containers.rst b/docs/aoa/containers.rst index 2fd97f18..e7b3e40b 100644 --- a/docs/aoa/containers.rst +++ b/docs/aoa/containers.rst @@ -32,8 +32,8 @@ under the ``[containers]`` section: # Define the maximum containers size name (default is 20 chars) max_name_size=20 # List of stats to disable (not display) - # Following stats can be disabled: name,status,uptime,cpu,mem,diskio,networkio,command - disable_stats=diskio,networkio + # Following stats can be disabled: name,status,uptime,cpu,mem,diskio,networkio,ports,command + disable_stats=command # Global containers' thresholds for CPU and MEM (in %) cpu_careful=50 cpu_warning=70 diff --git a/docs/aoa/diskio.rst b/docs/aoa/diskio.rst index af8b2df0..28e95d16 100644 --- a/docs/aoa/diskio.rst +++ b/docs/aoa/diskio.rst @@ -5,17 +5,12 @@ Disk I/O .. image:: ../_static/diskio.png -Glances displays the disk I/O throughput. The unit is adapted -dynamically. - -You can display: - +Glances displays the disk I/O throughput, count and mean latency: - bytes per second (default behavior / Bytes/s, KBytes/s, MBytes/s, etc) - requests per second (using --diskio-iops option or *B* hotkey) +- mean latency (using --diskio-latency option or *L* hotkey) -There is no alert on this information. - -It's possible to define: +It's also possible to define: - a list of disk to show (white list) - a list of disks to hide @@ -42,13 +37,20 @@ Filtering is based on regular expression. Please be sure that your regular expression works as expected. You can use an online tool like `regex101`_ in order to test your regular expression. -It is also possible to define thesholds for bytes read and write per second: +It is also possible to define thesholds for latency and bytes read and write per second: .. code-block:: ini [diskio] # Alias for sda1 and sdb1 #alias=sda1:SystemDisk,sdb1:DataDisk + # Default latency thresholds (in ms) (rx = read / tx = write) + rx_latency_careful=10 + rx_latency_warning=20 + rx_latency_critical=50 + tx_latency_careful=10 + tx_latency_warning=20 + tx_latency_critical=50 # Set thresholds (in bytes per second) for a given disk name (rx = read / tx = write) dm-0_rx_careful=4000000000 dm-0_rx_warning=5000000000 diff --git a/docs/aoa/fs.rst b/docs/aoa/fs.rst index 4d5dc100..d16e42bf 100644 --- a/docs/aoa/fs.rst +++ b/docs/aoa/fs.rst @@ -35,6 +35,11 @@ system: [fs] allow=shm +With the above configuration key, it is also possible to monitor NFS +mount points (allow=nfs). Be aware that this can slow down the +performance of the plugin if the NFS server is not reachable. In this +case, the plugin will wait for a 2 seconds timeout. + Also, you can hide mount points using regular expressions. To hide all mount points starting with /boot and /snap: diff --git a/docs/aoa/memory.rst b/docs/aoa/memory.rst index 2e41584d..b789438a 100644 --- a/docs/aoa/memory.rst +++ b/docs/aoa/memory.rst @@ -27,7 +27,7 @@ Stats description: is in RAM. - **inactive**: (UNIX): memory that is marked as not used. - **buffers**: (Linux, BSD): cache for things like file system metadata. -- **cached**: (Linux, BSD): cache for various things. +- **cached**: (Linux, BSD): cache for various things (including ZFS cache). Additional stats available in through the API: @@ -41,6 +41,10 @@ Additional stats available in through the API: - **shared**: (BSD): memory that may be simultaneously accessed by multiple processes. +It is possible to display the available memory instead of the used memory +by setting the ``available`` option to ``True`` in the configuration file +under the ``[mem]`` section. + A character is also displayed just after the MEM header and shows the trend value: diff --git a/docs/aoa/network.rst b/docs/aoa/network.rst index 72b81642..9cc977ed 100644 --- a/docs/aoa/network.rst +++ b/docs/aoa/network.rst @@ -72,7 +72,7 @@ can also be used to set a threshold higher than zero. .. code-block:: ini - [diskio] + [network] hide_zero=True hide_threshold_bytes=0 diff --git a/docs/aoa/ps.rst b/docs/aoa/ps.rst index 4d491bed..e29adc7b 100644 --- a/docs/aoa/ps.rst +++ b/docs/aoa/ps.rst @@ -149,6 +149,18 @@ Columns display pressing on the ``'/'`` key ========================= ============================================== +Disable display of virtual memory +--------------------------------- + +It's possible to disable the display of the VIRT column (virtual memory) by adding the +``disable_virtual_memory=True`` option in the ``[processlist]`` section of the configuration +file (glances.conf): + +.. code-block:: ini + + [processlist] + disable_virtual_memory=True + Process filtering ----------------- diff --git a/docs/aoa/sensors.rst b/docs/aoa/sensors.rst index bc01ce20..e43467b0 100644 --- a/docs/aoa/sensors.rst +++ b/docs/aoa/sensors.rst @@ -33,6 +33,7 @@ thresholds (default behavor). #temperature_core_careful=45 #temperature_core_warning=65 #temperature_core_critical=80 + #alias=temp1:Motherboard 0,core 0:CPU Core 0 .. note 1:: The support for multiple batteries is only available if diff --git a/docs/openapi.json b/docs/api/openapi.json similarity index 100% rename from docs/openapi.json rename to docs/api/openapi.json diff --git a/docs/api/python.rst b/docs/api/python.rst new file mode 100644 index 00000000..0fe8b870 --- /dev/null +++ b/docs/api/python.rst @@ -0,0 +1,1727 @@ +.. _api: + +Python API documentation +======================== + +This documentation describes the Glances Python API. + +Note: This API is only available in Glances 4.4.0 or higher. + + +TL;DR +----- + +You can access the Glances API by importing the `glances.api` module and creating an +instance of the `GlancesAPI` class. This instance provides access to all Glances plugins +and their fields. For example, to access the CPU plugin and its total field, you can +use the following code: + +.. code-block:: python + + >>> from glances import api + >>> gl = api.GlancesAPI() + >>> gl.cpu + {'cpucore': 16, + 'ctx_switches': 1292953734, + 'guest': 0.0, + 'idle': 89.8, + 'interrupts': 1050074731, + 'iowait': 0.4, + 'irq': 0.0, + 'nice': 0.0, + 'soft_interrupts': 453376109, + 'steal': 0.0, + 'syscalls': 0, + 'system': 6.2, + 'total': 6.9, + 'user': 3.6} + >>> gl.cpu["total"] + 6.9 + >>> gl.mem["used"] + 13941734016 + >>> gl.auto_unit(gl.mem["used"]) + 13.0G + +If the stats return a list of items (like network interfaces or processes), you can +access them by their name: + +.. code-block:: python + + >>> gl.network.keys() + ['wlp0s20f3', 'veth33b370c', 'veth19c7711'] + >>> gl.network["wlp0s20f3"] + {'alias': None, + 'bytes_all': 214, + 'bytes_all_gauge': 11422792843, + 'bytes_all_rate_per_sec': 607.0, + 'bytes_recv': 128, + 'bytes_recv_gauge': 9255109166, + 'bytes_recv_rate_per_sec': 363.0, + 'bytes_sent': 86, + 'bytes_sent_gauge': 2167683677, + 'bytes_sent_rate_per_sec': 244.0, + 'interface_name': 'wlp0s20f3', + 'key': 'interface_name', + 'speed': 0, + 'time_since_update': 0.35225653648376465} + +Init Glances Python API +----------------------- + +Init the Glances API: + +.. code-block:: python + + >>> from glances import api + >>> gl = api.GlancesAPI() + +Get Glances plugins list +------------------------ + +Get the plugins list: + +.. code-block:: python + + >>> gl.plugins() + ['alert', 'ports', 'diskio', 'containers', 'processcount', 'programlist', 'gpu', 'percpu', 'system', 'network', 'cpu', 'amps', 'processlist', 'load', 'sensors', 'uptime', 'now', 'fs', 'wifi', 'ip', 'help', 'version', 'psutilversion', 'core', 'mem', 'folders', 'quicklook', 'memswap'] + +Glances alert +------------- + +Alert stats: + +.. code-block:: python + + >>> type(gl.alert) + + >>> gl.alert + [{'avg': 99.98865126481176, + 'begin': 1762097741, + 'count': 2, + 'desc': '', + 'end': -1, + 'global_msg': 'High swap (paging) usage', + 'max': 99.98865126481176, + 'min': 99.98865126481176, + 'sort': 'memory_percent', + 'state': 'CRITICAL', + 'sum': 199.97730252962353, + 'top': ['code', 'code', 'cloudcode_cli'], + 'type': 'MEMSWAP'}, + {'avg': 84.94903731521964, + 'begin': 1762097741, + 'count': 2, + 'desc': '', + 'end': -1, + 'global_msg': 'High swap (paging) usage', + 'max': 85.00588993238541, + 'min': 84.89218469805387, + 'sort': 'memory_percent', + 'state': 'WARNING', + 'sum': 169.89807463043928, + 'top': [], + 'type': 'MEM'}] + +Alert fields description: + +* begin: Begin timestamp of the event +* end: End timestamp of the event (or -1 if ongoing) +* state: State of the event (WARNING|CRITICAL) +* type: Type of the event (CPU|LOAD|MEM) +* max: Maximum value during the event period +* avg: Average value during the event period +* min: Minimum value during the event period +* sum: Sum of the values during the event period +* count: Number of values during the event period +* top: Top 3 processes name during the event period +* desc: Description of the event +* sort: Sort key of the top processes +* global_msg: Global alert message + +Alert limits: + +.. code-block:: python + + >>> gl.alert.limits + {'alert_disable': ['False'], 'history_size': 1200.0} + +Glances ports +------------- + +Ports stats: + +.. code-block:: python + + >>> type(gl.ports) + + >>> gl.ports + [{'description': 'DefaultGateway', + 'host': '192.168.1.1', + 'indice': 'port_0', + 'port': 0, + 'refresh': 30, + 'rtt_warning': None, + 'status': 0.010207, + 'timeout': 3}] + +Ports fields description: + +* host: Measurement is be done on this host (or IP address) +* port: Measurement is be done on this port (0 for ICMP) +* description: Human readable description for the host/port +* refresh: Refresh time (in seconds) for this host/port +* timeout: Timeout (in seconds) for the measurement +* status: Measurement result (in seconds) +* rtt_warning: Warning threshold (in seconds) for the measurement +* indice: Unique indice for the host/port + +Ports limits: + +.. code-block:: python + + >>> gl.ports.limits + {'history_size': 1200.0, + 'ports_disable': ['False'], + 'ports_port_default_gateway': ['True'], + 'ports_refresh': 30.0, + 'ports_timeout': 3.0} + +Glances diskio +-------------- + +Diskio stats: + +.. code-block:: python + + >>> type(gl.diskio) + + >>> gl.diskio + Return a dict of dict with key= + >>> gl.diskio.keys() + ['nvme0n1', 'nvme0n1p1', 'nvme0n1p2', 'nvme0n1p3', 'dm-0', 'dm-1'] + >>> gl.diskio["nvme0n1"] + {'disk_name': 'nvme0n1', + 'key': 'disk_name', + 'read_bytes': 40689363456, + 'read_count': 2367573, + 'read_latency': 0, + 'read_time': 831705, + 'write_bytes': 81732805632, + 'write_count': 7321149, + 'write_latency': 0, + 'write_time': 6874532} + +Diskio fields description: + +* disk_name: Disk name. +* read_count: Number of reads. +* write_count: Number of writes. +* read_bytes: Number of bytes read. +* write_bytes: Number of bytes written. +* read_time: Time spent reading. +* write_time: Time spent writing. +* read_latency: Mean time spent reading per operation. +* write_latency: Mean time spent writing per operation. + +Diskio limits: + +.. code-block:: python + + >>> gl.diskio.limits + {'diskio_disable': ['False'], + 'diskio_hide': ['loop.*', '/dev/loop.*'], + 'diskio_hide_zero': ['False'], + 'diskio_rx_latency_careful': 10.0, + 'diskio_rx_latency_critical': 50.0, + 'diskio_rx_latency_warning': 20.0, + 'diskio_tx_latency_careful': 10.0, + 'diskio_tx_latency_critical': 50.0, + 'diskio_tx_latency_warning': 20.0, + 'history_size': 1200.0} + +Glances containers +------------------ + +Containers stats: + +.. code-block:: python + + >>> type(gl.containers) + + >>> gl.containers + Return a dict of dict with key= + >>> gl.containers.keys() + ['timescaledb-for-glances', 'prometheus-for-glances'] + >>> gl.containers["timescaledb-for-glances"] + {'command': '/docker-entrypoint.sh postgres', + 'cpu': {'total': 0.0}, + 'cpu_percent': 0.0, + 'created': '2025-11-01T15:37:49.229752418Z', + 'engine': 'docker', + 'id': '7078de8bc380626c26e279a4d6d63df966c29fbc7d3a6a34ada57f8c620609b3', + 'image': ('timescale/timescaledb-ha:pg17',), + 'io': {}, + 'io_rx': None, + 'io_wx': None, + 'key': 'name', + 'memory': {}, + 'memory_inactive_file': None, + 'memory_limit': None, + 'memory_percent': None, + 'memory_usage': None, + 'name': 'timescaledb-for-glances', + 'network': {}, + 'network_rx': None, + 'network_tx': None, + 'ports': '5432->5432/tcp,8008/tcp,8081/tcp', + 'status': 'running', + 'uptime': '23 hours'} + +Containers fields description: + +* name: Container name +* id: Container ID +* image: Container image +* status: Container status +* created: Container creation date +* command: Container command +* cpu_percent: Container CPU consumption +* memory_inactive_file: Container memory inactive file +* memory_limit: Container memory limit +* memory_usage: Container memory usage +* io_rx: Container IO bytes read rate +* io_wx: Container IO bytes write rate +* network_rx: Container network RX bitrate +* network_tx: Container network TX bitrate +* ports: Container ports +* uptime: Container uptime +* engine: Container engine (Docker and Podman are currently supported) +* pod_name: Pod name (only with Podman) +* pod_id: Pod ID (only with Podman) + +Containers limits: + +.. code-block:: python + + >>> gl.containers.limits + {'containers_all': ['False'], + 'containers_disable': ['False'], + 'containers_disable_stats': ['command'], + 'containers_max_name_size': 20.0, + 'history_size': 1200.0} + +Glances processcount +-------------------- + +Processcount stats: + +.. code-block:: python + + >>> type(gl.processcount) + + >>> gl.processcount + {'pid_max': 0, 'running': 3, 'sleeping': 448, 'thread': 2559, 'total': 607} + >>> gl.processcount.keys() + ['total', 'running', 'sleeping', 'thread', 'pid_max'] + >>> gl.processcount["total"] + 607 + +Processcount fields description: + +* total: Total number of processes +* running: Total number of running processes +* sleeping: Total number of sleeping processes +* thread: Total number of threads +* pid_max: Maximum number of processes + +Processcount limits: + +.. code-block:: python + + >>> gl.processcount.limits + {'history_size': 1200.0, 'processcount_disable': ['False']} + +Glances gpu +----------- + +Gpu stats: + +.. code-block:: python + + >>> type(gl.gpu) + + >>> gl.gpu + [] + +Gpu fields description: + +* gpu_id: GPU identification +* name: GPU name +* mem: Memory consumption +* proc: GPU processor consumption +* temperature: GPU temperature +* fan_speed: GPU fan speed + +Gpu limits: + +.. code-block:: python + + >>> gl.gpu.limits + {'gpu_disable': ['False'], + 'gpu_mem_careful': 50.0, + 'gpu_mem_critical': 90.0, + 'gpu_mem_warning': 70.0, + 'gpu_proc_careful': 50.0, + 'gpu_proc_critical': 90.0, + 'gpu_proc_warning': 70.0, + 'gpu_temperature_careful': 60.0, + 'gpu_temperature_critical': 80.0, + 'gpu_temperature_warning': 70.0, + 'history_size': 1200.0} + +Glances percpu +-------------- + +Percpu stats: + +.. code-block:: python + + >>> type(gl.percpu) + + >>> gl.percpu + Return a dict of dict with key= + >>> gl.percpu.keys() + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] + >>> gl.percpu["0"] + {'cpu_number': 0, + 'dpc': None, + 'guest': 0.0, + 'guest_nice': 0.0, + 'idle': 44.0, + 'interrupt': None, + 'iowait': 0.0, + 'irq': 0.0, + 'key': 'cpu_number', + 'nice': 0.0, + 'softirq': 0.0, + 'steal': 0.0, + 'system': 11.0, + 'total': 56.0, + 'user': 1.0} + +Percpu fields description: + +* cpu_number: CPU number +* total: Sum of CPU percentages (except idle) for current CPU number. +* system: Percent time spent in kernel space. System CPU time is the time spent running code in the Operating System kernel. +* user: CPU percent time spent in user space. User CPU time is the time spent on the processor running your program's code (or code in libraries). +* iowait: *(Linux)*: percent time spent by the CPU waiting for I/O operations to complete. +* idle: percent of CPU used by any program. Every program or task that runs on a computer system occupies a certain amount of processing time on the CPU. If the CPU has completed all tasks it is idle. +* irq: *(Linux and BSD)*: percent time spent servicing/handling hardware/software interrupts. Time servicing interrupts (hardware + software). +* nice: *(Unix)*: percent time occupied by user level processes with a positive nice value. The time the CPU has spent running users' processes that have been *niced*. +* steal: *(Linux)*: percentage of time a virtual CPU waits for a real CPU while the hypervisor is servicing another virtual processor. +* guest: *(Linux)*: percent of time spent running a virtual CPU for guest operating systems under the control of the Linux kernel. +* guest_nice: *(Linux)*: percent of time spent running a niced guest (virtual CPU). +* softirq: *(Linux)*: percent of time spent handling software interrupts. +* dpc: *(Windows)*: percent of time spent handling deferred procedure calls. +* interrupt: *(Windows)*: percent of time spent handling software interrupts. + +Percpu limits: + +.. code-block:: python + + >>> gl.percpu.limits + {'history_size': 1200.0, + 'percpu_disable': ['False'], + 'percpu_iowait_careful': 50.0, + 'percpu_iowait_critical': 90.0, + 'percpu_iowait_warning': 70.0, + 'percpu_max_cpu_display': 4.0, + 'percpu_system_careful': 50.0, + 'percpu_system_critical': 90.0, + 'percpu_system_warning': 70.0, + 'percpu_user_careful': 50.0, + 'percpu_user_critical': 90.0, + 'percpu_user_warning': 70.0} + +Glances system +-------------- + +System stats: + +.. code-block:: python + + >>> type(gl.system) + + >>> gl.system + {'hostname': 'nicolargo-xps15', + 'hr_name': 'Ubuntu 24.04 64bit / Linux 6.14.0-33-generic', + 'linux_distro': 'Ubuntu 24.04', + 'os_name': 'Linux', + 'os_version': '6.14.0-33-generic', + 'platform': '64bit'} + >>> gl.system.keys() + ['os_name', 'hostname', 'platform', 'os_version', 'linux_distro', 'hr_name'] + >>> gl.system["os_name"] + 'Linux' + +System fields description: + +* os_name: Operating system name +* hostname: Hostname +* platform: Platform (32 or 64 bits) +* linux_distro: Linux distribution +* os_version: Operating system version +* hr_name: Human readable operating system name + +System limits: + +.. code-block:: python + + >>> gl.system.limits + {'history_size': 1200.0, 'system_disable': ['False'], 'system_refresh': 60} + +Glances network +--------------- + +Network stats: + +.. code-block:: python + + >>> type(gl.network) + + >>> gl.network + Return a dict of dict with key= + >>> gl.network.keys() + ['wlp0s20f3', 'veth33b370c', 'veth19c7711'] + >>> gl.network["wlp0s20f3"] + {'alias': None, + 'bytes_all': 0, + 'bytes_all_gauge': 11422792843, + 'bytes_all_rate_per_sec': 0.0, + 'bytes_recv': 0, + 'bytes_recv_gauge': 9255109166, + 'bytes_recv_rate_per_sec': 0.0, + 'bytes_sent': 0, + 'bytes_sent_gauge': 2167683677, + 'bytes_sent_rate_per_sec': 0.0, + 'interface_name': 'wlp0s20f3', + 'key': 'interface_name', + 'speed': 0, + 'time_since_update': 0.0032188892364501953} + +Network fields description: + +* interface_name: Interface name. +* alias: Interface alias name (optional). +* bytes_recv: Number of bytes received. +* bytes_sent: Number of bytes sent. +* bytes_all: Number of bytes received and sent. +* speed: Maximum interface speed (in bit per second). Can return 0 on some operating-system. +* is_up: Is the interface up ? + +Network limits: + +.. code-block:: python + + >>> gl.network.limits + {'history_size': 1200.0, + 'network_disable': ['False'], + 'network_hide': ['docker.*', 'lo'], + 'network_hide_no_ip': ['True'], + 'network_hide_no_up': ['True'], + 'network_hide_zero': ['False'], + 'network_rx_careful': 70.0, + 'network_rx_critical': 90.0, + 'network_rx_warning': 80.0, + 'network_tx_careful': 70.0, + 'network_tx_critical': 90.0, + 'network_tx_warning': 80.0} + +Glances cpu +----------- + +Cpu stats: + +.. code-block:: python + + >>> type(gl.cpu) + + >>> gl.cpu + {'cpucore': 16, + 'ctx_switches': 1292953734, + 'guest': 0.0, + 'idle': 89.8, + 'interrupts': 1050074731, + 'iowait': 0.4, + 'irq': 0.0, + 'nice': 0.0, + 'soft_interrupts': 453376109, + 'steal': 0.0, + 'syscalls': 0, + 'system': 6.2, + 'total': 6.9, + 'user': 3.6} + >>> gl.cpu.keys() + ['total', 'user', 'nice', 'system', 'idle', 'iowait', 'irq', 'steal', 'guest', 'ctx_switches', 'interrupts', 'soft_interrupts', 'syscalls', 'cpucore'] + >>> gl.cpu["total"] + 6.9 + +Cpu fields description: + +* total: Sum of all CPU percentages (except idle). +* system: Percent time spent in kernel space. System CPU time is the time spent running code in the Operating System kernel. +* user: CPU percent time spent in user space. User CPU time is the time spent on the processor running your program's code (or code in libraries). +* iowait: *(Linux)*: percent time spent by the CPU waiting for I/O operations to complete. +* dpc: *(Windows)*: time spent servicing deferred procedure calls (DPCs) +* idle: percent of CPU used by any program. Every program or task that runs on a computer system occupies a certain amount of processing time on the CPU. If the CPU has completed all tasks it is idle. +* irq: *(Linux and BSD)*: percent time spent servicing/handling hardware/software interrupts. Time servicing interrupts (hardware + software). +* nice: *(Unix)*: percent time occupied by user level processes with a positive nice value. The time the CPU has spent running users' processes that have been *niced*. +* steal: *(Linux)*: percentage of time a virtual CPU waits for a real CPU while the hypervisor is servicing another virtual processor. +* guest: *(Linux)*: time spent running a virtual CPU for guest operating systems under the control of the Linux kernel. +* ctx_switches: number of context switches (voluntary + involuntary) per second. A context switch is a procedure that a computer's CPU (central processing unit) follows to change from one task (or process) to another while ensuring that the tasks do not conflict. +* interrupts: number of interrupts per second. +* soft_interrupts: number of software interrupts per second. Always set to 0 on Windows and SunOS. +* syscalls: number of system calls per second. Always 0 on Linux OS. +* cpucore: Total number of CPU core. +* time_since_update: Number of seconds since last update. + +Cpu limits: + +.. code-block:: python + + >>> gl.cpu.limits + {'cpu_ctx_switches_careful': 640000.0, + 'cpu_ctx_switches_critical': 800000.0, + 'cpu_ctx_switches_warning': 720000.0, + 'cpu_disable': ['False'], + 'cpu_iowait_careful': 5.0, + 'cpu_iowait_critical': 6.25, + 'cpu_iowait_warning': 5.625, + 'cpu_steal_careful': 50.0, + 'cpu_steal_critical': 90.0, + 'cpu_steal_warning': 70.0, + 'cpu_system_careful': 50.0, + 'cpu_system_critical': 90.0, + 'cpu_system_log': ['False'], + 'cpu_system_warning': 70.0, + 'cpu_total_careful': 65.0, + 'cpu_total_critical': 85.0, + 'cpu_total_log': ['True'], + 'cpu_total_warning': 75.0, + 'cpu_user_careful': 50.0, + 'cpu_user_critical': 90.0, + 'cpu_user_log': ['False'], + 'cpu_user_warning': 70.0, + 'history_size': 1200.0} + +Glances amps +------------ + +Amps stats: + +.. code-block:: python + + >>> type(gl.amps) + + >>> gl.amps + Return a dict of dict with key= + >>> gl.amps.keys() + ['Dropbox', 'Python', 'Conntrack', 'Nginx', 'Systemd', 'SystemV'] + >>> gl.amps["Dropbox"] + {'count': 0, + 'countmax': None, + 'countmin': 1.0, + 'key': 'name', + 'name': 'Dropbox', + 'refresh': 3.0, + 'regex': True, + 'result': None, + 'timer': 0.37017297744750977} + +Amps fields description: + +* name: AMP name. +* result: AMP result (a string). +* refresh: AMP refresh interval. +* timer: Time until next refresh. +* count: Number of matching processes. +* countmin: Minimum number of matching processes. +* countmax: Maximum number of matching processes. + +Amps limits: + +.. code-block:: python + + >>> gl.amps.limits + {'amps_disable': ['False'], 'history_size': 1200.0} + +Glances processlist +------------------- + +Processlist stats: + +.. code-block:: python + + >>> type(gl.processlist) + + >>> gl.processlist + Return a dict of dict with key= + >>> gl.processlist.keys() + [1211585, 1212589, 1225365, 739161, 1209239, 1209523, 1224962, 1316902, 1209604, 1562963, 1209619, 5654, 1209632, 1209611, 1209559, 1510111, 1506852, 1562459, 464283, 1511132, 1212999, 1508741, 1348985, 446730, 614914, 1348984, 1213613, 739044, 1493680, 739264, 1311079, 6237, 1318955, 1349000, 1212055, 1567216, 1287223, 18544, 1349010, 61459, 1211584, 1565847, 1566342, 1564984, 1493854, 739199, 1507790, 1209533, 1349017, 1212053, 1212054, 1351260, 3476, 1210327, 6612, 6069, 1350217, 1567213, 1209509, 1212802, 1493855, 1212316, 1212793, 1212051, 9513, 739578, 732, 569516, 6225, 1209504, 5857, 739105, 5770, 1493857, 5990, 5267, 6652, 1494015, 2993, 1493861, 8666, 5782, 6624, 6253, 6126, 614544, 5811, 1212056, 5808, 1493658, 2719, 2627, 5950, 1, 6285, 5589, 1350197, 14301, 5762, 6512, 688630, 6647, 5421, 5265, 2620, 614742, 2652, 5789, 5800, 9878, 5245, 5262, 5836, 2990, 6013, 2953, 6211, 5794, 2655, 3051, 5830, 2653, 3503, 5214, 5885, 5813, 5784, 688600, 5833, 3556, 5268, 234055, 1493859, 739047, 2493, 6192, 1493858, 20411, 20420, 5795, 6023, 2838, 7197, 2720, 2841, 5335, 1493860, 2647, 14320, 2623, 6076, 2642, 2616, 5631, 6142, 5575, 5281, 5871, 794, 5740, 6060, 5961, 6035, 5832, 6046, 614691, 2645, 5586, 5947, 6085, 5786, 5821, 2648, 2492, 2494, 5339, 3487, 5404, 5560, 5826, 614737, 6153, 1539841, 5561, 2615, 1287017, 5646, 2634, 5263, 14326, 1507610, 11442, 614728, 739046, 614678, 2791, 1493629, 1493635, 1350169, 1350176, 2614, 1287018, 2491, 614677, 6438, 3670, 6654, 2619, 1211852, 14329, 1567207, 2873, 1209336, 3500, 2874, 3489, 3526, 5252, 5346, 3191, 3495, 1546850, 1567212, 1286689, 3490, 1286685, 2875, 2718, 1287044, 3192, 739062, 1211841, 2, 3, 4, 5, 6, 7, 8, 10, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 29, 30, 31, 32, 33, 35, 36, 37, 38, 39, 41, 42, 43, 44, 45, 47, 48, 49, 50, 51, 53, 54, 55, 56, 57, 59, 60, 61, 62, 63, 65, 66, 67, 68, 69, 71, 72, 73, 74, 75, 77, 78, 79, 80, 81, 83, 84, 85, 86, 87, 89, 90, 91, 92, 93, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 107, 108, 109, 110, 111, 113, 114, 115, 116, 117, 118, 121, 122, 123, 124, 125, 126, 127, 128, 134, 135, 136, 137, 138, 139, 140, 142, 145, 146, 147, 148, 149, 150, 152, 155, 156, 157, 158, 165, 176, 185, 186, 211, 233, 262, 263, 264, 265, 271, 274, 275, 276, 277, 278, 279, 356, 359, 361, 362, 363, 364, 365, 452, 453, 616, 621, 622, 623, 629, 664, 665, 766, 767, 801, 977, 987, 988, 989, 990, 991, 992, 993, 994, 995, 996, 997, 998, 999, 1000, 1001, 1002, 1039, 1240, 1241, 1256, 1266, 1267, 1268, 1269, 1270, 1271, 1331, 1334, 1475, 1481, 1892, 1893, 1894, 1895, 1896, 1897, 1898, 1899, 1900, 1901, 1902, 1903, 1904, 1905, 1934, 1935, 1936, 1938, 1939, 1940, 1941, 1943, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024, 2025, 2026, 2027, 2043, 2044, 2045, 2046, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2058, 2059, 2060, 2061, 2062, 2063, 2064, 2066, 2068, 3390, 3522, 3603, 3604, 3605, 3606, 3607, 3608, 3609, 3610, 3948, 5125, 5134, 14316, 88766, 88767, 88768, 88769, 1507295, 1507298, 1507303, 1507312, 1507324, 1507325, 1507327, 1507328, 1507336, 1513897, 1516134, 1531836, 1531838, 1534195, 1535317, 1535902, 1535903, 1537825, 1538582, 1540654, 1542576, 1544921, 1547903, 1549140, 1549155, 1549516, 1549771, 1550621, 1550650, 1551556, 1551621, 1552050, 1554018, 1554424, 1555035, 1556719, 1556890, 1557157, 1558169, 1558873, 1559046, 1559097, 1559427, 1561010, 1561024, 1561642, 1562440, 1562661, 1562710, 1562913, 1563180, 1563795, 1564131, 1564175, 1564224, 1564428, 1564639, 1564652, 1565015, 1565176, 1566561, 1566562, 1566779, 1567037, 1567232] + >>> gl.processlist["1211585"] + {'cmdline': ['/proc/self/exe', + '--type=utility', + '--utility-sub-type=node.mojom.NodeService', + '--lang=en-US', + '--service-sandbox-type=none', + '--no-sandbox', + '--dns-result-order=ipv4first', + '--experimental-network-inspection', + '--inspect-port=0', + '--crashpad-handler-pid=739062', + '--enable-crash-reporter=864d4bb7-dd20-4851-830f-29e81dd93517,no_channel', + '--user-data-dir=/home/nicolargo/.config/Code', + '--standard-schemes=vscode-webview,vscode-file', + '--secure-schemes=vscode-webview,vscode-file', + '--cors-schemes=vscode-webview,vscode-file', + '--fetch-schemes=vscode-webview,vscode-file', + '--service-worker-schemes=vscode-webview', + '--code-cache-schemes=vscode-webview,vscode-file', + '--shared-files=v8_context_snapshot_data:100', + '--field-trial-handle=3,i,16476947824719290197,4720072013320928602,262144', + '--enable-features=DocumentPolicyIncludeJSCallStacksInCrashReports,EarlyEstablishGpuChannel,EstablishGpuChannelAsync', + '--disable-features=CalculateNativeWinOcclusion,FontationsLinuxSystemFonts,ScreenAIOCREnabled,SpareRendererForSitePerProcess', + '--variations-seed-version'], + 'cpu_percent': 8.9, + 'cpu_times': {'children_system': 997.58, + 'children_user': 1086.59, + 'iowait': 0.0, + 'system': 961.29, + 'user': 6419.41}, + 'gids': {'effective': 1000, 'real': 1000, 'saved': 1000}, + 'io_counters': [1107745792, + 1504481280, + 1107745792, + 1504481280, + 1, + 220041216, + 2011136, + 220041216, + 2011136, + 1, + 277743616, + 434176, + 277743616, + 434176, + 1, + 191922176, + 0, + 191922176, + 0, + 1, + 6549504, + 8192, + 6549504, + 8192, + 1, + 2169856, + 8192, + 2169856, + 8192, + 1, + 11862016, + 0, + 11862016, + 0, + 1, + 839889920, + 1178796032, + 839889920, + 1178796032, + 1, + 101145600, + 929394688, + 101145600, + 929394688, + 1, + 41656320, + 1306624, + 41656320, + 1306624, + 1, + 5561344, + 0, + 5561344, + 0, + 1, + 8621056, + 5234688, + 8621056, + 5234688, + 1, + 47021056, + 0, + 47021056, + 0, + 1, + 16046080, + 0, + 16046080, + 0, + 1, + 12394496, + 0, + 12394496, + 0, + 1, + 3363840, + 0, + 3363840, + 0, + 1, + 9134080, + 0, + 9134080, + 0, + 1, + 15806464, + 0, + 15806464, + 0, + 1, + 14017536, + 77824, + 14017536, + 77824, + 1, + 44699648, + 29106176, + 44699648, + 29106176, + 1, + 21232640, + 36864, + 21232640, + 36864, + 1, + 194560, + 0, + 194560, + 0, + 1], + 'key': 'pid', + 'memory_info': {'data': 6833442816, + 'dirty': 0, + 'lib': 0, + 'rss': 3557658624, + 'shared': 51339264, + 'text': 148733952, + 'vms': 1526385225728}, + 'memory_percent': 21.662829935976895, + 'name': 'code', + 'nice': 0, + 'num_threads': 62, + 'pid': 1211585, + 'status': 'S', + 'time_since_update': 0.6878149509429932, + 'username': 'nicolargo'} + +Processlist fields description: + +* pid: Process identifier (ID) +* name: Process name +* cmdline: Command line with arguments +* username: Process owner +* num_threads: Number of threads +* cpu_percent: Process CPU consumption (returned value can be > 100.0 in case of a process running multiple threads on different CPU cores) +* memory_percent: Process memory consumption +* memory_info: Process memory information (dict with rss, vms, shared, text, lib, data, dirty keys) +* status: Process status +* nice: Process nice value +* cpu_times: Process CPU times (dict with user, system, iowait keys) +* gids: Process group IDs (dict with real, effective, saved keys) +* io_counters: Process IO counters (list with read_count, write_count, read_bytes, write_bytes, io_tag keys) + +Processlist limits: + +.. code-block:: python + + >>> gl.processlist.limits + {'history_size': 1200.0, + 'processlist_cpu_careful': 50.0, + 'processlist_cpu_critical': 90.0, + 'processlist_cpu_warning': 70.0, + 'processlist_disable': ['False'], + 'processlist_mem_careful': 50.0, + 'processlist_mem_critical': 90.0, + 'processlist_mem_warning': 70.0, + 'processlist_nice_warning': ['-20', + '-19', + '-18', + '-17', + '-16', + '-15', + '-14', + '-13', + '-12', + '-11', + '-10', + '-9', + '-8', + '-7', + '-6', + '-5', + '-4', + '-3', + '-2', + '-1', + '1', + '2', + '3', + '4', + '5', + '6', + '7', + '8', + '9', + '10', + '11', + '12', + '13', + '14', + '15', + '16', + '17', + '18', + '19'], + 'processlist_status_critical': ['Z', 'D'], + 'processlist_status_ok': ['R', 'W', 'P', 'I']} + +Glances load +------------ + +Load stats: + +.. code-block:: python + + >>> type(gl.load) + + >>> gl.load + {'cpucore': 16, + 'min1': 2.4287109375, + 'min15': 2.06103515625, + 'min5': 2.0654296875} + >>> gl.load.keys() + ['min1', 'min5', 'min15', 'cpucore'] + >>> gl.load["min1"] + 2.4287109375 + +Load fields description: + +* min1: Average sum of the number of processes waiting in the run-queue plus the number currently executing over 1 minute. +* min5: Average sum of the number of processes waiting in the run-queue plus the number currently executing over 5 minutes. +* min15: Average sum of the number of processes waiting in the run-queue plus the number currently executing over 15 minutes. +* cpucore: Total number of CPU core. + +Load limits: + +.. code-block:: python + + >>> gl.load.limits + {'history_size': 1200.0, + 'load_careful': 0.7, + 'load_critical': 5.0, + 'load_disable': ['False'], + 'load_warning': 1.0} + +Glances sensors +--------------- + +Sensors stats: + +.. code-block:: python + + >>> type(gl.sensors) + + >>> gl.sensors + Return a dict of dict with key=