summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorПавел Жуков <33721692+LeaveMyYard@users.noreply.github.com>2023-04-26 11:49:13 +0300
committerПавел Жуков <33721692+LeaveMyYard@users.noreply.github.com>2023-04-26 11:49:13 +0300
commita6713712e6cc65e9aa8ade05f6582b1ef05987e1 (patch)
tree842862fab85d0c48ed44bb1e4f7d0f4dfc7b5da5
parent9c85cb67ea10c5cd807665ef25ad05b19dcc9a43 (diff)
parent32fd0b042e6b07e0a66241588e81d791afa08786 (diff)
Merge branch 'fix-CPU-metric' of github.com:robusta-dev/robusta-krr into fix-CPU-metric
-rw-r--r--README.md67
-rw-r--r--robusta_krr/core/models/config.py1
-rw-r--r--robusta_krr/core/runner.py2
-rw-r--r--robusta_krr/main.py2
-rw-r--r--robusta_krr/utils/configurable.py11
5 files changed, 68 insertions, 15 deletions
diff --git a/README.md b/README.md
index 9dd6168..701ccd5 100644
--- a/README.md
+++ b/README.md
@@ -104,13 +104,9 @@ By default, we use a _simple_ strategy to calculate resource recommendations. It
- For memory, we take the maximum value over the past week and add a 5% buffer.
-### Robusta UI integration
-
-If you are using [Robusta SaaS](https://platform.robusta.dev/), then KRR is integrated starting from [v0.10.15](https://github.com/robusta-dev/robusta/releases/tag/0.10.15). You can view all your recommendations (previous ones also), filter and sort them by either cluster, namespace or name.
-
-More features (like seeing graphs, based on which recommendations were made) coming soon. [Tell us what you need the most!](https://github.com/robusta-dev/krr/issues/new)
+#### Prometheus connection
-![Robusta UI Screen Shot][ui-screenshot]
+Find about how KRR tries to find the default prometheus to connect <a href="#prometheus-auto-discovery">here</a>.
### Difference with Kubernetes VPA
@@ -120,13 +116,21 @@ More features (like seeing graphs, based on which recommendations were made) com
| Installation Location 🌍 | ✅ Not required to be installed inside the cluster, can be used on your own device, connected to a cluster | ❌ Must be installed inside the cluster |
| Workload Configuration 🔧 | ✅ No need to configure a VPA object for each workload | ❌ Requires VPA object configuration for each workload |
| Immediate Results ⚡ | ✅ Gets results immediately (given Prometheus is running) | ❌ Requires time to gather data and provide recommendations |
-| Reporting 📊 | ✅ Detailed CLI Report, web UI in [Robusta.dev](https://home.robusta.dev/) | ❌ Not supported |
+| Reporting 📊 | ✅ Detailed CLI Report, web UI in [Robusta.dev](https://home.robusta.dev/) | ❌ Not supported |
| Extensibility 🔧 | ✅ Add your own strategies with few lines of Python | :warning: Limited extensibility |
| Custom Metrics 📏 | 🔄 Support in future versions | ❌ Not supported |
| Custom Resources 🎛️ | 🔄 Support in future versions (e.g., GPU) | ❌ Not supported |
| Explainability 📖 | 🔄 Support in future versions (Robusta will send you additional graphs) | ❌ Not supported |
| Autoscaling 🔀 | 🔄 Support in future versions | ✅ Automatic application of recommendations |
+### Robusta UI integration
+
+If you are using [Robusta SaaS](https://platform.robusta.dev/), then KRR is integrated starting from [v0.10.15](https://github.com/robusta-dev/robusta/releases/tag/0.10.15). You can view all your recommendations (previous ones also), filter and sort them by either cluster, namespace or name.
+
+More features (like seeing graphs, based on which recommendations were made) coming soon. [Tell us what you need the most!](https://github.com/robusta-dev/krr/issues/new)
+
+![Robusta UI Screen Shot][ui-screenshot]
+
<p align="right">(<a href="#readme-top">back to top</a>)</p>
<!-- GETTING STARTED -->
@@ -165,7 +169,7 @@ sudo apt install robusta-krr
`````sh
docker pull robusta/krr
-````
+````
#### Manual
@@ -215,16 +219,16 @@ By default krr will run in the current context. If you want to run it in a diffe
python krr.py simple -c my-cluster-1 -c my-cluster-2
```
-If you want to get the output in JSON format (-q is for quiet mode):
+If you want to get the output in JSON format (--logtostderr is required so no logs go to the result file):
```sh
-python krr.py simple -q -f json > result.json
+python krr.py simple --logtostderr -f json > result.json
```
If you want to get the output in YAML format:
```sh
-python krr.py simple -q -f yaml > result.yaml
+python krr.py simple --logtostderr -f yaml > result.yaml
```
If you want to see additional debug logs:
@@ -241,12 +245,51 @@ python krr.py simple --help
<p align="right">(<a href="#readme-top">back to top</a>)</p>
+<!-- Port-forwarding -->
+
+## Prometheus auto-discovery
+
+By default, KRR will try to auto-discover the running Prometheus by scanning those labels:
+```python
+"app=kube-prometheus-stack-prometheus"
+"app=prometheus,component=server"
+"app=prometheus-server"
+"app=prometheus-operator-prometheus"
+"app=prometheus-msteams"
+"app=rancher-monitoring-prometheus"
+"app=prometheus-prometheus"
+```
+
+If none of those labels result in finding Prometheus, you will get an error and will have to pass the working url explicitly (using the `-p` flag).
+
+<p align="right">(<a href="#readme-top">back to top</a>)</p>
+
+## Example of using port-forward for Prometheus
+
+If your prometheus is not auto-connecting, you can use `kubectl port-forward` for manually forwarding Prometheus.
+
+For example, if you have a Prometheus Pod called `kube-prometheus-st-prometheus-0`, then run this command to port-forward it:
+
+```sh
+kubectl port-forward pod/kube-prometheus-st-prometheus-0 9090
+```
+
+Then, open another terminal and run krr in it, giving an explicit prometheus url:
+
+```sh
+python krr.py simple -p http://127.0.0.1:9090
+```
+
+<p align="right">(<a href="#readme-top">back to top</a>)</p>
+
<!-- CUSTOM -->
## Creating a Custom Strategy/Formatter
Look into the `examples` directory for examples on how to create a custom strategy/formatter.
+<p align="right">(<a href="#readme-top">back to top</a>)</p>
+
<!-- BUILDING -->
## Building
@@ -304,6 +347,8 @@ pip install -e .
poetry run pytest
```
+<p align="right">(<a href="#readme-top">back to top</a>)</p>
+
<!-- CONTRIBUTING -->
## Contributing
diff --git a/robusta_krr/core/models/config.py b/robusta_krr/core/models/config.py
index edb1518..fb910a6 100644
--- a/robusta_krr/core/models/config.py
+++ b/robusta_krr/core/models/config.py
@@ -34,6 +34,7 @@ class Config(pd.BaseSettings):
# Logging Settings
format: str
strategy: str
+ log_to_stderr: bool
other_args: dict[str, Any]
diff --git a/robusta_krr/core/runner.py b/robusta_krr/core/runner.py
index 0737cf0..1ef0e14 100644
--- a/robusta_krr/core/runner.py
+++ b/robusta_krr/core/runner.py
@@ -44,7 +44,7 @@ class Runner(Configurable):
def _process_result(self, result: Result) -> None:
formatted = result.format(self.config.format)
self.echo("\n", no_prefix=True)
- self.console.print(formatted)
+ self.print_result(formatted)
def __get_resource_minimal(self, resource: ResourceType) -> Decimal:
if resource == ResourceType.CPU:
diff --git a/robusta_krr/main.py b/robusta_krr/main.py
index a58675f..0412974 100644
--- a/robusta_krr/main.py
+++ b/robusta_krr/main.py
@@ -79,6 +79,7 @@ def run() -> None:
format: str = typer.Option("table", "--formatter", "-f", help="Output formatter ({formatters})", rich_help_panel="Logging Settings"),
verbose: bool = typer.Option(False, "--verbose", "-v", help="Enable verbose mode", rich_help_panel="Logging Settings"),
quiet: bool = typer.Option(False, "--quiet", "-q", help="Enable quiet mode", rich_help_panel="Logging Settings"),
+ log_to_stderr: bool = typer.Option(False, "--logtostderr", help="Pass logs to stderr", rich_help_panel="Logging Settings"),
{strategy_settings},
) -> None:
'''Run KRR using the `{func_name}` strategy'''
@@ -92,6 +93,7 @@ def run() -> None:
format=format,
verbose=verbose,
quiet=quiet,
+ log_to_stderr=log_to_stderr,
strategy="{func_name}",
other_args={strategy_args},
)
diff --git a/robusta_krr/utils/configurable.py b/robusta_krr/utils/configurable.py
index 3957e6e..ded2900 100644
--- a/robusta_krr/utils/configurable.py
+++ b/robusta_krr/utils/configurable.py
@@ -6,8 +6,6 @@ from rich.console import Console
from robusta_krr.core.models.config import Config
-console = Console()
-
class Configurable(abc.ABC):
"""
@@ -17,7 +15,7 @@ class Configurable(abc.ABC):
def __init__(self, config: Config) -> None:
self.config = config
- self.console = console
+ self.console = Console(stderr=self.config.log_to_stderr)
@property
def debug_active(self) -> bool:
@@ -31,6 +29,13 @@ class Configurable(abc.ABC):
def __add_prefix(text: str, prefix: str, /, no_prefix: bool) -> str:
return f"{prefix} {text}" if not no_prefix else text
+ def print_result(self, content: str) -> None:
+ """
+ Prints the result in a console. The result is always put in stdout.
+ """
+ result_console = Console()
+ result_console.print(content)
+
def echo(
self, message: str = "", *, no_prefix: bool = False, type: Literal["INFO", "WARNING", "ERROR"] = "INFO"
) -> None: