23 Commits

Author SHA1 Message Date
Natan Keddem ce898250dd updated requirements 2024-03-02 21:59:46 -05:00
Natan Keddem 5cba893282 Update README.md 2024-03-02 21:05:54 -05:00
Natan Keddem db4f340898 add default sort to tables 2023-11-24 19:21:53 -05:00
Natan Keddem f58b03a86b optimize result display 2023-11-24 19:21:06 -05:00
Natan Keddem 61f297aa0b improved job removal 2023-11-23 22:16:30 -05:00
Natan Keddem 425b607e8c cleanup 2023-11-23 22:16:03 -05:00
Natan Keddem c01989210b update to NiceGUI 1.4.3 2023-11-23 18:44:45 -05:00
Natan Keddem 0148b23310 refactored automation 2023-11-23 18:44:29 -05:00
Natan Keddem 0221780a19 optimized prop recall 2023-11-23 18:42:08 -05:00
Natan Keddem 9ea536193b cleanup for pylint 2023-11-23 17:54:27 -05:00
Natan Keddem fcd8362464 improved hold management and display 2023-11-21 19:05:14 -05:00
Natan Keddem 36ee1f94cd added arbitrary property entry 2023-11-20 21:57:10 -05:00
Natan Keddem 07ce7e0bae fixed edge case for ErrorAggregator 2023-11-20 21:56:20 -05:00
Natan Keddem ffbc9b71c0 optimize date and time displays 2023-11-19 17:05:58 -05:00
Natan Keddem f3ef97a342 improved result timestamp 2023-11-18 21:46:56 -05:00
Natan Keddem 94fba0b925 added test to http pipe 2023-11-18 20:46:06 -05:00
Natan Keddem 8572ad766b improved browse and find 2023-11-18 20:45:53 -05:00
Natan Keddem 8a2922262e added output truncate to cli 2023-11-18 16:06:01 -05:00
Natan Keddem d322612fc8 refactor and fix table display and sorting 2023-11-17 23:18:03 -05:00
Natan Keddem 3d13876804 optimize startup 2023-11-17 23:16:15 -05:00
Natan Keddem 4685939cae fixed arbitrary target path recall 2023-11-16 21:37:21 -05:00
Natan Keddem dfcafed973 refactor startup builders 2023-11-16 19:34:52 -05:00
Natan Keddem 566fb9442c added arbitrary target path selection 2023-11-16 19:05:53 -05:00
19 changed files with 513 additions and 353 deletions
+18 -2
View File
@@ -1,7 +1,23 @@
# bale: ZFS Snapshot Browser Based GUI
## Demo
https://github.com/natankeddem/bale/assets/44515217/53c2dc10-afbf-44a2-9546-545b06e7c565
## Host Creation
[bale_host_creation.webm](https://github.com/natankeddem/bale/assets/44515217/450afac1-ffa6-4f6f-80b4-1aeafce6a6d7)
## Manual Management Task Handling
[bale_manual_task.webm](https://github.com/natankeddem/bale/assets/44515217/d9728db9-6efa-45ed-8d07-2d925a9249b9)
## Automatic Management Task Handling
[bale_auto_task.webm](https://github.com/natankeddem/bale/assets/44515217/ab648c45-e567-4557-88f9-c11b2b412cef)
## Downloading Files From Snapshots
[bale_file_download.webm](https://github.com/natankeddem/bale/assets/44515217/7db08302-8a8b-47d4-879c-ba310f8628e4)
## Simple Automations
[bale_simple_automation.webm](https://github.com/natankeddem/bale/assets/44515217/0cd6a7da-ff11-4786-88ef-6a644ed431ff)
## ZFS-Autobackup Automations
[bale_zab_automation.webm](https://github.com/natankeddem/bale/assets/44515217/7816ae9c-695c-47f1-9d68-f0075bb8e567)
## ⚠️ **_WARNING_**
+4 -1
View File
@@ -34,6 +34,9 @@
"-l",
"180"
],
"editor.suggest.showStatusBar": true
"editor.suggest.showStatusBar": true,
"pylint.args": [
"\"pylint.args\": [\"--disable=C0115\", \"--disable=C0116\", \"--disable=C0301\",\"--max-line-length=180\"]"
]
}
}
+4 -7
View File
@@ -1,4 +1,5 @@
from nicegui import ui
import asyncio
from nicegui import ui # type: ignore
from bale import elements as el
import bale.logo as logo
from bale.tabs import Tab
@@ -26,7 +27,7 @@ class Content:
self._automation = None
self._history = None
def build(self):
async def build(self):
self._header = ui.header(bordered=True).classes("bg-dark q-pt-sm q-pb-xs")
self._header.tailwind.border_color(f"[{el.orange}]").min_width("[920px]")
self._header.visible = False
@@ -44,11 +45,7 @@ class Content:
self._tab_panels = (
ui.tab_panels(self._tabs, value="Manage", on_change=lambda e: self._tab_changed(e), animated=False).classes("w-full h-full").bind_visibility_from(self._header)
)
ui.timer(1, self.select_default, once=True)
async def select_default(self):
tab = Tab(spinner=None)
default = tab.common.get("default", "")
default = Tab(spinner=None).common.get("default", "")
if default != "":
await self.host_selected(default)
+1 -1
View File
@@ -1,4 +1,4 @@
from nicegui import ui
from nicegui import ui # type: ignore
from bale import elements as el
from bale.tabs import Tab
from bale.interfaces import ssh
+22 -17
View File
@@ -1,11 +1,11 @@
from typing import Any, Callable, Dict, List, Literal, Optional, Union
from nicegui import ui, app, Tailwind
from nicegui.elements.spinner import SpinnerTypes
from nicegui.elements.tabs import Tab
from nicegui.tailwind_types.height import Height
from nicegui.tailwind_types.width import Width
from nicegui.elements.mixins.validation_element import ValidationElement
from nicegui.events import GenericEventArguments, handle_event
from nicegui import ui, app, Tailwind # type: ignore
from nicegui.elements.spinner import SpinnerTypes # type: ignore
from nicegui.elements.tabs import Tab # type: ignore
from nicegui.tailwind_types.height import Height # type: ignore
from nicegui.tailwind_types.width import Width # type: ignore
from nicegui.elements.mixins.validation_element import ValidationElement # type: ignore
from nicegui.events import GenericEventArguments, handle_event # type: ignore
from bale.interfaces import cli
import logging
@@ -71,8 +71,11 @@ class ErrorAggregator:
@property
def no_errors(self) -> bool:
validators = all(validation(element.value) for element in self.elements for validation in element.validation.values())
return self.enable and validators
if len(self.elements) > 0:
validators = all(validation(element.value) for element in self.elements for validation in element.validation.values())
return self.enable and validators
else:
return True
class WColumn(ui.column):
@@ -162,16 +165,17 @@ class FInput(ui.input):
class DSelect(ui.select):
def __init__(
self,
options: List | Dict,
options: Union[List, Dict],
*,
label: str | None = None,
label: Optional[str] = None,
value: Any = None,
on_change: Callable[..., Any] | None = None,
on_change: Optional[Callable[..., Any]] = None,
with_input: bool = False,
new_value_mode: Optional[Literal["add", "add-unique", "toggle"]] = None,
multiple: bool = False,
clearable: bool = False,
) -> None:
super().__init__(options, label=label, value=value, on_change=on_change, with_input=with_input, multiple=multiple, clearable=clearable)
super().__init__(options, label=label, value=value, on_change=on_change, with_input=with_input, new_value_mode=new_value_mode, multiple=multiple, clearable=clearable)
self.tailwind.width("full")
if multiple is True:
self.props("use-chips")
@@ -180,16 +184,17 @@ class DSelect(ui.select):
class FSelect(ui.select):
def __init__(
self,
options: List | Dict,
options: Union[List, Dict],
*,
label: str | None = None,
label: Optional[str] = None,
value: Any = None,
on_change: Callable[..., Any] | None = None,
on_change: Optional[Callable[..., Any]] = None,
with_input: bool = False,
new_value_mode: Optional[Literal["add", "add-unique", "toggle"]] = None,
multiple: bool = False,
clearable: bool = False,
) -> None:
super().__init__(options, label=label, value=value, on_change=on_change, with_input=with_input, multiple=multiple, clearable=clearable)
super().__init__(options, label=label, value=value, on_change=on_change, with_input=with_input, new_value_mode=new_value_mode, multiple=multiple, clearable=clearable)
self.tailwind.width("64")
+12 -5
View File
@@ -4,7 +4,7 @@ from asyncio.subprocess import Process, PIPE
import contextlib
import shlex
from datetime import datetime
from nicegui import ui
from nicegui import ui # type: ignore
from bale.result import Result
import logging
@@ -34,6 +34,7 @@ class Cli:
self.stderr: List[str] = []
self._terminate: asyncio.Event = asyncio.Event()
self._busy: bool = False
self._truncated: bool = False
self.prefix_line: str = ""
self._stdout_terminals: List[Terminal] = []
self._stderr_terminals: List[Terminal] = []
@@ -70,8 +71,11 @@ class Cli:
else:
break
async def _controller(self, process: Process) -> None:
async def _controller(self, process: Process, max_output_lines) -> None:
while process.returncode is None:
if max_output_lines > 0 and len(self.stderr) + len(self.stdout) > max_output_lines:
self._truncated = True
process.terminate()
if self._terminate.is_set():
process.terminate()
try:
@@ -83,7 +87,7 @@ class Cli:
def terminate(self) -> None:
self._terminate.set()
async def execute(self, command: str) -> Result:
async def execute(self, command: str, max_output_lines: int = 0) -> Result:
self._busy = True
c = shlex.split(command, posix=False)
try:
@@ -92,13 +96,14 @@ class Cli:
self.stdout.clear()
self.stderr.clear()
self._terminate.clear()
self._truncated = False
terminated = False
now = datetime.now().strftime("%Y/%m/%d %H:%M:%S")
self.prefix_line = f"<{now}> {command}\n"
for terminal in self._stdout_terminals:
terminal.call_terminal_method("write", "\n" + self.prefix_line)
await asyncio.gather(
self._controller(process=process),
self._controller(process=process, max_output_lines=max_output_lines),
self._read_stdout(stream=process.stdout),
self._read_stderr(stream=process.stderr),
)
@@ -110,7 +115,9 @@ class Cli:
finally:
self._terminate.clear()
self._busy = False
return Result(command=command, return_code=process.returncode, stdout_lines=self.stdout.copy(), stderr_lines=self.stderr.copy(), terminated=terminated)
return Result(
command=command, return_code=process.returncode, stdout_lines=self.stdout.copy(), stderr_lines=self.stderr.copy(), terminated=terminated, truncated=self._truncated
)
async def shell(self, command: str) -> Result:
self._busy = True
+2 -2
View File
@@ -90,10 +90,10 @@ class Ssh(Cli):
del self._config[self.host]
self.write_config()
async def execute(self, command: str) -> Result:
async def execute(self, command: str, max_output_lines: int = 0) -> Result:
self._base_cmd = f"{'' if self.use_key else f'sshpass -p {self.password} '} ssh -F {self._config_path} {self.host}"
self._full_cmd = f"{self._base_cmd} {command}"
return await super().execute(self._full_cmd)
return await super().execute(self._full_cmd, max_output_lines)
async def send_key(self) -> Result:
await get_public_key(self._raw_path)
+27 -19
View File
@@ -3,7 +3,7 @@ from pathlib import Path
import stat
from datetime import datetime
import uuid
from nicegui import app, events, ui
from nicegui import app, background_tasks, events, ui # type: ignore
from fastapi.responses import StreamingResponse
import asyncssh
from bale import elements as el
@@ -87,7 +87,7 @@ class SshFileBrowse(ui.dialog):
row.tailwind.height("[40px]")
el.DButton("Download", on_click=self._start_download)
ui.button("Exit", on_click=lambda: self.submit("exit"))
await self._update_grid()
await self._update_handler()
async def _connect(self) -> Tuple[asyncssh.SSHClientConnection, asyncssh.SFTPClient]:
ssh = await asyncssh.connect(self._zfs.hostname, username=self._zfs.username, client_keys=[self._zfs.key_path])
@@ -137,7 +137,7 @@ class SshFileBrowse(ui.dialog):
"permissions": attributes.permissions,
}
async def _update_grid(self) -> None:
async def _update_handler(self) -> None:
self._grid.call_api_method("showLoadingOverlay")
if self._ssh is None or self._sftp is None:
self._ssh, self._sftp = await self._connect()
@@ -165,7 +165,7 @@ class SshFileBrowse(ui.dialog):
async def _handle_double_click(self, e: events.GenericEventArguments) -> None:
self.path = e.args["data"]["path"]
if e.args["data"]["type"] == "directory":
await self._update_grid()
await self._update_handler()
else:
await self._start_download(e)
@@ -226,10 +226,10 @@ class SshFileFind(SshFileBrowse):
with el.DBody(height="fit", width="[90vw]"):
with el.WColumn().classes("col"):
filesystems = await self._zfs.filesystems
self._filesystem = el.DSelect(
list(filesystems.data.keys()), label="filesystem", with_input=True, on_change=self._update_grid
)
self._pattern = el.DInput("Pattern", on_change=self._update_grid)
self._filesystem = el.DSelect(list(filesystems.data.keys()), label="filesystem", with_input=True, on_change=self._update_handler)
with el.WRow():
self._pattern = ui.input("Pattern").classes("col").on("keydown.enter", handler=self._update_handler)
el.LgButton(icon="search", on_click=self._update_handler)
self._grid = ui.aggrid(
{
"defaultColDef": {"flex": 1, "sortable": True, "suppressMovable": True, "sortingOrder": ["asc", "desc"]},
@@ -237,12 +237,14 @@ class SshFileFind(SshFileBrowse):
{"field": "name", "headerName": "Name", "flex": 1, "sort": "desc", "resizable": True},
{"field": "location", "headerName": "Location", "flex": 1, "resizable": True},
{
"field": "modified_datetime",
"headerName": "Modified",
"maxWidth": 200,
":comparator": """(valueA, valueB, nodeA, nodeB, isInverted) => {
return (nodeA.data.modified_timestamp > nodeB.data.modified_timestamp) ? -1 : 1;
}""",
"field": "modified_timestamp",
"filter": "agTextColumnFilter",
"maxWidth": 125,
":cellRenderer": """(data) => {
var date = new Date(data.value * 1000).toLocaleString(undefined, {dateStyle: 'short', timeStyle: 'short', hour12: false});;
return date;
}""",
},
{
"field": "size",
@@ -264,15 +266,21 @@ class SshFileFind(SshFileBrowse):
row.tailwind.height("[40px]")
el.DButton("Download", on_click=self._start_download)
ui.button("Exit", on_click=lambda: self.submit("exit"))
await self._update_grid()
self._grid.call_api_method("hideOverlay")
async def _update_grid(self) -> None:
self._grid.call_api_method("showLoadingOverlay")
if self._filesystem is not None:
async def _update_handler(self) -> None:
if len(self._pattern.value) > 0 and self._filesystem is not None:
self._grid.call_api_method("showLoadingOverlay")
self._filesystem.props("readonly")
self._pattern.props("readonly")
files = await self._zfs.find_files_in_snapshots(filesystem=self._filesystem.value, pattern=self._pattern.value)
self._grid.options["rowData"] = files.data
self._grid.update()
self._grid.call_api_method("hideOverlay")
if files.truncated is True:
el.notify("Too many files found, truncating list.", type="warning")
self._grid.update()
self._filesystem.props(remove="readonly")
self._pattern.props(remove="readonly")
self._grid.call_api_method("hideOverlay")
async def _handle_double_click(self, e: events.GenericEventArguments) -> None:
await self._start_download(e)
+43 -33
View File
@@ -81,7 +81,7 @@ class Zfs:
command = command if len(command) < 160 else command[:160] + "..."
el.notify(command)
async def execute(self, command: str, notify: bool = True) -> Result:
async def execute(self, command: str, max_output_lines: int = 0, notify: bool = True) -> Result:
if notify:
self.notify(command)
return Result(command=command)
@@ -115,16 +115,15 @@ class Zfs:
return result
async def filesystems_with_prop(self, prop: str) -> Result:
result = await self.execute(f"zfs get -Hp -t filesystem,volume {prop}")
filesystems = []
result = await self.execute(f"zfs get -Hp -t filesystem,volume {prop}")
for line in result.stdout_lines:
matches = re.match("^(?P<name>[^\t]+)\t(?P<property>[^\t]+)\t(?P<value>[^\t]+)\t(?P<source>[^\n]+)", line)
if matches is not None:
md = matches.groupdict()
if md["property"] == prop and md["source"] == "local":
filesystems.append(md["name"])
result = Result(data=filesystems, cached=False)
return result
return Result(data=filesystems, cached=False)
async def holds_for_snapshot(self, snapshot: Union[str, None] = None) -> Result:
query = "holds_for_snapshot"
@@ -137,7 +136,7 @@ class Zfs:
with_holds.append(_name)
with_holds = " ".join(with_holds)
else:
with_holds = [snapshot]
with_holds = snapshot
if len(with_holds) > 0:
result = await self.execute(f"zfs holds -H -r {with_holds}", notify=False)
tags: Dict[str, list[str]] = {}
@@ -149,11 +148,16 @@ class Zfs:
if s not in tags:
tags[s] = []
tags[s].append(md["tag"])
self._last_data[query] = tags
if snapshot in self._last_data[query]:
result.data = self._last_data[query][snapshot]
if query not in self._last_data:
self._last_data[query] = {}
self._last_data[query].update(tags)
if snapshot is None:
result.data = self._last_data[query]
else:
result.data = []
if snapshot in self._last_data[query]:
result.data = self._last_data[query][snapshot]
else:
result.data = []
else:
return Result(data=[])
else:
@@ -166,25 +170,28 @@ class Zfs:
return result
async def find_files_in_snapshots(self, filesystem: str, pattern: str) -> Result:
filesystems = await self.filesystems
if filesystem in filesystems.data.keys():
if "mountpoint" in filesystems.data[filesystem]:
command = f"find {filesystems.data[filesystem]['mountpoint']}/.zfs/snapshot -type f -name '{pattern}' -printf '%h\t%f\t%s\t%T@\n'"
result = await self.execute(command=command, notify=False)
files = []
for line in result.stdout_lines:
matches = re.match(
"^(?P<location>[^\t]+)\t(?P<name>[^\t]+)\t(?P<bytes>[^\t]+)\t(?P<modified_timestamp>[^\n]+)",
line,
)
if matches is not None:
md = matches.groupdict()
md["path"] = f"{md['location']}/{md['name']}"
md["size"] = format_bytes(int(md["bytes"]))
md["modified_datetime"] = datetime.fromtimestamp(float(md["modified_timestamp"])).strftime("%Y/%m/%d %H:%M:%S")
files.append(md)
result.data = files
return result
try:
filesystems = await self.filesystems
command = f"find {filesystems.data[filesystem]['mountpoint']}/.zfs/snapshot -type f -name '{pattern}' -printf '%h\t%f\t%s\t%T@\n'"
result = await self.execute(command=command, notify=False, max_output_lines=1000)
files = []
for line in result.stdout_lines:
matches = re.match(
"^(?P<location>[^\t]+)\t(?P<name>[^\t]+)\t(?P<bytes>[^\t]+)\t(?P<modified_timestamp>[^\n]+)",
line,
)
if matches is not None:
md = matches.groupdict()
md["path"] = f"{md['location']}/{md['name']}"
md["bytes"] = int(md["bytes"])
md["size"] = format_bytes(md["bytes"])
md["modified_datetime"] = datetime.fromtimestamp(float(md["modified_timestamp"])).strftime("%Y/%m/%d %H:%M:%S")
md["modified_timestamp"] = float(md["modified_timestamp"])
files.append(md)
result.data = files
return result
except KeyError:
pass
return Result()
@property
@@ -219,9 +226,12 @@ class Zfs:
matches = re.match("^(?P<filesystem>[^@]+)@(?P<name>[^\t]+)\t(?P<used_bytes>[^\t]+)\t(?P<creation>[^\t]+)\t(?P<userrefs>[^\n]+)", line)
if matches is not None:
md = matches.groupdict()
md["creation_date"] = datetime.fromtimestamp(int(md["creation"])).strftime("%Y/%m/%d")
md["creation_time"] = datetime.fromtimestamp(int(md["creation"])).strftime("%H:%M")
md["used"] = format_bytes(int(md["used_bytes"]))
md["used_bytes"] = int(md["used_bytes"])
md["creation"] = int(md["creation"])
md["creation_date"] = datetime.fromtimestamp(md["creation"]).strftime("%Y/%m/%d")
md["creation_time"] = datetime.fromtimestamp(md["creation"]).strftime("%H:%M")
md["used"] = format_bytes(md["used_bytes"])
md["userrefs"] = int(md["userrefs"])
snapshot = f"{md['filesystem']}@{md['name']}"
snapshots[snapshot] = md
self._last_data[query] = snapshots
@@ -240,10 +250,10 @@ class Ssh(ssh.Ssh, Zfs):
def notify(self, command: str):
super().notify(f"<{self.host}> {command}")
async def execute(self, command: str, notify: bool = True) -> Result:
async def execute(self, command: str, max_output_lines: int = 0, notify: bool = True) -> Result:
if notify:
self.notify(command)
result = await super().execute(command)
result = await super().execute(command, max_output_lines)
if result.stderr != "":
el.notify(result.stderr, type="negative")
result.name = self.host
+1 -1
View File
@@ -1,4 +1,4 @@
from nicegui import ui
from nicegui import ui # type: ignore
import logging
logger = logging.getLogger(__name__)
+5 -4
View File
@@ -1,4 +1,5 @@
from nicegui import app, ui
import asyncio
from nicegui import app, Client, ui # type: ignore
from bale import elements as el
from bale.drawer import Drawer
from bale.content import Content
@@ -9,8 +10,8 @@ logger = logging.getLogger(__name__)
def build():
@ui.page("/")
def page() -> None:
@ui.page("/", response_timeout=30)
async def index(client: Client) -> None:
app.add_static_files("/static", "static")
el.load_element_css()
cli.load_terminal_css()
@@ -28,4 +29,4 @@ def build():
content = Content()
drawer = Drawer(column, content.host_selected, content.hide)
drawer.build()
content.build()
await content.build()
+1
View File
@@ -13,6 +13,7 @@ class Result:
stdout_lines: List[str] = field(default_factory=list)
stderr_lines: List[str] = field(default_factory=list)
terminated: bool = False
truncated: bool = False
data: Any = None
trace: str = ""
cached: bool = False
+20 -16
View File
@@ -5,22 +5,23 @@ from pathlib import Path
from functools import cache
from datetime import datetime
import time
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.schedulers.asyncio import AsyncIOScheduler # type: ignore
@dataclass(kw_only=True)
class Automation:
id: str
app: str
hosts: List[str]
host: str
command: str
schedule_mode: str
triggers: Dict[str, str]
options: Union[Dict[str, Any], None] = None
id: str = ""
name: str = ""
app: str = "remote"
hosts: List[str] = field(default_factory=list)
host: str = ""
command: str = ""
schedule_mode: str = ""
triggers: Dict[str, str] = field(default_factory=dict)
options: Dict[str, Any] = field(default_factory=dict)
pipe_success: bool = False
pipe_error: bool = False
timestamp: float = field(default_factory=time.time)
pipe_success: bool
pipe_error: bool
def to_dict(self) -> Dict[str, Any]:
return self.__dict__
@@ -29,11 +30,14 @@ class Automation:
@dataclass(kw_only=True)
class Zfs_Autobackup(Automation):
app: str = "zfs_autobackup"
execute_mode: str = "local"
target_host: str
target_path: str
target_paths: List[str]
filesystems: Dict[str, Union[str, List[str], Dict[str, str]]]
prop: str = "autobackup:{name}"
target_host: str = ""
target_path: str = ""
target_paths: List[str] = field(default_factory=list)
parentchildren: List[str] = field(default_factory=list)
parent: List[str] = field(default_factory=list)
children: List[str] = field(default_factory=list)
exclude: List[str] = field(default_factory=list)
class _Scheduler:
+9 -11
View File
@@ -6,7 +6,7 @@ from datetime import datetime
import time
import json
import httpx
from nicegui import app, ui
from nicegui import app, ui # type: ignore
from bale.interfaces.zfs import Ssh
from bale import elements as el
from bale.result import Result
@@ -91,18 +91,16 @@ class Tab:
with el.WColumn():
with el.Card() as card:
card.tailwind.width("full")
with el.WColumn():
ui.label(f"#> {result.command}").classes("text-secondary")
with el.WRow() as row:
row.tailwind.justify_content("around")
with ui.column() as col:
col.tailwind.max_width("lg")
ui.label(f"Host Name: {result.name}").classes("text-secondary")
ui.label(f"Command: {result.command}").classes("text-secondary")
ui.label(f"Date: {result.date}").classes("text-secondary")
with ui.column() as col:
col.tailwind.max_width("lg")
ui.label(f"Task has failed: {result.failed}").classes("text-secondary")
ui.label(f"Data is cached: {result.cached}").classes("text-secondary")
ui.label(f"Time: {result.time}").classes("text-secondary")
ui.label(f"Host: {result.name}").classes("text-secondary")
timestamp = await ui.run_javascript(
f"new Date({result.timestamp} * 1000).toLocaleString(undefined, {{dateStyle: 'short', timeStyle: 'short', hour12: 'false'}});"
)
ui.label(f"Timestamp: {timestamp}").classes("text-secondary")
ui.label(f"Return Code: {result.return_code}").classes("text-secondary")
with el.Card() as card:
with el.WColumn():
terminal = cli.Terminal(options={"rows": 18, "cols": 120, "convertEol": True})
+196 -185
View File
@@ -1,14 +1,17 @@
from typing import Any, Dict, List, Union
from typing import Any, Callable, Dict, List, Union
import asyncio
from datetime import datetime
import json
import string
from apscheduler.triggers.combining import AndTrigger
from apscheduler.triggers.combining import OrTrigger
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.interval import IntervalTrigger
from apscheduler.job import Job # type: ignore
from apscheduler.triggers.combining import AndTrigger # type: ignore
from apscheduler.triggers.combining import OrTrigger # type: ignore
from apscheduler.triggers.cron import CronTrigger # type: ignore
from apscheduler.triggers.interval import IntervalTrigger # type: ignore
from cron_validator import CronValidator # type: ignore
from cron_descriptor import get_description # type: ignore
from nicegui import ui, Tailwind, events # type: ignore
from . import SelectionConfirm, Tab
from nicegui import ui, Tailwind, events
from bale import elements as el
from bale.result import Result
from bale.interfaces import cli
@@ -16,8 +19,7 @@ from bale.interfaces import ssh
from bale.interfaces import zfs
from bale.apps import zab
from bale import scheduler
from cron_validator import CronValidator
from cron_descriptor import get_description
import logging
@@ -26,6 +28,22 @@ logger = logging.getLogger(__name__)
job_handlers: Dict[str, Union[cli.Cli, ssh.Ssh]] = {}
def automation(raw: Union[str, Job]) -> Union[scheduler.Automation, scheduler.Zfs_Autobackup, None]:
json_data = json.dumps({})
if isinstance(raw, str):
json_data = raw
elif isinstance(raw, Job):
if "data" in raw.kwargs:
json_data = raw.kwargs["data"]
else:
return None
raw_data = json.loads(json_data)
if raw_data["app"] == "zfs_autobackup":
return scheduler.Zfs_Autobackup(**raw_data)
else:
return scheduler.Automation(**raw_data)
def populate_job_handler(app: str, job_id: str, host: str):
tab = Tab(host=None, spinner=None)
if job_id not in job_handlers:
@@ -36,51 +54,48 @@ def populate_job_handler(app: str, job_id: str, host: str):
return job_handlers[job_id]
class CommandTemplate(string.Template):
class AutomationTemplate(string.Template):
delimiter = ""
async def automation_job(**kwargs) -> None:
if "data" in kwargs:
jd = json.loads(kwargs["data"])
command = CommandTemplate(jd["command"])
auto = automation(kwargs["data"])
if auto is not None:
command = AutomationTemplate(auto.command)
tab = Tab(host=None, spinner=None)
if jd["app"] == "zfs_autobackup":
d = scheduler.Zfs_Autobackup(**jd)
populate_job_handler(app=d.app, job_id=d.id, host=d.host)
if job_handlers[d.id].is_busy is False:
result = await job_handlers[d.id].execute(command.safe_substitute(host=d.host))
result.name = d.host
if auto.app == "zfs_autobackup":
populate_job_handler(app=auto.app, job_id=auto.id, host=auto.host)
if job_handlers[auto.id].is_busy is False:
result = await job_handlers[auto.id].execute(command.safe_substitute(name=auto.name, host=auto.host))
result.name = auto.host
result.status = "success" if result.return_code == 0 else "error"
if d.pipe_success is True and result.status == "success":
if auto.pipe_success is True and result.status == "success":
tab.pipe_result(result=result)
if d.pipe_error is True and result.status != "success":
if auto.pipe_error is True and result.status != "success":
tab.pipe_result(result=result)
tab.add_history(result=result)
else:
logger.warning("Job Skipped!")
elif jd["app"] == "remote":
d = scheduler.Automation(**jd)
populate_job_handler(app=d.app, job_id=d.id, host=d.host)
if job_handlers[d.id].is_busy is False:
result = await job_handlers[d.id].execute(command.safe_substitute(host=d.host))
result.name = d.host
if d.pipe_success is True and result.status == "success":
elif auto.app == "remote":
populate_job_handler(app=auto.app, job_id=auto.id, host=auto.host)
if job_handlers[auto.id].is_busy is False:
result = await job_handlers[auto.id].execute(command.safe_substitute(name=auto.name, host=auto.host))
result.name = auto.host
if auto.pipe_success is True and result.status == "success":
tab.pipe_result(result=result)
if d.pipe_error is True and result.status != "success":
if auto.pipe_error is True and result.status != "success":
tab.pipe_result(result=result)
tab.add_history(result=result)
else:
logger.warning("Job Skipped!")
elif jd["app"] == "local":
d = scheduler.Automation(**jd)
populate_job_handler(app=d.app, job_id=d.id, host=d.host)
if job_handlers[d.id].is_busy is False:
result = await job_handlers[d.id].execute(command.safe_substitute(host=d.host))
result.name = d.host
if d.pipe_success is True and result.status == "success":
elif auto.app == "local":
populate_job_handler(app=auto.app, job_id=auto.id, host=auto.host)
if job_handlers[auto.id].is_busy is False:
result = await job_handlers[auto.id].execute(command.safe_substitute(name=auto.name, host=auto.host))
result.name = auto.host
if auto.pipe_success is True and result.status == "success":
tab.pipe_result(result=result)
if d.pipe_error is True and result.status != "success":
if auto.pipe_error is True and result.status != "success":
tab.pipe_result(result=result)
tab.add_history(result=result)
else:
@@ -96,10 +111,10 @@ class Automation(Tab):
self.picked_options: Dict[str, str] = {}
self.triggers: Dict[str, str] = {}
self.picked_triggers: Dict[str, str] = {}
self.job_data: Dict[str, str] = {}
self.auto: Union[scheduler.Automation, scheduler.Zfs_Autobackup]
self.job_names: List[str] = []
self.default_options: Dict[str, str] = {}
self.build_command: str = ""
self.build_command: Callable
self.target_host: el.DSelect
self.target_paths: List[str] = [""]
self.target_path: el.DSelect
@@ -122,6 +137,11 @@ class Automation(Tab):
self.triggers_scroll: ui.scroll_area
self.trigger_controls: Dict[str, str] = {}
self.hosts: el.DSelect
self.prop: el.DInput
self.parentchildren: el.DSelect
self.parent: el.DSelect
self.children: el.DSelect
self.exclude: el.DSelect
super().__init__(spinner, host)
def _build(self) -> None:
@@ -134,7 +154,6 @@ class Automation(Tab):
el.SmButton("Create", on_click=self._create_automation)
el.SmButton("Remove", on_click=self._remove_automation)
el.SmButton("Edit", on_click=self._edit_automation)
# el.SmButton("Duplicate", on_click=self._duplicate_automation)
el.SmButton("Run Now", on_click=self._run_automation)
with ui.row().classes("items-center"):
el.SmButton(text="Refresh", on_click=self._update_automations)
@@ -161,8 +180,17 @@ class Automation(Tab):
"maxWidth": 150,
},
{"headerName": "Command", "field": "command", "filter": "agTextColumnFilter"},
{"headerName": "Next Date", "field": "next_run_date", "filter": "agDateColumnFilter", "maxWidth": 100},
{"headerName": "Next Time", "field": "next_run_time", "maxWidth": 100},
{
"headerName": "Next Run",
"field": "next_run",
"filter": "agTextColumnFilter",
"maxWidth": 125,
":cellRenderer": """(data) => {
var date = new Date(data.value * 1000).toLocaleString(undefined, {dateStyle: 'short', timeStyle: 'short', hour12: false});;
return date;
}""",
"sort": "asc",
},
{
"headerName": "Status",
"field": "status",
@@ -186,11 +214,10 @@ class Automation(Tab):
job_id = f"{job_data.args['data']['name']}@{self.host}"
for job in self.scheduler.scheduler.get_jobs():
if job.id == job_id:
if "data" in job.kwargs:
jd = json.loads(job.kwargs["data"])
populate_job_handler(app=jd["app"], job_id=job.id, host=self.host)
break
auto = automation(job)
if auto is not None and auto.id == job_id:
populate_job_handler(app=auto.app, job_id=auto.id, host=self.host)
break
async def run():
for job in self.scheduler.scheduler.get_jobs():
@@ -227,23 +254,12 @@ class Automation(Tab):
self._automations.clear()
for job in self.scheduler.scheduler.get_jobs():
if job.next_run_time is not None:
next_run_date = job.next_run_time.strftime("%Y/%m/%d")
next_run_time = job.next_run_time.strftime("%H:%M")
next_run = job.next_run_time.timestamp()
else:
next_run_date = "NA"
next_run_time = "NA"
if "data" in job.kwargs:
jd = json.loads(job.kwargs["data"])
if self.host == jd["host"]:
self._automations.append(
{
"name": job.id.split("@")[0],
"command": jd["command"],
"next_run_date": next_run_date,
"next_run_time": next_run_time,
"status": "",
}
)
next_run = "NA"
auto = automation(job)
if auto is not None and auto.host == self.host:
self._automations.append({"name": auto.name, "command": auto.command, "next_run": next_run, "status": ""})
self._grid.update()
async def _remove_automation(self) -> None:
@@ -253,8 +269,15 @@ class Automation(Tab):
rows = await self._grid.get_selected_rows()
for row in rows:
for job in self.scheduler.scheduler.get_jobs():
j = job.id.split("@")[0]
if j == row["name"]:
auto = automation(job)
if auto is not None and auto.name == row["name"]:
if job.id in job_handlers:
del job_handlers[job.id]
if isinstance(auto, scheduler.Zfs_Autobackup):
for host in auto.hosts:
command = AutomationTemplate(auto.prop)
prop = command.safe_substitute(name=auto.name, host=host)
await self._remove_prop_from_all_fs(host=host, prop=prop)
self.scheduler.scheduler.remove_job(job.id)
self._automations.remove(row)
self._grid.update()
@@ -272,29 +295,6 @@ class Automation(Tab):
job.modify(next_run_time=datetime.now())
self._set_selection()
async def _duplicate_automation(self) -> None:
rows = await self._grid.get_selected_rows()
if len(rows) == 1:
with ui.dialog() as dialog, el.Card():
with el.DBody():
with el.WColumn():
host = el.DSelect(self._zfs_hosts, value=self.host, label="Host", with_input=True)
with el.WRow():
el.DButton("Duplicate", on_click=lambda: dialog.submit("duplicate"))
result = await dialog
if result == "confirm":
for job in self.scheduler.scheduler.get_jobs():
if job.id == rows[0]["name"]:
self.scheduler.scheduler.add_job(
automation_job,
trigger=build_triggers(),
kwargs={"data": json.dumps(auto.to_dict())},
id=self.auto_name.value.lower(),
coalesce=True,
max_instances=1,
replace_existing=True,
)
async def _edit_automation(self) -> None:
self._set_selection(mode="single")
result = await SelectionConfirm(container=self._confirm, label=">EDIT<")
@@ -303,26 +303,17 @@ class Automation(Tab):
await self._create_automation(rows[0]["name"])
self._set_selection()
async def _add_prop_to_fs(
self,
host: str,
prop: str,
value: str,
module: str = "autobackup",
filesystems: Union[List[str], None] = None,
) -> None:
async def _add_prop_to_fs(self, host: str, prop: str, value: str, filesystems: Union[List[str], None] = None) -> None:
if filesystems is not None:
full_prop = f"{module}:{prop}"
for fs in filesystems:
result = await self._zfs[host].add_filesystem_prop(filesystem=fs, prop=full_prop, value=value)
result = await self._zfs[host].add_filesystem_prop(filesystem=fs, prop=prop, value=value)
self.add_history(result=result)
async def _remove_prop_from_all_fs(self, host: str, prop: str, module: str = "autobackup") -> None:
full_prop = f"{module}:{prop}"
filesystems_with_prop_result = await self._zfs[host].filesystems_with_prop(full_prop)
async def _remove_prop_from_all_fs(self, host: str, prop: str) -> None:
filesystems_with_prop_result = await self._zfs[host].filesystems_with_prop(prop)
filesystems_with_prop = list(filesystems_with_prop_result.data)
for fs in filesystems_with_prop:
result = await self._zfs[host].remove_filesystem_prop(filesystem=fs, prop=full_prop)
result = await self._zfs[host].remove_filesystem_prop(filesystem=fs, prop=prop)
self.add_history(result=result)
async def _create_automation(self, name: str = "") -> None:
@@ -331,16 +322,16 @@ class Automation(Tab):
self.picked_options = {}
self.triggers = {}
self.picked_triggers = {}
self.job_data = {}
jobs = self.scheduler.scheduler.get_jobs()
self.job_names = []
self.auto = scheduler.Automation(host=self.host, hosts=[self.host])
job = None
for job in jobs:
j = job.id.split("@")[0]
self.job_names.append(j)
if name == j:
job = self.scheduler.scheduler.get_job(job.id)
self.job_data.update(json.loads(job.kwargs["data"]))
auto = automation(job)
if auto is not None:
self.job_names.append(auto.name)
if auto.name == name:
self.auto = auto
def validate_name(n: str):
if len(n) > 0 and n.islower() and "@" not in n and (n not in self.job_names or name != ""):
@@ -383,18 +374,15 @@ class Automation(Tab):
def option_changed(e):
self.current_help.text = self.options[e.value]["description"]
async def zab_controls() -> None:
async def zab_controls(auto: scheduler.Zfs_Autobackup) -> None:
filesystems = await self.zfs.filesystems
if isinstance(self.job_data.get("filesystems", {}), dict):
self.fs = self.job_data.get(
"filesystems",
{"all": {}, "values": {}, "parent": [], "children": [], "parentchildren": [], "exclude": []},
)
else:
self.fs = {"all": {}, "values": {}, "parent": [], "children": [], "parentchildren": [], "exclude": []}
if not self.fs["all"]:
for fs in filesystems.data:
self.fs["all"][fs] = ""
parent: List[str] = []
children: List[str] = []
parentchildren: List[str] = []
exclude: List[str] = []
all_fs: Dict[str, str] = {}
for fs in filesystems.data:
all_fs[fs] = ""
async def target_host_selected() -> None:
if self.target_host.value != "":
@@ -416,36 +404,37 @@ class Automation(Tab):
self.target_path.update()
self.target_path.value = ""
async def target_path_selected() -> None:
self.build_command()
def build_command() -> None:
try:
prop_suffix = self.prop.value.split(":")[1]
except IndexError:
prop_suffix = ""
base = ""
for key, value in self.picked_options.items():
base = base + f" --{key}{f' {value}' if value != '' else ''}"
target_path = f"{f' {self.target_path.value}' if self.target_path.value != '' else ''}"
base = base + f" {self.auto_name.value.lower()}" + target_path
base = base + f" {prop_suffix}" + target_path
self.command.value = base
def all_fs_to_lists():
self.fs["parentchildren"].clear()
self.fs["parent"].clear()
self.fs["children"].clear()
self.fs["exclude"].clear()
for fs, v in self.fs["all"].items():
parentchildren.clear()
parent.clear()
children.clear()
exclude.clear()
for fs, v in all_fs.items():
if v == "":
self.fs["parentchildren"].append(fs)
self.fs["parent"].append(fs)
self.fs["children"].append(fs)
self.fs["exclude"].append(fs)
parentchildren.append(fs)
parent.append(fs)
children.append(fs)
exclude.append(fs)
elif v == "true":
self.fs["parentchildren"].append(fs)
parentchildren.append(fs)
elif v == "parent":
self.fs["parent"].append(fs)
parent.append(fs)
elif v == "child":
self.fs["children"].append(fs)
children.append(fs)
elif v == "false":
self.fs["exclude"].append(fs)
exclude.append(fs)
def cull_fs_list(e: events.GenericEventArguments, value: str = "false") -> None:
if e.sender != self.parentchildren:
@@ -456,11 +445,11 @@ class Automation(Tab):
self.children.disable()
if e.sender != self.exclude:
self.exclude.disable()
for fs, v in self.fs["all"].items():
for fs, v in all_fs.items():
if v == value:
self.fs["all"][fs] = ""
all_fs[fs] = ""
for fs in e.sender.value:
self.fs["all"][fs] = value
all_fs[fs] = value
all_fs_to_lists()
self.parentchildren.enable()
self.parent.enable()
@@ -471,6 +460,17 @@ class Automation(Tab):
self.children.update()
self.exclude.update()
def validate_prop(value):
parts = value.split(":")
for part in parts:
if part.find(" ") != -1:
return False
if len(part) < 1:
return False
if len(parts) != 2:
return False
return True
if name == "":
self.default_options = {
"verbose": "",
@@ -479,7 +479,7 @@ class Automation(Tab):
"ssh-config": self.zfs.config_path,
}
else:
self.default_options = self.job_data["options"]
self.default_options = auto.options
self.options = zab.options
self.build_command = build_command
filesystems = await self.zfs.filesystems
@@ -491,35 +491,37 @@ class Automation(Tab):
row.tailwind.width("[860px]").justify_content("center")
with ui.column() as col:
col.tailwind.height("full").width("[420px]")
self.hosts = el.DSelect(source_hosts, label="Source Host(s)", multiple=True, with_input=True)
self.prop = el.DInput(label="Property", value=auto.prop, on_change=build_command, validation=validate_prop)
self.app_em.append(self.prop)
self.target_host = el.DSelect(target_host, label="Target Host", on_change=target_host_selected)
self.target_paths = [""]
self.target_path = el.DSelect(self.target_paths, value="", label="Target Path", on_change=target_path_selected)
self.target_path = el.DSelect(self.target_paths, value="", label="Target Path", new_value_mode="add-unique", on_change=build_command)
self.hosts = el.DSelect(source_hosts, label="Source Host(s)", value=auto.hosts, multiple=True, with_input=True)
all_fs_to_lists()
with ui.scroll_area().classes("col"):
self.parentchildren = el.DSelect(
self.fs["parentchildren"],
parentchildren,
label="Source Parent And Children",
with_input=True,
multiple=True,
on_change=lambda e: cull_fs_list(e, "true"),
)
self.parent = el.DSelect(
self.fs["parent"],
parent,
label="Source Parent Only",
with_input=True,
multiple=True,
on_change=lambda e: cull_fs_list(e, "parent"),
)
self.children = el.DSelect(
self.fs["children"],
children,
label="Source Children Only",
with_input=True,
multiple=True,
on_change=lambda e: cull_fs_list(e, "child"),
)
self.exclude = el.DSelect(
self.fs["exclude"],
exclude,
label="Exclude",
with_input=True,
multiple=True,
@@ -528,19 +530,21 @@ class Automation(Tab):
with ui.column() as col:
col.tailwind.height("full").width("[420px]")
options_controls()
self.parentchildren.value = auto.parentchildren
self.parent.value = auto.parent
self.children.value = auto.children
self.exclude.value = auto.exclude
self.previous_prop = auto.prop
if name != "":
self.target_host.value = self.job_data.get("target_host", "")
target_path = self.job_data.get("target_path", "")
self.target_host.value = auto.target_host
target_path = auto.target_path
tries = 0
while target_path not in self.target_path.options and tries < 20:
await asyncio.sleep(0.1)
tries = tries + 1
if target_path not in self.target_paths:
self.target_paths.append(target_path)
self.target_path.value = target_path
self.parentchildren.value = self.fs["values"].get("parentchildren", None)
self.parent.value = self.fs["values"].get("parent", None)
self.children.value = self.fs["values"].get("children", None)
self.exclude.value = self.fs["values"].get("exclude", None)
self.hosts.value = self.job_data.get("hosts", [self.host])
else:
self.hosts.value = [self.host]
@@ -644,7 +648,7 @@ class Automation(Tab):
if name == "":
self.default_triggers = {"id": {"type": "Cron", "value": ""}}
else:
self.default_triggers = self.job_data["triggers"]
self.default_triggers = self.auto.triggers
with ui.row() as row:
row.tailwind(tw_rows)
self.current_trigger = el.FSelect(["Cron", "Interval"], value="Cron", label="Trigger", with_input=True)
@@ -668,7 +672,10 @@ class Automation(Tab):
if self.app.value is not None:
with options_col:
if self.app.value == "zfs_autobackup":
await zab_controls()
if isinstance(self.auto, scheduler.Zfs_Autobackup):
await zab_controls(self.auto)
else:
await zab_controls(scheduler.Zfs_Autobackup(host=self.host, hosts=[self.host]))
if self.app.value == "local":
local_controls()
if self.app.value == "remote":
@@ -677,20 +684,15 @@ class Automation(Tab):
self.stepper.next()
def local_controls():
command_input = el.DInput("Command").bind_value_to(self.command, "value")
if name != "":
command_input.value = self.job_data["command"]
el.DInput("Command", value=self.auto.command).bind_value_to(self.command, "value")
def remote_controls():
command_input = el.DInput("Command").bind_value_to(self.command, "value")
self.hosts = el.DSelect(self._zfs_hosts, value=self.host, label="Hosts", with_input=True, multiple=True)
command_input = el.DInput("Command", value=self.auto.command).bind_value_to(self.command, "value")
self.hosts = el.DSelect(self._zfs_hosts, value=self.auto.hosts, label="Hosts", with_input=True, multiple=True)
self.save.bind_enabled_from(self.hosts, "value", backward=lambda x: len(x) > 0)
if name != "":
command_input.value = self.job_data["command"]
self.hosts.value = self.job_data["hosts"]
def string_to_interval(string: str):
interval = string.split(":", 4)
def to_interval(value: str):
interval = value.split(":", 4)
interval = interval + ["0"] * (5 - len(interval))
return IntervalTrigger(weeks=int(interval[0]), days=int(interval[1]), hours=int(interval[2]), minutes=int(interval[3]), seconds=int(interval[4]))
@@ -701,7 +703,7 @@ class Automation(Tab):
if "Cron" == value["type"]:
triggers.append(CronTrigger().from_crontab(value["value"]))
elif "Interval" == value["type"]:
triggers.append(string_to_interval(value["value"]))
triggers.append(to_interval(value["value"]))
return combine(triggers)
def validate_hosts(e):
@@ -721,11 +723,11 @@ class Automation(Tab):
col.tailwind.height("full").width("[420px]")
self.auto_name = el.DInput(label="Name", value=" ", validation=validate_name)
with el.WRow():
self.pipe_success = el.DCheckbox("Pipe Success", value=self.job_data.get("pipe_success", False))
self.pipe_error = el.DCheckbox("Pipe Error", value=self.job_data.get("pipe_error", False))
self.pipe_success = el.DCheckbox("Pipe Success", value=self.auto.pipe_success)
self.pipe_error = el.DCheckbox("Pipe Error", value=self.auto.pipe_error)
self.schedule_em = el.ErrorAggregator(self.auto_name)
if name != "":
self.app = el.DInput(label="Application", value=self.job_data["app"]).props("readonly")
self.app = el.DInput(label="Application", value=self.auto.app).props("readonly")
else:
self.app = el.DSelect(
["zfs_autobackup", "local", "remote"],
@@ -751,15 +753,16 @@ class Automation(Tab):
with el.WRow() as row:
row.tailwind.height("[40px]")
self.as_spinner = el.Spinner()
self.app_em = el.ErrorAggregator()
self.save = el.DButton("SAVE", on_click=lambda: automation_dialog.submit("save"))
self.save.bind_enabled_from(self.app_em, "no_errors")
el.Spinner(master=self.as_spinner)
self.auto_name.value = name
if name != "":
self.auto_name.props("readonly")
self.schedule_mode.value = self.job_data["schedule_mode"]
self.schedule_mode.value = self.auto.schedule_mode
result = await automation_dialog
if result == "save":
auto: Union[scheduler.Automation, scheduler.Zfs_Autobackup]
auto_name = self.auto_name.value.lower()
if hasattr(self, "hosts"):
hosts = self.hosts.value
@@ -767,23 +770,25 @@ class Automation(Tab):
hosts = [self.host]
if self.app.value == "zfs_autobackup":
for job in jobs:
j = job.id.split("@")[0]
if j == auto_name:
existing_auto = automation(job)
if existing_auto is not None and existing_auto.name == auto_name:
self.scheduler.scheduler.remove_job(job.id)
for host in hosts:
auto_id = f"{auto_name}@{host}"
await self._remove_prop_from_all_fs(host=host, prop=auto_name)
await self._add_prop_to_fs(host=host, prop=auto_name, value="true", filesystems=self.parentchildren.value)
await self._add_prop_to_fs(host=host, prop=auto_name, value="parent", filesystems=self.parent.value)
await self._add_prop_to_fs(host=host, prop=auto_name, value="child", filesystems=self.children.value)
await self._add_prop_to_fs(host=host, prop=auto_name, value="false", filesystems=self.exclude.value)
self.fs["values"] = {}
self.fs["values"]["parentchildren"] = self.parentchildren.value
self.fs["values"]["parent"] = self.parent.value
self.fs["values"]["children"] = self.children.value
self.fs["values"]["exclude"] = self.exclude.value
if self.previous_prop != "":
command = AutomationTemplate(self.previous_prop)
prop = command.safe_substitute(name=auto_name, host=host)
await self._remove_prop_from_all_fs(host=host, prop=prop)
command = AutomationTemplate(self.prop.value)
prop = command.safe_substitute(name=auto_name, host=host)
await self._remove_prop_from_all_fs(host=host, prop=prop)
await self._add_prop_to_fs(host=host, prop=prop, value="true", filesystems=self.parentchildren.value)
await self._add_prop_to_fs(host=host, prop=prop, value="parent", filesystems=self.parent.value)
await self._add_prop_to_fs(host=host, prop=prop, value="child", filesystems=self.children.value)
await self._add_prop_to_fs(host=host, prop=prop, value="false", filesystems=self.exclude.value)
auto = scheduler.Zfs_Autobackup(
id=auto_id,
name=auto_name,
hosts=hosts,
host=host,
command="python -m zfs_autobackup.ZfsAutobackup" + self.command.value,
@@ -793,9 +798,13 @@ class Automation(Tab):
target_host=self.target_host.value,
target_path=self.target_path.value,
target_paths=self.target_path.options,
filesystems=self.fs,
pipe_success=self.pipe_success.value,
pipe_error=self.pipe_error.value,
prop=self.prop.value,
parentchildren=self.parentchildren.value,
parent=self.parent.value,
children=self.children.value,
exclude=self.exclude.value,
)
self.scheduler.scheduler.add_job(
automation_job,
@@ -808,13 +817,14 @@ class Automation(Tab):
)
elif self.app.value == "remote":
for job in jobs:
j = job.id.split("@")[0]
if j == auto_name:
auto = automation(job)
if auto is not None and auto.name == auto_name:
self.scheduler.scheduler.remove_job(job.id)
for host in hosts:
auto_id = f"{auto_name}@{host}"
auto = scheduler.Automation(
id=auto_id,
name=auto_name,
app=self.app.value,
hosts=hosts,
host=host,
@@ -837,6 +847,7 @@ class Automation(Tab):
auto_id = f"{auto_name}@{self.host}"
auto = scheduler.Automation(
id=auto_id,
name=auto_name,
app=self.app.value,
hosts=hosts,
host=self.host,
+40 -7
View File
@@ -1,7 +1,8 @@
from datetime import datetime
import json
from nicegui import ui, events # type: ignore
import httpx
from . import SelectionConfirm, Tab
from nicegui import ui, events
from bale import elements as el
from bale.result import Result
from bale.interfaces import zfs
@@ -33,7 +34,12 @@ class History(Tab):
"rowSelection": "multiple",
"paginationAutoPageSize": True,
"pagination": True,
"defaultColDef": {"resizable": True, "sortable": True, "suppressMovable": True, "sortingOrder": ["asc", "desc"]},
"defaultColDef": {
"resizable": True,
"sortable": True,
"suppressMovable": True,
"sortingOrder": ["asc", "desc"],
},
"columnDefs": [
{
"headerName": "Host",
@@ -47,8 +53,17 @@ class History(Tab):
"filter": "agTextColumnFilter",
"flex": 1,
},
{"headerName": "Date", "field": "date", "filter": "agDateColumnFilter", "maxWidth": 100},
{"headerName": "Time", "field": "time", "maxWidth": 100},
{
"headerName": "Timestamp",
"field": "timestamp",
"filter": "agTextColumnFilter",
"maxWidth": 125,
":cellRenderer": """(data) => {
var date = new Date(data.value * 1000).toLocaleString(undefined, {dateStyle: 'short', timeStyle: 'short', hour12: false});;
return date;
}""",
"sort": "desc",
},
{
"headerName": "Status",
"field": "status",
@@ -90,6 +105,20 @@ class History(Tab):
http[status]["data"] = e.content["json"]["data"]
http[status]["headers"] = e.content["json"]["headers"]
def test(status):
try:
url = http[status]["url"]
data = self.process_pipe_data(result=Result(name=self.host, command="TEST COMMAND", status=status), data=http[status]["data"])
headers = http[status]["headers"]
post = httpx.post(url=url, json=data, headers=headers)
print(post.status_code)
if post.status_code == 200:
el.notify("Test successful!", type="positive")
else:
el.notify(f"Test failed with status code {post.status_code}!", type="negative")
except:
el.notify("Test failed!", type="negative")
def show_controls(status):
if status not in http:
http[status] = {}
@@ -107,7 +136,7 @@ class History(Tab):
"topic": "mytopic",
"tags": ["turtle"],
"title": "Successful Automation Run for {name}",
"message": "{stdout}",
"message": "{command}",
},
)
editor.properties["content"]["json"]["headers"] = self.get_pipe_status("http", status).get("headers", {"Authorization": "Bearer tk_..."})
@@ -126,11 +155,15 @@ class History(Tab):
with ui.step("On Success"):
with el.WColumn().classes("col justify-start"):
show_controls(status="success")
el.LgButton("NEXT", on_click=lambda _: stepper.next())
with el.WRow():
el.LgButton("TEST", on_click=lambda _: test(status="success"))
el.LgButton("NEXT", on_click=lambda _: stepper.next())
with ui.step("On Error"):
with el.WColumn().classes("col justify-start"):
show_controls(status="error")
el.DButton("SAVE", on_click=lambda: host_dialog.submit("save"))
with el.WRow():
el.LgButton("TEST", on_click=lambda _: test(status="error"))
el.DButton("SAVE", on_click=lambda: host_dialog.submit("save"))
result = await host_dialog
if result == "save":
+50 -36
View File
@@ -1,6 +1,6 @@
import asyncio
from copy import deepcopy
from nicegui import ui
from nicegui import background_tasks, ui # type: ignore
from . import SelectionConfirm, Tab, Task
from bale.result import Result
from bale import elements as el
@@ -75,11 +75,20 @@ class Manage(Tab):
"field": "used",
"maxWidth": 100,
":comparator": """(valueA, valueB, nodeA, nodeB, isInverted) => {
return (nodeA.data.used_bytes > nodeB.data.used_bytes) ? -1 : 1;
}""",
return (nodeA.data.used_bytes > nodeB.data.used_bytes) ? -1 : 1;
}""",
},
{
"headerName": "Created",
"field": "creation",
"filter": "agTextColumnFilter",
"maxWidth": 125,
":cellRenderer": """(data) => {
var date = new Date(data.value * 1000).toLocaleString(undefined, {dateStyle: 'short', timeStyle: 'short', hour12: false});;
return date;
}""",
"sort": "desc",
},
{"headerName": "Creation Date", "field": "creation_date", "filter": "agDateColumnFilter", "maxWidth": 150},
{"headerName": "Creation Time", "field": "creation_time", "maxWidth": 150},
{"headerName": "Holds", "field": "userrefs", "filter": "agNumberColumnFilter", "maxWidth": 100},
],
"rowData": [],
@@ -92,8 +101,8 @@ class Manage(Tab):
self._spinner.visible = True
self.zfs.invalidate_query()
snapshots = await self.zfs.snapshots
await self.zfs.filesystems
await self.zfs.holds_for_snapshot()
background_tasks.create(self.zfs.filesystems, name="zfs_filesystems")
background_tasks.create(self.zfs.holds_for_snapshot(), name="zfs_holds")
self._grid.options["rowData"] = list(snapshots.data.values())
self._grid.update()
self._spinner.visible = False
@@ -103,9 +112,12 @@ class Manage(Tab):
result = await SelectionConfirm(container=self._confirm, label=">BROWSE<")
if result == "confirm":
rows = await self._grid.get_selected_rows()
filesystems = await self.zfs.filesystems
mount_path = filesystems.data[rows[0]["filesystem"]]["mountpoint"]
await sshdl.SshFileBrowse(zfs=self.zfs, path=f"{mount_path}/.zfs/snapshot/{rows[0]['name']}")
try:
filesystems = await self.zfs.filesystems
mount_path = filesystems.data[rows[0]["filesystem"]]["mountpoint"]
await sshdl.SshFileBrowse(zfs=self.zfs, path=f"{mount_path}/.zfs/snapshot/{rows[0]['name']}")
except KeyError:
el.notify(f"Unable to browse {rows[0]['filesystem']}", type="warning")
self._set_selection()
async def _find(self) -> None:
@@ -248,32 +260,34 @@ class Manage(Tab):
if result == "confirm":
self._spinner.visible = True
rows = await self._grid.get_selected_rows()
for row in rows:
holds = await self.zfs.holds_for_snapshot(f"{row['filesystem']}@{row['name']}")
for tag in holds.data:
if tag not in all_tags:
all_tags.append(tag)
if len(all_tags) > 0:
tags.update()
self._spinner.visible = False
result = await dialog
if result == "release":
if len(tags.value) > 0:
for tag in tags.value:
for row in rows:
tasks = self._add_task(
"release",
zfs.SnapshotRelease(
name=f"{row['filesystem']}@{row['name']}",
tag=tag,
recursive=recursive.value,
).command,
hosts=zfs_hosts.value,
)
if self._auto.value is True:
for task in tasks:
await self._run_task(task=task, spinner=self._spinner)
await self.display_snapshots()
if len(rows) > 0:
for row in rows:
holds = await self.zfs.holds_for_snapshot(f"{row['filesystem']}@{row['name']}")
for tag in holds.data:
if tag not in all_tags:
all_tags.append(tag)
if len(all_tags) > 0:
tags.update()
self._spinner.visible = False
result = await dialog
if result == "release":
if len(tags.value) > 0:
for tag in tags.value:
for row in rows:
tasks = self._add_task(
"release",
zfs.SnapshotRelease(
name=f"{row['filesystem']}@{row['name']}",
tag=tag,
recursive=recursive.value,
).command,
hosts=zfs_hosts.value,
)
if self._auto.value is True:
for task in tasks:
await self._run_task(task=task, spinner=self._spinner)
await self.display_snapshots()
self._spinner.visible = False
self._set_selection()
def _update_task_status(self, timestamp, status, result=None):
+1 -1
View File
@@ -7,7 +7,7 @@ import os
if not os.path.exists("data"):
os.makedirs("data")
os.environ.setdefault("NICEGUI_STORAGE_PATH", "data")
from nicegui import ui
from nicegui import ui # type: ignore
ui.card.default_style("max-width: none")
ui.card.default_props("flat bordered")
+57 -5
View File
@@ -1,8 +1,60 @@
aiofiles==23.2.1
aiohttp==3.9.3
aiosignal==1.3.1
annotated-types==0.6.0
anyio==4.3.0
APScheduler==3.10.4
SQLAlchemy==2.0.22
asyncssh==2.14.0
attrs==23.2.0
bidict==0.23.1
certifi==2024.2.2
cffi==1.16.0
click==8.1.7
colorama==0.4.6
cron-descriptor==1.4.0
cron-validator==1.0.8
nicegui==1.4.2
zfs-autobackup==3.2
netifaces==0.11.0
asyncssh==2.14.0
cryptography==42.0.5
docutils==0.19
fastapi==0.109.2
frozenlist==1.4.1
greenlet==3.0.3
h11==0.14.0
httpcore==1.0.4
httptools==0.6.1
httpx==0.27.0
idna==3.6
ifaddr==0.2.0
itsdangerous==2.1.2
Jinja2==3.1.3
markdown2==2.4.13
MarkupSafe==2.1.5
multidict==6.0.5
nicegui==1.4.17
orjson==3.9.15
pscript==0.7.7
pycparser==2.21
pydantic==2.6.3
pydantic_core==2.16.3
Pygments==2.17.2
python-dateutil==2.9.0.post0
python-dotenv==1.0.1
python-engineio==4.9.0
python-multipart==0.0.9
python-socketio==5.11.1
pytz==2024.1
PyYAML==6.0.1
simple-websocket==1.0.0
six==1.16.0
sniffio==1.3.1
SQLAlchemy==2.0.22
starlette==0.36.3
typing_extensions==4.10.0
tzlocal==5.2
uvicorn==0.27.1
uvloop==0.19.0
vbuild==0.8.2
watchfiles==0.21.0
websockets==12.0
wsproto==1.2.0
yarl==1.9.4
zfs-autobackup==3.2