Add rich display and quick gui
This commit is contained in:
parent
c2710d180b
commit
4441fe3d46
3 changed files with 331 additions and 64 deletions
180
poetry.lock
generated
180
poetry.lock
generated
|
@ -131,6 +131,166 @@ files = [
|
|||
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "3.0.0"
|
||||
description = "Python port of markdown-it. Markdown parsing, done right!"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
|
||||
{file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mdurl = ">=0.1,<1.0"
|
||||
|
||||
[package.extras]
|
||||
benchmarking = ["psutil", "pytest", "pytest-benchmark"]
|
||||
code-style = ["pre-commit (>=3.0,<4.0)"]
|
||||
compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
|
||||
linkify = ["linkify-it-py (>=1,<3)"]
|
||||
plugins = ["mdit-py-plugins"]
|
||||
profiling = ["gprof2dot"]
|
||||
rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
|
||||
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
|
||||
|
||||
[[package]]
|
||||
name = "mdurl"
|
||||
version = "0.1.2"
|
||||
description = "Markdown URL utilities"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
|
||||
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "23.1"
|
||||
description = "Core utilities for Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
|
||||
{file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.16.1"
|
||||
description = "Pygments is a syntax highlighting package written in Python."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"},
|
||||
{file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
plugins = ["importlib-metadata"]
|
||||
|
||||
[[package]]
|
||||
name = "pyqt5"
|
||||
version = "5.15.9"
|
||||
description = "Python bindings for the Qt cross platform application toolkit"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "PyQt5-5.15.9-cp37-abi3-macosx_10_13_x86_64.whl", hash = "sha256:883ba5c8a348be78c8be6a3d3ba014c798e679503bce00d76c666c2dc6afe828"},
|
||||
{file = "PyQt5-5.15.9-cp37-abi3-manylinux_2_17_x86_64.whl", hash = "sha256:dd5ce10e79fbf1df29507d2daf99270f2057cdd25e4de6fbf2052b46c652e3a5"},
|
||||
{file = "PyQt5-5.15.9-cp37-abi3-win32.whl", hash = "sha256:e45c5cc15d4fd26ab5cb0e5cdba60691a3e9086411f8e3662db07a5a4222a696"},
|
||||
{file = "PyQt5-5.15.9-cp37-abi3-win_amd64.whl", hash = "sha256:e030d795df4cbbfcf4f38b18e2e119bcc9e177ef658a5094b87bb16cac0ce4c5"},
|
||||
{file = "PyQt5-5.15.9.tar.gz", hash = "sha256:dc41e8401a90dc3e2b692b411bd5492ab559ae27a27424eed4bd3915564ec4c0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
PyQt5-Qt5 = ">=5.15.2"
|
||||
PyQt5-sip = ">=12.11,<13"
|
||||
|
||||
[[package]]
|
||||
name = "pyqt5-qt5"
|
||||
version = "5.15.2"
|
||||
description = "The subset of a Qt installation needed by PyQt5."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "PyQt5_Qt5-5.15.2-py3-none-macosx_10_13_intel.whl", hash = "sha256:76980cd3d7ae87e3c7a33bfebfaee84448fd650bad6840471d6cae199b56e154"},
|
||||
{file = "PyQt5_Qt5-5.15.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:1988f364ec8caf87a6ee5d5a3a5210d57539988bf8e84714c7d60972692e2f4a"},
|
||||
{file = "PyQt5_Qt5-5.15.2-py3-none-win32.whl", hash = "sha256:9cc7a768b1921f4b982ebc00a318ccb38578e44e45316c7a4a850e953e1dd327"},
|
||||
{file = "PyQt5_Qt5-5.15.2-py3-none-win_amd64.whl", hash = "sha256:750b78e4dba6bdf1607febedc08738e318ea09e9b10aea9ff0d73073f11f6962"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyqt5-sip"
|
||||
version = "12.12.2"
|
||||
description = "The sip module support for PyQt5"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "PyQt5_sip-12.12.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1cc49c8498c34649325d53bcd243c854391f828d9bab4f2f3afd3ee3451cab72"},
|
||||
{file = "PyQt5_sip-12.12.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c8f6e7a697d0ddf754798988fae7b2a0da04f6a59fb13ae863e5d1da4b280c4f"},
|
||||
{file = "PyQt5_sip-12.12.2-cp310-cp310-win32.whl", hash = "sha256:7e572c8104e75db2c69609d195daf44c7b965dcb1c5b48e30fc376868909be56"},
|
||||
{file = "PyQt5_sip-12.12.2-cp310-cp310-win_amd64.whl", hash = "sha256:6a65697aa0fdb66e20d7b1ef8adfacc1caf1e61655530920172bf3a2fb1148cd"},
|
||||
{file = "PyQt5_sip-12.12.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:761e018dbbc46daccdb01f8f0dcc0d055c76834d839f0343cbec4b0ecbbde512"},
|
||||
{file = "PyQt5_sip-12.12.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9d2b127ba5155bff452944b8a96ba06d7ec2161f48a2f9cc190425bfca94ab6b"},
|
||||
{file = "PyQt5_sip-12.12.2-cp311-cp311-win32.whl", hash = "sha256:26e75bc4ffd8e6b19ae96fe93dc135eb5aea03e4570724d4b3c40dbf36f3a2e6"},
|
||||
{file = "PyQt5_sip-12.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:d9548f353f17407d00f67d08c737de9f5c067352c3bdac8571492c614c2893eb"},
|
||||
{file = "PyQt5_sip-12.12.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7e640b7636d86271ba8969b260e1655068b44750f20801ebc80f49a1aa737bf9"},
|
||||
{file = "PyQt5_sip-12.12.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e46d957fbeecaa1437f2dd715407b1e59e0918cc29382c7ea79784c5f3cbe0d2"},
|
||||
{file = "PyQt5_sip-12.12.2-cp37-cp37m-win32.whl", hash = "sha256:cb4523097f1ccabb95b3197a58278a40fc944b33791d3406bfa397e12303b6c6"},
|
||||
{file = "PyQt5_sip-12.12.2-cp37-cp37m-win_amd64.whl", hash = "sha256:ed04bd0065d870912c1b0a4b34b8a78698c76d77f15474c3e841b0b6dd2f429f"},
|
||||
{file = "PyQt5_sip-12.12.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:71795c177010e52109812b03ec919020461ec42a7d9d241a45fe6d708529b5a6"},
|
||||
{file = "PyQt5_sip-12.12.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:de06b6bd8241a189f729b8c093ce5dcf5928489eb7748bda28e28324e57544b0"},
|
||||
{file = "PyQt5_sip-12.12.2-cp38-cp38-win32.whl", hash = "sha256:7050ad8f94370eb7e4caa022b7e6d8b2de615e0714b557ca2098c82c0132074a"},
|
||||
{file = "PyQt5_sip-12.12.2-cp38-cp38-win_amd64.whl", hash = "sha256:67eed70427d3291e5c52c349fb4619c57c9a8810ab8d78a142c00edcbfd20d3b"},
|
||||
{file = "PyQt5_sip-12.12.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf74db9a1542f66793ccc00e403c8c2c36c67c0cff0fb01d23fe71cc1c56c788"},
|
||||
{file = "PyQt5_sip-12.12.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:23e983119f760dc6c1a1e6cb21fd4c268d14c4ee497de6da9ce2b9d46f9779b2"},
|
||||
{file = "PyQt5_sip-12.12.2-cp39-cp39-win32.whl", hash = "sha256:a88ce85176639723f04cf5ce59157ecf3a9faca5d5dd1fe82d5ef46a3bd1d102"},
|
||||
{file = "PyQt5_sip-12.12.2-cp39-cp39-win_amd64.whl", hash = "sha256:7f13e71f5171f30d8b4176c081f0203a43e1704746b4cdaa837477945177b2a0"},
|
||||
{file = "PyQt5_sip-12.12.2.tar.gz", hash = "sha256:10d9bfa9f59f0fd1cad81be187479316ffc95684f573efea94512cb4257d2b17"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "qtpy"
|
||||
version = "2.3.1"
|
||||
description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "QtPy-2.3.1-py3-none-any.whl", hash = "sha256:5193d20e0b16e4d9d3bc2c642d04d9f4e2c892590bd1b9c92bfe38a95d5a2e12"},
|
||||
{file = "QtPy-2.3.1.tar.gz", hash = "sha256:a8c74982d6d172ce124d80cafd39653df78989683f760f2281ba91a6e7b9de8b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
packaging = "*"
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"]
|
||||
|
||||
[[package]]
|
||||
name = "quick"
|
||||
version = "1.0"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = []
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=6.5"
|
||||
qtpy = "*"
|
||||
|
||||
[package.extras]
|
||||
qtstyle = ["qdarkstyle"]
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/szsdk/quick"
|
||||
reference = "HEAD"
|
||||
resolved_reference = "38f2733b782d2ff161fb448c5e176e35f98f586e"
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.31.0"
|
||||
|
@ -152,6 +312,24 @@ urllib3 = ">=1.21.1,<3"
|
|||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "13.5.2"
|
||||
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"},
|
||||
{file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
markdown-it-py = ">=2.2.0"
|
||||
pygments = ">=2.13.0,<3.0.0"
|
||||
|
||||
[package.extras]
|
||||
jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.0.4"
|
||||
|
@ -172,4 +350,4 @@ zstd = ["zstandard (>=0.18.0)"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.11"
|
||||
content-hash = "681313232aa173dc3fcfed50799fb91962d7a5672eaca41f1c7b31369311add1"
|
||||
content-hash = "7fc941a65f9002a0a2353df4e0058d9a145971ac273bffe57bcf69e82b8f2b8d"
|
||||
|
|
|
@ -10,6 +10,9 @@ packages = [{include = "verbanote_client"}]
|
|||
python = "^3.11"
|
||||
requests = "^2.31.0"
|
||||
click = "^8.1.7"
|
||||
quick = {git = "https://github.com/szsdk/quick"}
|
||||
pyqt5 = "^5.15.9"
|
||||
rich = "^13.5.2"
|
||||
|
||||
[project.scripts]
|
||||
vb = "verbanote.main:main"
|
||||
|
|
|
@ -1,67 +1,160 @@
|
|||
import time
|
||||
import requests
|
||||
import logging
|
||||
import time
|
||||
import sys
|
||||
import click
|
||||
from datetime import timedelta
|
||||
from math import floor
|
||||
from pathlib import Path
|
||||
|
||||
args = sys.argv
|
||||
from dataclasses import dataclass
|
||||
import click
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from rich.live import Live
|
||||
import quick
|
||||
|
||||
# TODO turn all this into config style options or @click-style flags/options
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
pod_id = "7x0b7u16s6vyrc"
|
||||
bearer_token = "EIWX9RO18PRXCD0RUSY26MSD062GUF6REQGGV6QB"
|
||||
|
||||
api = f"https://api.runpod.ai/v2/{pod_id}"
|
||||
run_endpoint = f"{api}/run"
|
||||
status_endpoint = f"{api}/status"
|
||||
health_endpoint = f"{api}/health"
|
||||
purge_endpoint = f"{api}/purge-queue"
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {bearer_token}",
|
||||
}
|
||||
console = Console()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
endpoint: str
|
||||
token: str
|
||||
headers: dict[str, str]
|
||||
|
||||
|
||||
@quick.gui_option()
|
||||
@click.group()
|
||||
@click.option("--endpoint", "-e", help="URL of runpod serverless endpoint.")
|
||||
@click.pass_context
|
||||
@click.option("--endpoint", "-e", help="URL of runpod serverless endpoint to use.")
|
||||
@click.option("--token", "-t", help="Access token for runpod instance.")
|
||||
# TODO @click.version_option()
|
||||
def cli(token):
|
||||
def cli(ctx, endpoint, token):
|
||||
"""Verbanote
|
||||
|
||||
Transcribes any audio file given using OpenAI's whisper AI
|
||||
and pyannote for speaker detection.
|
||||
"""
|
||||
print(f"Token: {token}")
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {token}",
|
||||
}
|
||||
options: Config = Config(endpoint=endpoint, token=token, headers=headers)
|
||||
ctx.obj = options
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.pass_obj
|
||||
@click.argument(
|
||||
"audiofile",
|
||||
type=click.Path(exists=True, dir_okay=False, readable=True, path_type=Path),
|
||||
)
|
||||
def start(audiofile):
|
||||
def start(config: Config, audiofile: Path) -> None:
|
||||
"""Start processing the given audiofile.
|
||||
|
||||
Queues a job in the processing queue of the AI api.
|
||||
"""
|
||||
endpoint_new_job = f"{config.endpoint}/run"
|
||||
with console.status("[bold green]Uploading data..."):
|
||||
url = _upload_to_oxo(audiofile)
|
||||
|
||||
input_data = {"input": {"url": url}}
|
||||
logging.info(f"Requesting new job for {audiofile}...")
|
||||
response = requests.post(run_endpoint, json=input_data, headers=headers)
|
||||
click.echo(f"Job {response} has been queued.")
|
||||
console.log(f"[green]Requesting new job for[/green] {audiofile}...")
|
||||
response = requests.post(endpoint_new_job, json=input_data, headers=config.headers)
|
||||
job_id = response.json()["id"]
|
||||
console.log(f"[green]Job[/green] {job_id} [green]has been queued.[/green]")
|
||||
print_job_status(config, job_id)
|
||||
|
||||
|
||||
@cli.command()
|
||||
def health():
|
||||
logging.info("requesting health status...")
|
||||
resp = requests.get(health_endpoint, headers=headers)
|
||||
click.echo(resp)
|
||||
@click.pass_obj
|
||||
def health(config: Config) -> None:
|
||||
endpoint_health = f"{config.endpoint}/health"
|
||||
with console.status("[bold green]Requesting health status..."):
|
||||
resp = requests.get(endpoint_health, headers=config.headers)
|
||||
json = resp.json()
|
||||
console.print_json(data=json)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.pass_obj
|
||||
@click.argument("job_id")
|
||||
def job(config: Config, job_id: str) -> None:
|
||||
print_job_status(config, job_id)
|
||||
|
||||
|
||||
def cancel(config: Config, job_id: str) -> None:
|
||||
...
|
||||
|
||||
|
||||
STATUS_MAPPING = {
|
||||
"IN_QUEUE": "[yellow]queued[/yellow]",
|
||||
"IN_PROGRESS": "[blue]running[/blue]",
|
||||
"CANCELLED": "[orange1]cancelled[/orange1]",
|
||||
"COMPLETED": "[green]complete[/green]",
|
||||
"FAILED": "[red]failed[/red]",
|
||||
}
|
||||
|
||||
|
||||
def print_job_status(config: Config, job_id: str) -> None:
|
||||
result = _request_job_state(config, job_id)
|
||||
if not result:
|
||||
return
|
||||
|
||||
values: dict[str, str] = {}
|
||||
sw_start: float = time.time()
|
||||
sw_current: timedelta = timedelta()
|
||||
def rebuild_table():
|
||||
table = Table()
|
||||
table.add_column("Status")
|
||||
table.add_column("Time running")
|
||||
table.add_column("Job ID")
|
||||
table.add_column("Diarization")
|
||||
table.add_column("Transcription")
|
||||
table.add_row(
|
||||
values.get("status", "unknown"),
|
||||
str(sw_current),
|
||||
job_id,
|
||||
values.get("diarization", "..."),
|
||||
values.get("transcription", "..."),
|
||||
)
|
||||
return table
|
||||
|
||||
with Live(get_renderable=rebuild_table, refresh_per_second=1):
|
||||
while True:
|
||||
result = _request_job_state(config, job_id, silent=True)
|
||||
sw_current = timedelta(seconds=floor(time.time() - sw_start))
|
||||
values: dict[str, str] = {
|
||||
"status": STATUS_MAPPING[result["status"]],
|
||||
"transcription": result.get("transcription_url", "..."),
|
||||
"diarization": result.get("diarization_url", "..."),
|
||||
}
|
||||
|
||||
if result["status"] != "IN_QUEUE" and result["status"] != "IN_PROGRESS":
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
def _request_job_state(config: Config, id: str, silent: bool = False) -> dict:
|
||||
endpoint_health = f"{config.endpoint}/status/{id}"
|
||||
if silent:
|
||||
response = requests.get(endpoint_health, headers=config.headers)
|
||||
else:
|
||||
with console.status(
|
||||
f"[bold green]Requesting job[/bold green] {id}"
|
||||
" [bold green]status...[/bold green]"
|
||||
):
|
||||
response = requests.get(endpoint_health, headers=config.headers)
|
||||
if response.status_code == 404:
|
||||
console.log(f"[red]Job[/red] {id} [red]not found on endpoint.[/red]")
|
||||
return {}
|
||||
if not response.ok:
|
||||
raise requests.exceptions.HTTPError()
|
||||
return response.json()
|
||||
|
||||
|
||||
# TODO switch server component to be able to use S3 storage options
|
||||
def _upload_to_oxo(file: Path, url: str = "https://0x0.st", expires: int = 2) -> str:
|
||||
resp = requests.post(
|
||||
url=url,
|
||||
|
@ -69,42 +162,35 @@ def _upload_to_oxo(file: Path, url: str = "https://0x0.st", expires: int = 2) ->
|
|||
)
|
||||
if not resp.ok:
|
||||
raise requests.exceptions.HTTPError()
|
||||
logging.info(f"Uploaded file {file} to {str(resp.content)}")
|
||||
console.log(f"Uploaded file {file} to {str(resp.content)}")
|
||||
return str(resp.content)
|
||||
|
||||
|
||||
def main(args: list[str]) -> None:
|
||||
if args[1] == "status":
|
||||
if len(args) <= 2:
|
||||
logging.error("No job id to get status from supplied.")
|
||||
sys.exit(1)
|
||||
logging.info(f"requesting job {args[2]} status...")
|
||||
response = requests.get(f"{status_endpoint}/{args[2]}", headers=headers)
|
||||
elif args[1] == "cancel":
|
||||
if len(args) <= 2:
|
||||
logging.error("No job id to cancel supplied.")
|
||||
sys.exit(1)
|
||||
logging.info(f"requesting job {args[2]} cancellation...")
|
||||
response = requests.get(f"{status_endpoint}/{args[2]}", headers=headers)
|
||||
elif args[1] == "purge":
|
||||
logging.info("purging all jobs in queue...")
|
||||
response = requests.post(purge_endpoint, headers=headers)
|
||||
|
||||
json = response.json()
|
||||
|
||||
# the json will be similar to
|
||||
# {'id': 'e3d2e250-ea81-4074-9838-1c52d006ddcf', 'status': 'IN_QUEUE'}
|
||||
|
||||
while "status" in json and (
|
||||
json["status"] == "IN_QUEUE" or json["status"] == "IN_PROGRESS"
|
||||
):
|
||||
logging.info(f"{json['status']} for job {json['id']}, waiting...")
|
||||
time.sleep(3)
|
||||
response = requests.get(f"{status_endpoint}/{json['id']}", headers=headers)
|
||||
json = response.json()
|
||||
|
||||
logging.info(json)
|
||||
# def main(args: list[str]) -> None:
|
||||
# if args[1] == "status":
|
||||
# elif args[1] == "cancel":
|
||||
# if len(args) <= 2:
|
||||
# logging.error("No job id to cancel supplied.")
|
||||
# sys.exit(1)
|
||||
# logging.info(f"requesting job {args[2]} cancellation...")
|
||||
# response = requests.get(f"{status_endpoint}/{args[2]}", headers=headers)
|
||||
# elif args[1] == "purge":
|
||||
# logging.info("purging all jobs in queue...")
|
||||
# response = requests.post(purge_endpoint, headers=headers)
|
||||
#
|
||||
# # the json will be similar to
|
||||
# # {'id': 'e3d2e250-ea81-4074-9838-1c52d006ddcf', 'status': 'IN_QUEUE'}
|
||||
#
|
||||
# while "status" in json and (
|
||||
# json["status"] == "IN_QUEUE" or json["status"] == "IN_PROGRESS"
|
||||
# ):
|
||||
# logging.info(f"{json['status']} for job {json['id']}, waiting...")
|
||||
# time.sleep(3)
|
||||
# response = requests.get(f"{status_endpoint}/{json['id']}", headers=headers)
|
||||
# json = response.json()
|
||||
#
|
||||
# logging.info(json)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli(auto_envvar_prefix='VERBANOTE')
|
||||
cli(auto_envvar_prefix="VERBANOTE")
|
||||
|
|
Loading…
Reference in a new issue