Compare commits
8 commits
a9d7f96ad2
...
6e8132d311
Author | SHA1 | Date | |
---|---|---|---|
6e8132d311 | |||
cf0544b0a5 | |||
a7a8ebf8ed | |||
fd12c2b962 | |||
0033116a73 | |||
c96a1feec4 | |||
9e43483fd8 | |||
0088a35f44 |
10 changed files with 287 additions and 98 deletions
|
@ -1,4 +1,4 @@
|
|||
Name Description Source Target
|
||||
Name Description Source Target Injections
|
||||
aaxtomp3 Convert Audible's .aax filetype to MP3, FLAC, M4A, or OPUS A
|
||||
acpid A daemon for delivering ACPI power management events with netlink support R
|
||||
afew Initial tagging script for notmuch mail R
|
||||
|
@ -203,7 +203,7 @@ offpunk-git Fork of the command-line Gemini client AV-98 with added offline capa
|
|||
oh-my-zsh-git A community-driven framework for managing your zsh configuration. Includes 180+ optional plugins and over 120 themes to spice up your morning, and an auto-update tool so that makes it easy to keep up with the latest updates from the community A
|
||||
os-prober Utility to detect other OSes on a set of drives R
|
||||
pacman-contrib Contributed scripts and tools for pacman systems R
|
||||
papis-tui A tui interface for papis bibliography manager P whoosh
|
||||
papis Papis is a powerful and highly extensible command-line based document and bibliography manager. P whoosh,papis-zotero,papis-scihub,git+https://git.martyoeh.me/Marty/papis-extract.git,git+https://github.com/supersambo/papis-tui
|
||||
parallel A shell tool for executing jobs in parallel R
|
||||
parsec-bin Remotely connect to a gaming pc for a low latency remote computing experience A
|
||||
paru-bin Feature packed AUR helper A
|
||||
|
|
Can't render this file because it has a wrong number of fields in line 29.
|
|
@ -1,12 +1,7 @@
|
|||
Name Description Source Target
|
||||
ddrescue GNU data recovery tool R
|
||||
go-mtpfs-git Simple tool for viewing MTP devices as FUSE filesystems A
|
||||
most A terminal pager similar to 'more' and 'less' R
|
||||
netbird-bin A WireGuard-based mesh network that connects your devices into a single private network A
|
||||
odysseyra1n Bootstraps Procursus and install Sileo/libhooker on devices jailbroken with checkra1n. A
|
||||
papis-zotero Zotero remote server for papis A
|
||||
python-whoosh Fast, pure-Python full text indexing, search, and spell checking library R
|
||||
shadow-tech Desktop client for Shadow Tech cloud gaming service. A
|
||||
signal-desktop Signal Private Messenger for Linux R
|
||||
testdisk Checks and undeletes partitions + PhotoRec, signature based recovery tool R
|
||||
texlive-latexextra TeX Live - Large collection of add-on packages for LaTeX R
|
||||
adbfs-rootless-git fuse filesystem over adb tool for android devices, no device root required A
|
||||
eslint An AST-based pattern checker for JavaScript R
|
||||
m4b-tool-bin A command line utility to merge, split and chapterize audiobook files such as mp3, ogg, flac, m4a or m4b A
|
||||
qutebrowser-qt6-git A keyboard-driven, vim-like browser based on PyQt5 (Qt 6 branch) A
|
||||
texlive-latexextra TeX Live - LaTeX additional packages R
|
||||
time Utility for monitoring a program's use of system resources R
|
||||
|
|
|
|
@ -5,15 +5,13 @@
|
|||
[init]
|
||||
defaultBranch = main
|
||||
[sendemail]
|
||||
smtpserver = "/usr/bin/msmtp"
|
||||
annotate = yes
|
||||
smtpserver = "/usr/bin/msmtp"
|
||||
annotate = yes
|
||||
[alias]
|
||||
ignore = "!gitignore -f"
|
||||
pushmerge = "push -o merge_request.merge_when_pipeline_succeeds" # see https://docs.gitlab.com/ce/user/project/push_options.html # merge-when-pipeline-succeeds-alias
|
||||
last = "diff HEAD~ HEAD"
|
||||
pushall = "!git remote | xargs -I R git push R" # push to all connected remotes
|
||||
fetchall = "!git remote | xargs -I R git fetch R" # fetch from all connected remotes
|
||||
|
||||
pushall = "!git remote | xargs -I R git push R" # push to all connected remotes
|
||||
fetchall = "!git remote | xargs -I R git fetch R" # fetch from all connected remotes
|
||||
[commit]
|
||||
gpgsign = true # sign commits as me
|
||||
verbose = true # Always show diff when preparing commit message
|
||||
|
|
|
@ -58,9 +58,9 @@ def_key "g"
|
|||
move_home
|
||||
def_key "U"
|
||||
update_database
|
||||
def_key "s"
|
||||
def_key "S"
|
||||
reset_search_engine
|
||||
def_key "s"
|
||||
def_key "S"
|
||||
show_search_engine
|
||||
#def_key "f"
|
||||
# show_browser
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
-- Start quarto session
|
||||
local startsession = function(file, args)
|
||||
local path, _ = require("util").get_python_venv()
|
||||
vim.g["python3_host_prog"] = path
|
||||
file = file or "/tmp/jupyter-magma-session.json"
|
||||
local path = require("util").get_python_venv()
|
||||
vim.g["python3_host_prog"] = path
|
||||
|
||||
if vim.fn.executable('jupyter-console') ~= 1 then return end
|
||||
|
||||
if args then
|
||||
file = args[0]
|
||||
end
|
||||
|
|
|
@ -57,8 +57,7 @@ local function on_attach(client, bufnr)
|
|||
{ buffer = bufnr, desc = "Next error" }
|
||||
)
|
||||
|
||||
local prefix = require("which-key").register
|
||||
prefix({ ["<localleader>l"] = { name = "+lsp" } })
|
||||
require("which-key").register({ ["<localleader>l"] = { name = "+lsp" } })
|
||||
map("n", "<localleader>li", "<cmd>LspInfo<cr>", { buffer = bufnr, desc = "Lsp Info" })
|
||||
map(
|
||||
"n",
|
||||
|
|
|
@ -1,41 +1,32 @@
|
|||
local util = require("lspconfig/util")
|
||||
local util = require("lspconfig.util")
|
||||
local path = util.path
|
||||
local T = {}
|
||||
local exepath = vim.fn.exepath
|
||||
|
||||
local path_sep = function()
|
||||
local is_win = vim.loop.os_uname().sysname:find("Windows")
|
||||
if is_win then
|
||||
return "\\"
|
||||
else
|
||||
return "/"
|
||||
end
|
||||
end
|
||||
|
||||
-- from https://github.com/ray-x/navigator.lua/issues/247#issue-1465308677
|
||||
T.get_path = function(workspace)
|
||||
-- Use activated virtualenv.
|
||||
if vim.env.VIRTUAL_ENV then
|
||||
return path.join(vim.env.VIRTUAL_ENV, "bin", "python"), "virtual env"
|
||||
return path.join(vim.env.VIRTUAL_ENV, "bin", "python")
|
||||
end
|
||||
|
||||
-- Find and use virtualenv in workspace directory.
|
||||
for _, pattern in ipairs({ "*", ".*" }) do
|
||||
local match = vim.fn.glob(path.join(workspace, pattern, "pyvenv.cfg"))
|
||||
local py = "bin" .. path_sep() .. "python"
|
||||
if match ~= "" then
|
||||
local py = path.join("bin", "python")
|
||||
match = string.gsub(match, "pyvenv.cfg", py)
|
||||
return match, string.format("venv base folder: %s", match)
|
||||
return match
|
||||
end
|
||||
match = vim.fn.glob(path.join(workspace, pattern, "poetry.lock"))
|
||||
if match ~= "" then
|
||||
local venv_base_folder = vim.fn.trim(vim.fn.system("poetry env info -p"))
|
||||
return path.join(venv_base_folder, "bin", "python"), string.format("venv base folder: %s", venv_base_folder)
|
||||
return path.join(venv_base_folder, "bin", "python")
|
||||
end
|
||||
end
|
||||
|
||||
-- Fallback to system Python.
|
||||
return exepath("python3") or exepath("python") or "python", "fallback to system python path"
|
||||
return exepath("python3") or exepath("python") or "python"
|
||||
end
|
||||
|
||||
return T
|
||||
|
|
|
@ -1,18 +1,31 @@
|
|||
import random
|
||||
from urllib import parse
|
||||
import re
|
||||
from qutebrowser.api import interceptor
|
||||
from qutebrowser.extensions.interceptors import RedirectException
|
||||
from qutebrowser.extensions.interceptors import QUrl, RedirectException
|
||||
from qutebrowser.utils import message
|
||||
|
||||
def fixScribePath(url):
|
||||
""" Fix external medium blog to scribe translation.
|
||||
|
||||
def fixScribePath(url: QUrl):
|
||||
"""Fix external medium blog to scribe translation.
|
||||
Some paths from medium will go through a 'global identity'
|
||||
path which messes up the actual url path we want to go
|
||||
to and puts it in queries. This puts it back on the path.
|
||||
"""
|
||||
new_path = f"{url.path()}{url.query()}"
|
||||
url.setQuery("")
|
||||
url.setPath(re.sub(r"m/global-identity-2redirectUrl=", "", new_path))
|
||||
# double unquoting necessary!
|
||||
# I suppose we double-wrap it earlier somewhere?
|
||||
# unquoted = parse.unquote(
|
||||
# url.path(options=QUrl.ComponentFormattingOption.FullyEncoded)
|
||||
# )
|
||||
path = parse.unquote(f"{url.path()}{url.query()}", encoding='ascii')
|
||||
url.setQuery(None)
|
||||
new_path = re.sub(r"m/global-identity-2redirectUrl=", "", path)
|
||||
url.setPath(
|
||||
parse.quote(new_path),
|
||||
mode=QUrl.ParsingMode.StrictMode,
|
||||
)
|
||||
return url
|
||||
|
||||
|
||||
redirects = {
|
||||
"youtube": {
|
||||
|
@ -26,7 +39,6 @@ redirects = {
|
|||
"yt.funami.tech",
|
||||
"iv.melmac.space",
|
||||
"invidious.silur.me",
|
||||
"inv.riverside.rocks",
|
||||
"invidious.lidarshield.cloud",
|
||||
"invidious.flokinet.to",
|
||||
"invidious.snopyta.org",
|
||||
|
@ -58,7 +70,6 @@ redirects = {
|
|||
"source": ["reddit.com"],
|
||||
"target": [
|
||||
"td.vern.cc",
|
||||
"teddit.adminforge.de",
|
||||
"teddit.artemislena.eu",
|
||||
"teddit.bus-hit.me",
|
||||
"teddit.hostux.net",
|
||||
|
@ -66,7 +77,6 @@ redirects = {
|
|||
"teddit.net",
|
||||
"teddit.pussthecat.org",
|
||||
"teddit.sethforprivacy.com",
|
||||
"teddit.totaldarkness.net",
|
||||
"teddit.zaggy.nl",
|
||||
],
|
||||
},
|
||||
|
@ -155,14 +165,11 @@ redirects = {
|
|||
"source": ["medium.com"],
|
||||
"target": [
|
||||
"scribe.rip",
|
||||
"scribe.nixnet.services",
|
||||
"scribe.citizen4.eu",
|
||||
"scribe.bus-hit.me",
|
||||
"scribe.froth.zone",
|
||||
"scribe.privacydev.net",
|
||||
"sc.vern.cc",
|
||||
],
|
||||
"postprocess": fixScribePath
|
||||
"postprocess": fixScribePath,
|
||||
},
|
||||
"google": {
|
||||
"source": ["google.com"],
|
||||
|
@ -199,22 +206,24 @@ def rewrite(request: interceptor.Request):
|
|||
|
||||
url = request.request_url
|
||||
|
||||
for service in redirects.values():
|
||||
matched = False
|
||||
for source in service["source"]:
|
||||
if re.search(source, url.host()):
|
||||
matched = True
|
||||
if service := _should_be_redirected(url.host()):
|
||||
# TODO integrate pinging and always surf to fastest?
|
||||
target = service["target"][random.randint(0, len(service["target"]) - 1)]
|
||||
if target is not None and url.setHost(target) is not False:
|
||||
if "postprocess" in service:
|
||||
url = service["postprocess"](url)
|
||||
try:
|
||||
request.redirect(url)
|
||||
except RedirectException as e:
|
||||
message.error(str(e))
|
||||
|
||||
if matched:
|
||||
target = service["target"][random.randint(0, len(service["target"]) - 1)]
|
||||
if target is not None and url.setHost(target) is not False:
|
||||
if "postprocess" in service:
|
||||
service["postprocess"](url)
|
||||
try:
|
||||
request.redirect(url)
|
||||
except RedirectException as e:
|
||||
message.error(str(e))
|
||||
break
|
||||
|
||||
def _should_be_redirected(host: str, redirects: dict = redirects) -> dict | None:
|
||||
for service in redirects.values():
|
||||
for source in service["source"]:
|
||||
if re.search(source, host):
|
||||
return service
|
||||
return None
|
||||
|
||||
|
||||
interceptor.register(rewrite)
|
||||
|
|
|
@ -1,52 +1,74 @@
|
|||
base:
|
||||
vimflavour: nvim
|
||||
documentlist:
|
||||
defaultstyle: multiline
|
||||
marked-icon: '*'
|
||||
marked-icon: ""
|
||||
multilinestyle:
|
||||
rows:
|
||||
- <red>{doc.html_escape["ref"]}</red>
|
||||
- <bold>{doc.html_escape["title"]}<bold>
|
||||
- <cyan>{doc.html_escape["author"]}</cyan>
|
||||
- "<cyan>{doc.alias('type')} {doc['ref']} {doc.forfile('')}</cyan>"
|
||||
- "<white><bold>{doc.html_escape['title']}</bold></white>"
|
||||
- "<blue>{doc.html_escape['author']}</blue>"
|
||||
- "{doc.foreach('tags', '<lightgray>(</lightgray><lightgray><dim>{}</dim></lightgray><gray>)</gray>', split = ', ', sep = ' ')}"
|
||||
tablestyle:
|
||||
separator: " "
|
||||
headerstyle: "underline|bold"
|
||||
rowstyle: "white_bg"
|
||||
cursorrowstyle: "black_white|bold"
|
||||
columns:
|
||||
- content: '{doc.html_escape["ref"]}'
|
||||
header: Ref
|
||||
width: 15
|
||||
- content: '{doc.html_escape["author"]}'
|
||||
header: Author
|
||||
width: 30
|
||||
- content: '{doc.html_escape["year"]}'
|
||||
header: Year
|
||||
width: 4
|
||||
- content: '{doc.html_escape["title"]}'
|
||||
header: Titel
|
||||
width: 400
|
||||
cursorrowstyle: black_white
|
||||
headerstyle: underline|bold
|
||||
rowstyle: white_bg
|
||||
separator: " \u2502 "
|
||||
- {
|
||||
header: " #",
|
||||
content: "{doc.alias('type')} {str(len(doc.get_files()) if len(doc.get_files()) > 0 else '')}",
|
||||
width: 3,
|
||||
}
|
||||
- { header: "Reference", content: "@{doc['ref']}", width: 15 }
|
||||
- { header: "Year", content: "{str(doc['year'])}", width: 4 }
|
||||
- { header: "Authors", content: "{doc['author']}", width: 20 }
|
||||
- { header: "Title", content: "{doc['title']}", width: 200 }
|
||||
aliases:
|
||||
type:
|
||||
{
|
||||
article: "",
|
||||
book: "",
|
||||
incollection: "",
|
||||
inbook: '',
|
||||
software: "",
|
||||
presentation: "",
|
||||
thesis: "",
|
||||
_default_: "",
|
||||
}
|
||||
keymappings:
|
||||
' ': mark_down
|
||||
q: quit
|
||||
"?": help
|
||||
S: toggle_style
|
||||
/: search_mode
|
||||
<key_down>: scroll_down
|
||||
<key_up>: scroll_up
|
||||
<ctrl-f>: page_down
|
||||
<ctrl-b>: page_up
|
||||
'?': help
|
||||
G: jump_to_bottom
|
||||
gg: jump_to_top
|
||||
j: scroll_down
|
||||
k: scroll_up
|
||||
o: open
|
||||
o: open -r "pdf$"
|
||||
O: open -d
|
||||
r: view_reset
|
||||
R: reload
|
||||
e: edit
|
||||
n: papis edit -n papis_id:{doc['papis_id']}
|
||||
q: quit
|
||||
t: tag
|
||||
ii:
|
||||
" ": mark_selected
|
||||
mm: mark_selected
|
||||
M: mark_down
|
||||
mu: unmark_all
|
||||
mv: mark_view
|
||||
t: cmd "tag "
|
||||
i:
|
||||
- info_toggle
|
||||
- "Toggle info window"
|
||||
ij:
|
||||
I:
|
||||
- info_cycle
|
||||
- "Cycle info windows"
|
||||
# TODO look into https://github.com/supersambo/papis-tui vim-send mappings
|
||||
|
||||
infowindow:
|
||||
default_on: False
|
||||
views:
|
||||
|
@ -56,8 +78,18 @@ infowindow:
|
|||
content: "{doc['abstract']}"
|
||||
linewrap: True
|
||||
height: 8
|
||||
|
||||
commandline:
|
||||
search:
|
||||
keyword_aliases: {a: 'author:', t: 'title:', y: 'year:', k: 'tags:'}
|
||||
|
||||
statusbar:
|
||||
left:
|
||||
default: <black_white> {info["mode_upper"]} <black_white>
|
||||
default: "<black_green><bold> {info['mode_upper']} </black_green></bold><green_bg></green_bg>"
|
||||
normal: "<black_green><bold> {info['mode_upper']} </black_green></bold><green_bg></green_bg>"
|
||||
command: "<black_cyan><bold> {info['mode_upper']} </black_cyan></bold><cyan_bg></cyan_bg>"
|
||||
select: "<black_red><bold> {info['mode_upper']} </black_red></bold><red_bg></red_bg>"
|
||||
search: "<black_magenta><bold> {info['mode_upper']} </black_magenta></bold><magenta_bg></magenta_bg>"
|
||||
right:
|
||||
default: <black_white> {info["idx"]} < {info["marked"]} < {info["view"]} < {info["items"]} <black_white>
|
||||
default: "<green>{info['sortkeys']} </green><cyan_bg></cyan_bg><black_cyan> {info['idx']} < {info['marked']} < {info['view']} < {info['items']} </black_cyan>"
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
/home/marty/projects/python/papis/marvin/papis-marvin
|
163
writing/.config/papis/scripts/papis-marvin
Executable file
163
writing/.config/papis/scripts/papis-marvin
Executable file
|
@ -0,0 +1,163 @@
|
|||
#!/usr/bin/env python
|
||||
# papis-short-help: Import iOS Marvin exported csv annotations
|
||||
#
|
||||
# This script can be used to import your highlights and notes from
|
||||
# the iOS application 'Marvin Reader'. In the app, export your
|
||||
# annotations as 'csv' format and then point the script to the
|
||||
# resulting file.
|
||||
# https://git.martyoeh.me/Marty/papis-marvin
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import logging
|
||||
from typing import Dict
|
||||
import papis.api
|
||||
import papis.pick
|
||||
import papis.format
|
||||
import papis.commands.edit
|
||||
import papis.commands.list
|
||||
import papis.commands.add
|
||||
import papis.notes
|
||||
import papis.config
|
||||
import papis.database
|
||||
import isbnlib
|
||||
import papis.isbn
|
||||
|
||||
logger = logging.getLogger("marvin")
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
DEFAULT_CSV_PATH = "/home/marty/Nextcloud/Personal/Backups/Journal.csv"
|
||||
|
||||
|
||||
def main(fpath, db):
|
||||
with open(fpath) as f:
|
||||
import csv
|
||||
|
||||
csv = csv.DictReader(f)
|
||||
notes = get_all_annotations(db, csv)
|
||||
|
||||
write_to_files(notes)
|
||||
|
||||
|
||||
def get_all_annotations(db, csv) -> Dict:
|
||||
notes = {}
|
||||
note_file = ""
|
||||
for row in csv:
|
||||
# switch to next book
|
||||
if not is_same_book(row["Title"]):
|
||||
doc = get_document(db, row["Author"], row["Title"])
|
||||
if not doc:
|
||||
continue
|
||||
note_file = get_notefile(db, doc)
|
||||
|
||||
text = format_entry(row)
|
||||
|
||||
if note_file and text:
|
||||
if note_file not in notes.keys():
|
||||
notes[note_file] = []
|
||||
notes[note_file].append(text)
|
||||
return notes
|
||||
|
||||
|
||||
def get_document(db, author, title):
|
||||
res = query_document(db, author, title)
|
||||
if not res:
|
||||
add_to_database(author, title)
|
||||
res = query_document(db, author, title)
|
||||
if not res:
|
||||
logger.warning(f"Nothing found for {author}: {title}.\nPlease create manually.")
|
||||
return
|
||||
return res
|
||||
|
||||
|
||||
# TODO warn user/ let him pick with picker if multiple docs found
|
||||
def query_document(db, author, title):
|
||||
title = strip_string(title)
|
||||
for query in [f"author:({author}) title:({title})"]:
|
||||
print(f"query: {query}")
|
||||
res = db.query(query)
|
||||
if len(res) >= 1:
|
||||
return res[0]
|
||||
|
||||
|
||||
def add_to_database(author, title, confirm=True, edit=False):
|
||||
logger.info(f"Searching - '{title} {author}'")
|
||||
data = None
|
||||
try:
|
||||
data = papis.isbn.get_data(f"{title}")
|
||||
except isbnlib.ISBNLibException as e:
|
||||
logger.error(e)
|
||||
else:
|
||||
logger.warning(f"Found: {data}")
|
||||
if data:
|
||||
papis_data = papis.isbn.data_to_papis(data[0])
|
||||
papis.commands.add.run([], data=papis_data, confirm=confirm, edit=edit)
|
||||
|
||||
|
||||
def get_notefile(db, document) -> str | None:
|
||||
if not document.has("notes"):
|
||||
notes_name = papis.config.getstring("notes-name")
|
||||
document["notes"] = papis.format.format(notes_name, document)
|
||||
document.save()
|
||||
db.update(document)
|
||||
|
||||
notes_path = os.path.join(str(document.get_main_folder()), document["notes"])
|
||||
|
||||
if not os.path.exists(notes_path):
|
||||
# TODO reimplement logger: logger.debug("Creating '%s'", notes_path)
|
||||
papis.notes.notes_path_ensured(document)
|
||||
return notes_path
|
||||
|
||||
|
||||
# TODO implement custom formatting (akin to pubs-extract)
|
||||
def format_entry(row) -> str:
|
||||
text = f"> {row['HighlightText']}"
|
||||
if row["EntryText"]:
|
||||
if text:
|
||||
text += "\n"
|
||||
else:
|
||||
text = "> "
|
||||
text += f"{row['EntryText']}"
|
||||
return text
|
||||
|
||||
|
||||
_old_title = ""
|
||||
|
||||
|
||||
def is_same_book(title):
|
||||
global _old_title
|
||||
|
||||
same = _old_title == title
|
||||
_old_title = title
|
||||
if same:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def write_to_files(notes: Dict):
|
||||
# write to notes
|
||||
for f, entries in notes.items():
|
||||
if f:
|
||||
with open(f, "a") as note:
|
||||
logger.info(f"Editing {f}...")
|
||||
num_added = 0
|
||||
for entry in entries:
|
||||
with open(f) as noteread:
|
||||
if entry not in noteread.read():
|
||||
note.write(f"{entry}\n\n")
|
||||
num_added += 1
|
||||
logger.info(f"Added {num_added} entries to it.")
|
||||
|
||||
|
||||
strip_pattern = re.compile(r"([^\s\w]|_)+\w*")
|
||||
|
||||
|
||||
def strip_string(title) -> str:
|
||||
return strip_pattern.sub("", title)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# use argument passed to command as file or default file here
|
||||
fpath = sys.argv[1] if len(sys.argv) > 1 else DEFAULT_CSV_PATH
|
||||
|
||||
main(fpath, papis.database.get())
|
Loading…
Reference in a new issue