Compare commits
No commits in common. "6e8132d311bfcd6a2ee65851c1c39a9cca20dd00" and "a9d7f96ad2b0596f59b37e762216a01060a3f1f7" have entirely different histories.
6e8132d311
...
a9d7f96ad2
10 changed files with 97 additions and 286 deletions
|
@ -1,4 +1,4 @@
|
||||||
Name Description Source Target Injections
|
Name Description Source Target
|
||||||
aaxtomp3 Convert Audible's .aax filetype to MP3, FLAC, M4A, or OPUS A
|
aaxtomp3 Convert Audible's .aax filetype to MP3, FLAC, M4A, or OPUS A
|
||||||
acpid A daemon for delivering ACPI power management events with netlink support R
|
acpid A daemon for delivering ACPI power management events with netlink support R
|
||||||
afew Initial tagging script for notmuch mail R
|
afew Initial tagging script for notmuch mail R
|
||||||
|
@ -203,7 +203,7 @@ offpunk-git Fork of the command-line Gemini client AV-98 with added offline capa
|
||||||
oh-my-zsh-git A community-driven framework for managing your zsh configuration. Includes 180+ optional plugins and over 120 themes to spice up your morning, and an auto-update tool so that makes it easy to keep up with the latest updates from the community A
|
oh-my-zsh-git A community-driven framework for managing your zsh configuration. Includes 180+ optional plugins and over 120 themes to spice up your morning, and an auto-update tool so that makes it easy to keep up with the latest updates from the community A
|
||||||
os-prober Utility to detect other OSes on a set of drives R
|
os-prober Utility to detect other OSes on a set of drives R
|
||||||
pacman-contrib Contributed scripts and tools for pacman systems R
|
pacman-contrib Contributed scripts and tools for pacman systems R
|
||||||
papis Papis is a powerful and highly extensible command-line based document and bibliography manager. P whoosh,papis-zotero,papis-scihub,git+https://git.martyoeh.me/Marty/papis-extract.git,git+https://github.com/supersambo/papis-tui
|
papis-tui A tui interface for papis bibliography manager P whoosh
|
||||||
parallel A shell tool for executing jobs in parallel R
|
parallel A shell tool for executing jobs in parallel R
|
||||||
parsec-bin Remotely connect to a gaming pc for a low latency remote computing experience A
|
parsec-bin Remotely connect to a gaming pc for a low latency remote computing experience A
|
||||||
paru-bin Feature packed AUR helper A
|
paru-bin Feature packed AUR helper A
|
||||||
|
|
Can't render this file because it has a wrong number of fields in line 29.
|
|
@ -1,7 +1,12 @@
|
||||||
Name Description Source Target
|
Name Description Source Target
|
||||||
adbfs-rootless-git fuse filesystem over adb tool for android devices, no device root required A
|
ddrescue GNU data recovery tool R
|
||||||
eslint An AST-based pattern checker for JavaScript R
|
go-mtpfs-git Simple tool for viewing MTP devices as FUSE filesystems A
|
||||||
m4b-tool-bin A command line utility to merge, split and chapterize audiobook files such as mp3, ogg, flac, m4a or m4b A
|
most A terminal pager similar to 'more' and 'less' R
|
||||||
qutebrowser-qt6-git A keyboard-driven, vim-like browser based on PyQt5 (Qt 6 branch) A
|
netbird-bin A WireGuard-based mesh network that connects your devices into a single private network A
|
||||||
texlive-latexextra TeX Live - LaTeX additional packages R
|
odysseyra1n Bootstraps Procursus and install Sileo/libhooker on devices jailbroken with checkra1n. A
|
||||||
time Utility for monitoring a program's use of system resources R
|
papis-zotero Zotero remote server for papis A
|
||||||
|
python-whoosh Fast, pure-Python full text indexing, search, and spell checking library R
|
||||||
|
shadow-tech Desktop client for Shadow Tech cloud gaming service. A
|
||||||
|
signal-desktop Signal Private Messenger for Linux R
|
||||||
|
testdisk Checks and undeletes partitions + PhotoRec, signature based recovery tool R
|
||||||
|
texlive-latexextra TeX Live - Large collection of add-on packages for LaTeX R
|
||||||
|
|
|
|
@ -9,9 +9,11 @@
|
||||||
annotate = yes
|
annotate = yes
|
||||||
[alias]
|
[alias]
|
||||||
ignore = "!gitignore -f"
|
ignore = "!gitignore -f"
|
||||||
|
pushmerge = "push -o merge_request.merge_when_pipeline_succeeds" # see https://docs.gitlab.com/ce/user/project/push_options.html # merge-when-pipeline-succeeds-alias
|
||||||
last = "diff HEAD~ HEAD"
|
last = "diff HEAD~ HEAD"
|
||||||
pushall = "!git remote | xargs -I R git push R" # push to all connected remotes
|
pushall = "!git remote | xargs -I R git push R" # push to all connected remotes
|
||||||
fetchall = "!git remote | xargs -I R git fetch R" # fetch from all connected remotes
|
fetchall = "!git remote | xargs -I R git fetch R" # fetch from all connected remotes
|
||||||
|
|
||||||
[commit]
|
[commit]
|
||||||
gpgsign = true # sign commits as me
|
gpgsign = true # sign commits as me
|
||||||
verbose = true # Always show diff when preparing commit message
|
verbose = true # Always show diff when preparing commit message
|
||||||
|
|
|
@ -58,9 +58,9 @@ def_key "g"
|
||||||
move_home
|
move_home
|
||||||
def_key "U"
|
def_key "U"
|
||||||
update_database
|
update_database
|
||||||
def_key "S"
|
def_key "s"
|
||||||
reset_search_engine
|
reset_search_engine
|
||||||
def_key "S"
|
def_key "s"
|
||||||
show_search_engine
|
show_search_engine
|
||||||
#def_key "f"
|
#def_key "f"
|
||||||
# show_browser
|
# show_browser
|
||||||
|
|
|
@ -1,11 +1,8 @@
|
||||||
-- Start quarto session
|
-- Start quarto session
|
||||||
local startsession = function(file, args)
|
local startsession = function(file, args)
|
||||||
file = file or "/tmp/jupyter-magma-session.json"
|
local path, _ = require("util").get_python_venv()
|
||||||
local path = require("util").get_python_venv()
|
|
||||||
vim.g["python3_host_prog"] = path
|
vim.g["python3_host_prog"] = path
|
||||||
|
file = file or "/tmp/jupyter-magma-session.json"
|
||||||
if vim.fn.executable('jupyter-console') ~= 1 then return end
|
|
||||||
|
|
||||||
if args then
|
if args then
|
||||||
file = args[0]
|
file = args[0]
|
||||||
end
|
end
|
||||||
|
|
|
@ -57,7 +57,8 @@ local function on_attach(client, bufnr)
|
||||||
{ buffer = bufnr, desc = "Next error" }
|
{ buffer = bufnr, desc = "Next error" }
|
||||||
)
|
)
|
||||||
|
|
||||||
require("which-key").register({ ["<localleader>l"] = { name = "+lsp" } })
|
local prefix = require("which-key").register
|
||||||
|
prefix({ ["<localleader>l"] = { name = "+lsp" } })
|
||||||
map("n", "<localleader>li", "<cmd>LspInfo<cr>", { buffer = bufnr, desc = "Lsp Info" })
|
map("n", "<localleader>li", "<cmd>LspInfo<cr>", { buffer = bufnr, desc = "Lsp Info" })
|
||||||
map(
|
map(
|
||||||
"n",
|
"n",
|
||||||
|
|
|
@ -1,32 +1,41 @@
|
||||||
local util = require("lspconfig.util")
|
local util = require("lspconfig/util")
|
||||||
local path = util.path
|
local path = util.path
|
||||||
local T = {}
|
local T = {}
|
||||||
local exepath = vim.fn.exepath
|
local exepath = vim.fn.exepath
|
||||||
|
|
||||||
|
local path_sep = function()
|
||||||
|
local is_win = vim.loop.os_uname().sysname:find("Windows")
|
||||||
|
if is_win then
|
||||||
|
return "\\"
|
||||||
|
else
|
||||||
|
return "/"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
-- from https://github.com/ray-x/navigator.lua/issues/247#issue-1465308677
|
-- from https://github.com/ray-x/navigator.lua/issues/247#issue-1465308677
|
||||||
T.get_path = function(workspace)
|
T.get_path = function(workspace)
|
||||||
-- Use activated virtualenv.
|
-- Use activated virtualenv.
|
||||||
if vim.env.VIRTUAL_ENV then
|
if vim.env.VIRTUAL_ENV then
|
||||||
return path.join(vim.env.VIRTUAL_ENV, "bin", "python")
|
return path.join(vim.env.VIRTUAL_ENV, "bin", "python"), "virtual env"
|
||||||
end
|
end
|
||||||
|
|
||||||
-- Find and use virtualenv in workspace directory.
|
-- Find and use virtualenv in workspace directory.
|
||||||
for _, pattern in ipairs({ "*", ".*" }) do
|
for _, pattern in ipairs({ "*", ".*" }) do
|
||||||
local match = vim.fn.glob(path.join(workspace, pattern, "pyvenv.cfg"))
|
local match = vim.fn.glob(path.join(workspace, pattern, "pyvenv.cfg"))
|
||||||
|
local py = "bin" .. path_sep() .. "python"
|
||||||
if match ~= "" then
|
if match ~= "" then
|
||||||
local py = path.join("bin", "python")
|
|
||||||
match = string.gsub(match, "pyvenv.cfg", py)
|
match = string.gsub(match, "pyvenv.cfg", py)
|
||||||
return match
|
return match, string.format("venv base folder: %s", match)
|
||||||
end
|
end
|
||||||
match = vim.fn.glob(path.join(workspace, pattern, "poetry.lock"))
|
match = vim.fn.glob(path.join(workspace, pattern, "poetry.lock"))
|
||||||
if match ~= "" then
|
if match ~= "" then
|
||||||
local venv_base_folder = vim.fn.trim(vim.fn.system("poetry env info -p"))
|
local venv_base_folder = vim.fn.trim(vim.fn.system("poetry env info -p"))
|
||||||
return path.join(venv_base_folder, "bin", "python")
|
return path.join(venv_base_folder, "bin", "python"), string.format("venv base folder: %s", venv_base_folder)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
-- Fallback to system Python.
|
-- Fallback to system Python.
|
||||||
return exepath("python3") or exepath("python") or "python"
|
return exepath("python3") or exepath("python") or "python", "fallback to system python path"
|
||||||
end
|
end
|
||||||
|
|
||||||
return T
|
return T
|
||||||
|
|
|
@ -1,31 +1,18 @@
|
||||||
import random
|
import random
|
||||||
from urllib import parse
|
|
||||||
import re
|
import re
|
||||||
from qutebrowser.api import interceptor
|
from qutebrowser.api import interceptor
|
||||||
from qutebrowser.extensions.interceptors import QUrl, RedirectException
|
from qutebrowser.extensions.interceptors import RedirectException
|
||||||
from qutebrowser.utils import message
|
from qutebrowser.utils import message
|
||||||
|
|
||||||
|
def fixScribePath(url):
|
||||||
def fixScribePath(url: QUrl):
|
|
||||||
""" Fix external medium blog to scribe translation.
|
""" Fix external medium blog to scribe translation.
|
||||||
Some paths from medium will go through a 'global identity'
|
Some paths from medium will go through a 'global identity'
|
||||||
path which messes up the actual url path we want to go
|
path which messes up the actual url path we want to go
|
||||||
to and puts it in queries. This puts it back on the path.
|
to and puts it in queries. This puts it back on the path.
|
||||||
"""
|
"""
|
||||||
# double unquoting necessary!
|
new_path = f"{url.path()}{url.query()}"
|
||||||
# I suppose we double-wrap it earlier somewhere?
|
url.setQuery("")
|
||||||
# unquoted = parse.unquote(
|
url.setPath(re.sub(r"m/global-identity-2redirectUrl=", "", new_path))
|
||||||
# url.path(options=QUrl.ComponentFormattingOption.FullyEncoded)
|
|
||||||
# )
|
|
||||||
path = parse.unquote(f"{url.path()}{url.query()}", encoding='ascii')
|
|
||||||
url.setQuery(None)
|
|
||||||
new_path = re.sub(r"m/global-identity-2redirectUrl=", "", path)
|
|
||||||
url.setPath(
|
|
||||||
parse.quote(new_path),
|
|
||||||
mode=QUrl.ParsingMode.StrictMode,
|
|
||||||
)
|
|
||||||
return url
|
|
||||||
|
|
||||||
|
|
||||||
redirects = {
|
redirects = {
|
||||||
"youtube": {
|
"youtube": {
|
||||||
|
@ -39,6 +26,7 @@ redirects = {
|
||||||
"yt.funami.tech",
|
"yt.funami.tech",
|
||||||
"iv.melmac.space",
|
"iv.melmac.space",
|
||||||
"invidious.silur.me",
|
"invidious.silur.me",
|
||||||
|
"inv.riverside.rocks",
|
||||||
"invidious.lidarshield.cloud",
|
"invidious.lidarshield.cloud",
|
||||||
"invidious.flokinet.to",
|
"invidious.flokinet.to",
|
||||||
"invidious.snopyta.org",
|
"invidious.snopyta.org",
|
||||||
|
@ -70,6 +58,7 @@ redirects = {
|
||||||
"source": ["reddit.com"],
|
"source": ["reddit.com"],
|
||||||
"target": [
|
"target": [
|
||||||
"td.vern.cc",
|
"td.vern.cc",
|
||||||
|
"teddit.adminforge.de",
|
||||||
"teddit.artemislena.eu",
|
"teddit.artemislena.eu",
|
||||||
"teddit.bus-hit.me",
|
"teddit.bus-hit.me",
|
||||||
"teddit.hostux.net",
|
"teddit.hostux.net",
|
||||||
|
@ -77,6 +66,7 @@ redirects = {
|
||||||
"teddit.net",
|
"teddit.net",
|
||||||
"teddit.pussthecat.org",
|
"teddit.pussthecat.org",
|
||||||
"teddit.sethforprivacy.com",
|
"teddit.sethforprivacy.com",
|
||||||
|
"teddit.totaldarkness.net",
|
||||||
"teddit.zaggy.nl",
|
"teddit.zaggy.nl",
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
@ -165,11 +155,14 @@ redirects = {
|
||||||
"source": ["medium.com"],
|
"source": ["medium.com"],
|
||||||
"target": [
|
"target": [
|
||||||
"scribe.rip",
|
"scribe.rip",
|
||||||
|
"scribe.nixnet.services",
|
||||||
"scribe.citizen4.eu",
|
"scribe.citizen4.eu",
|
||||||
"scribe.bus-hit.me",
|
"scribe.bus-hit.me",
|
||||||
|
"scribe.froth.zone",
|
||||||
|
"scribe.privacydev.net",
|
||||||
"sc.vern.cc",
|
"sc.vern.cc",
|
||||||
],
|
],
|
||||||
"postprocess": fixScribePath,
|
"postprocess": fixScribePath
|
||||||
},
|
},
|
||||||
"google": {
|
"google": {
|
||||||
"source": ["google.com"],
|
"source": ["google.com"],
|
||||||
|
@ -206,24 +199,22 @@ def rewrite(request: interceptor.Request):
|
||||||
|
|
||||||
url = request.request_url
|
url = request.request_url
|
||||||
|
|
||||||
if service := _should_be_redirected(url.host()):
|
for service in redirects.values():
|
||||||
# TODO integrate pinging and always surf to fastest?
|
matched = False
|
||||||
|
for source in service["source"]:
|
||||||
|
if re.search(source, url.host()):
|
||||||
|
matched = True
|
||||||
|
|
||||||
|
if matched:
|
||||||
target = service["target"][random.randint(0, len(service["target"]) - 1)]
|
target = service["target"][random.randint(0, len(service["target"]) - 1)]
|
||||||
if target is not None and url.setHost(target) is not False:
|
if target is not None and url.setHost(target) is not False:
|
||||||
if "postprocess" in service:
|
if "postprocess" in service:
|
||||||
url = service["postprocess"](url)
|
service["postprocess"](url)
|
||||||
try:
|
try:
|
||||||
request.redirect(url)
|
request.redirect(url)
|
||||||
except RedirectException as e:
|
except RedirectException as e:
|
||||||
message.error(str(e))
|
message.error(str(e))
|
||||||
|
break
|
||||||
|
|
||||||
def _should_be_redirected(host: str, redirects: dict = redirects) -> dict | None:
|
|
||||||
for service in redirects.values():
|
|
||||||
for source in service["source"]:
|
|
||||||
if re.search(source, host):
|
|
||||||
return service
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
interceptor.register(rewrite)
|
interceptor.register(rewrite)
|
||||||
|
|
|
@ -1,74 +1,52 @@
|
||||||
base:
|
|
||||||
vimflavour: nvim
|
|
||||||
documentlist:
|
documentlist:
|
||||||
defaultstyle: multiline
|
defaultstyle: multiline
|
||||||
marked-icon: ""
|
marked-icon: '*'
|
||||||
multilinestyle:
|
multilinestyle:
|
||||||
rows:
|
rows:
|
||||||
- "<cyan>{doc.alias('type')} {doc['ref']} {doc.forfile('')}</cyan>"
|
- <red>{doc.html_escape["ref"]}</red>
|
||||||
- "<white><bold>{doc.html_escape['title']}</bold></white>"
|
- <bold>{doc.html_escape["title"]}<bold>
|
||||||
- "<blue>{doc.html_escape['author']}</blue>"
|
- <cyan>{doc.html_escape["author"]}</cyan>
|
||||||
- "{doc.foreach('tags', '<lightgray>(</lightgray><lightgray><dim>{}</dim></lightgray><gray>)</gray>', split = ', ', sep = ' ')}"
|
|
||||||
tablestyle:
|
tablestyle:
|
||||||
separator: " "
|
|
||||||
headerstyle: "underline|bold"
|
|
||||||
rowstyle: "white_bg"
|
|
||||||
cursorrowstyle: "black_white|bold"
|
|
||||||
columns:
|
columns:
|
||||||
- {
|
- content: '{doc.html_escape["ref"]}'
|
||||||
header: " #",
|
header: Ref
|
||||||
content: "{doc.alias('type')} {str(len(doc.get_files()) if len(doc.get_files()) > 0 else '')}",
|
width: 15
|
||||||
width: 3,
|
- content: '{doc.html_escape["author"]}'
|
||||||
}
|
header: Author
|
||||||
- { header: "Reference", content: "@{doc['ref']}", width: 15 }
|
width: 30
|
||||||
- { header: "Year", content: "{str(doc['year'])}", width: 4 }
|
- content: '{doc.html_escape["year"]}'
|
||||||
- { header: "Authors", content: "{doc['author']}", width: 20 }
|
header: Year
|
||||||
- { header: "Title", content: "{doc['title']}", width: 200 }
|
width: 4
|
||||||
aliases:
|
- content: '{doc.html_escape["title"]}'
|
||||||
type:
|
header: Titel
|
||||||
{
|
width: 400
|
||||||
article: "",
|
cursorrowstyle: black_white
|
||||||
book: "",
|
headerstyle: underline|bold
|
||||||
incollection: "",
|
rowstyle: white_bg
|
||||||
inbook: '',
|
separator: " \u2502 "
|
||||||
software: "",
|
|
||||||
presentation: "",
|
|
||||||
thesis: "",
|
|
||||||
_default_: "",
|
|
||||||
}
|
|
||||||
keymappings:
|
keymappings:
|
||||||
q: quit
|
' ': mark_down
|
||||||
"?": help
|
|
||||||
S: toggle_style
|
|
||||||
/: search_mode
|
/: search_mode
|
||||||
<key_down>: scroll_down
|
<key_down>: scroll_down
|
||||||
<key_up>: scroll_up
|
<key_up>: scroll_up
|
||||||
<ctrl-f>: page_down
|
<ctrl-f>: page_down
|
||||||
<ctrl-b>: page_up
|
<ctrl-b>: page_up
|
||||||
|
'?': help
|
||||||
G: jump_to_bottom
|
G: jump_to_bottom
|
||||||
gg: jump_to_top
|
gg: jump_to_top
|
||||||
j: scroll_down
|
j: scroll_down
|
||||||
k: scroll_up
|
k: scroll_up
|
||||||
o: open -r "pdf$"
|
o: open
|
||||||
O: open -d
|
|
||||||
r: view_reset
|
|
||||||
R: reload
|
|
||||||
e: edit
|
e: edit
|
||||||
n: papis edit -n papis_id:{doc['papis_id']}
|
n: papis edit -n papis_id:{doc['papis_id']}
|
||||||
" ": mark_selected
|
q: quit
|
||||||
mm: mark_selected
|
t: tag
|
||||||
M: mark_down
|
ii:
|
||||||
mu: unmark_all
|
|
||||||
mv: mark_view
|
|
||||||
t: cmd "tag "
|
|
||||||
i:
|
|
||||||
- info_toggle
|
- info_toggle
|
||||||
- "Toggle info window"
|
- "Toggle info window"
|
||||||
I:
|
ij:
|
||||||
- info_cycle
|
- info_cycle
|
||||||
- "Cycle info windows"
|
- "Cycle info windows"
|
||||||
# TODO look into https://github.com/supersambo/papis-tui vim-send mappings
|
|
||||||
|
|
||||||
infowindow:
|
infowindow:
|
||||||
default_on: False
|
default_on: False
|
||||||
views:
|
views:
|
||||||
|
@ -78,18 +56,8 @@ infowindow:
|
||||||
content: "{doc['abstract']}"
|
content: "{doc['abstract']}"
|
||||||
linewrap: True
|
linewrap: True
|
||||||
height: 8
|
height: 8
|
||||||
|
|
||||||
commandline:
|
|
||||||
search:
|
|
||||||
keyword_aliases: {a: 'author:', t: 'title:', y: 'year:', k: 'tags:'}
|
|
||||||
|
|
||||||
statusbar:
|
statusbar:
|
||||||
left:
|
left:
|
||||||
default: "<black_green><bold> {info['mode_upper']} </black_green></bold><green_bg></green_bg>"
|
default: <black_white> {info["mode_upper"]} <black_white>
|
||||||
normal: "<black_green><bold> {info['mode_upper']} </black_green></bold><green_bg></green_bg>"
|
|
||||||
command: "<black_cyan><bold> {info['mode_upper']} </black_cyan></bold><cyan_bg></cyan_bg>"
|
|
||||||
select: "<black_red><bold> {info['mode_upper']} </black_red></bold><red_bg></red_bg>"
|
|
||||||
search: "<black_magenta><bold> {info['mode_upper']} </black_magenta></bold><magenta_bg></magenta_bg>"
|
|
||||||
right:
|
right:
|
||||||
default: "<green>{info['sortkeys']} </green><cyan_bg></cyan_bg><black_cyan> {info['idx']} < {info['marked']} < {info['view']} < {info['items']} </black_cyan>"
|
default: <black_white> {info["idx"]} < {info["marked"]} < {info["view"]} < {info["items"]} <black_white>
|
||||||
|
|
||||||
|
|
|
@ -1,163 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
# papis-short-help: Import iOS Marvin exported csv annotations
|
|
||||||
#
|
|
||||||
# This script can be used to import your highlights and notes from
|
|
||||||
# the iOS application 'Marvin Reader'. In the app, export your
|
|
||||||
# annotations as 'csv' format and then point the script to the
|
|
||||||
# resulting file.
|
|
||||||
# https://git.martyoeh.me/Marty/papis-marvin
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
import logging
|
|
||||||
from typing import Dict
|
|
||||||
import papis.api
|
|
||||||
import papis.pick
|
|
||||||
import papis.format
|
|
||||||
import papis.commands.edit
|
|
||||||
import papis.commands.list
|
|
||||||
import papis.commands.add
|
|
||||||
import papis.notes
|
|
||||||
import papis.config
|
|
||||||
import papis.database
|
|
||||||
import isbnlib
|
|
||||||
import papis.isbn
|
|
||||||
|
|
||||||
logger = logging.getLogger("marvin")
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
DEFAULT_CSV_PATH = "/home/marty/Nextcloud/Personal/Backups/Journal.csv"
|
|
||||||
|
|
||||||
|
|
||||||
def main(fpath, db):
|
|
||||||
with open(fpath) as f:
|
|
||||||
import csv
|
|
||||||
|
|
||||||
csv = csv.DictReader(f)
|
|
||||||
notes = get_all_annotations(db, csv)
|
|
||||||
|
|
||||||
write_to_files(notes)
|
|
||||||
|
|
||||||
|
|
||||||
def get_all_annotations(db, csv) -> Dict:
|
|
||||||
notes = {}
|
|
||||||
note_file = ""
|
|
||||||
for row in csv:
|
|
||||||
# switch to next book
|
|
||||||
if not is_same_book(row["Title"]):
|
|
||||||
doc = get_document(db, row["Author"], row["Title"])
|
|
||||||
if not doc:
|
|
||||||
continue
|
|
||||||
note_file = get_notefile(db, doc)
|
|
||||||
|
|
||||||
text = format_entry(row)
|
|
||||||
|
|
||||||
if note_file and text:
|
|
||||||
if note_file not in notes.keys():
|
|
||||||
notes[note_file] = []
|
|
||||||
notes[note_file].append(text)
|
|
||||||
return notes
|
|
||||||
|
|
||||||
|
|
||||||
def get_document(db, author, title):
|
|
||||||
res = query_document(db, author, title)
|
|
||||||
if not res:
|
|
||||||
add_to_database(author, title)
|
|
||||||
res = query_document(db, author, title)
|
|
||||||
if not res:
|
|
||||||
logger.warning(f"Nothing found for {author}: {title}.\nPlease create manually.")
|
|
||||||
return
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
# TODO warn user/ let him pick with picker if multiple docs found
|
|
||||||
def query_document(db, author, title):
|
|
||||||
title = strip_string(title)
|
|
||||||
for query in [f"author:({author}) title:({title})"]:
|
|
||||||
print(f"query: {query}")
|
|
||||||
res = db.query(query)
|
|
||||||
if len(res) >= 1:
|
|
||||||
return res[0]
|
|
||||||
|
|
||||||
|
|
||||||
def add_to_database(author, title, confirm=True, edit=False):
|
|
||||||
logger.info(f"Searching - '{title} {author}'")
|
|
||||||
data = None
|
|
||||||
try:
|
|
||||||
data = papis.isbn.get_data(f"{title}")
|
|
||||||
except isbnlib.ISBNLibException as e:
|
|
||||||
logger.error(e)
|
|
||||||
else:
|
|
||||||
logger.warning(f"Found: {data}")
|
|
||||||
if data:
|
|
||||||
papis_data = papis.isbn.data_to_papis(data[0])
|
|
||||||
papis.commands.add.run([], data=papis_data, confirm=confirm, edit=edit)
|
|
||||||
|
|
||||||
|
|
||||||
def get_notefile(db, document) -> str | None:
|
|
||||||
if not document.has("notes"):
|
|
||||||
notes_name = papis.config.getstring("notes-name")
|
|
||||||
document["notes"] = papis.format.format(notes_name, document)
|
|
||||||
document.save()
|
|
||||||
db.update(document)
|
|
||||||
|
|
||||||
notes_path = os.path.join(str(document.get_main_folder()), document["notes"])
|
|
||||||
|
|
||||||
if not os.path.exists(notes_path):
|
|
||||||
# TODO reimplement logger: logger.debug("Creating '%s'", notes_path)
|
|
||||||
papis.notes.notes_path_ensured(document)
|
|
||||||
return notes_path
|
|
||||||
|
|
||||||
|
|
||||||
# TODO implement custom formatting (akin to pubs-extract)
|
|
||||||
def format_entry(row) -> str:
|
|
||||||
text = f"> {row['HighlightText']}"
|
|
||||||
if row["EntryText"]:
|
|
||||||
if text:
|
|
||||||
text += "\n"
|
|
||||||
else:
|
|
||||||
text = "> "
|
|
||||||
text += f"{row['EntryText']}"
|
|
||||||
return text
|
|
||||||
|
|
||||||
|
|
||||||
_old_title = ""
|
|
||||||
|
|
||||||
|
|
||||||
def is_same_book(title):
|
|
||||||
global _old_title
|
|
||||||
|
|
||||||
same = _old_title == title
|
|
||||||
_old_title = title
|
|
||||||
if same:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def write_to_files(notes: Dict):
|
|
||||||
# write to notes
|
|
||||||
for f, entries in notes.items():
|
|
||||||
if f:
|
|
||||||
with open(f, "a") as note:
|
|
||||||
logger.info(f"Editing {f}...")
|
|
||||||
num_added = 0
|
|
||||||
for entry in entries:
|
|
||||||
with open(f) as noteread:
|
|
||||||
if entry not in noteread.read():
|
|
||||||
note.write(f"{entry}\n\n")
|
|
||||||
num_added += 1
|
|
||||||
logger.info(f"Added {num_added} entries to it.")
|
|
||||||
|
|
||||||
|
|
||||||
strip_pattern = re.compile(r"([^\s\w]|_)+\w*")
|
|
||||||
|
|
||||||
|
|
||||||
def strip_string(title) -> str:
|
|
||||||
return strip_pattern.sub("", title)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# use argument passed to command as file or default file here
|
|
||||||
fpath = sys.argv[1] if len(sys.argv) > 1 else DEFAULT_CSV_PATH
|
|
||||||
|
|
||||||
main(fpath, papis.database.get())
|
|
1
writing/.config/papis/scripts/papis-marvin
Symbolic link
1
writing/.config/papis/scripts/papis-marvin
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
/home/marty/projects/python/papis/marvin/papis-marvin
|
Loading…
Reference in a new issue