Migrate from .stow-rename to --dotfiles
This commit is contained in:
parent
e344a02c65
commit
c9c512486d
23 changed files with 0 additions and 2 deletions
34
dot-local/bin/andromeda
Executable file
34
dot-local/bin/andromeda
Executable file
|
@ -0,0 +1,34 @@
|
|||
#!/bin/zsh
|
||||
print >&2 "Andromeda Start Shell Script
|
||||
|
||||
This requires projekt.andromeda to be installed on the device.
|
||||
Make sure the device is connected and ADB option enabled.
|
||||
Please only have one device connected at a time to use this!"
|
||||
|
||||
shell() {
|
||||
adb shell "$@" || exit $?
|
||||
}
|
||||
|
||||
# Let's first grab the location where Andromeda is installed
|
||||
pkg=( "$(shell pm path projekt.andromeda)" )
|
||||
pkg=( ${(f)pkg} )
|
||||
pkg=( ${pkg#package:} )
|
||||
# These steps could all be done as a one-liner but this way is easier to read.
|
||||
|
||||
# Quit Substratum if it's running.
|
||||
shell am force-stop projekt.substratum
|
||||
|
||||
# If Andromeda is already running, we have to kill it rather than just force-stop it. :(
|
||||
pid="$(shell pidof andromeda)"
|
||||
if [[ -n $pid ]]; then
|
||||
print "Existing Andromeda process found with PID $pid, killing..." >&2
|
||||
# Additionally, Andromeda only responds to SIGKILL, not SIGTERM, which is absolutely disgusting.
|
||||
shell kill -9 $pid
|
||||
fi
|
||||
|
||||
print "Launching Andromeda..." >&2
|
||||
shell <<EOF
|
||||
appops set projekt.andromeda RUN_IN_BACKGROUND allow
|
||||
appops set projekt.substratum RUN_IN_BACKGROUND allow
|
||||
CLASSPATH=${(j.:.)pkg} app_process /system/bin --nice-name=andromeda projekt.andromeda.Andromeda >/dev/null 2>&1 &
|
||||
EOF
|
5
dot-local/bin/beet-play-mpd
Executable file
5
dot-local/bin/beet-play-mpd
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/zsh
|
||||
mv $1 $XDG_DATA_HOME/mpd/playlists/beet-play-mpd.m3u &&
|
||||
mpc clear >/dev/null &&
|
||||
mpc load beet-play-mpd >/dev/null &&
|
||||
mpc play
|
11
dot-local/bin/http.server.cors
Executable file
11
dot-local/bin/http.server.cors
Executable file
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env python3
|
||||
from http.server import HTTPServer, SimpleHTTPRequestHandler, test
|
||||
import sys
|
||||
|
||||
class CORSRequestHandler(SimpleHTTPRequestHandler):
|
||||
def end_headers(self):
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
super().end_headers()
|
||||
|
||||
if __name__ == '__main__':
|
||||
test(CORSRequestHandler, HTTPServer, port=int(sys.argv[1]) if len(sys.argv) > 1 else 8000)
|
91
dot-local/bin/j2y
Executable file
91
dot-local/bin/j2y
Executable file
|
@ -0,0 +1,91 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from yaml import dump
|
||||
|
||||
try:
|
||||
from yaml import CDumper as Dumper
|
||||
except ImportError:
|
||||
from yaml import Dumper
|
||||
|
||||
from collections.abc import Sized
|
||||
import json, re, sys
|
||||
|
||||
def dict_representer(dumper, d):
|
||||
node = dumper.represent_dict(d)
|
||||
# Don't use YAML flow style for large dicts, because the flow style output
|
||||
# only really looks good with a very small number of keys.
|
||||
if len(d) > 5 or sum(len(v) for v in d.values() if isinstance(v, Sized)) > 30:
|
||||
node.flow_style = False
|
||||
return node
|
||||
Dumper.add_representer(dict, dict_representer)
|
||||
|
||||
def cat(files):
|
||||
usedStdin = False
|
||||
for f in files:
|
||||
if f == '-':
|
||||
if usedStdin: continue
|
||||
usedStdin = True
|
||||
with open(f) if f != '-' else sys.stdin as s:
|
||||
for line in s: yield line
|
||||
|
||||
# http://stackoverflow.com/a/7795029/1208816
|
||||
braces = '{}[]'
|
||||
whitespace_esc = ' \t'
|
||||
braces_esc = '\\' + '\\'.join(braces)
|
||||
braces_pat = '[' + braces_esc + ']'
|
||||
no_braces_pat = '[^' + braces_esc + ']*'
|
||||
until_braces_pat = re.compile(no_braces_pat + braces_pat)
|
||||
balance_map = dict(zip(braces, [1, -1, 1, -1]))
|
||||
|
||||
def streamingfinditer(pat, stream):
|
||||
for s in stream:
|
||||
while True:
|
||||
m = pat.search(s)
|
||||
if not m:
|
||||
yield (0,s)
|
||||
break
|
||||
yield (1, m.group())
|
||||
s = pat.split(s, 1)[1]
|
||||
|
||||
def simpleorcompoundobjects(stream):
|
||||
obj = ""
|
||||
unbalanced = 0
|
||||
for (c, m) in streamingfinditer(until_braces_pat, stream):
|
||||
if (c == 0): # remainder of line returned, nothing interesting
|
||||
if (unbalanced == 0):
|
||||
yield (0, m)
|
||||
else:
|
||||
obj += m
|
||||
if (c == 1): # match returned
|
||||
if (unbalanced == 0):
|
||||
yield (0, m[:-1])
|
||||
obj += m[-1]
|
||||
else:
|
||||
obj += m
|
||||
unbalanced += balance_map[m[-1]]
|
||||
if (unbalanced == 0):
|
||||
yield (1, obj)
|
||||
obj = ""
|
||||
|
||||
def streamingiterload(stream):
|
||||
for c,o in simpleorcompoundobjects(stream):
|
||||
for x in iterload(o):
|
||||
yield x
|
||||
|
||||
# http://stackoverflow.com/a/6886743/1208816
|
||||
def iterload(string_or_fp, cls=json.JSONDecoder, **kwargs):
|
||||
try:
|
||||
string = string_or_fp.read()
|
||||
except AttributeError:
|
||||
string = str(string_or_fp)
|
||||
|
||||
decoder = cls(**kwargs)
|
||||
idx = json.decoder.WHITESPACE.match(string, 0).end()
|
||||
while idx < len(string):
|
||||
obj, end = decoder.raw_decode(string, idx)
|
||||
yield obj
|
||||
idx = json.decoder.WHITESPACE.match(string, end).end()
|
||||
|
||||
files = sys.argv[1:] or ('-',)
|
||||
for obj in streamingiterload(cat(files)):
|
||||
print(dump(obj, Dumper=Dumper, explicit_start=True), end='', flush=True)
|
13
dot-local/bin/kit
Executable file
13
dot-local/bin/kit
Executable file
|
@ -0,0 +1,13 @@
|
|||
#!/bin/zsh
|
||||
: ${XDG_CACHE_HOME:=~/.cache}
|
||||
MAC_KITTY=/Applications/kitty.app/Contents/MacOS
|
||||
|
||||
if (( $+commands[kitty] == 0 )); then
|
||||
if [[ -x $MAC_KITTY/kitty ]]; then
|
||||
path=($MAC_KITTY $path)
|
||||
else
|
||||
print "Sorry, kitty must be installed to use this script! https://sw.kovidgoyal.net/kitty" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
exec kitty @ --to unix:$XDG_CACHE_HOME/kitty/kitty.sock "$@"
|
7
dot-local/bin/link-brewed-php
Executable file
7
dot-local/bin/link-brewed-php
Executable file
|
@ -0,0 +1,7 @@
|
|||
#!/usr/bin/env zsh
|
||||
cd ~/bin || exit $?
|
||||
for bin in /usr/local/opt/php@*/bin/php; do
|
||||
name=php${${bin##*@}%%/*}
|
||||
echo "Linking ~/bin/$name -> $bin" >&2
|
||||
ln -fs $bin $name
|
||||
done
|
4
dot-local/bin/luma-both-to-png
Executable file
4
dot-local/bin/luma-both-to-png
Executable file
|
@ -0,0 +1,4 @@
|
|||
#!/bin/zsh
|
||||
for date in ${(u)argv[@]%_*.bmp}; do
|
||||
magick ${date}_top.bmp ${date}_bot.bmp -alpha set -gravity Center -background 'rgba(0,0,0,0)' -append ${date}.png || exit $?
|
||||
done
|
6
dot-local/bin/luma-to-png
Executable file
6
dot-local/bin/luma-to-png
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/bin/zsh
|
||||
for src in $argv; do
|
||||
dest=$src:r.png
|
||||
[[ $src = *.png ]] && dest=${src:r}_twitter.png
|
||||
magick $src -alpha set -fill 'rgba(0,0,0,0)' -draw 'color 0,0 point' $dest || exit $?
|
||||
done
|
24
dot-local/bin/mf2
Executable file
24
dot-local/bin/mf2
Executable file
|
@ -0,0 +1,24 @@
|
|||
#!/usr/bin/env python3
|
||||
import mf2py
|
||||
import sys
|
||||
|
||||
args = sys.argv[1:]
|
||||
if not args:
|
||||
args.append('-')
|
||||
|
||||
|
||||
def parse(**kwargs):
|
||||
return mf2py.Parser(html_parser='html5lib', **kwargs).to_json()
|
||||
|
||||
|
||||
for arg in args:
|
||||
if arg.startswith('//'):
|
||||
arg = 'https:' + arg
|
||||
if arg.startswith('http'):
|
||||
doc = parse(url=arg)
|
||||
elif arg == '-':
|
||||
doc = parse(doc=sys.stdin)
|
||||
else:
|
||||
with open(arg, 'r') as f:
|
||||
doc = parse(doc=f)
|
||||
print(doc)
|
19
dot-local/bin/push
Executable file
19
dot-local/bin/push
Executable file
|
@ -0,0 +1,19 @@
|
|||
#!/bin/zsh
|
||||
zparseopts -D -A opts -- t: e:
|
||||
autoload pass-unpack
|
||||
typeset -A creds
|
||||
pass-unpack Productivity/Simplepush creds
|
||||
|
||||
enc_key=$(echo -n $creds[password]$creds[salt] | sha1sum | awk '{print toupper($1)}' | cut -c1-32)
|
||||
iv=$(openssl enc -aes-128-cbc -k dummy -P -md sha1 | grep iv | cut -d = -f 2)
|
||||
|
||||
encrypt() {
|
||||
openssl aes-128-cbc -base64 -K $enc_key -iv $iv | awk '{print}' ORS='' | tr + - | tr / _
|
||||
}
|
||||
|
||||
msg="$(cat -- "$@" | encrypt)"
|
||||
extras=()
|
||||
[[ -n $opts[-t] ]] && extras+=(title=$(encrypt <<<$opts[-t]))
|
||||
[[ -n $opts[-e] ]] && extras+=(event=$opts[-e])
|
||||
|
||||
http -f --ignore-stdin https://api.simplepush.io/send key=$creds[key] msg=$msg iv=$iv encrypted=true $extras
|
45
dot-local/bin/sav-quotes
Executable file
45
dot-local/bin/sav-quotes
Executable file
|
@ -0,0 +1,45 @@
|
|||
#!/usr/bin/env zsh
|
||||
list_srcs() {
|
||||
local src
|
||||
for src in $quotesrc_dir/get-*-quotes; do
|
||||
src=${src:t}
|
||||
src=${src#get-}
|
||||
src=${src%-quotes}
|
||||
print $src
|
||||
done
|
||||
}
|
||||
save_from_src() {
|
||||
mkdir -p ${dest:h}
|
||||
rm -f $dest
|
||||
|
||||
for i in {1..5}
|
||||
do
|
||||
$src >> $dest
|
||||
(( i < 5 )) && echo % >> $dest
|
||||
wc -l $dest
|
||||
done
|
||||
strfile $dest
|
||||
}
|
||||
|
||||
quotesrc_dir=${XDG_DATA_HOME:=~/.local/share}/sav-quotes
|
||||
dest=${XDG_CACHE_HOME:=~/.cache}/sav-quotes/quotes
|
||||
src_spec=${1:-bash}
|
||||
|
||||
if [[ $src_spec = --completion ]]; then
|
||||
list_srcs
|
||||
exit 0
|
||||
fi
|
||||
|
||||
src=$quotesrc_dir/get-${src_spec}-quotes
|
||||
if [[ -x $src ]]; then
|
||||
if ! $src &>/dev/null; then
|
||||
print Could not connect to quotesrc $src_spec >&2
|
||||
exit 1
|
||||
fi
|
||||
save_from_src $src
|
||||
else
|
||||
echo "Invalid quotesrc specified: $src_spec"
|
||||
echo "Valid quotesrcs are:"
|
||||
list_srcs
|
||||
exit 1
|
||||
fi
|
22
dot-local/bin/unphp
Executable file
22
dot-local/bin/unphp
Executable file
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/env php
|
||||
<?php
|
||||
if (is_readable('./vendor/autoload.php')) require './vendor/autoload.php';
|
||||
|
||||
function to_array($obj) {
|
||||
if (is_array($obj)) return array_map('to_array', $obj);
|
||||
if (!is_object($obj)) return $obj;
|
||||
$cls = new \ReflectionClass($obj);
|
||||
$fields = ['__class__' => $cls->name];
|
||||
foreach ($cls->getProperties() as $prop) {
|
||||
$prop->setAccessible(true);
|
||||
$fields[$prop->getName()] = to_array($prop->getValue($obj));
|
||||
}
|
||||
return $fields;
|
||||
}
|
||||
|
||||
$args = array_slice($argv, 1);
|
||||
if (empty($args)) $args[] = '-';
|
||||
foreach ($args as $arg) {
|
||||
if ($arg === '-') $arg = 'php://stdin';
|
||||
echo json_encode(to_array(unserialize(file_get_contents($arg)))) . PHP_EOL;
|
||||
}
|
22
dot-local/bin/up
Executable file
22
dot-local/bin/up
Executable file
|
@ -0,0 +1,22 @@
|
|||
#!/bin/zsh
|
||||
|
||||
upload() {
|
||||
http --check-status --ignore-stdin --form \
|
||||
POST https://up.00dani.me/ \
|
||||
Authorization:$UP_TOKEN \
|
||||
file@$1
|
||||
}
|
||||
|
||||
if (( $# == 0 )); then
|
||||
upload =(cat)
|
||||
exit $?
|
||||
fi
|
||||
|
||||
for f; do
|
||||
if [[ ! -r $f ]]; then
|
||||
print "up: $f is not a readable file" >&2
|
||||
exit 1
|
||||
fi
|
||||
upload $f || exit $?
|
||||
print
|
||||
done
|
17
dot-local/bin/y2j
Executable file
17
dot-local/bin/y2j
Executable file
|
@ -0,0 +1,17 @@
|
|||
#!/usr/bin/env python3
|
||||
try:
|
||||
from yaml import CSafeLoader as SafeLoader
|
||||
except ImportError:
|
||||
from yaml import SafeLoader
|
||||
import sys, json
|
||||
|
||||
files = sys.argv[1:] or ('-',)
|
||||
|
||||
stdinUsed = False
|
||||
for f in files:
|
||||
if f == '-':
|
||||
if stdinUsed: continue
|
||||
stdinUsed = True
|
||||
with open(f) if f != '-' else sys.stdin as stream:
|
||||
l = SafeLoader(stream)
|
||||
while l.check_data(): print(json.dumps(l.get_data()))
|
4
dot-local/share/sav-quotes/format-bash-quotes.sed
Normal file
4
dot-local/share/sav-quotes/format-bash-quotes.sed
Normal file
|
@ -0,0 +1,4 @@
|
|||
s/\#.*/%/
|
||||
1,/Add Quote \/ ModApp \/ Search /d
|
||||
/Home \/ Latest /,$d
|
||||
/^ *$/d;s/^ *//
|
10
dot-local/share/sav-quotes/format-qdb-quotes.sed
Normal file
10
dot-local/share/sav-quotes/format-qdb-quotes.sed
Normal file
|
@ -0,0 +1,10 @@
|
|||
# first we remove indentation from the start of lines
|
||||
s/^ *//
|
||||
# Convert the quote headers to %
|
||||
1,$ s/^\#.*/%/
|
||||
s/Comment:/\nComment:/
|
||||
s/- \[.*//
|
||||
/\[\s\+\]/d
|
||||
1,/TodayLatestBestTopWorst/d
|
||||
/save page/,$d
|
||||
/^ *$/d
|
6
dot-local/share/sav-quotes/format-xkcdb-quotes.sed
Normal file
6
dot-local/share/sav-quotes/format-xkcdb-quotes.sed
Normal file
|
@ -0,0 +1,6 @@
|
|||
# first we remove indentation from the start of lines
|
||||
s/^ *//
|
||||
# Convert the quote headers to %
|
||||
1,$ s/^\#[0-9].*/%/
|
||||
# and remove the header
|
||||
/%/,$!d
|
5
dot-local/share/sav-quotes/get-bash-quotes
Executable file
5
dot-local/share/sav-quotes/get-bash-quotes
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
links -dump http://bash.org/\?random1 | sed -f "$dir/format-bash-quotes.sed"
|
||||
exit ${PIPESTATUS[0]}
|
5
dot-local/share/sav-quotes/get-qdb-quotes
Executable file
5
dot-local/share/sav-quotes/get-qdb-quotes
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
links -dump http://www.qdb.us/random | sed -f "$dir/format-qdb-quotes.sed"
|
||||
exit ${PIPESTATUS[0]}
|
6
dot-local/share/sav-quotes/get-whatthecommit-quotes
Executable file
6
dot-local/share/sav-quotes/get-whatthecommit-quotes
Executable file
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
for i in {1..5}; do
|
||||
curl -s http://whatthecommit.com/index.txt || exit 1
|
||||
echo "%"
|
||||
done
|
11
dot-local/share/sav-quotes/get-xkcdb-quotes
Executable file
11
dot-local/share/sav-quotes/get-xkcdb-quotes
Executable file
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
sum() {
|
||||
declare -i acc
|
||||
for i; do acc+=i; done
|
||||
echo $acc
|
||||
}
|
||||
|
||||
curl -s 'http://www.xkcdb.com/?random1' |
|
||||
hxnormalize -i 0 -l 99999999999 -x | hxselect -cs '<br>\n%<br>\n' 'span.quote' |
|
||||
html2text -b 0 | hxunent | sed '$d; s/ *$//' | sed '$d'
|
||||
exit $(sum ${PIPESTATUS[*]})
|
Loading…
Add table
Add a link
Reference in a new issue