Site Tools


onny:notizen

bash

lzma hado compression and extraction

tar -c --lzma -f my_archive.tar.lzma /some_directory
tar -x --lzma -f my_archive.tar.lzma

run script verbose

sh -x scripname.sh

cheap python virtualenv

mkdir path
ln -s /usr/bin/python2 path/python
export PATH="$srcdir/path:$PATH"

get process runtime by pid, where pid is 1234 in this example

ps -o etime= -p "1234" 

write command output to file and to stdout (python -u for unbuffered output)

python3 -u sperrmuell.py 2>&1 | tee sperrmuell_ka.csv

recurseviley rename string

find . -type f -print0 | xargs -0 sed -i 's/twentytwelve/projectinsanity/g'

overwrite LD_LIBRARY_PATH

LD_LIBRARY_PATH="/home/onny/projects/onlyoffice-documentserver/src/DocumentServer-ONLYOFFICE-DocumentServer-5.2.7/core/build/lib/linux_64/:$LD_LIBRARY_PATH" ./AllFontsGen

compare command line argument to string

#!/bin/bash
if [ "$1" = "-v" ]; then
	wf-recorder -g "$(slurp)" -f "$(xdg-user-dir PICTURES)/$(date +'%Y-%m-%d-%H%M%S_wf-recorder.mp4')"
else
	slurp | grim -g - - | wl-copy && wl-paste > "$(xdg-user-dir PICTURES)/$(date +'%Y-%m-%d-%H%M%S_grim.png')"
fi

file exists

if [ ! -f /tmp/foo.txt ]; then
    echo "File not found!"
fi

program exit

exit 0 # okay
exit 1 # fail

receive signal bash

trap_with_arg() {
    func="$1" ; shift
    for sig ; do
        trap "$func $sig" "$sig"
    done
}
 
func_trap() {
    echo "Trapped: $1"
}
 
trap_with_arg func_trap INT TERM EXIT
 
echo "Send signals to PID $$ and type [enter] when done."
read # Wait so the script doesn't exit.

check file checksum

echo "$SHA256SUM  $FILE" \
  | sha256sum -c
 
if [ $? != 0 ]; then
  echo 'checkra1n checksum is not valid'
  exit 1
fi

launcher or wrapper: pass arguments into script

FILE=/tmp/checkra1n-0.12.4-beta
chmod +x $FILE
$FILE "$@"

grep

regex match group

grep -Po "(?<=Version: )([0-9]|\.)*(?=\s|\$)" style.css

fd

search for all files with specific extension in directory /

fd --type f -e fm . /

zip all files with specific file extension

fd -t f -e fm . / | zip source -@

file creation timespan

fd --type f -e pdf --change-newer-than "2020-09-09" --changed-before "2020-10-01" .

execute command

fd --type f -e doc -e docx -e pdf -i gabriel -x cp --backup=t {} /tmp/gabriel/

find filenames unallowed characters

fd '[^A-Z a-züö@0-9._-]' remote/200_Archiv/CP_Dont_Touch

patching

appling

diff -u original.c new.c > original.patch
patch < original.patch
# patch -p0 < original.patch
# patch -p1 -i packaging-fix.patch

creating patch

git commit -am "meine änderungen"
git format-patch "HEAD^"

xargs

fd . | xargs -I {} rm "{}"

rsync

custom ssh port

rsync -rvz -e 'ssh -p 2222' --progress --remove-sent-files ./dir user@host:/path

parallel, threaded

ls -1 | parallel rsync -a {} /destination/directory/

openssh

SSH public key deployen

ssh-copy-id alarm@10.0.0.2

local port forwarding to remote

ssh -R 0.0.0.0:8096:localhost:8096 onny@example.com
/etc/ssh/sshd_config
[...]
GatewayPorts yes
[...]

networking

nftables

nft list ruleset
nft flush ruleset
nft -f ruleset.nft

display handles, insert rule at position

nft -a list ruleset
nft add rule inet filter input position 17 tcp dport "{http, https}" accept
nft delete rule inet filter input handle 23

sysctl

disable ipv6

sysctl net.ipv6.conf.all.disable_ipv6=1
sysctl net.ipv6.conf.default.disable_ipv6=1
sysctl net.ipv6.conf.lo.disable_ipv6=1

iptables

connection sharing. Iptables-Fu (internet0 ist das Interface, dass mit dem Internet verbunden ist):

sysctl net.ipv4.ip_forward=1
iptables -t nat -A POSTROUTING -o internet0 -j MASQUERADE
iptables -A FORWARD -m conntrack --ctstate RELATED,ESTABLISHED -j ACCEPT
iptables -A FORWARD -i net0 -o internet0 -j ACCEPT

picloud network sharing & port forwarding openwrt

sysctl net.ipv4.ip_forward=1
iptables -t nat -A POSTROUTING -o wlan0 -j MASQUERADE
iptables -A FORWARD -m conntrack --ctstate RELATED,ESTABLISHED -j ACCEPT
iptables -A FORWARD -i net0 -o wlan0 -j ACCEPT
iptables -I FORWARD -o br-lan -d 192.168.1.2 -j ACCEPT
iptables -t nat -I PREROUTING -p tcp --dport 8096 -j DNAT --to 192.168.1.2:8096
iptables -t nat -A OUTPUT -p tcp --dport 8096 -j DNAT --to 192.168.1.2:8096
iptables -t nat -I PREROUTING -p tcp --dport 2222 -j DNAT --to 192.168.1.2:22
iptables -t nat -A OUTPUT -p tcp --dport 2222 -j DNAT --to 192.168.1.2:22

</code>

dnsmasq

minimal hostapd and dnsmasq config

/etc/dnsmasq/dnsmasq.conf
interface=wlan0
listen-address=172.24.1.1
bind-interfaces
server=8.8.8.8 
#port=0 # disable dns
domain-needed
bogus-priv
dhcp-range=172.24.1.50,172.24.1.150,12h
/etc/hostapd/hostapd.conf
interface=wlan0
driver=nl80211
ssid=MyAP
hw_mode=g
channel=11
wpa=1
wpa_passphrase=MyPasswordHere
wpa_key_mgmt=WPA-PSK
wpa_pairwise=TKIP CCMP
wpa_ptk_rekey=600

ifupd

/etc/ifplugd/ifplugd.action
#!/bin/sh
ifname="$1"
action="$2"

case "$action.$ifname" in
up.enp0s10)
	systemctl restart systemd-networkd
	;;
down.enp0s10)
	;;
esac
/etc/ifplugd/ifplugd.conf
INTERFACES="enp0s10"
ARGS="-fwI -u0 -d10"
systemctl restart ifupd@enp0s25
systemctl enable ifupd@enp0s25

document manipulation

pdf document manipulation

compression

gm convert -density 200x200 -units PixelsPerInch -compress jpeg -quality 70 in.pdf out.pdf

lossless merge

pdfunite in-1.pdf in-2.pdf in-n.pdf out.pdf

extract page range

pdftk campus_italia.pdf cat 1-280 output campus_italia_a1a2.pdf

insert into pdf

pdftk A=bigpdf.pdf B=insert.pdf cat A1-180 B A181-end output output.pdf

imagemagick picture to equal size pdf

i=300; convert a.png b.png -compress jpeg -quality 100 \
      -density ${i}x${i} -units PixelsPerInch \
      -resize $((i*827/100))x$((i*1169/100)) \
      -gravity center \
      -extent $((i*827/100))x$((i*1169/100)) multipage.pdf
convert a.jpeg b.pdf -compress jpeg -quality 70 -density 300x300 -units PixelsPerInch -resize 2481x3507 -gravity center -extent 2481x3507 multipage.pdf

scale pdf pages to specific size (a4)

cpdf -scale-to-fit "210mm 297mm" Scheine\ Germanistik\ 3.pdf -o Scheine\ Germanistik\ 4.pdf

ffmpeg

Constant quality AV1. The CRF value can be from 0–63. Lower values mean better quality.

ffmpeg -i input.mp4 -c:v libaom-av1 -crf 30 -strict experimental av1_test.mp4

Burn subtitles, fast video conversion

ffmpeg -i Kawamata\ -\ La\ passage\ des\ chaises.mkv -vf subtitles=Kawamata\ -\ La\ passage\ des\ chaises.mkv -acodec copy -preset:v ultrafast Kawamata\ -\ La\ passage\ des\ chaises.mp4

lossless mp3 merge

ffmpeg -f concat -i <(printf "file '%s'\n" ./*.mp3) -c copy output.mp3

lossless audio extraction

ffmpeg -i videofile.mp4 -vn -acodec copy audiofile.mp3

extract from mkv

n=`mkvinfo ${base}.mkv |grep "Track type" |grep -n "audio" |cut -d":" -f1`
audTrack=`echo "${n} - 1" |bc`
mkvextract tracks ${base}.mkv ${audTrack}:${base}.ac3

security

web discovery

photon

photon -u test.example.org

lynis

lynis audit system --quick

subbrute

check for subdomains

torify subbrute leel.de

wfuzz

wfuzz -c --hc 404 -w /opt/wfuzz/wordlist/general/megabeast.txt http://www.leeel.de/FUZZ
wfuzz -c --hc 404,403 -w /opt/wfuzz/wordlist/general/admin-panels.txt -w /opt/wfuzz/wordlist/general/extensions_common.txt http://www.leeel.de/FUZZFUZ2Z

Preparing data for LFI scan

cat /var/cache/pkgfile/* | grep -a ".*/.*\.conf$" | sort | uniq > lfi

exploit kits

sec tools

chromium / chrome

disable gpu blacklist, enable nouveau hardware acceleration

chromium --ignore-gpu-blacklist

docker

Pull

docker pull ubuntu:22.04
docker pull rootlogin/nextcloud:develop

Run

docker run -td ubuntu:22.04

Executing

docker exec -it ffffdfdfsdfsdfsfsffsdfs /bin/bash

Nextcloud

docker run -d -p 80:80 rootlogin/nextcloud
docker run -v /home/onny/projects/nextcloud-app-radio:/opt/nextcloud/apps/radio -d --name nextcloud -p 80:80 rootlogin/nextcloud

Debugging it

 docker run -i -t e326cbb922aa /bin/bash # exec shell of image
 docker exec -i -t e326cbb922aa /bin/bash # exec new shell running container 

Pull from repository

 docker pull eugeneware/docker-wordpress-nginx
 docker run -p 80:80 -d docker-wordpress-nginx
 docker ps
 docker commit e5a70884ac44 eugeneware/docker-wordpress-nginx:aenderungen1
 # docker stop / run
 docker run -t -i -v /home/onny/projects/web-whackspace:/usr/share/nginx/www/wp-content/themes/whackspace -p 80:80 -d e326cbb922aa
 docker run -i -t e326cbb922aa /bin/bash

Build from Dockerfile

 cd  ~/projects/docker-invoiceplane-nginx
 sudo docker build -t="docker-invoiceplane-nginx" .
 sudo docker run -p 80:80 -d docker-invoiceplane-nginx

Build from URL

docker build -t nextcloud-testing github.com/onny/docker-nextcloud

Delete image

docker rmi <image name / id>

Export and load image

docker save myimage > myimage.tar
docker load < myimage.tar

Remove all images and containers

docker system prune -a

prevent from auto start

docker update --restart=no client-iaro_db_1

docker commit container and rerun

$ docker ps  -a
CONTAINER ID        IMAGE                 COMMAND                  CREATED              STATUS                          PORTS               NAMES
    5a8f89adeead        ubuntu:14.04          "/bin/bash"              About a minute ago   Exited (0) About a minute ago                       agitated_newton
$ docker commit 5a8f89adeead newimagename
$ docker run -ti -v "$PWD/dir1":/dir1 -v "$PWD/dir2":/dir2 newimagename /bin/bash

Run emulated multiarch images

docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
docker run --rm -t arm32v7/alpine uname -m
docker run -it --rm arm32v7/alpine ash
# keep it running
docker run -d -it --name alpine-armv7 --platform linux/arm/v7 arm32v7/alpine /bin/sh

wordpress docker image

docker-compose.yml
version: '3'
 
services:
   db:
     image: mysql:5.7
     volumes:
       - db_data:/var/lib/mysql
     restart: always
     environment:
       MYSQL_ROOT_PASSWORD: somewordpress
       MYSQL_DATABASE: wordpress
       MYSQL_USER: wordpress
       MYSQL_PASSWORD: wordpress
 
   wordpress:
     depends_on:
       - db
     image: wordpress:latest
     volumes:
       - .:/var/www/html/wp-content/themes/ausstellung-virtuell        
     ports:
       - "8000:80"
     restart: always
     environment:
       WORDPRESS_DB_HOST: db:3306
       WORDPRESS_DB_USER: wordpress
       WORDPRESS_DB_PASSWORD: wordpress
volumes:
    db_data:

Note the mount instruction in the volumes section, providing the local theme to the wordpress container.

docker-compose up -d

Visit 127.0.0.1:8000

eigenes system setup

signatures

gpg --keyserver pool.sks-keyservers.net --recv-keys 0x4E2C6E8793298290 # tor-browser-en aur packet

ansible

Run single command

ansible playground.pi -i hosts -m shell -a "whoami"

Limit playbook to specific host from group

ansible-playbook -i hosts archlinux-syssetup.yml -l playground.pi --ask-become-pass

Running single ansible role

picloud.yml
  roles:
     - { role: nsupdate, tags: nsupdate }
 ansible-playbook -i hosts --ask-become-pass picloud.yml --tags 'nsupdate'

Directly define server without inventory file

ansible-playbook -i "192.168.1.23," wgnas.yml --ask-become-pass

Skip specific role by tag

ansible-playbook --inventory-file=.vagrant/provisioners/ansible/inventory -v picloud.yml --skip-tags mount

playbook

Include distribution specific vars, e.g. vars/Archlinux.yml or vars/Debian.yml

tasks/main.yml
- name: Include OS-specific variables.
  include_vars: "{{ ansible_os_family }}.yml"

use encrypted vars with vault

ansible-vault encrypt_string --vault-password-file ~/.ansible_vault_pw my_secret
vars/auth.yml
notsecret: myvalue
mysecret: !vault |
          $ANSIBLE_VAULT;1.1;AES256
          66386439653236336462626566653063336164663966303231363934653561363964363833313662
          6431626536303530376336343832656537303632313433360a626438346336353331386135323734
          62656361653630373231613662633962316233633936396165386439616533353965373339616234
          3430613539666330390a313736323265656432366236633330313963326365653937323833366536
          34623731376664623134383463316265643436343438623266623965636363326136
other_plain_text: othervalue
ansible-playbook -i hosts -v piradio.yml --ask-become-pass --vault-password-file ~/.ansible_vault_pw

conditions

- name: Enable ufw service
  service:
    name: ufw
    enabled: yes
  when: ufw_state == "enabled"

podcasts

kolhacampus archiv

sendung genre url
PLUG-IN drum&base http://www.icast.co.il/Rss.aspx?ID=515483
https://onny.project-insanity.org/laboumdeluxe/feed.xml # FM4 La Boum de Luxe, Music EDM Techno Radio
https://onny.project-insanity.org/bounce/feed.xml # SRF Virus Bounce, Music Hip Hop Radio

vs code

plugins

  • vetur (vuejs highlighting)

firejail

Running app without networking

firejail --net=none vlc

Running app in private mode (fresh home folder)

firefox --private firefox

Persistent user specific configuration

cat ~/.config/firejail/vlc.profile
include /etc/firejail/vlc.profile
net none

nextcloud

Sync only a specific folder with nextcloud

nextcloudcmd pictures https://nextcloud.project-insanity.org/remote.php/webdav/pictures

developement

gcc

-Werror=implicit-fallthrough=
-Wno-implicit-fallthrough

git

show remote origin

git remote show origin

change remote origin

git remote set-url origin gitlab@http-new.pi:onny/web-wikidict.git

merge commits from a remote repository

git fetch https://github.com/rfc2822/davdroid.git master
git branch -r
git merge FETCH_HEAD
"force pull", overwrite local changes
git fetch --all
git reset --hard origin/master
git branch
git branch firefox45
git checkout firefox45

new branch

git branch iss53
git checkout iss53

delete branch

git branch # list
git branch -d swaybar
git push onny -d samsung-treltexx # delete remote branch

git show all tags

git log --no-walk --tags --pretty="%h %d %s"

delete last commit

git reset --hard HEAD~1

remove sensitive files from repo

git filter-branch --force --index-filter \
'git rm --cached --ignore-unmatch PATH-TO-YOUR-FILE-WITH-SENSITIVE-DATA' \
--prune-empty --tag-name-filter cat -- --all
git push origin --force --all
git push origin --force --tags

rebase upstream

git clone git@github.com:croaky/dotfiles.git
cd dotfiles
git remote add upstream git@github.com:thoughtbot/dotfiles.git
git fetch upstream
git rebase upstream/master

git cherry pick commit for specific files

git checkout 13243f2eafc4292917178051fe1bb5aab2774dca -p include/mmc.h drivers/mmc/mmc.c arch/arm/include/asm/arch-exynos/mmc.h drivers/mmc/s5p_sdhci.c common/cmd_mmc.c common/cmd_mmc_spi.c common/env_mmc.c include/sdhci.h

rebase

git remote add upstream https://github.com/whoever/whatever.git
git fetch upstream
git checkout master
git rebase upstream/master
git push -f origin master

rebase, force overwrite upstream changes

git rebase -X theirs master

rebase branch

git checkout fragments
git rebase upstream/master

squash commits

git rebase -i upstream/master
# < choose squash for all of your commits, except the first one >
# < Edit the commit message to make sense, and describe all your changes >
git push origin omgpull -f

alternatively

git rebase -i HEAD~3

alternatively

git merge --squash apple-a9

rerun tests with empty commit

git commit --allow-empty -m 'run tests again'
git push --set-upstream USERNAME mynewbranch

force push, remove latest commit of remote repository (origin)

git reset HEAD^
git push origin +HEAD

integrate changes into last commit

git commit --amend --no-edit

overwrite branch with other branch

git checkout maddy
git reset --hard maddytest
git push onny maddy -f

modify older or specific commit

git rebase --interactive 'bbc643cd^'
# make changes
git commit --all --amend --no-edit
git rebase --continue

yum

yum install rpm-build
rpmbuild --rebuild aiccu-2007.01.15-7.el6.src.rpm
cd /root/rpmbuild/RPMS/x86_64
rpm -i aiccu-2007.01.15-7.el7.centos.x86_64.rpm

tmux

copy all scrollback buffer into a file. Press keys: “Prefix + :”

capture-pane -S -3000
save-buffer filename.txt

wine

installing msi

wine msiexec /i xyz.msi

scanning

wireshark: filter only http traffic

http

arp-scan

arp-scan --interface=wlp3s0 --localnet

nmap use nse script

nmap -p 80 192.168.188.0/24 -n --open --script /usr/share/nmap/scripts/http-title.nse

debian

which package provides file XY

apt-file update
apt-file search netstat

extract deb package

ar x *.deb

Makefile

define variables with preset which can be overwritten

DOCUMENT_ROOT ?= /var/www/onlyoffice/documentserver
LOG_DIR ?= /var/log/onlyoffice/documentserver
DATA_DIR ?= /var/lib/onlyoffice/documentserver/App_Data
CONFIG_DIR ?= /etc/onlyoffice/documentserver
CREATE_USER ?= TRUE

conditions

ifeq ($(CREATE_USER),TRUE)
	adduser --quiet --home ${DESTDIR}${DOCUMENT_ROOT} --system --group onlyoffice
	chown onlyoffice:onlyoffice -R ${DESTDIR}$(dirname {DOCUMENT_ROOT})
	chown onlyoffice:onlyoffice -R ${DESTDIR}$(dirname {LOG_DIR})
	chown onlyoffice:onlyoffice -R ${DESTDIR}$(dirname $(dirname {DATA_DIR}))
endif

condition if directory exists

.PHONY: all
all:
ifneq ($(wildcard tileserver/*),)
    cd tileserver
    git pull
    cd ..
else
    git clone https://github.com/maptiler/tileserver-php.git tileserver
endif

mail

echo mail server

echo@univie.ac.at

send smtp mail

echo -n "username" | base64
# dXNlcm5hbWU=
echo -n "password" | base64
# cGFzc3dvcmQ=
openssl s_client -connect mail.agenturserver.de:465
AUTH LOGIN
ZGRkZGRkZGRk
enp6enp6enp6eno=
RCPT TO: <admin@example.local>
Subject: I have some questions!
Question 1: ...
DONE

Android

installed apps

antennapod davx5 dbnavigator fdroid fennec icsx5 jellyfin keepassdx libreoffice vlc nextcloud quicklyric radiodroid signal soundhound spotify tasks documentviewer fdroid-privilegedextension

configurations

  • antennpod subscriptions
  • fdroid pi repo
  • davx calendar & contacts
  • jellyfin config
  • nextcloud config
  • signal backup
  • radiodroid station list
  • spotify config

flash recovery

heimdall flash --RECOVERY twrp-3.2.1-1-serranoltexx.img

anbox

pacman -S anbox-git anbox-image anbox-modules-dkms-git
modprobe binder_linux ashmem_linux
systemctl restart anbox-container-manager
systemctl --user restart anbox-session-manager
anbox launch --package=org.anbox.appmgr --component=org.anbox.appmgr.AppViewActivity
wget "https://f-droid.org/FDroid.apk"
adb install FDroid.apk

davdroid

https://nextcloud.project-insanity.org/remote.php/dav

In case of 2FA requires device specific password

vim

comment multiple lines

CTRL + V # visual block mode
after selecting
Shift + I # insert mode
type #
ESC

onlyoffice

zitieren

Anführungszeichen öffnend: [Alt Gr] + [V]
Anführungszeichen schließend: [Alt Gr] + [B]

wayland

run x apps with root

xhost +SI:localuser:root
sudo gparted

gpg

==> Verifying source file signatures with gpg...
    aurutils-1.5.3.tar.gz ... FAILED (unknown public key 6BC26A17B9B7018A)
==> ERROR: One or more PGP signatures could not be verified!
==> ERROR: Could not download sources.
onny@http ~ % sudo -u aur gpg --recv-keys 6BC26A17B9B7018A    

decrypt symmetric

gpg doc.gpg

tools

  • etcher: create windows, mac and linux usb flash installation sticks
    • github.com/slacka/WoeUSB
  • browsh: graphical terminal browser
  • meld compare folders
  • cpod github
  • flutter sdk
  • deezloader remix
  • scrcpy: access android screen via adb and control ist
ngrep -q -W byline "^(GET|POST) .*"
ngrep -q -W byline "search" host www.google.com and port 80

pages

  • unpaywall hack
https://outline.com/zeit.de/2011/26/Nationalsozialismus-Tagebuecher/komplettansicht

openwrt

udate all packages

opkg update
opkg list-upgradable | cut -f 1 -d ' ' | xargs opkg upgrade 

dbus

dbus system monitor with filter

busctl --match "path=/net/connman/iwd" monitor

list tree

busctl tree net.connman.iwd

introspect available properties

busctl introspect net.connman.iwd /net/connman/iwd/636166652d6d6174732d67617374_psk

systemd

service hardening

PrivateTmp=true
ProtectHome=true
# Mounts the /usr, /boot, and /etc directories read-only for processes invoked by this unit.
ProtectSystem=full
# Ensures that the service process and all its children can never gain new privileges
NoNewPrivileges=true
# Sets up a new /dev namespace for the executed processes and only adds API pseudo devices
# such as /dev/null, /dev/zero or /dev/random (as well as the pseudo TTY subsystem) to it,
# but no physical devices such as /dev/sda.
PrivateDevices=true
# Explicit module loading will be denied. This allows to turn off module load and unload
# operations on modular kernels. It is recommended to turn this on for most services that
# do not need special file systems or extra kernel modules to work.
ProtectKernelModules=true
# Kernel variables accessible through /proc/sys, /sys, /proc/sysrq-trigger, /proc/latency_stats,
# /proc/acpi, /proc/timer_stats, /proc/fs and /proc/irq will be made read-only to all processes
# of the unit. Usually, tunable kernel variables should only be written at boot-time, with the
# sysctl.d(5) mechanism. Almost no services need to write to these at runtime; it is hence
# recommended to turn this on for most services.
ProtectKernelTunables=true
# The Linux Control Groups (cgroups(7)) hierarchies accessible through /sys/fs/cgroup will be
# made read-only to all processes of the unit. Except for container managers no services should
# require write access to the control groups hierarchies; it is hence recommended to turn this on
# for most services
ProtectControlGroups=true
# Restricts the set of socket address families accessible to the processes of this unit.
# Protects against vulnerabilities such as CVE-2016-8655
RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX
# Takes away the ability to create or manage any kind of namespace
RestrictNamespaces=true

nixos

apply changes to system

sudo nixos-rebuild switch --flake '/etc/nixos#joes-desktop'
sudo nixos-rebuild boot --flake '/etc/nixos#joes-desktop' # raspi specific

update channel, rebuild and switch

nixos-rebuild switch --upgrade

search package

nix search gedit

nixos testing environement

nix-shell -p toilet

install unstable package

nix-channel --add https://nixos.org/channels/nixos-unstable unstable
nix-channel --update unstable
nix-env -iA unstable.pdfarranger

custom local repository, list packages

nix-env -f /etc/nixos/apps -qaP '*'

install package from local repo

nix-env -f /etc/nixos/apps -iA xerox6000-6010

package shell script

  # Here we but a shell script into path, which lets us start sway.service (after importing the environment of the login shell).
  environment.systemPackages = with pkgs; [
    (
      pkgs.writeTextFile {
        name = "startsway";
        destination = "/bin/startsway";
        executable = true;
        text = ''
          #! ${pkgs.bash}/bin/bash

          # first import environment variables from the login manager
          systemctl --user import-environment
          # then start the service
          exec systemctl --user start sway.service
        '';
      }
    )
  ];

garbade collector

nix-collect-garbage -d

list package files

find $(nix eval -f /etc/nixos/apps --raw xerox6000-6010.outPath)

install package

nix-env -i icecat

remove package

nix-env -e icecat

or

sudo nix-store --delete --ignore-liveness /nix/store/1hnbdgz5yy9agnbnix2d8cvxj2d6hlc5-system-path

list installed packages

# installed via configuration.nix
nixos-option environment.systemPackages | head -2 | tail -1 | sed -e 's/ /\n/g' | cut -d- -f2- | sort | uniq
# + dependencies
nix-store --query --requisites /run/current-system
nix-store --query --requisites /run/current-system | cut -d- -f2- | sort | uniq
# list user packages
nix-env --query

python virtualenv

nix-shell -p python3Packages.virtualenv
python -m venv venv
source venv/bin/activate
pip install -r requirements.txt

local repository (nixpkgs clone) as systemwide channel

$ nix-build nixos/release.nix -A channel --arg nixpkgs  '{ outPath = ./. ; revCount = "'$(git rev-list HEAD | wc -l)'"; shortRev = "'$(git rev-parse --short HEAD)'"; }'
...
/nix/store/hash-name/
$ sudo nix-channel --remove nixos
$ sudo nix-channel --add file:///nix/store/hash-name/tarballs/thetarball.tar.xz nixos
$ sudo nix-channel --update

test packages git pull request

let
[...]
  nixpkgs-tars = "https://github.com/NixOS/nixpkgs/archive/";

  # FIXME iwd networks option
  pr75800 = import (fetchTarball
    "${nixpkgs-tars}ba0baf53e24a123a45861cf5fa08e4b3e1377db0.tar.gz")
    { config = config.nixpkgs.config; };
  # FIXME nftables + docker
  pr81172 = import (fetchTarball 
    "${nixpkgs-tars}0b4e135d8e9c76a43346ae24e33572e627951203.tar.gz") 
    { config = config.nixpkgs.config; };
[...]
in
[...]
  nixpkgs.overlays = [
    (self: super:
      {
        # FIXME: add iwd networks option
        inherit (pr75800) iwd;
      }
      )];

retrieve hash

curl -sL https://github.com/NixOS/nixpkgs/pull/64977.patch \
                | head -n 1 | grep -o -E -e "[0-9a-f]{40}"

allow unfree package installation nix-env

env NIXPKGS_ALLOW_UNFREE=1 nix-env -f /home/onny/projects/nur-packages -iA ocenaudio

build local package

cd nixpkgs
nix build -f ./. python310Packages.baserow
nix build -f ./. nodePackages.hyperpotamus
nix-build -E 'with import <nixpkgs> { }; callPackage ./default.nix { nodejs = pkgs."nodejs-10_x"; }'
nix-build -E 'with import /home/onny/nixpkgs { }; libsForQt5.callPackage ./pkgs/applications/office/shelf/default.nix {}'

rebuilding with progress indication

nix build '(with import <nixpkgs/nixos> { }; system)'	 
nixos-rebuild -I nixpkgs=/home/onny/projects/nixpkgs switch --max-jobs 1	 

run program

nix run nixpkgs#electrum -- --help
nix run github:TheZombie1999/nixpkgs#upscaly
nix shell nixpkgs#hash-slinger --command tlsa --create example.org

review repository

git clone https://github.com/NixOS/nixpkgs.git
cd nixpkgs
nixpkgs-review pr 98044

setup python virtualenv, working pip

15.17.3.6. How to consume python modules using pip in a virtual environment like I am used to on other Operating Systems?
https://nixos.org/manual/nixpkgs/stable/#python

which package provides certain file

command-not-found telnet

use module from unstable

nixpkgs.config.packageOverrides = pkgs: rec {
  unstable = import unstableTarball { };
  opensnitch = unstable.opensnitch;
};

overlay: use / overwrite package from custom remote or local repo / fork:

  nixpkgs.overlays = [
    (self: super: {
      foo2zjs = (import (builtins.fetchTarball {
        url =
          "https://github.com/onny/nixpkgs/archive/foo2zjs.tar.gz";
        sha256 = "1jp98jhc4mw9bipdiq7qdrzn3lslk4mi7prqfh2v43isgsvpd6bg";
      }) { config = { allowUnfree = true; }; }).foo2zjs;
      linux-wifi-hotspot = (import /home/onny/projects/nixpkgs2 {}).linux-wifi-hotspot;
    })
  ];

overlay generate package derivation which includes custom files

  nwjs' = nwjs.overrideAttrs (x: {
    ffmpegPrebuilt = fetchurl {
      url = "https://github.com/iteufel/nwjs-ffmpeg-prebuilt/releases/download/${x.version}/${x.version}-linux-x64.zip";
      sha256 = "1ch14s80p4dpwkhwa831kqy4j7m55v1mdxvq0bdaa5jpd7c75mbk";
    };
    patchPhase = ''
      cd lib
      ${unzip}/bin/unzip -o $ffmpegPrebuilt
      ${x.patchPhase or ""}
    '';
  });

python environment with gobject introspection etc

nix-shell -p 'python3.withPackages (p: [p.pygobject3])' -p libnotify -p gobjectIntrospection

fetchpatch array

patches = map fetchpatch [
  /* This patch is currently necessary for the unit test suite to run correctly.
   * See https://www.mail-archive.com/klee-dev@imperial.ac.uk/msg03136.html
   * and https://github.com/klee/klee/pull/1458 for more information.
   */
  #{
  #  name = "fix-gtest";
  #  sha256 = "F+/6videwJZz4sDF9lnV4B8lMx6W11KFJ0Q8t1qUDf4=";
  #  url = "https://github.com/klee/klee/pull/1458.patch";
  #}

  # This patch fixes test compile issues with glibc 2.33+.
  #{
  #  name = "fix-glibc-2.33";
  #  sha256 = "PzxqtFyLy9KF1eA9AAKg1tu+ggRdvu7leuvXifayIcc=";
  #  url = "https://github.com/klee/klee/pull/1385.patch";
  #}

locate / search file belongs to which package

nix-locate -w libnss3.so

list package content

tree (nix-instantiate --eval -E 'with import <nixpkgs> {}; glibcLocales.outPath' | xargs)

nixos-shell usage

QEMU_NET_OPTS="hostfwd=tcp::8080-:80" NIX_PATH=nixpkgs=/home/onny/projects/nixpkgs nixos-shell vm-invoiceplane.nix
sudo -E QEMU_NET_OPTS="hostfwd=tcp::80-:80" NIX_PATH=nixpkgs=/home/onny/projects/nixpkgs nixos-shell vm-invoiceplane.nix

systemd one shot service

  # Symlink nvim user config for root user
  systemd.services.nvim-symlink = {
    script = ''
      if [[ ! -h "/root/.config/nvim" ]]; then
        ln -s "/home/onny/.config/nvim" "/root/.config/"
      fi
    '';
    wantedBy = [ "multi-user.target" ];
    serviceConfig = {
      Type = "oneshot";
    };
  };

packaging

fetchurl

{ fetchurl, }:

let
  pname = "librewolf-bin";
  version = "85.0.2-1";
  name = "${pname}-${version}";

  src = fetchurl {
    url = "https://gitlab.com/librewolf-community/browser/linux/uploads/b87285386bed26dc6d6d4cf252ca7adf/LibreWolf-${version}.x86_64.AppImage";
    sha256 = "0sapm4g4qs63sm640kxcjrngxnix524ms6mxnn0xz6p0xr8dz27r";
  };

build package

cd /path/to/nixpkgs
nix-build -A nodePackages.<new-or-updated-package>
nix-build -E 'with import /home/onny/projects/nixpkgs { }; python3Packages.callPackage ./pkgs/development/python-modules/pyasn {}'

get checksum

nix-prefetch-url 'http://i3wm.org/downloads/i3-4.5.1.tar.bz2'

running tests

echo "$PR_DIFF" | xargs editorconfig-checker -disable-indent-size

reviewing uncommited changes

nixpkgs-review wip

tempalte fetchFromGitLab + cmake qt application

{ lib
, stdenv
, fetchFromGitLab
, cmake
, pkg-config
, qtbase
, qttools
, qpdf
, podofo
}:

stdenv.mkDerivation rec {
  pname = "pdfmixtool";
  version = "1.0.2";

  src = fetchFromGitLab {
    owner = "scarpetta";
    repo = pname;
    rev = "v${version}";
    sha256 = "066ap1w05gj8n0kvilyhlr1fzwrmlczx3lax7mbw0rfid9qh3467";
  };

  nativeBuildInputs = [
    cmake
    pkg-config
  ];

  buildInputs = [
    qtbase
    qttools
    qpdf
    podofo
  ];

  meta = with lib; {
    description = "An application to split, merge, rotate and mix PDF files";
    homepage = "https://gitlab.com/scarpetta/pdfmixtool";
    license = licenses.gpl3Only;
    maintainers = with maintainers; [ onny ];
  };

}

run test locally

nix-build nixos/tests/dokuwiki.nix

test changes to a modul

nixos-rebuild --upgrade switch -I nixpkgs=/home/onny/projects/nixpkgs

wrapProgram add binary to path

 nativeBuildInputs = [
    makeWrapper
  ];

[...]

  postInstall = ''
    wrapProgram $out/bin/wihotspot-gui \
      --prefix PATH : ${lib.makeBinPath [ iw ]}
  '';

wrapProgram add specific arguments

{ lib, stdenv, fetchurl, makeWrapper, adoptopenjdk-bin, jre }:
 
[...]
  nativeBuildInputs = [ makeWrapper ];
 
  installPhase = ''
    mkdir -p $out/share/java $out/bin
    cp $src $out/share/java/tla2tools.jar
    makeWrapper ${jre}/bin/java $out/bin/tlc \
      --add-flags "-XX:+UseParallelGC -cp $out/share/java/tla2tools.jar tlc2.TLC"
  '';

install and modify systemd service

  postInstall = ''
    mkdir -p $out/lib/systemd/system
    substitute dist/systemd/maddy.service $out/lib/systemd/system/maddy.service \
      --replace "/usr/bin/maddy" "$out/bin/maddy" \
      --replace "/bin/kill" "${coreutils}/bin/kill"
    substitute dist/systemd/maddy@.service $out/lib/systemd/system/maddy@.service \
      --replace "/usr/bin/maddy" "$out/bin/maddy" \
      --replace "/bin/kill" "${coreutils}/bin/kill"
  '';

build rust package

{ lib
, fetchFromGitHub
, rustPlatform
, pkg-config
, openssl
, dbus
, sqlite
, file
, gzip
, makeWrapper
}:

rustPlatform.buildRustPackage rec {
  pname = "krankerl";
  version = "0.13.0";

  src = fetchFromGitHub {
    owner = "ChristophWurst";
    repo = "krankerl";
    rev = "v${version}";
    sha256 = "1gp8b2m8kcz2f16zv9xwv4n1zki6imvz9z31kixh6amdj6fif3d1";
  };

  cargoSha256 = "sha256:01hcxs14wwhhvr08x816wa3jcm4zvm6g7vais793cgijipyv00rc";

  nativeBuildInputs = [
    pkg-config
    gzip
    makeWrapper
  ];

  buildInputs = [
    openssl
    dbus
    sqlite
  ];

  checkInputs = [
    file
  ];

  meta = with lib; {
    description = "A CLI helper to manage, package and publish Nextcloud apps";
    homepage = "https://github.com/ChristophWurst/krankerl";
    license = licenses.gpl3Only;
    platforms = platforms.linux;
    maintainers = with maintainers; [ onny ];
  };
}

packaging scheme unstable git

unstable-2018-05-15

package binary deb

{ stdenv
, lib
, fetchurl
, autoPatchelfHook
, dpkg
, qt5
, libjack2
, alsa-lib
, bzip2
, libpulseaudio }:

stdenv.mkDerivation rec {
  pname = "ocenaudio";
  version = "3.10.6";

  src = fetchurl {
    url = "https://www.ocenaudio.com/downloads/index.php/ocenaudio_debian9_64.deb?version=${version}";
    sha256 = "0fgvm1xw2kgrqj3w6slpfxbb3pw9k8i0dz16q9d5d8gyyvr2mh8g";
  };

  nativeBuildInputs = [
    autoPatchelfHook
    qt5.qtbase
    qt5.wrapQtAppsHook
    libjack2
    libpulseaudio
    bzip2
    alsa-lib
  ];

  buildInputs = [ dpkg ];

  dontUnpack = true;
  dontBuild = true;
  dontStrip = true;

  installPhase = ''
    mkdir -p $out
    dpkg -x $src $out
    cp -av $out/opt/ocenaudio/* $out
    rm -rf $out/opt
    # Create symlink bzip2 library
    ln -s ${bzip2.out}/lib/libbz2.so.1 $out/libbz2.so.1.0
  '';

  meta = with lib; {
    description = "Cross-platform, easy to use, fast and functional audio editor";
    homepage = "https://www.ocenaudio.com";
    license = licenses.unfree;
    platforms = platforms.linux;
    maintainers = with maintainers; [ onny ];
  };
}

package appimage program

{ lib
, fetchurl
, appimageTools
}:

appimageTools.wrapType2 rec {
  pname = "sonixd";
  version = "0.14.0";

  src = fetchurl {
    url = "https://github.com/jeffvli/sonixd/releases/download/v${version}/Sonixd-${version}-linux-x86_64.AppImage";
    sha256 = "sha256-q+26Ut5wN9gFDBdqirR+he/ppu/b1wiqq23WkcRAQd4=";
  };

  extraInstallCommands = ''
    mv $out/bin/sonixd-${version} $out/bin/sonixd
  '';

  meta = with lib; {
    description = "Full-featured Subsonic/Jellyfin compatible desktop music player";
    homepage = "https://github.com/jeffvli/sonixd";
    license = licenses.gpl3Only;
    maintainers = with maintainers; [ onny ];
    platforms = [ "x86_64-linux" ];
  };
}

packaging java / jar file

  installPhase = ''
    mkdir -p $out/{bin,lib}
    cp ${src}/briar-desktop.jar $out/lib/
    makeWrapper ${openjdk}/bin/java $out/bin/briar-desktop \
      --add-flags "-jar $out/lib/briar-desktop.jar"
  '';

nixops

setup

/etc/nixos/configuration.nix
virtualisation.libvirtd.enable = true;
users.extraUsers.myuser.extraGroups = [ "libvirtd" ];
nix-env -iA nixos-unstable.nixopsUnstable
sudo mkdir /var/lib/libvirt/images
sudo chgrp libvirtd /var/lib/libvirt/images
sudo chmod g+w /var/lib/libvirt/images
sudo virsh pool-define-as default dir --target /var/lib/libvirt/images
sudo virsh pool-autostart default
sudo virsh pool-start default
nixops create -d example-libvirtd examples/trivial-virtd.nix
nixops deploy -d example-libvirtd
nixops list

connect to instance (deployment name: example-libvirtd, machine name: machine)

nixops ssh -d example-libvirtd machine

delete deployment, delete machine

nixops delete -d example-libvirtd
 
nixops destroy --include nix-http

start, stop destroy machine foo

nixops start --include foo
nixops stop --include foo
nixops destroy --include foo

list machines

nixops info

filesystem

partitioning

reset flash drive

dd if=/dev/zero of=/dev/sdX bs=2M count=32

change label (vfat etc)

fatlabel /dev/sdb1 "mystick"

resize extX partition

sfdisk -l /dev/sdb
# Disk /dev/sdb: 55.9 GiB, 60022480896 bytes, 117231408 sectors
# Disk model: CR60GB External 
# Units: sectors of 1 * 512 = 512 bytes
# Sector size (logical/physical): 512 bytes / 512 bytes
# I/O size (minimum/optimal): 512 bytes / 512 bytes
# Disklabel type: dos
# Disk identifier: 0x2486e7f7
#
# Device     Boot Start       End   Sectors  Size Id Type
# /dev/sdb1        2048 117231407 117229360 55.9G 83 Linux
 
e2fsck -f /dev/sdb1
resize2fs /dev/sdb1 50G
# resize2fs 1.45.5 (07-Jan-2020)
# Resizing the filesystem on /dev/sdb1 to 13107200 (4k) blocks.
# The filesystem on /dev/sdb1 is now 13107200 (4k) blocks long.
fdisk /dev/sdb
# 1. (d) delete partition
# 2. (n) create new partition
# 3. (p) primary
# 4. (1) partition number
# 5. (2048) start block, same as above
# 6. (+52428800K) last sector partition (13107200k*4k)
# 7. (a) partition is bootable flag
# 8. (w) write changes

recover gpt partition

sgdisk -e /dev/sda

lvm

restore snapshot

lvconvert --merge /dev/vg0/playground_snap

dd

isoinfo -d -i /dev/cdrom | grep -i -E 'block size|volume size' 
dd if=/dev/cdrom of=test.iso bs=<block size from above> count=<volume size from above> status=progress

mount

mount with offset

# find offset in testdisk, multiplay start sector with sector-bytes
mount -o loop,offset=1048576 /dev/sdb /mnt

mount webdav

mount.davfs https://bwsyncandshare.kit.edu/remote.php/dav/files/7bac0379-52e8-42e4-xxxx@bwidm.scc.kit.edu/ remote

mixxx

  • Theme: LateNight
  • Set Microphone Output to default Pulseaudio
./sync.sh
./generate_playlist.sh
env QT_QPA_PLATFORM=xcb mixxx

samba

/etc/samba/smb.conf
[global]
workgroup = WORKGROUP
server role = standalone server
security = user
map to guest = Bad Password

[public]
path = /mnt
writeable = no
browsable = yes
guest ok = yes
systemctl restart smb nmb

curlftpfs

curlftpfs ftp.example.com /mnt/ftp/ -o user=username:password,allow_other

pnpm

pnpm init
pnpm install jquery@3.5.1 --save # see npmjs.com

update packages

pnpm outdated

gitlab-ci

Test gitlab-ci.yml, change into root dir, then:

gitlab-runner exec docker packaging

Where packaging is the name of the job.

lrzip

Compress/decompress files and directories multithreaded

lrztar directory
lrzuntar directory.tar.lrz
lrzip filename
lrunzip filename.lrz

postmarketOS

general chroot management

pmbootstrap pull
pmbootstrap shutdown

install package into chroot

pmbootstrap chroot --suffix native -- apk add paxmark

emulated chroot

pmbootstrap chroot -b=armv7

update package index

pmbootstrap update

edit kernel config

pmbootstrap kconfig edit htc-pyramid

apk commands

pmbootstrap chroot
$ apk update
$ apk add paxmark
$ apk add paxmark-0.12-r0.apk

working with git

cd .local/var/pmbootstrap/cache_git/pmaports
git checkout htcpyramix
git pull master
git rebase master

cleanup chroot(s)

pmbootstrap zap

flash or boot kernel directly

pmbootstrap flasher flash_kernel
pmbootstrap flasher boot

prepare kernel image for odin

pmbootstrap export
cd /tmp/postmarketOS-export
cp boot.img-samsung-i8150 recovery.img
tar -cf recovery.tar recovery.img
md5sum -t recovery.tar >> recovery.tar
mv recovery.tar recovery.tar.md5

parted

list

parted /dev/sda
$ print

resize partition (default unit size MB)

parted /dev/sda
$ resizepartition
Partition number? 1
End?  [10.0GB]? 15000MB

change unit

unit GB

wireshark

capture usb traffic

modprobe usbmon
tshark -D
tshark -i usbmon0

binaries and libraries

read symbols of library

readelf -Ws /usr/lib/libusb-1.0.so.0

wf-recorder

record system video + audio

pactl list sources short
wf-recorder -aalsa_output.pci-0000_00_1b.0.analog-stereo.monitor --file=recording_with_audio.mp4

note there's a bug in wf-recorder, devicename directly after -a without space

encoded

wf-recorder -d /dev/dri/renderD128 -c h264_vaapi --bframes 0 -p crf=20 -aalsa_output.pci-0000_00_1b.0.analog-stereo.monitor --file=recording_encoded.mp4

qemu

booting armv7 alpinelinux

# download kernel and stuff from here http://dl-cdn.alpinelinux.org/alpine/edge/releases/armv7/netboot/
qemu-system-arm -M virt -m 512M -cpu cortex-a15 -kernel vmlinuz-lts -initrd initramfs-lts -append "console=ttyAMA0 ip=dhcp alpine_repo=http://dl-cdn.alpinelinux.org/alpine/edge/main/" -nographic

postgresql

list all databases

sudo -u postgres psql
# \l

drop database

sudo -u postgres psql
# drop database gitlabhq_production;
# drop database gitlabhq_production WITH (FORCE);

list tables

sudo -u postgres psql
# \c gitlabhq_production
# \dt

create and delete user

DROP ROLE gitlab;
CREATE USER gitlab WITH PASSWORD 'test123';

grant permissions

ALTER USER gitlab SUPERUSER;
CREATE DATABASE gitlabhq_production OWNER gitlab;
ALTER DATABASE gitlabhq_production OWNER TO gitlab;

dump database

pg_dump -U gitlab gitlabhq_production > /tmp/gitlab.pgsql

dump all

pg_dumpall > /tmp/dump_file_name.tar

import database

psql# CREATE DATABASE gitlabhq_production;
psql -U gitlab gitlabhq_production < gitlab.pgsql
onny/notizen.txt · Last modified: 2024/05/22 12:10 by 10.250.0.1