Skip to content




Here I store memo on general Linux commands and simple scripts.

usb drive

USB 2.0 (480 Mbps), USB 3.0 ( 5 Gbps), USB 3.2 Gen 2 (10 Gbps)

$ sudo lsusb -tvv
/:  Bus 02.Port 1: Dev 1, Class=root_hub, Driver=xhci_hcd/4p, 5000M
    ID 1d6b:0003 Linux Foundation 3.0 root hub
    /sys/bus/usb/devices/usb2  /dev/bus/usb/002/001
    |__ Port 1: Dev 2, If 0, Class=Mass Storage, Driver=usb-storage, 5000M
        ID 04bb:016c I-O Data Device, Inc.
        /sys/bus/usb/devices/2-1  /dev/bus/usb/002/002
/:  Bus 01.Port 1: Dev 1, Class=root_hub, Driver=xhci_hcd/1p, 480M
    ID 1d6b:0002 Linux Foundation 2.0 root hub
    /sys/bus/usb/devices/usb1  /dev/bus/usb/001/001
    |__ Port 1: Dev 2, If 0, Class=Hub, Driver=hub/4p, 480M
        ID 2109:3431 VIA Labs, Inc. Hub
        /sys/bus/usb/devices/1-1  /dev/bus/usb/001/002

openssl

[[openssl]]

Checking ciphers

openssl ciphers 'DEFAULT:!RC4-SHA'

Enc/dec files and dirs

# A single file:

openssl aes-256-cbc -in file -out file.aes
openssl aes-256-cbc -d -in file.aes -out file

# A directory:

tar -zcf - directory | openssl aes-256-cbc -out directory.tar.gz.aes
openssl aes-256-cbc -d  -in directory.tar.gz.aes | tar -xz -f -

http check using netcat and openssl

[[nc]], [[netcat]], [[openssl]]

# plain http check using nc
echo -ne "GET / HTTP/1.0\r\nConnection: Close\r\nHost: beryl.blink-1x52.net\r\n\r\n" | nc 10.1.1.1 80

# https check using openssl
echo -ne "GET / HTTP/1.1\r\nConnection: Close\r\nHost: beryl.blink-1x52.net\r\n\r\n" | openssl s_client -connect 10.1.1.1:443 -quiet

ftps

curl -kv -T t.txt --ssl ftp://10.1.1.1 --user test

syslog

[[syslog]]

echo '<0>Test message' | nc -w 1 -u 10.1.1.1 514
logger -p local0.notice "Test message"

calculation

facility * 8 + severity

local0(16) * 8 + info(5) = 133

local7 = 184 + sev_id

severity

        Numerical         Severity
          Code

           0       Emergency: system is unusable
           1       Alert: action must be taken immediately
           2       Critical: critical conditions
           3       Error: error conditions
           4       Warning: warning conditions
           5       Notice: normal but significant condition
           6       Informational: informational messages
           7       Debug: debug-level messages

facility

       Numerical             Facility
          Code

           0             kernel messages
           1             user-level messages
           2             mail system
           3             system daemons
           4             security/authorization messages (note 1)

           5             messages generated internally by syslogd
           6             line printer subsystem
           7             network news subsystem
           8             UUCP subsystem
           9             clock daemon (note 2)
          10             security/authorization messages (note 1)
          11             FTP daemon
          12             NTP subsystem
          13             log audit (note 1)
          14             log alert (note 1)
          15             clock daemon (note 2)
          16             local use 0  (local0)
          17             local use 1  (local1)
          18             local use 2  (local2)
          19             local use 3  (local3)
          20             local use 4  (local4)
          21             local use 5  (local5)
          22             local use 6  (local6)
          23             local use 7  (local7)

curl

[[curl]]

I can use this to log http(s) reachability to certain destinations, store the results in files per dst, and generate plot and etc.

echo "`date --utc "+%Y-%m-%d %H:%M:%S"`,`curl -w "@curl_format.txt" -o /dev/null "https://beryl.blink-1x52.net/" --no-keepalive -s`"
curl_format.txt
%{response_code},%{time_namelookup},%{time_connect},%{time_pretransfer},%{time_starttransfer},%{time_total}\n

github release

curl -sfL "api.github.com/repos/projectcalico/calico/releases/latest" | grep "tag_name"

# example seen in directpv doc
DIRECTPV_RELEASE=$(curl -sfL "https://api.github.com/repos/minio/directpv/releases/latest" | awk '/tag_name/ { print substr($2, 3, length($2)-4) }')

oui

[[oui]], [[MAC address]]

# curl -x http://proxy:port "http://standards-oui.ieee.org/oui.txt" -o oui.txt
curl https://standards-oui.ieee.org/ -o oui.txt

# check using file containing mac addr
grep -if macaddr.oui.txt oui.txt

# vendor name
grep -i microsoft oui.txt | awk -F' ' '/hex/ {print $1}' | tr [:upper:] [:lower:]

sed

[[sed]]

Removing multibyte characters

LANG=C sed 's/[\x80-\xFF]//g' filename

Removing empty lines

sed '/^$/d'

# sed -i '/^$/d' <file> ; "-i" to update the same file

Removing leading space

sed 's/^[ \t]*//'

ssh

[[ssh]]

SSH config file when you need to support remote nodes with old ssh setup

~/.ssh/config
Host *
  GSSAPIAuthentication yes
  GSSAPIKeyExchange yes
  GSSAPIDelegateCredentials yes
  KexAlgorithms +diffie-hellman-group1-sha1,diffie-hellman-group-exchange-sha1
  ConnectTimeout 4

SSH SOCKS proxy tunnel

# on local port 1080 tcp
ssh -fN -D 1080 {jump_server_name}

# run using nohup ... &
# find and kill the process to terminate the tunnel
# run something like...) ps ux | grep {jump_server_name} | grep -v grep | kill `awk '{print $2}'`

nc

[[nc]], [[netcat]]

# listen on tcp port
nc -kl {port number}

In python

import socket
import sys

if (len(sys.argv) != 2 or not sys.argv[1].isdigit()):
    print('Usage: listen <port>')
    exit()

p = int(sys.argv[1])
l = []
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', p))
s.listen(1)
while 1:
    (c, a) = s.accept()
    l.append(c)
    print('%d: connection from %s' % (len(l), a))

awk

[[awk]]

# field separator "."
awk -F. '{print $4}' n

# skip first field
awk '{$NF=""}1' <filename>

# and the last
awk '{$NF=""}1' <filename> | cut -d, -f2-

# certain column starting with...
awk -F'\t' '$4 ~ /^SEP/ {print $0}'  scope_lease.csv

# removing duplicating lines
awk '!seen[$0]++' n
awk -F, '!seen[$15]++' log
zcat <inputfile> | awk '$11 == 200 {print $3}' | awk '!seen[$1]++'

# occurrence
awk '{count[$2]++} END {for (word in count) print word, count[word]}' p2.out | sort -k2 -Vr
grep JAPAN scannerdb/scanner_inventory.csv | awk -vFPAT='([^,]*)|("[^"]+")' -vOFS=, '{count[$14]++} END {for (word in count) print word, count[word]}'

tree

[[tree]], [[find]]

tree alternative

find . -type d -print

Find files in the CWD including subdirectories and get the sum of the file sizes.

# assuming the directory is all 4096...
find -name '*' -ls | grep -v 4096 | awk '{ sum += $7; } END {print sum;}'

# print top 10 files
find -name '*' -ls | grep -v 4096 | sort -nk7 | tail -10 | sort -nr -k 7 | awk '{print $11, $7}'

diff

[[diff]]

# run grep for every line in the first file against the second fileĀ¶
grep -wFf dc1-cli dc1show_run > out-dc1

# side-by-side output with maximum of 220 char, excluding common lines
diff -W 220 -y --suppress-common-lines ./admin/show_run ../../latest/hostname/admin/show_run

# diff and patching
diff -u original-code updated-code > patchfile.patch
patch original-code patchfile.patch

for loop

for item in `cat scope_list`; do grep $item out| head -2; done

for ipaddr in `cat list.txt`; do whois -h whois.radb.net $ipaddr | grep origin | head -1 >> w.out; done
# try other whois as necessary - whois.nic.ad.jp and etc.