in ubuntu change apt daily timer, to adjust when unattended updates are run, and when automatic reboot is performed

sudo systemctl edit apt-daily.timer

add following config

OnCalendar=
OnCalendar=*-*-* 4:00
RandomizedDelaySec=1h

save and check next time it would run using command

systemctl status apt-daily.timer

sudo apt-get install opendkim opendkim-tools
sudo mkdir -p /etc/opendkim/keys/ENTER-YOUR-DOMAIN.COM
sudo opendkim-genkey -b 2048 -d ENTER-YOUR-DOMAIN.COM -D /etc/opendkim/keys/ENTER-YOUR-DOMAIN.COM -s mail -v
# import DNS record from mail.txt in /etc/opendkim/keys/ENTER-YOUR-DOMAIN.COM
sudo chown -R opendkim:opendkim /etc/opendkim
sudo chmod -R go-rw /etc/opendkim/keys

# edit /etc/mail/sendmail.mc , add: INPUT_MAIL_FILTER(`opendkim', `S=local:/run/opendkim/opendkim.sock')
# edit /etc/opendkim.conf , 
#Domain                  yourdomain.com
#KeyFile                 /etc/opendkim/keys/yourdomain.com/mail.private
#Selector                mail

sudo m4 /etc/mail/sendmail.mc > /etc/mail/sendmail.cf

sudo systemctl restart opendkim
sudo systemctl restart sendmail

sudo -u postgres -i pg_dump ENTER_db_name_here > /var/ENTER_db_name_here_"$(date +%Y-%m-%d_%H%M%S)".sql \
&& sudo -u postgres psql -c "DROP DATABASE ENTER_db_name_here WITH(FORCE);" \
&& sudo -u postgres psql -c "CREATE DATABASE ENTER_db_name_here;"

as separate commands:

sudo -u postgres -i pg_dump ENTER_db_name_here > /var/ENTER_db_name_here_"$(date +%Y-%m-%d_%H%M%S)".sql
sudo -u postgres psql -c "DROP DATABASE ENTER_db_name_here WITH(FORCE);"
sudo -u postgres psql -c "CREATE DATABASE ENTER_db_name_here;"

On Ubuntu 20 LTS

uname -a ; lsb_release -a
apt update && apt upgrade -y
apt install fail2ban unattended-upgrades apt-listchanges git zip software-properties-common
ufw allow 80
ufw allow 443
ufw allow 22
ufw enable

apt-get install unattended-upgrades
nano /etc/apt/apt.conf.d/10periodic
APT::Periodic::Update-Package-Lists "1";
APT::Periodic::Download-Upgradeable-Packages "1";
APT::Periodic::AutocleanInterval "7";
APT::Periodic::Unattended-Upgrade "1";

nano /etc/apt/apt.conf.d/50unattended-upgrades
Unattended-Upgrade::Allowed-Origins {
        "Ubuntu lucid-security";
//      "Ubuntu lucid-updates";
};
Unattended-Upgrade::Automatic-Reboot "true";


sudo dpkg-reconfigure -plow unattended-upgrades
sudo unattended-upgrades --dry-run

reboot

nginx

apt install nginx
sudo unlink /etc/nginx/sites-enabled/default
mkdir /var/www/TODO-ENTER-YOUR-DOMAIN.com
nano /etc/nginx/sites-available/TODO-ENTER-YOUR-DOMAIN.com
# paste and use nginx domain config bellow
ln -s /etc/nginx/sites-available/TODO-ENTER-YOUR-DOMAIN.com /etc/nginx/sites-enabled/
nginx -t
sudo systemctl reload nginx

nginx basic

server {
    listen 80;
    server_name TODO-ENTER-YOUR-DOMAIN.com;
    root /var/www/TODO-ENTER-YOUR-DOMAIN.com/public;

    index index.html index.htm index.php;
    
    location ~ /\. {
        deny all;
    }
    location ^~ /.well-known/ {
        allow all;
    }

    location / {
        index index.html index.php; ## Allow a static html file to be shown first
        try_files $uri $uri/ @handler; ## If missing pass the URI to Magento's front handler
        expires 30d; ## Assume all files are cachable
    }

    location @handler {
        rewrite / /index.php;
    }

    location ~ \.php$ {
        include snippets/fastcgi-php.conf;
        fastcgi_pass unix:/run/php/php8.1-fpm.sock;
     }

    location ~ /\.ht {
        deny all;
    }

}

nginx with HTTP auth

generate .htpasswd:

sudo apt install apache2-utils && sudo htpasswd -c /etc/apache2/.htpasswd user

nginx config with http auth

server {
    listen 80;
    server_name TODO-ENTER-YOUR-DOMAIN.com;
    root /var/www/TODO-ENTER-YOUR-DOMAIN.com/public;

    index index.html index.htm index.php;
    
    location ~ /\. {
        deny all;
    }
    location ^~ /.well-known/ {
        allow all;
    }

    location / {
        satisfy any;
        allow 127.0.0.1;
        deny  all;
        auth_basic           "HTTP auth";
        auth_basic_user_file /etc/apache2/.htpasswd; 
    
        index index.html index.php;
        try_files $uri $uri/ @handler;
        expires 30d;
    }

    location @handler {
        rewrite / /index.php;
    }

    location ~ \.php$ {
        include snippets/fastcgi-php.conf;
        fastcgi_pass unix:/run/php/php8.1-fpm.sock;
     }

    location ~ /\.ht {
        deny all;
    }

}

HTTPS

apt install certbot python3-certbot-nginx
certbot --nginx -d TODO-ENTER-YOUR-DOMAIN.com
systemctl status certbot.timer
certbot renew --dry-run

PHP

sudo add-apt-repository ppa:ondrej/php -y
apt install php8.1 php8.1-fpm php8.1-common php8.1-pdo php8.1-pgsql php8.1-intl php8.1-mbstring php8.1-xml php8.1-dom php8.1-xml  php8.1-curl php8.1-gd php8.1-imagick php8.1-cli php8.1-dev php8.1-imap php8.1-opcache php8.1-zip php8.1-redis php8.1-intl

composer

php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');"
php -r "if (hash_file('sha384', 'composer-setup.php') === '906a84df04cea2aa72f40b5f787e49f22d4c2f19492ac310e8cba5b96ac8b64115ac402c8cd292b8a03482574915d1a8') { echo 'Installer verified'; } else { echo 'Installer corrupt'; unlink('composer-setup.php'); } echo PHP_EOL;"
php composer-setup.php
php -r "unlink('composer-setup.php');"
sudo mv composer.phar /usr/local/bin/composer

Symfony

wget https://get.symfony.com/cli/installer -O - | bash
mv /root/.symfony/bin/symfony /usr/local/bin/symfony
symfony check:requirements

node

curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash -
apt-get install -y nodejs
npm install
npm install --global yarn

PostgreSQL

sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
apt update && apt upgrade
sudo apt install postgresql-14 postgresql-client-14
sudo -u postgres psql
CREATE DATABASE yourdbname;
CREATE USER youruser WITH ENCRYPTED PASSWORD 'yourpass';
GRANT ALL PRIVILEGES ON DATABASE yourdbname TO youruser;
exit

mySQL

CREATE DATABASE example_database;
CREATE USER 'example_user'@'%' IDENTIFIED WITH mysql_native_password BY 'password';
GRANT ALL ON example_database.* TO 'example_user'@'%';

to install mysql 5.7: https://www.vultr.com/docs/how-to-install-mysql-5-7-on-ubuntu-20-04/

git global

git config --global user.name "YOUR-SERVER-DOMAIN.COM"
git config --global user.email "[email protected]"

deploy

ssh-keygen -t ed25519 -C "[email protected]"
cat /root/.ssh/id_ed25519.pub
# add public ssh key to your repo config, at least read only
cd /var/www/TODO-ENTER-YOUR-DOMAIN.com
git clone enterYourGitRepoAddress /var/www/TODO-ENTER-YOUR-DOMAIN.com
composer install --no-dev --classmap-authoritative --no-interaction
composer dump-env prod
php bin/console doctrine:database:create
php bin/console doctrine:migrations:migrate --all-or-nothing --no-interaction
php bin/console app:execute-one-time
npm install
yarn encore production

permissions

chmod -R 750 /var/www/YOUR-DOMAIN.COM/var
chown -R www-data:www-data /var/www/YOUR-DOMAIN.COM
chmod +x ./bin/console;

DB backup

sudo -u postgres -i pg_dump database_name > /var/zzzz_database_backup/database_name_"$(date +%Y-%m-%d_%H%M%S)".sql

Mailhog

sudo apt-get -y install golang-go git
go get github.com/mailhog/MailHog
sudo cp ~/go/bin/MailHog /usr/local/bin/Mailhog
Mailhog
sudo tee /etc/systemd/system/mailhog.service <<EOL
[Unit]
Description=Mailhog
After=network.target
[Service]
User=$USER 
ExecStart=/usr/bin/env /usr/local/bin/Mailhog > /dev/null 2>&1 &
[Install]
WantedBy=multi-user.target
EOL

sudo systemctl start mailhog
sudo systemctl enable mailhog


# mailhog password
sudo tee /etc/systemd/system/mailhog.service <<EOL
[Unit]
Description=Mailhog
After=network.target
[Service]
User=$USER 
ExecStart=/usr/bin/env /usr/local/bin/Mailhog -auth-file=/etc/mailhog-auth.txt > /dev/null 2>&1 &
[Install]
WantedBy=multi-user.target
EOL

# https://www.browserling.com/tools/bcrypt
sudo tee /etc/mailhog-auth.txt <<EOL
LOGIN_HERE:BCRYPT_PASSWORD_HERE
EOL

other

rm -r ./var/cache/prod

php bin/console cache:warmup --env=prod --no-interaction

composer install --optimize-autoloader --no-dev --no-interaction # for testing and fast updates, autoloader without cache

sudo lsof -i -P | grep LISTEN | grep :$PORT # check ports usage

of course better option than deleting millions of files would be to use rm -r system command or unmount specific directory, but sometimes you don’t have such access, in that case you can use `lftp` command line in example bellow

it would be faster to use lftp rather than midnight commander or other GUI ftp client, because lftp executed via command line would be faster

nohup lftp -u ENTER_USERNAME_HERE,ENTER_PASS_HERE sftp://ENTER_USERNAME_HERE.your-storagebox.de:/home/ -p 23 -e "rm -r /home/ENTER_DIRECTORY_TO_DELETE_HERE" &

nohup is used so that process won’t stop when you log out or connection is closed, this is important because deleting millions of files, one by one using this method could take days

sometimes you can encounter problems with permissions, to fix this you can change invalid permissions using chmod:

nohup lftp -u ENTER_USERNAME_HERE,ENTER_PASS_HERE sftp://ENTER_USERNAME_HERE.your-storagebox.de:/home/ -p 23 -e "chmod -R 770 /home/ENTER_DIRECTORY_TO_DELETE_HERE" &

execute this in browser’s Js console:

POST request

fetch('//example.com/movies.json', {
    method: 'POST',
    mode: 'cors', // no-cors, *cors, same-origin
    cache: 'no-cache', // *default, no-cache, reload, force-cache, only-if-cached
    credentials: 'same-origin', // include, *same-origin, omit
    headers: {
      'Content-Type': 'application/json'
      // 'Content-Type': 'application/x-www-form-urlencoded',
    },
    redirect: 'follow', // manual, *follow, error
    referrerPolicy: 'no-referrer', // no-referrer, *no-referrer-when-downgrade, origin, origin-when-cross-origin, same-origin, strict-origin, strict-origin-when-cross-origin, unsafe-url
    body: JSON.stringify({
      "postDataHere_Key": "postDataValue",
    }) 
  })
  .then(response => response.json())
  .then(data => console.log(data));

GET request

fetch('//example.com/movies.json', {
    method: 'GET',
    mode: 'cors', // no-cors, *cors, same-origin
    cache: 'no-cache', // *default, no-cache, reload, force-cache, only-if-cached
    credentials: 'same-origin', // include, *same-origin, omit
    headers: {
      'Content-Type': 'application/json'
      // 'Content-Type': 'application/x-www-form-urlencoded',
    },
    redirect: 'follow', // manual, *follow, error
    referrerPolicy: 'no-referrer', // no-referrer, *no-referrer-when-downgrade, origin, origin-when-cross-origin, same-origin, strict-origin, strict-origin-when-cross-origin, unsafe-url
  })
  .then(response => response.json())
  .then(data => console.log(data));

and see if there’s response and no errors

to test if JavaScript request do not result in “Access to fetch at ‘http://example.com/movies.json’ from origin ‘http://localhost:3000’ has been blocked by CORS policy: No ‘Access-Control-Allow-Origin’ header is present on the requested resource. If an opaque response serves your needs, set the request’s mode to ‘no-cors’ to fetch the resource with CORS disabled.”

to search for PHP files that are ignored by git

git check-ignore -v $(find . -type f -name '*.php' -print)