update of assets from refresh script

This commit is contained in:
Tykayn 2023-02-17 12:16:08 +01:00 committed by tykayn
parent e3cc208beb
commit 9b9fd8d3d6
6 changed files with 0 additions and 646 deletions

View File

@ -1,155 +0,0 @@
#/bin/bash
# ----------------- documentation -----------------
#
# @author functions_sync by @tykayn - contact at cipherbliss.com
export today=`date` # to log current time
export ARCHIVE_SYNCABLE="/home/poule/encrypted/stockage-syncable" # place where we have our things sorted, other than home
export BORG_PASSCOMMAND="cat $ARCHIVE_SYNCABLE/.tk-borg-passphrase-light" # get the borg repo pass
export SPACESHIP_BORG_REPO="/home/poule/borg_archives/backup_land4to"
export LOG_FILE_BACKUP="$ARCHIVE_SYNCABLE/www/backup/log_backup.log"
export LOG_FILE_BACKUP_DATES="$ARCHIVE_SYNCABLE/www/backup/summary_log_backup.log" # log dates of execution of the script
CURRENT_YEAR="2022"
USER='tykayn'
# --------- log de la date courante -------- #
function logDate()
{
echo "--- $1" | tee -a $LOG_FILE_BACKUP_DATES 2>&1
date '+%Y-%m-%d %H:%M:%S' | tee -a $LOG_FILE_BACKUP_DATES 2>&1
echo "--- " | tee -a $LOG_FILE_BACKUP_DATES 2>&1
}
# --------- gestion des exclusions de rsync -------- #
EXCLUDE=( '.yarn' 'cache' '.cache' 'Cache' 'Steam' 'steamapps' '.npm' '.yarn' 'node_modules' '.mozilla' 'vendor' 'Steam' '.rbenv' '.config/borg' '@eaDir' 'steamapps' 'bower_components')
exclude_opts=()
for item in "${EXCLUDE[@]}"; do
exclude_opts+=( --exclude="$item" )
done
logDate "exclusions de rsync: \n ${exclude_opts[@]}"
# --------- recopie des éléments de poule zfs -------- #
# les disques de desintation doivent avoir environ 2.5To de place disponible
# chacun doit refléter la partie interne de /home/poule ainsi que le dossier music
function syncToBigDiskName()
{
local diskName=$1
echo ' ' >> $LOG_FILE_BACKUP_DATES
# chech that the disk exists
FILE=/media/$USER/$diskName
if test -d "$FILE"; then
echo "### $FILE , $diskName exists." >> $LOG_FILE_BACKUP_DATES
echo "### ${today} replicate to disk $diskName" >> $LOG_FILE_BACKUP_DATES
logDate 'disk $diskName : part illus';
rsync -avhWP /home/poule/encrypted/dessins_autres_gens /media/$USER/$diskName/encrypted --perms --delete-before --inplace "${exclude_opts[@]}"
rsync -avhWP /home/poule/encrypted/mangas /media/$USER/$diskName/encrypted --perms --delete-before --inplace "${exclude_opts[@]}"
logDate 'disk $diskName : part installateurs';
rsync -avhWP /home/poule/encrypted/home /media/$USER/$diskName/encrypted --perms --delete-before --inplace "${exclude_opts[@]}"
rsync -avhWP /home/poule/encrypted/installateurs /media/$USER/$diskName/encrypted --perms --delete-before --inplace "${exclude_opts[@]}"
logDate 'disk $diskName : part stockage-syncable : photos current year';
rsync -avhWP /home/poule/encrypted/stockage-syncable/photos/$CURRENT_YEAR/* /media/$USER/$diskName/encrypted/stockage-syncable/photos/$CURRENT_YEAR --delete-before --inplace "${exclude_opts[@]}"
logDate 'disk $diskName : part stockage-syncable : photos all';
rsync -avhWP /home/poule/encrypted/stockage-syncable/photos/* /media/$USER/$diskName/encrypted/stockage-syncable/photos --delete-before --inplace "${exclude_opts[@]}"
logDate 'disk $diskName : part production-servers-backup';
rsync -avhWP /home/poule/borg_archives/production-servers-backup/* /media/$USER/$diskName/borg_archives/production-servers-backup --delete-before --inplace "${exclude_opts[@]}"
logDate 'disk $diskName : part encrypted all';
rsync -avhWP /home/poule/encrypted/* /media/$USER/$diskName/encrypted --delete-before "${exclude_opts[@]}"
logDate 'disk $diskName : part music';
rsync -avhWP /home/poule/music /media/$USER/$diskName/ --delete-before --inplace "${exclude_opts[@]}"
else
echo "### $FILE introuvable." >> $LOG_FILE_BACKUP_DATES
fi
date '+%Y-%m-%d %H:%M:%S' | tee -a $LOG_FILE_BACKUP_DATES 2>&1
echo "---- syncToBigDiskName $diskName faite -----------------------" | tee -a $LOG_FILE_BACKUP_DATES 2>&1
}
# ----------------------------------------------------------------------
# --------- le laptop fatland n'a que 2 To de disponible -------- #
function syncfatland()
{
echo ' ' >> $LOG_FILE_BACKUP_DATES
echo ' - envoi vers FATland' >> $LOG_FILE_BACKUP_DATES
#### vers le laptop FATland
rsync /home/poule/encrypted/stockage-syncable/photos/$CURRENT_YEAR tykayn@192.168.1.12:/home/poule/encrypted/stockage-syncable/photos -avhWP --delete-before "${exclude_opts[@]}"
rsync -avhWP /home/poule/encrypted/stockage-syncable/photos/* tykayn@192.168.1.12:/home/poule/encrypted/stockage-syncable/photos --delete-before "${exclude_opts[@]}"
rsync /home/poule/encrypted/stockage-syncable tykayn@192.168.1.12:/home/poule/encrypted -avhWP --delete-before "${exclude_opts[@]}"
rsync /home/poule/encrypted/mangas/* tykayn@192.168.1.12:/home/poule/encrypted/mangas -avhWP --delete-before "${exclude_opts[@]}"
rsync /home/poule/encrypted/home/* tykayn@192.168.1.12:/home/poule/encrypted/home -avhWP --delete-before "${exclude_opts[@]}"
rsync /home/poule/borg_archives/* tykayn@192.168.1.12:/home/poule/borg_archives -avhWP --delete-before
date | tee -a $LOG_FILE_BACKUP_DATES 2>&1
echo "sync fatland fait" | tee -a $LOG_FILE_BACKUP_DATES 2>&1
}
# --------- serveurs web -------- #
# retrieve web servers data to zfs spaceship
# récup des borg backup des serveurs web
function getWebServersBorg()
{
echo ' ' >> $LOG_FILE_BACKUP_DATES
echo "### ${today} copy of servers borg_backup production contents " | tee -a $LOG_FILE_BACKUP_DATES 2>&1
rsync -avzhWP --perms --delete-before tykayn@peertube.cipherbliss.com:/home/$USER/backup/borgbackup_peertube /home/poule/borg_archives/production-servers-backup/spare & rsync -avzhWP --perms --delete-before tykayn@www.cipherbliss.com:/home/$USER/backup/borgbackup_soy /home/poule/borg_archives/production-servers-backup/soyoustart & rsync -avzhWP --perms --delete-before tykayn@peertube.cipherbliss.com:/home/$USER/backup/serveurs-production/borgbackup_cloudland /home/poule/borg_archives/production-servers-backup/cloud
}
# ----------------- BORG -----------------
# partie contenant tout stockage-syncable
function upBorg()
{
killall borg
logDate "### --------- SPACESHIP | creating borg archive at $SPACESHIP_BORG_REPO"
rm -rf /home/$USER/.cache/borg/150867528afd85114c8aba98af201a7ad8cf01869c507a87c025d2f8701040a9/lock.exclusive
rm -rf $SPACESHIP_BORG_REPO/lock.exclusive
borg create $SPACESHIP_BORG_REPO::encrypted_spaceship_{now} $ARCHIVE_SYNCABLE /home/poule/encrypted/home/$USER --exclude 'BAZAR' "${exclude_opts[@]}" --progress --verbose --stats --compression zstd,9 | tee -a $LOG_FILE_BACKUP 2>&1
echo ' ' | tee -a $LOG_FILE_BACKUP 2>&1
logDate '### --------- ${today} | SPACESHIP | pruning old archives' | tee -a $LOG_FILE_BACKUP 2>&1
# nettoyage tk_backup
borg prune -v --list --stats --keep-daily=8 --keep-weekly=6 --keep-monthly=3 --keep-yearly=2 $SPACESHIP_BORG_REPO | tee -a $LOG_FILE_BACKUP 2>&1
logDate '### --------- pruning done'
}
# ---------- manage log git
function logGit_csv()
{
git log --pretty=format:"%cd - %an : %s" --graph --since=8.weeks | tee -a log_boulot.org 2>&1
}
# écrire un log des commits réalisés groupés par jour pour le dossier courant
function logGit_per_day(){
while read -r -u 9 since name
do
until=$(date '+%Y-%m-%d %H:%M:%S' )
echo "$since $name"
echo
GIT_PAGER=cat git log \
--no-merges \
--committer="$name" \
--since="$since 00:00:00 +0000" \
--until="$until 00:00:00 +0000" \
--format=' * [%h] %s'
echo
done 9< <(git log --no-merges --format=$'%cd %cn' --date=short --since=8.weeks | sort --unique --reverse)
}
function logGit_to_org()
{
folder_name=${PWD##*/}
touch log_git_list.org
echo "* Log git $folder_name\n"> log_git_list.org;
pwd >> log_git_list.org;
cat log_git_list.org;
logGit_per_day | tee -a log_git_list.org 2>&1
}

View File

@ -1,180 +0,0 @@
#/bin/bash
# ----------------- documentation -----------------
#
# @author sync_spaceship by @tykayn - contact at cipherbliss.com
#
# L'archive Borg contient stockage syncable et vise à être mise dans le NAS pour la postérité
# BORG_NEW_PASSPHRASE=mon_pass_bien_compliqué borg init -e=repokey /home/poule/borg_archives/backup_land4to
# borg create --compression lzma,9 /home/poule/borg_archives/backup_land4to::backup_land4to_{now}
#
#
# à installer en cronjob avec:
# crontab -e
#
# m h dom mon dow command
# */30 * * * * bash /home/$USER/sync_spaceship.sh
#
# ----------------- configs -----------------
source /home/tykayn/functions_sync.sh
logDate ' start backup script from sync_spaceship script'
logDate "${pwd} sync_spaceship.sh"
logDate ' stop all rsync jobs'
killall rsync;
# ------------------
logDate ' copie du script actuel'
cp /home/$USER/sync_spaceship.sh /home/$USER/Nextcloud/boulot/syncro_scripts_spaceship
cp /home/$USER/sync_spaceship.sh $ARCHIVE_SYNCABLE/__scripts_syncro
cp /home/$USER/.bash_aliases $ARCHIVE_SYNCABLE/__scripts_syncro
cp /home/$USER/.emacs /home/$USER/Nextcloud/boulot/emacs
echo ' ' >> $LOG_FILE_BACKUP_DATES
echo "### ${today} start backup script from sync_spaceship script" >> $LOG_FILE_BACKUP_DATES
echo ' ' >> $LOG_FILE_BACKUP_DATES
echo "### ${today} list of debian apt packages saved in borg_archives list_of_debian_apt_packages.txt" >> $LOG_FILE_BACKUP_DATES
# save a list of apt packages
# to restore it:
# dpkg --set-selections < list_of_debian_apt_packages.txt
dpkg --get-selections>/home/$USER/list_of_debian_apt_packages.txt
# back pictures to ARCHIVE_SYNCABLE
logDate 'copy of Nextcloud InstantUpload photos'
mv /home/$USER/Nextcloud/InstantUpload/Camera/* "$ARCHIVE_SYNCABLE/photos/$CURRENT_YEAR" | tee -a $LOG_FILE_BACKUP 2>&1
echo ' ' >> $LOG_FILE_BACKUP_DATES
echo "### ${today} medias in $ARCHIVE_SYNCABLE/photos/2022" >> $LOG_FILE_BACKUP_DATES
ls -l "$ARCHIVE_SYNCABLE/photos/2022" | wc -l | tee -a $LOG_FILE_BACKUP 2>&1
logDate ' Screenshots et Download'
mv /home/$USER/Nextcloud/InstantUpload/Screenshots/* "$ARCHIVE_SYNCABLE/photos/screenshots" | tee -a $LOG_FILE_BACKUP 2>&1
mv /home/$USER/Nextcloud/InstantUpload/Download/* "$ARCHIVE_SYNCABLE/BAZAR" | tee -a $LOG_FILE_BACKUP 2>&1
logDate 'update local nextcloud to stockage syncable';
rsync -avhWP --perms --inplace --delete-before /home/$USER/Nextcloud/* $ARCHIVE_SYNCABLE/archivage/clouds/Nextcloud | tee -a $LOG_FILE_BACKUP 2>&1
logDate 'update home backup';
rsync -avhWP --inplace --delete-before /home/$USER/* /home/poule/encrypted/home/$USER "${exclude_opts[@]}" --exclude 'Nextcloud' --exclude 'www' | tee -a $LOG_FILE_BACKUP 2>&1
# --------- sauver les dossiers de développement dans l'archive stockage syncable sans supprimer les projets présents -------- #
logDate 'WWW et HTML sauver les dossiers de développement';
rsync -avP /home/tykayn/www/* /home/poule/encrypted/stockage-syncable/www/development/html "${exclude_opts[@]}"
rsync -avP /var/www/html/* /home/poule/encrypted/stockage-syncable/www/development/html "${exclude_opts[@]}"
# ----------------------------------------------------------------------
# --------- disques -------- #
# - blue 4To (squatt à lyon)
# - brossadent 4To
# - chaton 5To (usb boitier)
# - lilia 4To
# - louisbraille 4To
# - brossadent 4To (squatt à vovo)
# -|(disque sur dock)
# |--|
# | |- moonmoon 3To
#  | |- rondoudou 1To --- non chiffré
# |
# |--|
# |- catwoman 4To (dans le NAS)
# - Taiga 1To
echo "le log de backup se situe dans : $LOG_FILE_BACKUP_DATES"
# --------- mettre à jour les borg backup des serveurs distants -------- #
getWebServersBorg;
# --- raspberry pi ---------- #
logDate 'update local backup de domoticz vers stockage-syncable/www/backup/domoticz/synced';
rsync -avhWP pi@192.168.1.8:/home/pi /home/poule/encrypted/stockage-syncable/www/backup/domoticz/synced --delete-before --inplace
# --- maj borg de stockage syncable ---------- #
upBorg;
# --------- disques avec beaucoup de place -------- #
# --------- disques chiffrés -------- #
syncToBigDiskName louisbraille
syncToBigDiskName rugged
syncToBigDiskName moonmoon # dernier disque source pour temporisation
syncToBigDiskName lilia
syncToBigDiskName chaton
syncToBigDiskName brossadent
syncToBigDiskName blue
# --------- disques non chiffrés -------- #
# --------- disques de petite taille -------- #
# ne peuvent prendre que le stockage syncable
# ----------- small disks --------------
# syncro vers Taiga de borg
FILE=/media/$USER/Taiga
if test -d "$FILE"; then
echo "### $FILE , Taiga exists." >> $LOG_FILE_BACKUP_DATES
logDate 'Taiga disk monté - sync borg backup';
# rsync -avhWP /home/poule/encrypted/stockage-syncable/* /media/$USER/$diskName/encrypted/stockage-syncable --perms --delete-before --inplace "${exclude_opts[@]}"
rsync -avhWP /home/poule/borg_archives/backup_land4to/* /media/tykayn/Taiga/backup_land4to --delete-before --inplace
else
echo 'Taiga disk NON monté '; >> $LOG_FILE_BACKUP_DATES
fi
# --------- autres pool ZFS -------- #
syncfatland;
## possible amélioration de vitesse de rsync sur les gros dossiers
# ls $dossier_source | xargs -n1 -P4 -I% rsync -Pa % $destination
# ----------------- sync to NAS -----------------
# à destination du NAS, les borg backups perso et de serveurs
logDate ' à destination du NAS: backup_land4to';
rsync -avhWP /home/poule/borg_archives/backup_land4to/* tykayn@192.168.1.15:/var/services/homes/tykayn/borg_archives/backup_land4to --delete-before --inplace --perms
rsync -avhWP /home/poule/cryptomator/* tykayn@192.168.1.15:/volume1/bidules_partagés/cryptomator --delete-before --inplace --perms
logDate ' à destination du NAS: production-servers-backup';
#rsync -avhWP /home/poule/borg_archives/production-servers-backup/* tykayn@192.168.1.15:/var/services/homes/tykayn/borg_archives/production-servers-backup --delete-before --inplace --perms --exclude="@eaDir" "${exclude_opts[@]}"
logDate ' à destination du NAS: vidéos DL';
#rsync -avhWP /home/poule/videos/DOCU-CONF-YOUTUBE/* tykayn@192.168.1.15:/volume1/bidules_partagés/videos/DOCU-CONF-YOUTUBE --delete-before --inplace --perms
logDate ' à destination du NAS: vidéos';
#rsync -avhWP /home/poule/videos/* tykayn@192.168.1.15:/volume1/bidules_partagés/videos --delete-before --inplace --perms
logDate ' à destination du NAS: music';
#rsync -avhWP /home/poule/music tykayn@192.168.1.15:/volume1/music --delete-before --inplace --perms
# en provenance du NAS ----- les bidules partagés
logDate 'en provenance du NAS: bidules_partagés Documents administratifs';
#rsync -avhWP tykayn@192.168.1.15:/volume1/bidules_partagés/Documents\\\ administratifs /home/poule/encrypted/bidules_partagés_backup --delete-before --inplace --perms "${exclude_opts[@]}"
logDate 'en provenance du NAS: bidules_partagés Briis';
#rsync -avhWP tykayn@192.168.1.15:/volume1/bidules_partagés/Briis /home/poule/encrypted/bidules_partagés_backup --delete-before --inplace --perms "${exclude_opts[@]}"
logDate 'en provenance du NAS: bidules_partagés Mariage';
#rsync -avhWP tykayn@192.168.1.15:/volume1/bidules_partagés/Mariage /home/poule/encrypted/bidules_partagés_backup --delete-before --inplace --perms "${exclude_opts[@]}"
logDate 'en provenance du NAS: bidules_partagés wulfila_home sans backups ordi';
#rsync -avhWP tykayn@192.168.1.15:/volume1/bidules_partagés/wulfila_home /home/poule/encrypted/other_people_content --inplace --exclude=TK-LAND --exclude=musique_tykayn --exclude=windows_backup_laptop_claire --delete-before --inplace --perms "${exclude_opts[@]}"
logDate 'en provenance du NAS: fait';
echo "### ${today} end" >> $LOG_FILE_BACKUP_DATES
date -ud "@$SECONDS" | tee -a $LOG_FILE_BACKUP 2>&1
echo "voir les logs: gedit $LOG_FILE_BACKUP"
echo "voir les logs des sections par dates: gedit $LOG_FILE_BACKUP_DATES"
echo " " | tee -a $LOG_FILE_BACKUP 2>&1
echo "taille du BAZAR: $ARCHIVE_SYNCABLE/BAZAR " | tee -a $LOG_FILE_BACKUP 2>&1
du -sch $ARCHIVE_SYNCABLE/BAZAR | tee -a $LOG_FILE_BACKUP 2>&1
date -ud "@$SECONDS" | tee -a $LOG_FILE_BACKUP_DATES 2>&1
echo " " | tee -a $LOG_FILE_BACKUP 2>&1
echo " " | tee -a $LOG_FILE_BACKUP 2>&1
logDate 'fin de sync_spaceship.sh';

View File

@ -1,28 +0,0 @@
#!/bin/bash
touch ~/Nextcloud/textes/orgmode/incoming_inbox.org
# comparer le fichier de todo de nextcloud et le fichier modèle de base_inbox.org
# si une différence est notée, migrer les notes
if ! cmp ~/Nextcloud/Notes/todo.txt ~/Nextcloud/textes/orgmode/base_inbox.org >/dev/null 2>&1
then
echo "les deux fichiers sont différents"
echo "lignes à copier des notes de nextcloud: "
cat ~/Nextcloud/Notes/todo.txt
echo " "
cat ~/Nextcloud/Notes/todo.txt | wc -l
echo "lignes"
echo " "
sed -i 's/\*\ /\*\*\ /g' ~/Nextcloud/Notes/todo.txt
cat ~/Nextcloud/Notes/todo.txt >> ~/Nextcloud/textes/orgmode/incoming_inbox.org
echo "copiées dans ~/Nextcloud/textes/orgmode/incoming_inbox.org"
echo " "
echo " lignes dans l'incoming_inbox.org"
cat ~/Nextcloud/textes/orgmode/base_inbox.org > ~/Nextcloud/Notes/todo.txt
echo "copied ~/Nextcloud/Notes/todo.txt to incoming_inbox.org"
else
echo "Rien à rajouter dans le fichier incoming inbox"
fi

View File

@ -1,35 +0,0 @@
#/bin/bash
# @author script by @tykayn - contact at cipherbliss.com
# rafraîchir les assets du dépot d'example versionné avec ceux couramment utilisés.
# à installer sur un ordi que l'on utilise en tant que référence d'assets
# à installer en cronjob avec:
# crontab -e
#
# m h dom mon dow command
# */30 * * * * bash /home/tykayn/www/scripts/refresh_from_current_assets.sh
# configs
export USERNAME_CURRENT=tykayn
export HOME_OF_SCRIPTS=/home/$USERNAME_CURRENT/www/scripts
export HOME_OF_USERNAME_CURRENT=/home/$USERNAME_CURRENT
echo "mise à jour des assets de référence dans les scripts custom $HOME_OF_SCRIPTS depuis l'ordinateur actuel"
# orgmode
cp "$HOME_OF_USERNAME_CURRENT/.emacs" "$HOME_OF_SCRIPTS/assets/org" -r
cp "$HOME_OF_USERNAME_CURRENT/Nextcloud/textes/orgmode/config.org" "$HOME_OF_SCRIPTS/assets/org" -r
cp "$HOME_OF_USERNAME_CURRENT/Nextcloud/textes/orgmode/style.css" "$HOME_OF_SCRIPTS/assets/org" -r
cp "$HOME_OF_USERNAME_CURRENT/Nextcloud/textes/orgmode/upcalendar.sh" "$HOME_OF_SCRIPTS/assets/org" -r
# sauvegardes
cp "$HOME_OF_USERNAME_CURRENT/sync_spaceship.sh" "$HOME_OF_SCRIPTS/bash/backups"
cp "$HOME_OF_USERNAME_CURRENT/test-func.sh" "$HOME_OF_SCRIPTS/bash/backups"
cp "$HOME_OF_USERNAME_CURRENT/functions_sync.sh" "$HOME_OF_SCRIPTS/bash/backups"
echo "HOME_OF_SCRIPTS : $HOME_OF_SCRIPTS"
ls -l "$HOME_OF_SCRIPTS/assets/org"
cd $HOME_OF_SCRIPTS
git add .
git commit -m "update of assets from refresh script"
git push origin

164
style.css
View File

@ -1,164 +0,0 @@
body {
max-width: 80vw;
margin: 1rem auto;
}
#table-of-contents{
font-size: 1rem;
border-left: solid 3px;
padding-left: 1rem;
}
#table-of-contents h2{
font-size: 1rem;
text-align:left;
}
.section-number-2{
display:none;
}
.title sub{
font-size: 1em;
margin-left: 0.5ch;
}
.timestamp{
font-weight:700;
color: crimson;
}
h1 {
color: #111;
font-family: 'Open Sans Condensed', sans-serif;
font-size: 64px;
font-weight: 700;
line-height: 64px;
margin: 0 0 0;
padding: 20px 30px;
text-align: center;
text-transform: uppercase;
}
h2 {
color: #111;
font-family: 'Open Sans Condensed', sans-serif;
font-size: 48px;
font-weight: 700;
line-height: 48px;
margin: 0 0 24px;
padding: 0 30px;
text-align: center;
text-transform: uppercase;
}
p {
color: #111;
font-family: 'Calibri', 'Open Sans', sans-serif;
font-size: 1rem;
line-height: 1.5rem;
margin: 0 0 2rem;
/*columns:3;*/
}
a {
color: #990000;
text-decoration: none;
}
a:hover {
text-decoration: underline
}
.date {
color: #111;
display: block;
font-family: 'Open Sans', sans-serif;
font-size: 16px;
position: relative;
text-align: center;
z-index: 1;
background: white;
}
.date:before {
border-top: 1px solid #111;
content: "";
position: absolute;
top: 0rem;
left: 0;
width: 100%;
z-index: -1;
}
.author {
color: #111;
display: block;
font-family: 'Open Sans', sans-serif;
font-size: 16px;
padding-bottom: 38px;
position: relative;
text-align: center;
z-index: 1;
background: white;
}
.author:before {
border-top: 1px solid #111;
content: "";
position: absolute;
top: 0rem;
left: 0;
width: 100%;
z-index: -1;
}
.date span,
.author span {
background: #fdfdfd;
padding: 0 10px;
text-transform: uppercase;
}
.line {
border-top: 1px solid #111;
display: block;
margin-top: 60px;
padding-top: 50px;
position: relative;
}
.read-more {
-moz-border-radius: 50%;
-moz-transition: all 0.2s ease-in-out;
-webkit-border-radius: 50%;
-webkit-transition: all 0.2s ease-in-out;
background: #111;
border-radius: 50%;
border: 10px solid #fdfdfd;
color: #fff;
display: block;
font-family: 'Open Sans', sans-serif;
font-size: 14px;
height: 80px;
line-height: 80px;
margin: -40px 0 0 -40px;
position: absolute;
bottom: 0px;
left: 50%;
text-align: center;
text-transform: uppercase;
width: 80px;
}
.read-more:hover {
background: #990000;
text-decoration: none;
}
.org-src-container{
background: #dedede;
padding:0.5rem;
margin-bottom: 2rem;
}
pre.example{
background: #ccc;
padding: 0.5rem;
margin: 1rem;
}

View File

@ -1,84 +0,0 @@
#!/bin/bash
#####################################################
# author: @tykayn@mastodon.cipherbliss.com
# website: https://www.cipherbliss.com
#
# update all listed git projects in the home folder/www
# list of framagit repos to clone. Run this command to make it work
# ------------------------------------------------------------------------------------------------
#
# cd ~/Téléchargements
# curl -s https://forge.chapril.org/tykayn/scripts/raw/branch/master/update_git_projects.sh | bash
#
# ------------------------------------------------------------------------------------------------
#####################################################
######################################################
# liste de tous les projets pour chaque forge logicielle
#####################################################
declare -a list_repos_framagit=("caisse-bliss" "joinfediverse" "date-poll-api" "mastodon" "peertube" "events-liberator" "gitall" "dotclear-importer" "mobilizon" "fanzine-log" "crossed-words" "generator-tk" "circles" "card-deck" "sfprobe" "mastermind" "portfolio" "time-tracker" "cipherbliss.com" "caisse-bliss-frontend" "compta" "trafficjam" "ical-generator" "blueprint-cipherbliss" "dotclear2wordpress" "api" "diaspora" )
declare -a list_repos_forge_chapril=("transcription" "org-report-stats" "multi-account-post-schedule-mastodon" "framalibre-scraping" "scripts" "melting-pot" "funky-framadate-front" "rss-feeder-mobilizon" "mastodon-archive-stats" "gtg2json" "libreavous-audio-reader" "osm_my_commerce" "fromage-js" "ueberauth_openstreetmap" "events-liberator")
prefix_framagit='https://framagit.org/tykayn/'
prefix_forgechapril='https://forge.chapril.org/tykayn/'
cloning_place="/home/$USER/www/"
cd $cloning_place
pwd
# fonction qui prend une url de base et une liste de noms de dépots à cloner
# si vous trouvez comment passer facilement un array en argument à une fonction bash, go faire la fonction pour que l'on puisse faire " pullOrCreateRepo base_url liste_de_dépots"
#function pullOrCreateRepo(){
#}
#####################################################
# tout ceci est très verbeux.
# fonctionnement:
# lancer les clonages et git pull sur chaque dépot
# test existence of a folder
# if there is no folder, clone it
# else, update with fetch from origin
#####################################################
echo "----------- from framagit"
for project_name in "${list_repos_framagit[@]}";
do
cd $cloning_place
if [ ! -d "$project_name" ]
then
echo "+++++ cloning ${project_name}"
echo "from ${prefix_framagit}${project_name}.git"
git clone "${prefix_framagit}${project_name}.git"
else
echo "##### update project $project_name"
cd $cloning_place$project_name
git fetch origin
git config pull.ff only
git pull
fi
done
echo "----------- from forge chapril"
for project_name in "${list_repos_forge_chapril[@]}";
do
cd $cloning_place
if [ ! -d "$project_name" ]
then
echo "+++++ cloning ${project_name}"
echo "from ${prefix_forgechapril}${project_name}.git"
git clone "${prefix_forgechapril}${project_name}.git"
else
echo "##### update project $project_name"
cd $cloning_place$project_name
git fetch origin
git config pull.ff only
git pull
fi
done
# vous pouvez ajouter vos autres dépots
cd $cloning_place
ls -l |wc -l
echo "update done"