Compare commits

..

78 Commits

Author SHA1 Message Date
cbb57b8493 add redirect www.clarissariviere.fr 2023-06-15 21:20:12 +02:00
42cd55258c add protocol 2023-05-25 00:33:22 +02:00
4c433bfde8 fix gouter reverse 2023-04-22 12:33:29 +02:00
59c2b26213 add if noindex 2023-04-20 20:26:38 +02:00
0b8994be5d ajout mot de clefs 2023-04-04 22:48:03 +02:00
9be3fb9f85 fix header http 2023-03-19 11:56:39 +01:00
e821c4d26a replace http header link by sub_filter link 2023-03-19 11:51:50 +01:00
178fe9fcf4 fix debug backup script 2023-03-18 21:17:55 +01:00
f258895234 add debug for blacklist 2023-03-18 21:00:40 +01:00
34076c243e Merge pull request 'alternate' (#3) from alternate into master
Reviewed-on: #3
2023-03-13 13:02:56 +00:00
0bde1ce7c4 multi-clustering blacklist 2023-03-12 22:36:59 +01:00
69d9f5309d add redirect page 2023-03-08 20:40:22 +01:00
ccc12e6b6f add exclude ip 2023-03-04 10:53:37 +01:00
3c1f18d417 fix path iptables for refill_blacklist 2023-03-04 10:47:04 +01:00
c1197dec5c fix path iptables executable 2023-03-03 19:45:51 +01:00
76deffe6f4 fix blacklist 2023-03-02 23:56:46 +01:00
17cc54b85f fix mail backup 2023-03-02 21:59:12 +01:00
2b63ec3527 remove if 2023-02-28 11:04:30 +01:00
8219583961 add alternate link 2023-02-28 11:03:42 +01:00
79811d9fe5 Merge pull request 'configure' (#2) from configure into master
Reviewed-on: #2
2023-02-27 21:53:16 +00:00
ddb954f111 backup blacklist 2023-02-27 22:52:15 +01:00
171392bf04 add deconfigure backup 2023-02-27 22:47:49 +01:00
3b8652a97a add script restore 2023-02-27 22:39:25 +01:00
767c45eb30 fix script backup for new version duplicity 2023-02-27 22:31:21 +01:00
85f7573262 gestion ps 2023-02-26 22:32:10 +01:00
f519194c13 gestion des process 2023-02-26 22:28:47 +01:00
ef64e724cb remove r 2023-02-24 23:33:51 +01:00
c5c18b05a0 add backup script 2023-02-17 18:04:21 +01:00
dc27773f24 replace by remove 2023-02-17 16:20:37 +01:00
663b7c4517 ajout robots 2023-02-16 23:06:03 +01:00
8c85e36d85 add canonical web 2023-02-15 22:25:05 +01:00
62af6912cd replace when by tag specific 2023-02-15 21:54:20 +01:00
18da14f2de add check robot yandex 2023-02-14 22:20:37 +01:00
39fab7b0dc create directory supervision 2023-02-14 22:19:01 +01:00
d475156b3e deplacement template virtualhost 2023-02-14 21:39:20 +01:00
9993844f02 backup and restore blacklist file 2023-02-13 23:52:47 +01:00
7d5f86b045 test configure and deconfigure 2023-02-12 22:28:32 +01:00
1e605b0109 add deconfigure tasks 2023-02-12 21:58:18 +01:00
c22d70bcdd add deconfigure tasks 2023-02-12 21:44:19 +01:00
98ee7045f6 configure finish 2023-02-12 21:25:57 +01:00
c54c384178 finish configure stats 2023-02-12 19:15:48 +01:00
87ea635874 templatizing virtualhost getinfo_day 2023-02-12 18:38:38 +01:00
6b7d2ff9a9 add configure file 2023-02-12 18:13:05 +01:00
a40f922246 script de renouvellement certificat 2023-02-08 22:08:11 +01:00
8f07dc6185 optimisation du script de blacklist 2023-02-08 21:17:11 +01:00
b7addc2aee plus de detail dans le mail 2023-02-07 22:33:52 +01:00
9041206f14 add location validation google 2023-02-06 23:02:40 +01:00
3dff2b0975 fix mail and ipinfo 2023-02-05 22:13:55 +01:00
02028f3bf3 information ip envoyé 2023-02-05 20:58:28 +01:00
c7ddbd20b6 add iptables blacklist 2023-02-05 17:58:15 +01:00
cc219924cc add iptables rules 2023-02-04 18:38:19 +01:00
0ea6c30c9b fix blacklist script 2023-02-03 07:09:23 +01:00
28fba64c55 variable files blacklist 2023-02-02 23:35:06 +01:00
8a5746cb13 add blacklist sh 2023-01-25 20:09:19 +01:00
8e2211e365 add month stat 2023-01-23 23:56:58 +01:00
8ed95bc32b fix script day and week 2023-01-23 23:45:51 +01:00
5dca6fbdc9 add sub_filter_types 2023-01-23 23:35:00 +01:00
1239415279 get info by week 2023-01-22 21:28:39 +01:00
0b614d02a8 get info per week 2023-01-22 21:23:34 +01:00
c5e0bbe6fd change variable for mail 2023-01-22 21:22:45 +01:00
9d053e3eb4 getinfo error 400/404 2023-01-22 21:07:54 +01:00
5915385fea add script sentinel 2023-01-22 20:51:28 +01:00
e5a64b1846 add nginx.conf and deploy it 2023-01-21 18:32:20 +01:00
568cf5e444 access_log and error_log specific 2023-01-17 22:41:39 +01:00
bfe80698c1 commentaire inutile + add ignore headers for activation cache 2023-01-16 21:15:03 +01:00
afd5175df8 update conf gouter 2023-01-14 18:48:17 +01:00
d90a593518 change domain name 2023-01-02 17:57:54 +01:00
ccd0c1d4bb Merge pull request 'deploy-nginx' (#1) from deploy-nginx into master
Reviewed-on: #1
2022-12-28 17:10:14 +00:00
ae0655da7c add deploy single project 2022-12-28 18:08:18 +01:00
19ecb0bca1 add destroy files 2022-12-20 00:14:15 +01:00
bb9ca607a0 add reverse proxy 2022-12-20 00:04:33 +01:00
61b59c8a20 change name to git_name 2022-12-19 23:50:57 +01:00
1e3e08b555 change variable 2022-12-19 23:46:55 +01:00
1996945eb4 add destroy web project 2022-11-12 20:45:00 +01:00
a647946652 add stages 2022-11-12 15:30:59 +01:00
11d49e0ccb change name playbook 2022-11-12 15:26:32 +01:00
7509c69283 finish unarchive git project 2022-11-12 15:25:20 +01:00
dc5ce05094 deploy web wip 2022-11-12 14:43:59 +01:00
36 changed files with 1376 additions and 31 deletions

View File

@@ -1,5 +0,0 @@
[defaults]
remote_user = admloc
private_key_file = /home/provisioner/.ssh/id_rsa_toolbox
inventory = hosts

View File

@@ -0,0 +1,78 @@
51.222.107.37
45.33.110.22
185.142.236.35
164.92.135.200
46.101.166.31
195.181.163.29
206.189.47.168
103.74.54.128
185.180.143.140
146.0.77.38
172.104.249.218
137.184.200.131
128.90.135.254
134.209.70.98
3.235.198.47
71.6.199.23
20.84.48.39
193.42.33.15
167.235.148.2
54.74.107.180
170.187.229.101
165.227.89.199
165.22.98.234
34.122.37.133
167.172.141.44
167.172.142.119
134.209.207.188
34.125.93.26
34.162.183.125
139.59.138.104
35.245.198.244
143.198.85.144
157.245.136.150
185.134.23.83
20.125.115.103
185.163.109.66
128.199.85.172
148.153.45.238
185.142.236.34
45.13.227.172
18.204.48.86
34.106.22.184
51.222.107.37
106.75.176.55
146.190.84.120
143.198.213.67
34.125.234.83
43.130.152.82
45.59.163.17
172.94.9.227
148.153.45.236
68.183.183.237
89.187.162.187
206.189.38.98
2.57.122.253
172.105.190.200
15.188.26.9
13.49.23.69
20.87.214.199
45.55.64.12
188.166.187.222
167.71.24.123
185.213.174.115
165.232.174.66
161.35.153.48
54.219.74.101
2.57.122.81
165.22.96.121
43.153.118.27
43.153.12.17
185.180.143.141
18.144.156.146
193.56.29.113
3.129.42.4
169.197.143.220
3.15.142.108
139.162.34.62
80.66.79.22

2
hosts
View File

@@ -1,2 +0,0 @@
centos-test ansible_connection=ssh ansible_host=1.1.110.25

View File

@@ -0,0 +1,36 @@
$ANSIBLE_VAULT;1.1;AES256
65333737373761626438343263333163623934626161313738303239383134333133313661333739
3765666232653562383861643033356535383230613564330a343931393265303332346339373161
31653534646463333138633564663238323664313432343666613633353538323530323631326665
3133303461303966310a626664396637313532313666386236303765613530343863636636346334
33633536656337643962663564656465666636623734376162366233643431343966373737613064
62336665386635316433636166353263356131383632616665643935616131333230343965613834
63323363616535363437306362613934633533386438353466353138386438313063316565616636
66643535356364396230653032643661316534356266333035323766306165383562653836313532
31396432316564633933363338393535363937386533343137373664366538323836343038313062
61383630386233313034353966383265333735303064333535643738633362336362323565326131
66666565376663383733616136386462353937613364653932353062386665623439613933366535
61343134323031343133626265336231306131376661396163333939643561356363306333666637
64353137643238653562643034383262356266366636333135616262643436363638666166336565
63346131346238666166303338303264363634373635663830663636656661303935623239346339
33306564313566343339626362333735343737333763616330303266353836303438323131306161
61303633636335636335383734326638663238313961653561613164333865383364323234383133
65626130376434343165373531643935616431316631636165323365376564646535613534616237
65303430373336383436373162376536376563623730343237366435653163613337303538643062
66643361613732366431336231363133326435623361663366646537386433613262326161303966
39363732653361646534653866326436666462346235376664623039343431373938666266313034
62373639323039656266623562326634633131623964313666646463383064303266643162636362
35646563623533303466636631646339626464306665383266643839653734373465313538363035
33313762313934396137323433313238393239623831663430396530303764336338356366646264
36393038633033303066346339663939653964333735303465626139613464313437356264373562
34376230333834373831363661636461383763383138653537383235343132623830326532393564
61646265303835306534346433303138306632306163613336393834313337306233376665313262
35396630666162373432313939646537666335343835613363653334313234356564373431366537
37323838323835386538343261633762303035336665656638636165303130343733633766656333
33623861633664626232316434326138303539363130333561323630393932363735363362663832
64393965346131396236653864323930633763303435613330386236633164636465646664396530
35343838323364643236383334663432316339613231613030643935333932633732313635633164
63663861323663613931636238313862326364396538616463376533396136653266393136663265
65613862333066643030656263333534343161613638356264663635643430356563313561633535
30303165663931633761363633383237333765383332363962353530313036346561383539643966
336562336464303538313234386162383165

1
inventory/hosts Normal file
View File

@@ -0,0 +1 @@
vps-host ansible_connection=ssh ansible_host=51.222.107.37 ansible_port=2424 ansible_user=valentin

View File

@@ -1,5 +1,5 @@
---
- hosts: all
remote_user: admloc
remote_user: valentin
roles:
- deploy-web

View File

@@ -0,0 +1,48 @@
#!/bin/bash
TAR=/usr/bin/tar
PYTHON=/usr/bin/python3
GZIP=/usr/bin/gzip
SCRIPTDIR=/home/valentin/script
WEBSCRAP=${SCRIPTDIR}/web_scrap.py
URL=www.clarissariviere.com
DATE=$(date +%Y%m%d)
DIRECTORY=/home/valentin/backup
BACKUPDIR=/home/valentin/backup_clarissa
LIST=${BACKUPDIR}/backup.list
fileBackup="backup-clarissa-${DATE}"
LOGFILE=web_scrap.txt
SENDER="valczebackup@gmail.com"
if [ $(date +%u) -eq 1 ]; then
echo > ${LIST}
rm -rf "${BACKUPDIR}/*-incr.tar.gz"
fileBackup="${fileBackup}-full"
subject="Sauvegarde full"
else
fileBackup="${fileBackup}-incr"
subject="Sauvegarde incremental"
fi
subject="${subject} ${URL} ${DATE}"
echo > ${BACKUPDIR}/${LOGFILE}
${PYTHON} ${WEBSCRAP} --url ${URL} --dir ${DIRECTORY} --logfile ${BACKUPDIR}/${LOGFILE} --quiet
if [ ${?} -ne 0 ]; then
subject="FAIL : ${subject} : recuperation page"
echo ${subject} | mail -s "${subject}" -A ${BACKUPDIR}/${LOGFILE} ${SENDER}
exit 1
fi
${TAR} --create --file="${BACKUPDIR}/${fileBackup}.tar" --listed-incremental=${LIST} ${DIRECTORY}
if [ ${?} -ne 0 ]; then
subject="FAIL : ${subject} : archivage page "
echo ${subject} | mail -s "${subject}" -A ${BACKUPDIR}/${LOGFILE} ${SENDER}
exit 1
fi
${GZIP} -f -9 "${BACKUPDIR}/${fileBackup}.tar"
if [ ${?} -ne 0 ]; then
subject="FAIL : ${subject} : compression archive "
echo ${subject} | mail -s "${subject}" -A ${BACKUPDIR}/${LOGFILE} ${SENDER}
exit 1
fi
subject="OK : ${subject}"
echo ${subject}| mail -s "${subject}" -A ${BACKUPDIR}/${LOGFILE} ${SENDER}
find ${BACKUPDIR} -name "*.tar.gz" -type f -ctime +90 -exec rm {} \;
exit 0

View File

@@ -0,0 +1,64 @@
#!/bin/bash
MAIL=/tmp/mail
SERVER_LOG=/var/log/nginx
HOST=($(cat /etc/sentinel/virtualhost))
BLACKLIST=/etc/sentinel/blacklist
EXCLUDE=/etc/sentinel/exclude
SENDER=/etc/sentinel/ip
SSH=$(cat /etc/sentinel/ssh_port)
IP=$(hostname -I |awk '{print $1}')
chain_count=$(/usr/sbin/iptables -L BLACKLIST -n | wc -l)
if [ ${chain_count} -eq 0 ]; then
bash /usr/local/bin/sentinel/refill_blacklist.sh
fi
list_sender=($(cat ${SENDER}))
for i in ${list_sender[@]}
do
if [ -f /tmp/blacklist_${i} ]; then
count_ip=$(cat ${BLACKLIST} /tmp/blacklist_${i} |grep -f ${EXCLUDE} -v |sort |uniq -ui |wc -l)
cat ${BLACKLIST} /tmp/blacklist_${i} |grep -f ${EXCLUDE} -v |sort |uniq -u >> ${BLACKLIST}
if [ ${count_ip} -ne 0 ]; then
bash /usr/local/bin/sentinel/refill_blacklist.sh
fi
fi
done
for i in ${HOST[@]}
do
log_access=${SERVER_LOG}/${i}_access.log
tail -n 50 $log_access | awk -F "|" '{ if($2 == "400" || $2 == "404") print $0}' > /tmp/error_$i
cat /tmp/error_$i | awk -F "|" '{ if($2 == "404") print $1}' > /tmp/404_$i
cat /tmp/error_$i | awk -F "|" '{ if($2 == "400") print $1}' > /tmp/400_$i
cat /tmp/404_$i | sort | uniq -c | awk '{ if($1 >= 5) print $2}' > /tmp/blacklist_404
cat /tmp/400_$i |sort | uniq -c |awk '{ if($1 >= 5) print $2}' > /tmp/blacklist_400
count=$(cat /tmp/blacklist_404 /tmp/blacklist_400 |grep -f ${BLACKLIST} -v |grep -f ${EXCLUDE} -v |sort |uniq |wc -l)
if [ ${count} -ne 0 ]; then
echo "Nouvelle IP blacklisté" > ${MAIL}
list_ip=($(cat /tmp/blacklist_400 /tmp/blacklist_404 |grep -f ${BLACKLIST} -v |grep -f ${EXCLUDE} -v |sort |uniq))
for j in ${list_ip[@]}
do
echo ${j} >> ${MAIL}
curl http://ipinfo.io/${j} >> ${MAIL}
echo "" >> ${MAIL}
cat /tmp/error_$i | grep ${j} >> ${MAIL}
echo "" >> ${MAIL}
echo ${j} >> ${BLACKLIST}
/usr/sbin/iptables -A BLACKLIST -s ${j} -j DROP
done
for j in ${list_sender[@]}
do
echo "Blacklist envoye a ${j}" >> ${MAIL}
echo "scp -i /home/valentin/.ssh-blacklist/id_rsa -P ${SSH} ${BLACKLIST} blacklist_user@${j}:/tmp/blacklist_${IP}" >> ${MAIL}
scp -i /home/valentin/.ssh-blacklist/id_rsa -P ${SSH} ${BLACKLIST} blacklist_user@${j}:/tmp/blacklist_${IP}i
if [ ${?} -ne 0 ]; then
echo "Error SCP for ${j}" >> ${MAIL}
fi
done
echo "IP dejà blacklisté : " >> ${MAIL}
cat ${BLACKLIST} >> ${MAIL}
cat ${MAIL} |mail -s "Blacklist IP ${i}" valczebackup@gmail.com
fi
done

View File

@@ -0,0 +1,10 @@
#!/bin/bash
expire=$(echo | openssl s_client -connect clarissariviere.com:443 2>/dev/null | openssl x509 -noout -enddate |awk -F "=" '{print $2}')
timeExpire=$(date --date="${expire}" +%s)
now=$(date +%s)
if [ ${now} -gt ${timeExpire} ]; then
certbot renew
echo "Certificat renouvellé" | mail -s "certificat renouvelle" valczebackup@gmail.com
fi

View File

@@ -0,0 +1,59 @@
#!/bin/bash
MAIL=/tmp/mail
DIRECTORY=/home/valentin/mail
SERVER_LOG=/var/log/nginx
TOKEN=$(cat /etc/sentinel/token)
DATE=$(date +%Y%m%d-%H%M%S)
HOST=($(cat /etc/sentinel/virtualhost))
WEEK=$(date +%V)
DAY=$(date +%u)
if [ ${DAY} -eq 1 ]; then
if [ ${WEEK} -ne "01" ]; then
WEEK=$(echo "$WEEK-1" |bc)
if [ ${#WEEK} -eq 1 ]; then
WEEK="0${WEEK}"
fi
else
WEEK="53"
fi
fi
for i in ${HOST[@]}
do
directory_host=$DIRECTORY/$i/$WEEK
log_access=${SERVER_LOG}/${i}_access.log.1
mkdir -pv $directory_host
cat $log_access | grep "|" | awk -F "|" '{print $1}' | sort | uniq > $directory_host/list_$DATE
cat $log_access | awk -F "|" '{ if($2 == "404") print $1}' > $directory_host/404_$DATE
cat $log_access | awk -F "|" '{ if($2 == "400") print $1}' > $directory_host/400_$DATE
grep robots.txt $log_access | awk -F '|' '{print $3}' > $directory_host/robots_$DATE
while read line; do
if grep $line $DIRECTORY/*/*/output_*.txt > /dev/null 2>&1; then
grep -h -B1 -A8 $line $DIRECTORY/*/*/output_*.txt |head -10 >> $directory_host/output_$DATE.txt
else
curl "ipinfo.io/$line?token=$TOKEN" >> $directory_host/output_$DATE.txt
fi
echo >> $directory_host/output_$DATE.txt
done <$directory_host/list_$DATE
echo "nombre de visite : $(wc -l $directory_host/list_$DATE |cut -d ' ' -f1)" > ${MAIL}
echo "nombre de visite par pays, par region et par ville : " >> ${MAIL}
LIST=("country" "region" "city")
for j in ${LIST[@]}
do
echo "----${j}------" >> ${MAIL}
cat $directory_host/output_$DATE.txt |grep "${j}" |sort |uniq -c >> ${MAIL}
echo "--------------" >> ${MAIL}
done
ERROR=("400" "404")
for j in ${ERROR[@]}
do
echo "nombre erreur ${j} par IP : " >> ${MAIL}
cat $directory_host/${j}_$DATE |sort |uniq -c >> ${MAIL}
echo "--------" >> ${MAIL}
done
echo "nombre de robots " >> ${MAIL}
cat $directory_host/robots_$DATE |sort |uniq -c >> ${MAIL}
echo "---------" >> ${MAIL}
cat ${MAIL} |mail -s "Rapport reverse proxy $DATE" -A $directory_host/output_$DATE.txt valczebackup@gmail.com
#rm $directory_host/*
done

View File

@@ -0,0 +1,32 @@
#!/bin/bash
MAIL=/tmp/mail_week
DIRECTORY=/home/valentin/mail
SERVER_LOG=/var/log/nginx
DATE=$(date +%Y%m%d-%H%M%S)
HOST=($(cat /etc/sentinel/virtualhost))
for i in ${HOST[@]}
do
directory_host=$DIRECTORY/$i/*
echo "nombre de visite : $(wc -l $directory_host/list_* |tail -n1 | awk '{print $1}')" > ${MAIL}
echo "nombre de visite par pays, par region et par ville : " >> ${MAIL}
LIST=("country" "region" "city")
for j in ${LIST[@]}
do
echo "----${j}------" >> ${MAIL}
cat $directory_host/output_*.txt |grep "${j}" |sort |uniq -c >> ${MAIL}
echo "--------------" >> ${MAIL}
done
ERROR=("400" "404")
for j in ${ERROR[@]}
do
echo "nombre erreur ${j} par IP :" >> ${MAIL}
cat $directory_host/${j}_* |sort |uniq -c >> ${MAIL}
echo "----------------" >> ${MAIL}
done
echo "nombre de robots :" >> ${MAIL}
cat $directory_host/robots_* |sort |uniq -c >> ${MAIL}
echo "----------------" >> ${MAIL}
cat ${MAIL} |mail -s "Rapport mensuel reverse proxy ${i} $DATE" valczebackup@gmail.com
rm -rf $directory_host
done

View File

@@ -0,0 +1,44 @@
#!/bin/bash
MAIL=/tmp/mail_week
DIRECTORY=/home/valentin/mail
SERVER_LOG=/var/log/nginx
TOKEN=af920d2f7dbe97
DATE=$(date +%Y%m%d-%H%M%S)
HOST=($(cat /etc/sentinel/virtualhost))
WEEK=$(date +%V)
DAY=$(date +%u)
if [ ${DAY} -eq 1 ]; then
if [ ${WEEK} -ne "01" ]; then
WEEK=$(echo "$WEEK-1" |bc)
if [ ${#WEEK} -eq 1 ]; then
WEEK="0${WEEK}"
fi
else
WEEK="53"
fi
fi
for i in ${HOST[@]}
do
directory_host=$DIRECTORY/$i/$WEEK
echo "nombre de visite : $(wc -l $directory_host/list_* |tail -n1 | awk '{print $1}')" > ${MAIL}
echo "nombre de visite par pays, par region et par ville : " >> ${MAIL}
LIST=("country" "region" "city")
for j in ${LIST[@]}
do
echo "----${j}------" >> ${MAIL}
cat $directory_host/output_*.txt |grep "${j}" |sort |uniq -c >> ${MAIL}
echo "--------------" >> ${MAIL}
done
ERROR=("400" "404")
for j in ${ERROR[@]}
do
echo "nombre erreur ${j} par IP :" >> ${MAIL}
cat $directory_host/${j}_* |sort |uniq -c >> ${MAIL}
echo "----------------" >> ${MAIL}
done
echo "nombre de robots :" >> ${MAIL}
cat $directory_host/robots_* |sort |uniq -c >> ${MAIL}
echo "----------------" >> ${MAIL}
cat ${MAIL} |mail -s "Rapport hebdomadaire reverse proxy ${i} $DATE" valczebackup@gmail.com
#rm $directory_host/*
done

View File

@@ -0,0 +1,103 @@
server {
if ($host = clarissariviere.fr) {
return 301 "https://www.clarissariviere.com$request_uri";
} # managed by Certbot
if ($host = www.clarissariviere.fr) {
return 301 "https://www.clarissariviere.com$request_uri";
} # managed by Certbot
if ($host = clarissariviere.com) {
return 301 "https://www.clarissariviere.com$request_uri";
} # managed by Certbot
access_log /var/log/nginx/clarissa_access.log main;
error_log /var/log/nginx/clarissa_error.log;
#gzip_static off;
server_name clarissariviere.com clarissariviere.fr www.clarissariviere.fr www.clarissariviere.com;
add_header 'Content-Security-Policy' 'upgrade-insecure-requests';
#add_header Link "<https://www.clarissariviere.com$request_uri;> rel=\"canonical\", <https://www.clarissariviere.fr$request_uri;> rel=\"alternate\" hreflang=\"fr\"";
proxy_cache STATIC;
set $header "<https://$host$request_uri;> rel=\"canonical\"";
if ($request_uri ~ "/tag/") {
set $header "<https://$host;> rel=\"canonical\"";
set $link "";
}
add_header Link $header;
location / {
# First attempt to serve request as file, then
# as directory, then fall back to displaying a 404.
#try_files $uri $uri/ =404;
proxy_set_header Accept-Encoding "";
proxy_pass http://gouters.canalblog.com/;
#add_header Link "<https://www.clarissariviere.com; rel=\"canonical\">";
#proxy_redirect off;
#proxy_set_header Host $host;
#proxy_buffering on;
#proxy_cache STATIC;
proxy_cache_key $scheme://$host$uri$is_args$query_string;
proxy_cache_valid 200 10m;
proxy_cache_lock on;
proxy_cache_use_stale error timeout invalid_header updating http_500 http_502 http_503 http_504;
proxy_ignore_headers X-Accel-Expires Expires Cache-Control;
#proxy_ssl_verify off;
#proxy_set_header X-Real-IP $remote_addr;
#proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
#proxy_set_header X-Forwarded-Proto $scheme;
sub_filter 'http://gouters.canalblog.com' "https://$host";
sub_filter '<meta name="generator" content="CanalBlog - https://www.canalblog.com" />' '';
sub_filter '<meta name="Keywords" content="' '<meta name="Keywords" content="clarissa, riviere, ';
sub_filter_types text/html text/xml text/plain text/css;
sub_filter_once off;
}
location /googlebbc3cfa6d1866691.html {
root /var/www/gouter/;
}
location /yandex_93259fe4480c9828.html {
root /var/www/gouter/;
}
location /yandex_f07f7ace7d8459d8.html {
root /var/www/gouter/;
}
listen 443 ssl; # managed by Certbot
ssl_certificate /etc/letsencrypt/live/clarissariviere.com/fullchain.pem; # managed by Certbot
ssl_certificate_key /etc/letsencrypt/live/clarissariviere.com/privkey.pem; # managed by Certbot
include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
}
server {
if ($host = www.clarissariviere.fr) {
return 301 https://$host$request_uri;
} # managed by Certbot
if ($host = www.clarissariviere.com) {
return 301 https://$host$request_uri;
} # managed by Certbot
if ($host = clarissariviere.fr) {
return 301 https://"www.clarissariviere.fr"$request_uri;
} # managed by Certbot
if ($host = clarissariviere.com) {
return 301 https://"www.clarissariviere.com"$request_uri;
} # managed by Certbot
server_name clarissariviere.com clarissariviere.fr www.clarissariviere.fr www.clarissariviere.com;
listen 80;
return 404; # managed by Certbot
}

View File

@@ -0,0 +1,85 @@
user www-data;
worker_processes 2;
pid /run/nginx.pid;
include /etc/nginx/modules-enabled/*.conf;
events {
worker_connections 768;
# multi_accept on;
}
http {
##
# Basic Settings
##
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
server_tokens off;
# server_names_hash_bucket_size 64;
# server_name_in_redirect off;
include /etc/nginx/mime.types;
default_type application/octet-stream;
##
# SSL Settings
##
ssl_protocols TLSv1 TLSv1.1 TLSv1.2; # Dropping SSLv3, ref: POODLE
ssl_prefer_server_ciphers on;
##
# Logging Settings
##
log_format main '$remote_addr|$status|$body_bytes_sent|$http_referer|$http_user_agent|$http_x_forwarded_for|$remote_user [$time_local]|$request';
access_log /var/log/nginx/access.log main;
error_log /var/log/nginx/error.log;
##
# Gzip Settings
##
gzip on;
# gzip_vary on;
# gzip_proxied any;
# gzip_comp_level 6;
# gzip_buffers 16 8k;
# gzip_http_version 1.1;
# gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
proxy_cache_path /data/nginx/cache keys_zone=STATIC:10m max_size=1g inactive=1d;
##
# Virtual Host Configs
##
include /etc/nginx/conf.d/*.conf;
include /etc/nginx/sites-enabled/*;
}
#mail {
# # See sample authentication script at:
# # http://wiki.nginx.org/ImapAuthenticateWithApachePhpScript
#
# # auth_http localhost/auth.php;
# # pop3_capabilities "TOP" "USER";
# # imap_capabilities "IMAP4rev1" "UIDPLUS";
#
# server {
# listen localhost:110;
# protocol pop3;
# proxy on;
# }
#
# server {
# listen localhost:143;
# protocol imap;
# proxy on;
# }
#}

View File

@@ -0,0 +1,38 @@
#!/bin/bash
IPTABLES=/usr/sbin/iptables
BLACKLIST=/etc/sentinel/blacklist
chain_count=$(${IPTABLES} -L BLACKLIST -n | wc -l)
if [ ${chain_count} -eq 0 ]; then
${IPTABLES} -N BLACKLIST
${IPTABLES} -I INPUT 1 -p tcp -m tcp --dport 80 -j BLACKLIST
${IPTABLES} -I INPUT 1 -p tcp -m tcp --dport 443 -j BLACKLIST
fi
if [ ! -f ${BLACKLIST} ]; then
touch ${BLACKLIST}
fi
if [ ${chain_count} -gt 2 ]; then
chain_count=$(echo ${chain_count}-2 |bc)
iptables_ip=($(${IPTABLES} -nvL BLACKLIST | tail -n ${chain_count} | awk '{print $8}'))
for i in $(cat ${BLACKLIST})
do
block_ip=1
for j in ${iptables_ip[@]}
do
if [ "${i}" == "${j}" ]; then
block_ip=0
fi
done
if [ ${block_ip} -eq 1 ]; then
${IPTABLES} -A BLACKLIST -s ${i} -j DROP
fi
done
else
for i in $(cat ${BLACKLIST})
do
${IPTABLES} -A BLACKLIST -s ${i} -j DROP
done
fi

View File

@@ -0,0 +1,51 @@
#!/bin/bash
LOGFILE_RECENT="/var/log/scw-log/logfile-recent.log"
LOGFILE="/var/log/scw-log/logfile.log"
DUPLICITY=/usr/local/bin/duplicity
log () {
date=`date +%Y-%m-%d`
hour=`date +%H:%M:%S`
echo "$date $hour $*" >> ${LOGFILE_RECENT}
}
rotate_log() {
cat ${LOGFILE_RECENT} >> ${LOGFILE}
status="OK"
if [ $(grep "Errors 0" ${LOGFILE_RECENT} |wc -l) -eq 0 ]; then
status="ALERTE FAIL !!!"
fi
cat ${LOGFILE_RECENT} |mail -s "${status} | Backup ${SCW_BUCKET} `date +%Y-%m-%d`" valczebackup@gmail.com
}
USER=$(whoami)
currently_backuping=$(ps -ef | grep duplicity | grep python |grep ${USER} | wc -l)
if [ $currently_backuping -eq 0 ]; then
if [ ${#} -ne 1 ]; then
log ">>> Il manque un paramètre ${0} : <CONFIGFILE>"
rotate_log
exit 1
fi
if [ ! -f ${1} ]; then
log ">>> Le paramètre n'est pas un fichier ${USER} : ${1}"
rotate_log
exit 1
fi
source "$1"
echo > ${LOGFILE_RECENT}
log ">>> removing old backups"
${DUPLICITY} remove-older-than --s3-endpoint-url ${SCW_ENDPOINT_URL} --s3-region-name ${SCW_REGION} ${KEEP_BACKUP_TIME} ${SCW_BUCKET} --force >> ${LOGFILE_RECENT} 2>&1
log ">>> creating and uploading backup to c14 cold storage ${SOURCE}"
${DUPLICITY} \
incr --full-if-older-than ${FULL_BACKUP_TIME} \
--s3-endpoint-url ${SCW_ENDPOINT_URL} \
--s3-region-name ${SCW_REGION} \
--asynchronous-upload \
--s3-use-glacier \
--encrypt-key=${GPG_FINGERPRINT} \
--sign-key=${GPG_FINGERPRINT} \
${SOURCE} ${SCW_BUCKET} >> ${LOGFILE_RECENT} 2>&1
rotate_log
else
log ">>> Duplicity déjà en cours de route sur cette utilisateur ${USER}"
rotate_log
fi

View File

@@ -0,0 +1,29 @@
#!/bin/bash
if [ $# -lt 3 ]; then
echo -e "Usage $0 <scw_configrc> <time or delta> [file to restore] <restore to>
Exemple:
\t$ $0 2018-7-21 recovery/ ## recovers * from closest backup to date
\t$ $0 0D secret data/ ## recovers most recent file nammed 'secret'";
exit; fi
source $1
shift
if [ $# -eq 2 ]; then
duplicity \
--s3-endpoint-url ${SCW_ENDPOINT_URL} \
--s3-region-name ${SCW_REGION} \
--time $1 \
${SCW_BUCKET} $2
fi
if [ $# -eq 3 ]; then
duplicity \
--s3-endpoint-url ${SCW_ENDPOINT_URL} \
--s3-region-name ${SCW_REGION} \
--time $1 \
--file-to-restore $2 \
${SCW_BUCKET} $3
fi

View File

@@ -0,0 +1,50 @@
# tasks file for stats script
- name: "Create log for backup script"
file:
path: "{{ item }}"
state: directory
with_items:
- "/var/log/scw-log"
- "/root/log"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Copy scw backup and restore script
copy:
src: "{{ item }}"
dest: "/opt/{{ item }}"
mode: "0500"
with_items:
- "scw-backup.sh"
- "scw-restore.sh"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Copy scw backup config
template:
src: "scw-configrc.j2"
dest: "/root/.scw-configrc"
mode: "0400"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
#- name: Crontab blacklist
# ansible.builtin.cron:
# name: "blacklist script"
# cron_file: "blacklist_cron"
# minute: "*/5"
# job: "bash /usr/local/bin/sentinel/blacklist.sh"
# user: root
#
# vars:
# ansible_become: yes
# ansible_become_method: sudo
# ansible_become_password: "{{ sudo_password }}"

View File

@@ -0,0 +1,27 @@
# tasks file for stats script
- name: Copy blacklist script
copy:
src: "{{ item }}.sh"
dest: "/usr/local/bin/sentinel/{{ item }}.sh"
mode: "0555"
with_items:
- blacklist
- refill_blacklist
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Crontab blacklist
ansible.builtin.cron:
name: "blacklist script"
cron_file: "blacklist_cron"
minute: "*/5"
job: "bash /usr/local/bin/sentinel/blacklist.sh"
user: root
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"

View File

@@ -0,0 +1,59 @@
- name: Create sentinel directory
file:
state: directory
path: "{{ item }}/sentinel"
with_items:
- /usr/local/bin
- /etc
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Template virtualhost
template:
src: virtualhost.j2
dest: /etc/sentinel/virtualhost
mode: "0444"
with_items:
- virtualhost
- ip
- ssh_port
- exclude
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
tags: [ "configure_blacklist" ]
- name: Copy blacklist
copy:
src: "{{ playbook_dir }}/blacklist/{{ inventory_hostname }}/etc/sentinel/blacklist"
dest: /etc/sentinel/blacklist
mode: "0644"
when: script is not defined or script == "blacklist"
ignore_errors: true
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
tags: [ "restore_blacklist" ]
- name: Deploy stats script
import_tasks: stats.yml
tags: [ "configure_stats" ]
- name: Configure blacklist script
import_tasks: blacklist.yml
tags: [ "configure_blacklist" ]
- name: Configure supervision script
import_tasks: supervision.yml
tags: [ "configure_supervision" ]
- name: Configure backup script
import_tasks: backup.yml
tags: [ "configure_backup" ]

View File

@@ -0,0 +1,36 @@
- name: Backup blacklist
fetch:
src: /etc/sentinel/blacklist
dest: blacklist
tags: [ "backup_blacklist" ]
- name: Deconfigure stats script
import_tasks: deconfigure_stats.yml
tags: [ "deconfigure_stats" ]
- name: Deconfigure blacklist script
import_tasks: deconfigure_blacklist.yml
tags: [ "deconfigure_blacklist" ]
- name: Deconfigure supervision script
import_tasks: deconfigure_supervision.yml
tags: [ "deconfigure_supervision" ]
- name: Deconfigure backup script
import_tasks: deconfigure_backup.yml
tags: [ "deconfigure_backup" ]
- name: Remove sentinel directory
file:
state: absent
path: "{{ item }}/sentinel"
with_items:
- /usr/local/bin
- /etc
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"

View File

@@ -0,0 +1,31 @@
# tasks file for stats script
- name: "Create log for backup script"
file:
path: "{{ item }}"
state: absent
with_items:
- "/var/log/scw-log"
- "/root/log"
- "/opt/scw-backup.sh"
- "/opt/scw-restore.sh"
- "/root/.scw-configrc"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
#- name: Crontab blacklist
# ansible.builtin.cron:
# name: "blacklist script"
# cron_file: "blacklist_cron"
# minute: "*/5"
# job: "bash /usr/local/bin/sentinel/blacklist.sh"
# user: root
#
# vars:
# ansible_become: yes
# ansible_become_method: sudo
# ansible_become_password: "{{ sudo_password }}"

View File

@@ -0,0 +1,25 @@
# tasks file for stats script
- name: Remove crontab blacklist
ansible.builtin.cron:
name: "blacklist script"
cron_file: "blacklist_cron"
state: absent
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Remove blacklist script
copy:
src: "{{ item }}.sh"
state: absent
with_items:
- blacklist
- refill_blacklist
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"

View File

@@ -0,0 +1,64 @@
# tasks file for stats script
- name: Remove crontab get info day
ansible.builtin.cron:
name: "get info day"
cron_file: "get_info_day_cron"
state: absent
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Remove crontab get info week
ansible.builtin.cron:
name: "get info week"
cron_file: "get_info_week_cron"
state: absent
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Remove crontab get info month
ansible.builtin.cron:
name: "get info month"
cron_file: "get_info_month_cron"
state: absent
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Remove getinfo script
file:
path: "/usr/local/bin/sentinel/getinfo_{{ item }}.sh"
state: absent
with_items:
- day
- week
- month
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Remove token
file:
path: "/etc/sentinel/token"
state: absent
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Remove template virtualhost
file:
path: /etc/sentinel/virtualhost
state: absent
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"

View File

@@ -0,0 +1,22 @@
# tasks file for stats script
- name: Remove crontab blacklist
ansible.builtin.cron:
name: "check ssl script"
cron_file: "check_ssl_cron"
state: absent
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Remove check_ssl script
file:
path: "/usr/local/bin/sentinel/check_ssl.sh"
state: absent
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"

View File

@@ -0,0 +1,151 @@
# tasks file for deploy-web
- name: "all git archive"
local_action:
module: git
repo: "https://{{ git_username | urlencode }}:{{ git_password | urlencode }}@git.valczeryba.ovh/v4l3n71n/{{ item.git_name }}.git"
dest: "/home/valentin/src/"
archive: "/tmp/{{ item.git_name }}.tar.gz"
force: yes
update: yes
when: "item.git_name is defined and project_name is not defined and conf_name is not defined"
with_items:
- "{{ project }}"
- name: "all create directory "
file:
path: "/var/www/{{ item.git_name }}"
state: directory
owner: www-data
group: www-data
mode: '500'
when: "item.git_name is defined and project_name is not defined and conf_name is not defined"
with_items:
- "{{ project }}"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "all extract repo "
unarchive:
src: "/tmp/{{ item.git_name }}.tar.gz"
dest: "/var/www/{{ item.git_name }}"
owner: www-data
group: www-data
mode: '500'
when: "item.git_name is defined and project_name is not defined and conf_name is not defined"
with_items:
- "{{ project }}"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "single git archive"
local_action:
module: git
repo: "https://{{ git_username | urlencode }}:{{ git_password | urlencode }}@git.valczeryba.ovh/v4l3n71n/{{ project_name }}.git"
dest: "/home/valentin/src/"
archive: "/tmp/{{ project_name }}.tar.gz"
force: yes
update: yes
when: "project_name is defined"
- name: "single create directory "
file:
path: "/var/www/{{ project_name }}"
state: directory
owner: www-data
group: www-data
mode: '500'
when: "project_name is defined"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "single extract repo "
unarchive:
src: "/tmp/{{ project_name }}.tar.gz"
dest: "/var/www/{{ project_name }}"
owner: www-data
group: www-data
mode: '500'
when: "project_name is defined"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "copy nginx.conf"
copy:
src: "nginx.conf"
dest: "/etc/nginx/"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "all copy configure nginx"
copy:
src: "{{ item.conf_name }}"
dest: "/etc/nginx/sites-available"
when: "item.conf_name is defined and project_name is not defined and conf_name is not defined"
with_items:
- "{{ project }}"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "all create symlink"
file:
src: "/etc/nginx/sites-available/{{ item.conf_name }}"
dest: "/etc/nginx/sites-enabled/{{ item.conf_name }}"
state: link
when: "item.conf_name is defined and project_name is not defined and conf_name is not defined"
with_items:
- "{{ project }}"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "single copy configure nginx"
copy:
src: "{{ conf_name }}"
dest: "/etc/nginx/sites-available"
when: "conf_name is defined"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "single create symlink"
file:
src: "/etc/nginx/sites-available/{{ conf_name }}"
dest: "/etc/nginx/sites-enabled/{{ conf_name }}"
state: link
when: "conf_name is defined"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: restart nginx
service:
name: nginx
state: restarted
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"

View File

@@ -0,0 +1,77 @@
- name: "Remove all directories"
file:
path: "/var/www/{{ item.git_name }}"
state: absent
when: "item.git_name is defined and project_name is not defined and conf_name is not defined"
with_items:
- "{{ project }}"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "Remove single directory"
file:
path: "/var/www/{{ project_name }}"
state: absent
when: "project_name is defined"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "Delete all symlink"
file:
path: "/etc/nginx/sites-enabled/{{ item.conf_name }}"
state: absent
when: "item.conf_name is defined and project_name is not defined and conf_name is not defined"
with_items:
- "{{ project }}"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "remove all configure nginx"
file:
path: "/etc/nginx/sites-available/{{ item.conf_name }}"
state: absent
when: "item.conf_name is defined and project_name is not defined and conf_name is not defined"
with_items:
- "{{ project }}"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "Delete single symlink"
file:
path: "/etc/nginx/sites-enabled/{{ conf_name }}"
state: absent
when: "conf_name is defined"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: "remove single configure nginx"
file:
path: "/etc/nginx/sites-available/{{ conf_name }}"
state: absent
when: "conf_name is defined"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: restart nginx
service:
name: nginx
state: restarted
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"

View File

@@ -1,25 +1,16 @@
---
# tasks file for deploy-web
- name: debug local
local_action:
module: git
repo: "git@gitlab.secu.pcc:descartes/isr-inventory.git"
dest: "/home/provisioner/src/isr-inventory"
archive: "/tmp/isr-inventory.tar.gz"
force: yes
track_submodules: yes
update: yes
key_file: "/home/provisioner/.ssh/id_rsa_toolbox"
run_once: True
become: yes
become_user: provisioner
- name: Deploy project web
import_tasks: deploy.yml
tags: ["deploy"]
- name: Create directory
file:
path: "/home/admloc/isr-inventory"
state: directory
- name: Configure project web
import_tasks: configure.yml
tags: [ "configure" ]
- name: Extract repo isr
unarchive:
src: "/tmp/isr-inventory.tar.gz"
dest: "/home/admloc/isr-inventory"
- name: Destrpy project web
import_tasks: destroy.yml
tags: ["destroy"]
- name: Deconfigure project web
import_tasks: deconfigure.yml
tags: [ "deconfigure" ]

View File

@@ -0,0 +1,66 @@
# tasks file for stats script
- name: Template token ipinfo
template:
src: token.j2
dest: /etc/sentinel/token
mode: "0555"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Copy getinfo script
copy:
src: "getinfo_{{ item }}.sh"
dest: "/usr/local/bin/sentinel/getinfo_{{ item }}.sh"
mode: "0555"
with_items:
- day
- week
- month
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Crontab get info day
ansible.builtin.cron:
name: "get info day"
cron_file: "get_info_day_cron"
minute: "0"
hour: "3"
user: root
job: "bash /usr/local/bin/sentinel/getinfo_day.sh"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Crontab get info week
ansible.builtin.cron:
name: "get info week"
cron_file: "get_info_week_cron"
minute: "15"
hour: "3"
weekday: "1"
user: root
job: "bash /usr/local/bin/sentinel/getinfo_week.sh"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Crontab get info month
ansible.builtin.cron:
name: "get info month"
cron_file: "get_info_month_cron"
minute: "30"
hour: "3"
day: "1"
user: root
job: "bash /usr/local/bin/sentinel/getinfo_month.sh"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"

View File

@@ -0,0 +1,36 @@
# tasks file for stats script
- name: Create supervision directory
file:
state: directory
path: "{{ item }}/supervision"
with_items:
- /usr/local/bin
- /etc
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Copy check_ssl script
copy:
src: "check_ssl.sh"
dest: "/usr/local/bin/supervision/check_ssl.sh"
mode: "0555"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"
- name: Crontab check_ssl
ansible.builtin.cron:
name: "check ssl script"
cron_file: "check_ssl_cron"
minute: "*/30"
user: root
job: "bash /usr/local/bin/supervision/check_ssl.sh"
vars:
ansible_become: yes
ansible_become_method: sudo
ansible_become_password: "{{ sudo_password }}"

View File

@@ -0,0 +1,3 @@
{% for host in ip_exclude_blacklist %}
{{ host }}
{% endfor %}

View File

@@ -0,0 +1,3 @@
{% for host in ip_sender_blacklist %}
{{ host }}
{% endfor %}

View File

@@ -0,0 +1,28 @@
export AWS_ACCESS_KEY_ID="{{ aws_access_key_id }}"
export AWS_SECRET_ACCESS_KEY="{{ aws_secret_access_key }}"
export SCW_REGION="{{ scw_region }}"
export SCW_ENDPOINT_URL="https://s3.${SCW_REGION}.scw.cloud"
export SCW_BUCKET="s3://{{ scw_directory }}"
# GPG Key information
export PASSPHRASE="{{ passphrase }}"
export GPG_FINGERPRINT="{{ gpg_fingerprint }}"
# Folder to backup
export SOURCE="--exclude /sys --exclude /proc --exclude /opt --exclude /tmp --exclude /mnt --exclude /home /"
# Will keep backup up to 1 month
export KEEP_BACKUP_TIME="1M"
# Will make a full backup every 10 days
export FULL_BACKUP_TIME="10D"
# Log files
export LOGFILE_RECENT="/root/log/logfile-recent.log"
export LOGFILE="/root/log/logfile.log"
log () {
date=`date +%Y-%m-%d`
hour=`date +%H:%M:%S`
echo "$date $hour $*" >> ${LOGFILE_RECENT}
}
export -f log

View File

@@ -0,0 +1 @@
{{ ssh_port_blacklist }}

View File

@@ -0,0 +1 @@
{{ token_ipinfo }}

View File

@@ -0,0 +1,3 @@
{% for host in virtualhosts %}
{{ host }}
{% endfor %}