diff --git a/README.md b/README.md
index 268299f..4590c73 100644
--- a/README.md
+++ b/README.md
@@ -46,3 +46,6 @@ Attic
* [postinstall.sh](https://forge.tourmentine.com/n/scripts/src/master/attic/postinstall.sh) => OVH/Kimsufi FreeBSD postinstall script
* [tssh](https://forge.tourmentine.com/n/scripts/src/master/attic/tssh) => automaticaly rename Konsole tabs using ssh'ed hostname
* [mplayer_update.sh](https://forge.tourmentine.com/n/scripts/src/master/attic/mplayer_update.sh) => download, compile and install FFMpeg & MPlayer
+ * [webnewsget2.sh](https://forge.tourmentine.com/n/scripts/src/master/attic/webnewsget2.sh)+[html2rss.php](https://forge.tourmentine.com/n/scripts/src/master/attic/html2rss.php) => download HTML pages and generate RSS feeds from them
+ * [kimsufi_postinstall.sh](https://forge.tourmentine.com/n/scripts/src/master/attic/kimsufi_postinstall.sh) => bootstrap script for fresh kimsufi/OVH servers
+
diff --git a/attic/html2rss.php b/attic/html2rss.php
new file mode 100644
index 0000000..3cacb1b
--- /dev/null
+++ b/attic/html2rss.php
@@ -0,0 +1,317 @@
+#!/usr/bin/php -q
+
+ $totalnews)
+ $NbMax = $totalnews;
+
+ $outputfile = "/var/www/website/backends/txt/$filename.txt";
+
+ if ($totalnews > 1)
+ {
+ $backend = fopen("$outputfile","w");
+
+ for($n = $offset+1 ; $n <= $NbMax+$offset ; $n++)
+ {
+ fwrite ($backend,"%%\n");
+ fwrite ($backend,$tableau[$n]["title"]."\n");
+ if (!strstr($tableau[$n]["link"],"http://"))
+ $tableau[$n]["link"]=$baseurl.$tableau[$n]["link"];
+ fwrite ($backend,$tableau[$n]["link"]."\n");
+ }
+ fclose($backend);
+ echo "$outputfile généré (".($n-1)." enregistrements).\n";
+ }
+ else echo "$outputfile non créé : nombre de news insuffisant ($totalnews)\n";
+}
+
+
+/**********************************************************************
+ * write2rss091()
+ **********************************************************************
+ * écrit les $NbMaxde premères valeurs de $tableau, à savoir les
+ * champs link et title (voir fonction parse_html())
+ * dans le fichier $file, en ajoutant au besoin (si défini) la
+ * variable $bu correspondant à l'url de base ($baseurl dans le .ini)
+ **********************************************************************
+ * format: voir spécificications RSS 0.91:
+ * http://my.netscape.com/publish/formats/rss-spec-0.91.html
+ **********************************************************************/
+
+function write2rss091($inifiledata,$tableau)
+{
+
+ extract($inifiledata);
+
+ $entete = "
+
+
+
+
+
+
+
+ $titlesite
+ $linksite
+ $descriptionsite
+ $languagesite\n\n";
+
+ $outputfile = "/var/www/website/backends/xml/$filename.rss";
+
+ $totalnews = count($tableau);
+
+ if ($NbMax > $totalnews)
+ $NbMax = $totalnews;
+
+ if ($totalnews > 1)
+ {
+ $backend = fopen("$outputfile","w");
+ fwrite ($backend,$entete);
+ for($n = $offset+1 ; $n <= $NbMax+$offset ; $n++)
+ {
+ fwrite ($backend," - \n");
+ fwrite ($backend," ".$tableau[$n]["title"]."\n");
+ if (!strstr($tableau[$n]["link"],"http://") && isset($baseurl))
+ $tableau[$n]["link"]=$baseurl.$tableau[$n]["link"];
+ fwrite ($backend," ".htmlentities($tableau[$n]["link"])."\n");
+ fwrite ($backend,"
\n\n");
+ }
+ fwrite ($backend," \n\n");
+ fclose($backend);
+ echo "$outputfile généré (".($n-1)." enregistrements).\n";
+ }
+ else echo "$outputfile non créé : nombre de news insuffisant ($totalnews)\n";
+}
+
+/**********************************************************************
+ * write2rss()
+ **********************************************************************
+ * idem write2rss091, mais pour format 2.0 (!)
+ **********************************************************************/
+
+function write2rss($inifiledata,$tableau)
+{
+
+ extract($inifiledata);
+
+ $entete = "
+
+
+
+
+
+ $titlesite
+ $linksite
+ $descriptionsite
+ $languagesite
+ ".date("r")."
+ ".date("r")."
+ WebLoom beta\n\n";
+
+ $outputfile = "/var/www/website/backends/xml/$filename.rss";
+
+ $totalnews = count($tableau);
+
+ if ($NbMax > $totalnews)
+ $NbMax = $totalnews;
+
+ if ($totalnews > 1)
+ {
+ $backend = fopen("$outputfile","w");
+ fwrite ($backend,$entete);
+ for($n = $offset+1 ; $n <= $NbMax+$offset ; $n++)
+ {
+ fwrite ($backend," - \n");
+ fwrite ($backend," ".$tableau[$n]["title"]."\n");
+ if (!strstr($tableau[$n]["link"],"http://") && isset($baseurl))
+ $tableau[$n]["link"]=$baseurl.$tableau[$n]["link"];
+ fwrite ($backend," ".htmlentities($tableau[$n]["link"])."\n");
+ fwrite ($backend,"
\n\n");
+ }
+ fwrite ($backend," \n\n");
+ fclose($backend);
+ echo "$outputfile généré (".($n-1)." enregistrements).\n";
+ }
+ else echo "$outputfile non créé : nombre de news insuffisant ($totalnews)\n";
+}
+
+/**********************************************************************
+ * write2atom()
+ **********************************************************************
+ * écrit les $NbMaxde premères valeurs de $tableau, à savoir les
+ * champs link et title (voir fonction parse_html())
+ * dans le fichier $file, en ajoutant au besoin (si défini) la
+ * variable $bu correspondant à l'url de base ($baseurl dans le .ini)
+ **********************************************************************
+ * format: voir le brouillon des spécificications Atom 0.3:
+ * http://www.ietf.org/internet-drafts/draft-ietf-atompub-format-02.txt
+ **********************************************************************/
+
+function write2atom($inifiledata,&$tableau)
+{
+
+ extract($inifiledata);
+
+ $entete = "
+
+
+ $titlesite
+
+ WebLoom
+ $descriptionsite
+ ".date("c")."\n\n";
+
+
+ $outputfile = "/var/www/website/backends/atom/$filename.xml";
+
+ $totalnews = count($tableau);
+
+ if ($NbMax > $totalnews)
+ $NbMax = $totalnews;
+
+ if ($totalnews > 1)
+ {
+ $backend = fopen("$outputfile","w");
+ fwrite ($backend,$entete);
+ for($n = $offset+1 ; $n <= $NbMax+$offset ; $n++)
+ {
+ //print_r($tableau[$n]);
+ fwrite ($backend," \n");
+ fwrite ($backend," ".$tableau[$n]["title"]."\n");
+ if (!strstr($tableau[$n]["link"],"http://") && isset($baseurl))
+ $tableau[$n]["link"]=$baseurl.$tableau[$n]["link"];
+ fwrite ($backend," ".htmlentities($tableau[$n]["link"])."\n");
+ fwrite ($backend," \n");
+ fwrite ($backend," ".$tableau[$n]["summary"]."\n");
+ fwrite ($backend," ".date("c")."\n");
+ fwrite ($backend," ".date("c")."\n");
+ fwrite ($backend," ".$tableau[$n]["author"]."\n");
+ fwrite ($backend," \n\n");
+ }
+ fwrite ($backend,"");
+ fclose($backend);
+ echo "$outputfile généré (".($n-1)." enregistrements).\n";
+ }
+ else echo "$outputfile non créé : nombre de news insuffisant ($totalnews)\n";
+}
+
+/*********************************************************************************************/
+
+ $f=$argv[1];
+
+ include("/var/www/website/modules/backends/conf/$f");
+ if (!isset($NbMax) || $NbMax=="")
+ $NbMax = 10;
+
+ $inidata = compact('source','filename','titlesite','linksite','descriptionsite','languagesite','baseurl','grep','offset','NbMax');
+
+ $data = implode("",file($source));
+ // le nettoie
+ $data = strtr($data,"\n\0\r\t"," ");
+ /*$data = ereg_replace("é","é",$data);
+ $data = ereg_replace("ù","ù",$data);
+ $data = ereg_replace("à","à",$data);
+ $data = ereg_replace("è","è",$data);
+ $data = ereg_replace("ê","ê",$data);
+ $data = ereg_replace("î","î",$data);
+ $data = ereg_replace("ô","ô",$data);
+ $data = ereg_replace("ï;","ï",$data);
+ $data = ereg_replace("&","&",$data);*/
+ $data = html_entity_decode($data);
+ $data = trim($data);
+
+ preg_match_all($grep,$data,$res,PREG_SET_ORDER);
+
+ $index = 1;
+ foreach ($res as $elt)
+ {
+ if ($elt[1] != "" && $elt[2] != "")
+ {
+ $niouzes[$index]["link"] = $elt[1];
+ if (file_exists($baseurl.$elt[1]))
+ {
+ $dataitem = implode("",file($baseurl.$elt[1]));
+ $dataitem = strtr($dataitem,"\n\0\r"," ");
+ $dataitem = trim($dataitem);
+ if (isset($grepitemdate))
+ {
+ preg_match_all($grepitemdate,$dataitem,$resitem,PREG_SET_ORDER);
+ $niouzes[$index]["date"] = $resitem[0][1];
+ }
+ else
+ $niouzes[$index]["date"] = "unknown";
+ if (isset($grepitemsummary))
+ {
+ preg_match_all($grepitemsummary,$dataitem,$resitem,PREG_SET_ORDER);
+ if (strlen(strip_tags($resitem[0][1])) > 200)
+ $niouzes[$index]["summary"] = substr(strip_tags($resitem[0][1]), 0, 200)."...";
+ else
+ $niouzes[$index]["summary"] = strip_tags($resitem[0][1]);
+ }
+ else
+ $niouzes[$index]["summary"] = "unknown";
+ if (isset($author))
+ {
+ $niouzes[$index]["author"] = $author;
+ }
+ else if (isset($grepitemauthor))
+ {
+ preg_match_all($grepitemauthor,$dataitem,$resitem,PREG_SET_ORDER);
+ $niouzes[$index]["author"] = $resitem[0][1];
+ }
+ else
+ $niouzes[$index]["author"] = "unknown";
+ }
+ $niouzes[$index]["title"] = trim($elt[2]);
+ }
+ $index++;
+ }
+
+ // écrit les données
+ //write2txt($inidata,$niouzes);
+ //write2rss091($inidata,$niouzes);
+ write2rss($inidata,&$niouzes);
+ write2atom($inidata,$niouzes);
+
+
+?>
diff --git a/attic/kimsufi_postinstall.sh b/attic/kimsufi_postinstall.sh
new file mode 100644
index 0000000..384a432
--- /dev/null
+++ b/attic/kimsufi_postinstall.sh
@@ -0,0 +1,21 @@
+echo "mykey">>/root/.ssh/authorized_keys2
+umount /servers/
+#zpool create -f servers ada0s1e
+zpool create -f servers ad4s1e
+zfs create servers/ftp
+zfs create servers/web
+zfs create servers/mail
+zfs create servers/db
+zfs create servers/dns
+#fetch ftp://ftp1.freebsd.org/pub/FreeBSD/releases/amd64/8.2-RELEASE/src.txz
+#fetch ftp://ftp1.freebsd.org/pub/FreeBSD/releases/amd64/8.2-RELEASE/ports.txz
+#tar -C / -xvzf src.txz
+#tar -C / -xvzf ports.txz
+#cd /usr/ports/shells/zsh/ && make -DBATCH clean install
+#cd /usr/ports/security/ipsec-tools/ && make -DBATCH clean install
+#cd /usr/ports/editors/vim-lite/ && make -DBATCH clean install
+#cd /usr/ports/sysutils/rdiff-backup/ && make -DBATCH clean install
+#cd /usr/ports/sysutils/ezjail/ && make -DBATCH clean install
+pkg_add -r zsh ipsec-tools rdiff-backup ezjail openvpn 3proxy
+print OK
+return OK
diff --git a/attic/webnewsget2.ini.php.sample b/attic/webnewsget2.ini.php.sample
new file mode 100644
index 0000000..cf37a41
--- /dev/null
+++ b/attic/webnewsget2.ini.php.sample
@@ -0,0 +1,10 @@
+(.+?)|";
+?>
diff --git a/attic/webnewsget2.sh b/attic/webnewsget2.sh
new file mode 100644
index 0000000..d1405b9
--- /dev/null
+++ b/attic/webnewsget2.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+for i in `ls -A /home/joe/conf/`
+do
+ /usr/local/bin/html2rss.php $i
+done