almost initial import

This commit is contained in:
n 2014-09-13 21:23:13 +02:00
parent 7693d8c729
commit ad62380b7a
17 changed files with 738 additions and 17 deletions

119
73lab.sh Normal file
View File

@ -0,0 +1,119 @@
#########################################
#!/bin/bash
# AjoutPingus 1.0
# 23/06/2002
#########################################
# vérifie si de nouveaux
# ouallpépères sont arrivés
# et dans ce cas les télécharge,
# et les converti dans les formats:
# - 1280x1204 png&jpeg
# - 1024x768 png&jpeg
# - 1024x768 png&jpeg
# - 800x600 png&jpeg
# et envoie un mail pour prévenir.
#########################################
# IMPORTANT: nécessite l'utilitaire
# ImageMagick http://www.imagemagick.org
#########################################
sujet="Ajout Pingus";
dest="n@tourmentine.com";
log="/tmp/ayo.log";
ok="";
if test -e $log;
then rm -f $log;
fi
wget -O /tmp/73lab.html http://www.73lab.com/index.php\?menu_item=1\&page=ALL\&curr_lang=0
for i in `cat /tmp/73lab.html | grep javascript | cut -d '&' -f 2 | tail +2 | cut -b 117- | cut -d '.' -f 1 | sed s/_1280_1024// | tail +1`
do
# image originale (1280x1024 png)
if test -e /var/ftp/pub/wallpaper/ayo73/png/1280x1024/$i\_1280\_1024.png;
then echo $ok;
else
wget -O /var/ftp/pub/wallpaper/ayo73/png/1280x1024/$i\_1280\_1024.png http://73lab.free.fr/distant_file/dessins/wallpaper/$i\_1280\_1024.png;
chown ftp:ftp /var/ftp/pub/wallpaper/ayo73/png/1280x1024/$i\_1280\_1024.png;
echo "var/ftp/pub/wallpaper/ayo73/png/1280x1024/$i-1280-1024.png">>$log;
fi
# 1024x768 png
if test -e /var/ftp/pub/wallpaper/ayo73/png/1024x768/$i\_1024\_768.png;
then echo $ok;
else
convert -enhance -geometry 1024x768! /var/ftp/pub/wallpaper/ayo73/png/1280x1024/$i\_1280\_1024.png /var/ftp/pub/wallpaper/ayo73/png/1024x768/$i\_1024\_768.png;
chown ftp:ftp /var/ftp/pub/wallpaper/ayo73/png/1024x768/$i\_1024\_768.png;
echo "/var/ftp/pub/wallpaper/ayo73/png/1280x1024/$i-1024-768.png">>$log;
fi
# 800x600 png
if test -e /var/ftp/pub/wallpaper/ayo73/png/800x600/$i\_800\_600.png;
then echo $ok;
else
convert -enhance -geometry 800x600! /var/ftp/pub/wallpaper/ayo73/png/1280x1024/$i\_1280\_1024.png /var/ftp/pub/wallpaper/ayo73/png/800x600/$i\_800\_600.png;
chown ftp:ftp /var/ftp/pub/wallpaper/ayo73/png/800x600/$i\_800\_600.png;
echo "/var/ftp/pub/wallpaper/ayo73/png/800x600/$i-800-600.png">>$log;
fi
# 1280x1024 jpeg
if test -e /var/ftp/pub/wallpaper/ayo73/jpeg/1280x1024/$i\_1280\_1024.jpg;
then echo $ok;
else
convert -enhance /var/ftp/pub/wallpaper/ayo73/png/1280x1024/$i\_1280\_1024.png /var/ftp/pub/wallpaper/ayo73/jpeg/1280x1024/$i\_1280\_1024.jpg;
chown ftp:ftp /var/ftp/pub/wallpaper/ayo73/jpeg/1280x1024/$i\_1280\_1024.jpg;
echo "/var/ftp/pub/wallpaper/ayo73/jpeg/1280x1024/$i-1280-1024.jpg">>$log;
fi
# 1024x768 jpeg
if test -e /var/ftp/pub/wallpaper/ayo73/jpeg/1024x768/$i\_1024\_768.jpg;
then echo $ok;
else
convert -enhance /var/ftp/pub/wallpaper/ayo73/png/1024x768/$i\_1024\_768.png /var/ftp/pub/wallpaper/ayo73/jpeg/1024x768/$i\_1024\_768.jpg;
chown ftp:ftp /var/ftp/pub/wallpaper/ayo73/jpeg/1024x768/$i\_1024\_768.jpg;
echo "/var/ftp/pub/wallpaper/ayo73/jpeg/1024x768/$i-1024-768.jpg">>$log;
fi
# ajoute ça à mes ouallepépères persos
if test -e /home/n/.wallpap/ayo73/$i\_1280\_1024.jpg;
then echo $ok;
else
cp /var/ftp/pub/wallpaper/ayo73/jpeg/1280x1024/$i\_1280\_1024.jpg /home/n/.wallpap/ayo73/;
chown n:n /home/n/.wallpap/ayo73/$i\_1280\_1024.jpg;
echo "/home/n/.wallpap/ayo73/$i-1280-1024.jpg">>$log;
fi
# 800x600 jpeg
if test -e /var/ftp/pub/wallpaper/ayo73/jpeg/800x600/$i\_800\_600.jpg;
then echo $ok;
else
convert -enhance /var/ftp/pub/wallpaper/ayo73/png/800x600/$i\_800\_600.png /var/ftp/pub/wallpaper/ayo73/jpeg/800x600/$i\_800\_600.jpg;
chown ftp:ftp /var/ftp/pub/wallpaper/ayo73/jpeg/800x600/$i\_800\_600.jpg;
echo "/var/ftp/pub/wallpaper/ayo73/jpeg/800x600/$i-800-600.jpg">>$log;
fi
# vignettes (153x122 jpeg)
if test -e /home/n/website/images/ayo73/$i\_1280\_1024.jpg;
then echo $ok;
else
convert -enhance -geometry 153x122! /var/ftp/pub/wallpaper/ayo73/png/1280x1024/$i\_1280\_1024.png /home/n/website/images/ayo73/$i\_1280\_1024.jpg;
chown n:n /home/n/website/images/ayo73/$i\_1280\_1024.jpg;
echo "/home/n/website/images/ayo73/$i-1280-1024.jpg">>$log;
fi
done
if test -e $log;
then sort $log | mail -s "$sujet" $dest
fi
rm -f /tmp/73lab.html;

View File

@ -0,0 +1,16 @@
// ==UserScript==
// @name AutocompleteOnDamnit
// @namespace mawhorter.net
// @include *
// ==/UserScript==
var frmels = document.getElementsByTagName('input');
for(var i in frmels) {
if(frmels[i].getAttribute('autocomplete'))
frmels[i].removeAttribute('autocomplete');
};
var frmels = document.getElementsByTagName('form');
for(var i in frmels) {
if(frmels[i].getAttribute('autocomplete'))
frmels[i].removeAttribute('autocomplete');
};

1
bmsync Normal file
View File

@ -0,0 +1 @@
scp /home/n/.kde3.5/share/apps/konqueror/bookmarks.xml root@web:/www/website/modules/ ; ssh root@web "php /www/website/modules/genlinks.php"

9
caminfo.sh Normal file
View File

@ -0,0 +1,9 @@
#!/bin/bash
#if [ -a /tmp/xmms-info ]
# then cat /tmp/xmms-info | tail -n 2 | head -n 1 | sed -e s/Title:/Listening\ to:/ | sed -e s/ï/ï/ | sed -e s/ö/ö/ | sed -e s/à/a/ | sed -e s/ê/ê/ | sed -e s/é/é/ | sed -e s/è/è/ | sed -e s/ç/ç/ | sed -e s/°/o/ > /home/n/.caminfo
# else echo > /home/n/.caminfo
#fi
#echo > /home/n/.caminfo
/usr/local/bin/infopipe.pl > /home/n/.caminfo 2>/dev/null

38
caminit Normal file
View File

@ -0,0 +1,38 @@
#!/bin/bash
case $1 in
start)
echo "Starting webcam..."
sudo /etc/init.d/lircd stop
sudo /sbin/rmmod quickcam
sudo /sbin/rmmod lirc_atiusb
sudo /sbin/rmmod lirc_dev
#sudo /sbin/rmmod bttv
sleep 1
sudo /sbin/modprobe quickcam
sleep 1
echo > /home/n/.caminfo
echo > /tmp/xmms-info
camE
sleep 1
sudo /sbin/modprobe lirc_atiusb
sleep 1
sudo /etc/init.d/lircd start
#sudo /etc/init.d/lircmd start
#sudo /sbin/modprobe bttv
killall camE
;;
stop)
echo "Stopping webcam..."
killall camE
scp /home/n/.block.jpg root@web:/www/website/images/webcam.jpg > /dev/null
echo > /home/n/.caminfo
;;
restart)
echo "Restarting webcam..."
killall camE
camE
;;
*)
echo "Usage: $0 [start|stop|restart]"
esac

24
cleanmaildir.sh Normal file
View File

@ -0,0 +1,24 @@
#!/bin/sh
#
# simple shell script to clean some subdirectories of a maildir
#
# format of $etcfile is:
# dirname1,maxdays
# dirname2,maxdays
# etc...
# (one line by subdir to clean)
#
# were dirname* is the name of the subdirectory to clean,
# and maxdays is the maximum number of days before a message is deleted
#
# put it in the user's crontab:
# 50 * * * * /path/to/cleanmaildir.sh
etcfile=/etc/cleanmaildir.conf
for i in `cat $etcfile`
do
dir=`echo $i | cut -f 1 -d ,`
time=`echo $i | cut -f 2 -d ,`
find ~/Maildir/$dir/cur/* -mtime +`expr $time - 1` -exec rm '{}' \;
done

1
dodo.sh Normal file
View File

@ -0,0 +1 @@
xset dpms force off

317
html2rss.php Normal file
View File

@ -0,0 +1,317 @@
#!/usr/bin/php -q
<?php
/***************************************************
** html2rss.php - 13/07/02 n@tourmentine.com
***************************************************
* 20/10/04: ajout formats RSS 2.0 et Atom 0.3
***************************************************
* prend en paramètre un fichier de config,
* en fonction des paramètres, télécharge les news,
* les parse, puis écris les données dans les
* fichiers backends (en txt et rss 0.91)
***************************************************/
/**********************************************************************
* write2txt()
**********************************************************************
* écrit les $NbMde premères valeurs de $tableau, à savoir les
* champs link et title (voir fonction parse_html())
* dans le fichier $file, en ajoutant au besoin (si défini) la
* variable $bu correspondant à l'url de base ($baseurl dans le .ini)
**********************************************************************
* format: fichier de news au format texte
*
* %%
* titre
* url
* %%
* titre
* (...)
**********************************************************************/
function write2txt($inifiledata,$tableau)
{
extract($inifiledata);
echo $tableau[0]["title"];
$totalnews = count($tableau);
if ($NbMax > $totalnews)
$NbMax = $totalnews;
$outputfile = "/var/www/website/backends/txt/$filename.txt";
if ($totalnews > 1)
{
$backend = fopen("$outputfile","w");
for($n = $offset+1 ; $n <= $NbMax+$offset ; $n++)
{
fwrite ($backend,"%%\n");
fwrite ($backend,$tableau[$n]["title"]."\n");
if (!strstr($tableau[$n]["link"],"http://"))
$tableau[$n]["link"]=$baseurl.$tableau[$n]["link"];
fwrite ($backend,$tableau[$n]["link"]."\n");
}
fclose($backend);
echo "$outputfile généré (".($n-1)." enregistrements).\n";
}
else echo "$outputfile non créé : nombre de news insuffisant ($totalnews)\n";
}
/**********************************************************************
* write2rss091()
**********************************************************************
* écrit les $NbMaxde premères valeurs de $tableau, à savoir les
* champs link et title (voir fonction parse_html())
* dans le fichier $file, en ajoutant au besoin (si défini) la
* variable $bu correspondant à l'url de base ($baseurl dans le .ini)
**********************************************************************
* format: voir spécificications RSS 0.91:
* http://my.netscape.com/publish/formats/rss-spec-0.91.html
**********************************************************************/
function write2rss091($inifiledata,$tableau)
{
extract($inifiledata);
$entete = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>
<!DOCTYPE rss PUBLIC \"-//Netscape Communications//DTD RSS 0.91//EN\"
\"http://my.netscape.com/publish/formats/rss-0.91.dtd\">
<rss version=\"0.91\">
<channel>
<title>$titlesite</title>
<link>$linksite</link>
<description>$descriptionsite</description>
<language>$languagesite</language>\n\n";
$outputfile = "/var/www/website/backends/xml/$filename.rss";
$totalnews = count($tableau);
if ($NbMax > $totalnews)
$NbMax = $totalnews;
if ($totalnews > 1)
{
$backend = fopen("$outputfile","w");
fwrite ($backend,$entete);
for($n = $offset+1 ; $n <= $NbMax+$offset ; $n++)
{
fwrite ($backend," <item>\n");
fwrite ($backend," <title>".$tableau[$n]["title"]."</title>\n");
if (!strstr($tableau[$n]["link"],"http://") && isset($baseurl))
$tableau[$n]["link"]=$baseurl.$tableau[$n]["link"];
fwrite ($backend," <link>".htmlentities($tableau[$n]["link"])."</link>\n");
fwrite ($backend," </item>\n\n");
}
fwrite ($backend," </channel>\n\n</rss>");
fclose($backend);
echo "$outputfile généré (".($n-1)." enregistrements).\n";
}
else echo "$outputfile non créé : nombre de news insuffisant ($totalnews)\n";
}
/**********************************************************************
* write2rss()
**********************************************************************
* idem write2rss091, mais pour format 2.0 (!)
**********************************************************************/
function write2rss($inifiledata,$tableau)
{
extract($inifiledata);
$entete = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>
<rss version=\"2.0\">
<channel>
<title>$titlesite</title>
<link>$linksite</link>
<description>$descriptionsite</description>
<language>$languagesite</language>
<pubDate>".date("r")."</pubDate>
<lastBuildDate>".date("r")."</lastBuildDate>
<generator>WebLoom beta</generator>\n\n";
$outputfile = "/var/www/website/backends/xml/$filename.rss";
$totalnews = count($tableau);
if ($NbMax > $totalnews)
$NbMax = $totalnews;
if ($totalnews > 1)
{
$backend = fopen("$outputfile","w");
fwrite ($backend,$entete);
for($n = $offset+1 ; $n <= $NbMax+$offset ; $n++)
{
fwrite ($backend," <item>\n");
fwrite ($backend," <title>".$tableau[$n]["title"]."</title>\n");
if (!strstr($tableau[$n]["link"],"http://") && isset($baseurl))
$tableau[$n]["link"]=$baseurl.$tableau[$n]["link"];
fwrite ($backend," <link>".htmlentities($tableau[$n]["link"])."</link>\n");
fwrite ($backend," </item>\n\n");
}
fwrite ($backend," </channel>\n\n</rss>");
fclose($backend);
echo "$outputfile généré (".($n-1)." enregistrements).\n";
}
else echo "$outputfile non créé : nombre de news insuffisant ($totalnews)\n";
}
/**********************************************************************
* write2atom()
**********************************************************************
* écrit les $NbMaxde premères valeurs de $tableau, à savoir les
* champs link et title (voir fonction parse_html())
* dans le fichier $file, en ajoutant au besoin (si défini) la
* variable $bu correspondant à l'url de base ($baseurl dans le .ini)
**********************************************************************
* format: voir le brouillon des spécificications Atom 0.3:
* http://www.ietf.org/internet-drafts/draft-ietf-atompub-format-02.txt
**********************************************************************/
function write2atom($inifiledata,&$tableau)
{
extract($inifiledata);
$entete = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>
<feed version=\"0.3\"
xmlns=\"http://purl.org/atom/ns#\"
xml:lang=\"$languagesite\">
<title>$titlesite</title>
<link rel=\"alternate\" type=\"text/html\" href=\"$linksite\" />
<generator url=\"http://tourmentine.com/\" version=\"beta\">WebLoom</generator>
<tagline>$descriptionsite</tagline>
<modified>".date("c")."</modified>\n\n";
$outputfile = "/var/www/website/backends/atom/$filename.xml";
$totalnews = count($tableau);
if ($NbMax > $totalnews)
$NbMax = $totalnews;
if ($totalnews > 1)
{
$backend = fopen("$outputfile","w");
fwrite ($backend,$entete);
for($n = $offset+1 ; $n <= $NbMax+$offset ; $n++)
{
//print_r($tableau[$n]);
fwrite ($backend," <entry>\n");
fwrite ($backend," <title>".$tableau[$n]["title"]."</title>\n");
if (!strstr($tableau[$n]["link"],"http://") && isset($baseurl))
$tableau[$n]["link"]=$baseurl.$tableau[$n]["link"];
fwrite ($backend," <id>".htmlentities($tableau[$n]["link"])."</id>\n");
fwrite ($backend," <link rel=\"alternate\" type=\"text/html\" href=\"".htmlentities($tableau[$n]["link"])."\" />\n");
fwrite ($backend," <summary>".$tableau[$n]["summary"]."</summary>\n");
fwrite ($backend," <modified>".date("c")."</modified>\n");
fwrite ($backend," <issued>".date("c")."</issued>\n");
fwrite ($backend," <author><name>".$tableau[$n]["author"]."</name></author>\n");
fwrite ($backend," </entry>\n\n");
}
fwrite ($backend,"</feed>");
fclose($backend);
echo "$outputfile généré (".($n-1)." enregistrements).\n";
}
else echo "$outputfile non créé : nombre de news insuffisant ($totalnews)\n";
}
/*********************************************************************************************/
$f=$argv[1];
include("/var/www/website/modules/backends/conf/$f");
if (!isset($NbMax) || $NbMax=="")
$NbMax = 10;
$inidata = compact('source','filename','titlesite','linksite','descriptionsite','languagesite','baseurl','grep','offset','NbMax');
$data = implode("",file($source));
// le nettoie
$data = strtr($data,"\n\0\r\t"," ");
/*$data = ereg_replace("&eacute;","é",$data);
$data = ereg_replace("&ugrave;","ù",$data);
$data = ereg_replace("&agrave;","à",$data);
$data = ereg_replace("&egrave;","è",$data);
$data = ereg_replace("&ecirc;","ê",$data);
$data = ereg_replace("&icirc;","î",$data);
$data = ereg_replace("&ocirc;","ô",$data);
$data = ereg_replace("&iuml;;","ï",$data);
$data = ereg_replace("&amp;","&",$data);*/
$data = html_entity_decode($data);
$data = trim($data);
preg_match_all($grep,$data,$res,PREG_SET_ORDER);
$index = 1;
foreach ($res as $elt)
{
if ($elt[1] != "" && $elt[2] != "")
{
$niouzes[$index]["link"] = $elt[1];
if (file_exists($baseurl.$elt[1]))
{
$dataitem = implode("",file($baseurl.$elt[1]));
$dataitem = strtr($dataitem,"\n\0\r"," ");
$dataitem = trim($dataitem);
if (isset($grepitemdate))
{
preg_match_all($grepitemdate,$dataitem,$resitem,PREG_SET_ORDER);
$niouzes[$index]["date"] = $resitem[0][1];
}
else
$niouzes[$index]["date"] = "unknown";
if (isset($grepitemsummary))
{
preg_match_all($grepitemsummary,$dataitem,$resitem,PREG_SET_ORDER);
if (strlen(strip_tags($resitem[0][1])) > 200)
$niouzes[$index]["summary"] = substr(strip_tags($resitem[0][1]), 0, 200)."...";
else
$niouzes[$index]["summary"] = strip_tags($resitem[0][1]);
}
else
$niouzes[$index]["summary"] = "unknown";
if (isset($author))
{
$niouzes[$index]["author"] = $author;
}
else if (isset($grepitemauthor))
{
preg_match_all($grepitemauthor,$dataitem,$resitem,PREG_SET_ORDER);
$niouzes[$index]["author"] = $resitem[0][1];
}
else
$niouzes[$index]["author"] = "unknown";
}
$niouzes[$index]["title"] = trim($elt[2]);
}
$index++;
}
// écrit les données
//write2txt($inidata,$niouzes);
//write2rss091($inidata,$niouzes);
write2rss($inidata,&$niouzes);
write2atom($inidata,$niouzes);
?>

6
mkvig Normal file
View File

@ -0,0 +1,6 @@
echo "[Desktop Entry]
Icon=./.folder.png">./.directory
#convert /home/n/img/pochettes/$1.jpg ./folder.jpg
#convert /home/n/img/pochettes/$1.jpg ./.folder.png
convert $1 ./folder.jpg
convert $1 ./.folder.png

57
mplayer_update.sh Normal file
View File

@ -0,0 +1,57 @@
#######################################
# mplayer_update.sh
#######################################
# get currebt FFmpeg's & MPlayer's CVS
# snapshots and compile them
# to put in crontab early in the
# morning (about 6:00 AM)
#######################################
# télécharge les snapshots CVS de
# FFmpeg & MPlayer et les compile
# à mettre dans la crontab tôt le
# matin (à 6 heures environ)
#######################################
#!/bin/bash
export MPLAYER_DIR=MPlayer-`date +"%Y%m%d"`
export FFMPEG_ARCHIVE=ffmpeg-`date +"%d%m%y" --date '1 day ago'`-cvs.tar.gz
export LOGFILE=/tmp/mplayer_update.log
cd /tmp
echo ------------------------------téléchargement des sources------------------------->$LOGFILE
echo >>$LOGFILE
wget -nv ftp://ftp1.mplayerhq.hu/MPlayer/cvs/MPlayer-current.tar.bz2 >/dev/null 2>>$LOGFILE
wget -nv http://www.tinkerland.org.uk/cvs-snapshots/current/$FFMPEG_ARCHIVE >/dev/null 2>>$LOGFILE
tar xfjv MPlayer-current.tar.bz2 >/dev/null 2>>$LOGFILE
tar zxvf $FFMPEG_ARCHIVE >/dev/null 2>>$LOGFILE
rm -f /tmp/MPlayer-current.tar.bz2
rm -f /tmp/$FFMPEG_ARCHIVE
echo >>$LOGFILE
echo ------------------------------installation FFmpeg-------------------------------->>$LOGFILE
echo >>$LOGFILE
cd ffmpeg
./configure >>$LOGFILE
make >/dev/null 2>>$LOGFILE
make install >>$LOGFILE
cd ../$MPLAYER_DIR
rm -Rf libavcodec
cp -R ../ffmpeg/libavcodec .
echo >>$LOGFILE
echo ------------------------------installation MPlayer------------------------------->>$LOGFILE
echo >>$LOGFILE
# à customiser suivant la configuration
./configure --language=fr --enable-gui --enable-new-conf --enable-menu --enable-qtx-codecs --enable-live --enable-xmms --with-xanimlibdir=/usr/X11R6/lib/xanim >>$LOGFILE
make >/dev/null 2>>$LOGFILE
make install >>$LOGFILE
#cp DOCS/French/mplayer.1 /usr/local/man/fr/man1/
cp DOCS/French/*.html /usr/local/doc/mplayer/
cd ..
rm -Rf /tmp/$MPLAYER_DIR
rm -Rf /tmp/ffmpeg
#envoyer le résultat à l'utilisateur n
mail -s "Rapport de compilation MPlayer" n <$LOGFILE

25
nickbook.user.js Normal file
View File

@ -0,0 +1,25 @@
// ==UserScript==
// @name NickBook
// @namespace http://tourmentine.com/
// @description my own settings :)
// @include http://*.facebook.com/*
// @include https://*.facebook.com/*
// ==/UserScript==
// addStyle() function borrowed to Faceboox Fixer UserScript
function addStyle(css) {
if (heads = document.getElementsByTagName('head')) {
var style = document.createElement('style');
try { style.innerHTML = css; }
catch(x) { style.innerText = css; }
style.type = 'text/css';
heads[0].appendChild(style);
}
}
//addStyle('.comments_add_box img { display:none; }');
addStyle('.uiProfilePhotoMedium { display:none; }');
addStyle('#menubar_container { background-color:#000000; }');
addStyle('.profile .box .box_header { background-color:#000000; color:#FFFFFF; }');
addStyle('.UIFilterList .selected { background-color:#000000; color:#FFFFFF; }');

78
opdsupdates.py Normal file
View File

@ -0,0 +1,78 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
import time
import urllib2
import re
import smtplib
def findstring(url):
"""find if document's last update date is equal to current date"""
doc = urllib2.urlopen(url).read()
doctime = re.search(r'Catalogue g&eacute;n&eacute;r&eacute; le (.*)<',
doc)
if doctime != None:
print url.rstrip('\n') + " : "+doctime.group(1)
date = re.match(r'(.*) (.*) (.*)',doctime.group(1))
day = date.group(1)
month = date.group(2)
year = date.group(3)
#day = re.match(r'\d (.*)',doctime.group(1))
if day < 10:
day = '0' + day
months = { 'janv.' : '01',
'f&eacute;vr.' : '02',
'mars' : '03',
'avr.' : '04',
'mai' : '05',
'juin' : '06',
'juil.' : '07',
'aout' : '08',
'sept.' : '09',
'oct.' : '10',
'nov.' : '11',
'd&eacute;c' : '12'}
for i,j in months.iteritems():
month = month.replace(i,j)
doctime = time.strptime(day + ' ' + month + ' ' + year,
'%d %m %Y')[0:3]
curtime = time.localtime()[0:3]
if doctime == curtime:
rooturl = re.match('(.*)/_catalog/(.*)',url)
url = rooturl.group(1) + '/_catalog/index.html'
sendmail('OPDS Update','catalog ' + url + ' updated!')
return True
else:
return False
else:
sendmail('OPDS Error','string not found at catalog ' + url )
def sendmail(subject,corpus):
"""send an alert e-mail"""
fromaddr = "noreply@tourmentine.com"
mto = "n@tourmentine.com"
message = """From: No Reply <noreply@tourmentine.com>
To: nico <n@tourmentine.com>
Subject: """+subject+"""
"""+corpus+"""
"""
smtp = smtplib.SMTP()
smtp.connect()
smtp.sendmail(fromaddr,mto,message)
smtp.close()
print time.strftime('%c') + ' : Starting harvest...\n'
f = open('opdslist.txt')
for line in f:
if not re.match(r'^#',line):
findstring(line)
print '\n'

21
postinstall.sh Normal file
View File

@ -0,0 +1,21 @@
echo "ssh-dss AAAAB3NzaC1kc3MAAACBAJYazE4nQN4m74E0OC9Ex863x+rRADt9hJRQAixNSMwNPAilfQ0/pt0z4UXZjuKC6aRhWeIQOkof3sXzy04S5QgQY5jAB5rH1J+06QiU6opFcd3IGtnoHST1HSY4FyV6EFbamm8ad/oNrlWCMXYSCHgMGdwmVYzoSjPcgJ7eTHz3AAAAFQDBZV2VSIQwKDjY9uWcNU9CVhe8WwAAAIApm2KB2zGxXwwR6tcuKFcP6uvvZAbTKT/nGDGgADncjFb8DC3MDzwetDA9N0kqtrdRzu4GYYsXOMyTyP/RJQwF4rPF7PKFaB70F0a3knPlZX09ML8EE9L3BD0RvPEfLWY3OzZ9SHWcfEWphgIX9WpTUaWdmqSO0Txii/8FU2yb7gAAAIA7Y/zsTbNO/FNunGSBOg+4NqvbkU6cLuVDty81p7wXlfazhtKPzP5R8L1ReaKavGABwMzCi/4S/QnHLP2C/UW6nUi5Fh53PPRZI+6mam2WIlLWYv5eQZBEDLsxQsk/htSomu8oEilXODfDo7qbRQ5rKVcN5VmKaxIqoEtTN1bWjQ== n@big">>/root/.ssh/authorized_keys2
umount /servers/
#zpool create -f servers ada0s1e
zpool create -f servers ad4s1e
zfs create servers/ftp
zfs create servers/web
zfs create servers/mail
zfs create servers/db
zfs create servers/dns
#fetch ftp://ftp1.freebsd.org/pub/FreeBSD/releases/amd64/8.2-RELEASE/src.txz
#fetch ftp://ftp1.freebsd.org/pub/FreeBSD/releases/amd64/8.2-RELEASE/ports.txz
#tar -C / -xvzf src.txz
#tar -C / -xvzf ports.txz
#cd /usr/ports/shells/zsh/ && make -DBATCH clean install
#cd /usr/ports/security/ipsec-tools/ && make -DBATCH clean install
#cd /usr/ports/editors/vim-lite/ && make -DBATCH clean install
#cd /usr/ports/sysutils/rdiff-backup/ && make -DBATCH clean install
#cd /usr/ports/sysutils/ezjail/ && make -DBATCH clean install
pkg_add -r zsh ipsec-tools rdiff-backup ezjail openvpn 3proxy
print OK
return OK

21
randomwallpaper.sh Executable file → Normal file
View File

@ -1,8 +1,5 @@
#!/bin/bash
#export DISPLAY=:0.0
echo "DISPLAY:$DISPLAY
"
Dir="/home/n/img/wallpap/1920x1200/"
wallpaplist="/home/n/.wallpaplist"
@ -42,20 +39,10 @@ else
echo "${RandomFile}" >> ${wallpaplist}
fi
if [ -z "${DBUS_SESSION_BUS_ADDRESS}" ] ; then
. ${HOME}/.dbus/session-bus/`ls -rt ${HOME}/.dbus/session-bus/ | tail -1`
export DBUS_SESSION_BUS_ADDRESS DBUS_SESSION_BUS_PID
fi
#echo ${DBUS_SESSION_BUS_ADDRESS}
#echo "Selected File: $RandomFile"
#DISPLAY=:0.0 feh --bg-scale "${Dir%/}/${RandomFile}"
#feh --bg-scale "${Dir%/}/${RandomFile}"
#DISPLAY=:0.0 xfconf-query -c xfce4-desktop -p /backdrop/screen0/monitor0/image-path -s "${Dir%/}/${RandomFile}"
#DISPLAY=:0.0 sudo -u n xfconf-query -c xfce4-desktop -v -p /backdrop/screen0/monitor0/image-path -s ""
DISPLAY=:0.0 feh --bg-scale "${Dir%/}/${RandomFile}"
DISPLAY=:0.0 xfconf-query -c xfce4-desktop -p /backdrop/screen0/monitor0/image-path -s "${Dir%/}/${RandomFile}"
feh --bg-scale "${Dir%/}/${RandomFile}"
xfconf-query -c xfce4-desktop -p /backdrop/screen0/monitor0/image-path -s "${Dir%/}/${RandomFile}"
xfconf-query -c xfce4-desktop -v -p /backdrop/screen0/monitor0/last-image -s "${Dir%/}/${RandomFile}"
xfconf-query -c xfce4-desktop -v -p /backdrop/screen0/monitorVGA1/workspace0/last-image -s "${Dir%/}/${RandomFile}"
pkill gkrellm && gkrellm
#pkill gkrellm && DISPLAY=:0.0 gkrellm
pkill gkrellm && DISPLAY=:0.0 gkrellm

6
tssh Normal file
View File

@ -0,0 +1,6 @@
#!/bin/sh
dcop $KONSOLE_DCOP_SESSION renameSession "`echo $1 | cut -d '@' -f 2`"
ssh $1
dcop $KONSOLE_DCOP_SESSION renameSession "Terminal"

View File

@ -0,0 +1,10 @@
<?php
$source = "http://fr.news.yahoo.com/2/index.html";
$filename = "yahooactu_monde";
$titlesite = "Yahoo! Actualités - Monde";
$linksite = "http://fr.news.yahoo.com/2";
$descriptionsite = "Yahoo! actualités";
$languagesite = "fr";
$baseurl = "http://fr.news.yahoo.com";
$grep = "|<b><a href=(.+?)>(.+?)</a></b>|";
?>

6
webnewsget2.sh Normal file
View File

@ -0,0 +1,6 @@
#!/bin/bash
for i in `ls -A /home/joe/conf/`
do
/usr/local/bin/html2rss.php $i
done