--- /dev/null
+# Copyright 2008 James Bunton <jamesbunton@fastmail.fm>
+# Licensed for distribution under the GPL version 2, check COPYING for details
+# asyncore.loop() with delayed function calls
+
+import asyncore
+import heapq
+import signal
+import time
+
+tasks = []
+running = False
+
+class Task(object):
+ def __init__(self, delay, func, args=[], kwargs={}):
+ self.time = time.time() + delay
+ self.func = lambda: func(*args, **kwargs)
+
+ def __cmp__(self, other):
+ return cmp(self.time, other.time)
+
+ def __call__(self):
+ f = self.func
+ self.func = None
+ if f:
+ return f()
+
+ def cancel(self):
+ assert self.func is not None
+ self.func = None
+
+def schedule(delay, func, args=[], kwargs={}):
+ task = Task(delay, func, args, kwargs)
+ heapq.heappush(tasks, task)
+ return task
+
+def loop(timeout=30.0, use_poll=False):
+ global running
+ running = True
+ oldhandler = signal.signal(signal.SIGTERM, exit)
+
+ while running:
+ now = time.time()
+ while tasks and tasks[0].time < now:
+ task = heapq.heappop(tasks)
+ task()
+
+ t = timeout
+ if tasks:
+ t = max(min(t, tasks[0].time - now), 0)
+
+ asyncore.loop(timeout=t, count=1, use_poll=use_poll)
+
+ signal.signal(signal.SIGTERM, oldhandler)
+
+def exit(*args):
+ global running
+ running = False
+
+__all__ = ("schedule", "loop", "exit")
+
+++ /dev/null
-#!/usr/bin/env python2.5
-
-MINFILE_SIZE = 1024
-FILEBUFFER_SIZE = 1024**2
-
-import os, sys, bisect
-
-from copy import copy
-from base64 import standard_b64encode as b64encode
-from collections import defaultdict
-import cPickle
-try:
- import hashlib
- def _getSha1(filename):
- return hashlib.sha1()
-except ImportError:
- import sha
- def _getSha1(filename):
- return sha.new()
-def getSha1(filename):
- sha1 = _getSha1(filename)
- f = file(filename, 'r')
- data = f.read(FILEBUFFER_SIZE)
- while data:
- sha1.update(data)
- data = f.read(FILEBUFFER_SIZE)
- return b64encode(sha1.digest())
-
-try:
- import psyco
- psyco.full()
-except ImportError:
- print >>sys.stderr, "WARNING: Could not load psyco"
-
-class DiskObject(object):
- def __repr__(self):
- return repr(self.getFullPath())
- def __str__(self):
- return self.getFullPath()
- def __lt__(self, other):
- if not hasattr(other, 'getFullPath'):
- raise NotImplemented()
- return self.getFullPath() < other.getFullPath()
- def __eq__(self, other):
- if not hasattr(other, 'getFullPath'):
- raise NotImplemented()
- return self.getFullPath() == other.getFullPath()
- def __hash__(self):
- return hash(self.getFullPath())
-
-class Folder(DiskObject):
- def __init__(self, name, parent = None):
- if name.find(os.path.sep) >= 0 and name != os.path.sep:
- print name
- parent_name, name = os.path.split(name)
- parent = Folder(parent_name, parent)
-
- self.name = name
- self.parent = parent
- if parent:
- parent.addChild(self)
- self.children = {}
- def getFullPath(self):
- folderStack = []
- f = self
- while f:
- folderStack.append(f.name)
- f = f.parent
- return os.path.sep.join(reversed(folderStack))
- def addChild(self, child):
- self.children[child.name] = child
-
-def findDirectory(rootDir, dirName, createNonExistant = False):
- dir = dirName.split(os.path.sep)[1:]
- if dir == ['']:
- dir = []
-
- ret = rootDir
- for folderName in dir:
- try:
- ret = ret.children[folderName]
- except KeyError, e:
- if not createNonExistant:
- raise e
- ret = Folder(folderName, ret)
-
- return ret
-
-class FileObject(DiskObject):
- def __init__(self, name, folder):
- self.name = name
- self.folder = folder
- statinfo = os.stat(self.getFullPath())
- self.mtime_size = (statinfo.st_mtime, statinfo.st_size)
- def getDiskID(self):
- statinfo = os.stat(self.getFullPath())
- return (statinfo.st_dev, statinfo.st_ino) # Identify the file
- def get_mtime_size(self):
- return self.mtime_size
- def getFullPath(self):
- return '%(folder)s/%(file)s' % { 'folder': self.folder.getFullPath(), 'file': self.name }
-
-class GlobalFileInfo(object):
- def __init__(self):
- self.files = defaultdict(list)
- self.filelist = {}
- self.root = Folder('')
-
- def _scanDirUpdateFile(self, dirObject, dirPath, filename):
- def printPath(word):
- print '%s "%s"' % (word, filename[-80:])
- fullpath = os.path.join(dirPath, filename)
- if os.path.islink(fullpath) or not os.path.isfile(fullpath):
- printPath('Skipping')
- return
- try:
- file = FileObject(filename, dirObject)
- new_mtime_size = file.get_mtime_size()
-
- if file in self.filelist:
- if file.get_mtime_size() == self.filelist[file].get_mtime_size():
- printPath('Skipping')
- return
- old_sha1 = self.filelist[file].sha1
- del self.filelist[file]
- self.files[old_sha1].remove(file)
-
- if file.get_mtime_size()[1] < MINFILE_SIZE:
- printPath('Skipping')
- return
- printPath('Scanning')
-
- file.sha1 = getSha1(fullpath)
- self.files[file.sha1].append(file)
- self.filelist[file] = file
- except IOError:
- print >>sys.stderr, 'WARNING: Could not get sha1 of "%s"\n' % (fullpath)
-
- def scanDir(self, dirName):
- root = findDirectory(self.root, dirName, createNonExistant = True)
-
- for dirPath, dirs, files in os.walk(dirName):
- print 'Scanning directory "%s"\n' % dirPath
- folder = findDirectory(self.root, dirPath, createNonExistant = True)
- # Add the children Directories
- if '.svn' in dirs:
- dirs.remove('.svn')
- for d in dirs:
- Folder(d, folder) # As a side effect, this is added to the parent correctly
-
- for f in files:
- sys.stdout.write("\033[A\033[300D\033[2K")
- self._scanDirUpdateFile(folder, dirPath, f)
- sys.stdout.write("\033[A\033[100D\033[2K")
- def findDuplicates(self):
- return [(sha1, list(filenames)) for sha1, filenames in self.files.items() if len(filenames) > 1]
-
-def main():
- try:
- files = cPickle.load(open(sys.argv[1]))
- except IOError:
- files = GlobalFileInfo()
-
- for dir in sys.argv[2:]:
- if dir[-1] == '/':
- dir = dir[:-1]
- files.scanDir(dir)
-
- cPickle.dump(files, open(sys.argv[1], 'wb'), 2)
- print "Done"
-
-### print files.files
-
-if __name__ == "__main__":
- main()
+++ /dev/null
-#!/usr/bin/env python2.5
-
-MINFILE_SIZE = 1024
-FILEBUFFER_SIZE = 1024**2
-APPLICATION_VERSION = '0.2'
-
-import os, sys, bisect
-
-import python24_adapter
-from copy import copy
-from base64 import standard_b64encode as b64encode
-from collections import defaultdict
-import cPickle
-
-try:
- import hashlib
- def _getSha1(filename):
- return hashlib.sha1()
-except ImportError:
- import sha
- def _getSha1(filename):
- return sha.new()
-def getSha1(filename):
- if _sha1_cache.has_key(filename):
- return b64encode(_sha1_cache[filename])
-
- sha1 = _getSha1(filename)
- f = file(filename, 'r')
- data = f.read(FILEBUFFER_SIZE)
- while data:
- sha1.update(data)
- data = f.read(FILEBUFFER_SIZE)
-
- ret = sha1.digest()
- _sha1_cache[filename] = ret
- return b64encode(ret)
-
-try:
- import psyco
- psyco.full()
-except ImportError:
- print >>sys.stderr, "WARNING: Could not load psyco"
-
-def __versionUpgrade0_1(input):
- import base64
- return '0.2', dict((filename, base64.b64decode(sha1hash)) for filename, sha1hash in input)
-
-def loadCache(filename = os.path.expanduser('~/.sha1_cache'), version = APPLICATION_VERSION):
- global _sha1_cache
- try:
- cache_version, cache = cPickle.load(open(filename, 'rb'))
- if cache_version == '0.1':
- cache_version, cache = __versionUpgrade0_1(cache)
-
- if cache_version != version:
- raise Exception("Invalid Version")
- print 'WARNING: Using the cache file "%s", sha1 hash may be old' % filename
- except:
- cache = {}
- _sha1_cache = cache
-
-def storeCache(filename = os.path.expanduser('~/.sha1_cache'), version = APPLICATION_VERSION):
- fd = open(filename, 'wb')
- try:
- cPickle.dump((version, _sha1_cache), fd)
- finally:
- fd.close()
-
-class GlobalFileInfo(object):
- def __init__(self):
- self.files = defaultdict(lambda : defaultdict(list))
-
- def _scanDirUpdateFile(self, dirPath, filename):
- def printPath(word):
- print '%s "%s"' % (word, filename[-80:])
- fullpath = os.path.abspath(os.path.join(dirPath, filename))
- if os.path.islink(fullpath) or not os.path.isfile(fullpath):
- printPath('Skipping')
- return
- try:
- statInfo = os.stat(fullpath)
-
- if statInfo.st_size < MINFILE_SIZE:
- printPath('Skipping')
- return
- printPath('Scanning')
-
- fileHash = getSha1(fullpath)
- self.files[(fileHash, statInfo.st_size)][(statInfo.st_dev, statInfo.st_ino)].append(fullpath)
- except IOError:
- print >>sys.stderr, 'WARNING: Could not get sha1 of "%s"\n' % (fullpath)
-
- def scanDir(self, dirName):
- for dirPath, dirs, files in os.walk(dirName):
- print 'Scanning directory "%s"\n' % dirPath
- # Add the children Directories
- if '.svn' in dirs:
- dirs.remove('.svn')
-
- for f in files:
- sys.stdout.write("\033[A\033[300D\033[2K")
- self._scanDirUpdateFile(dirPath, f)
- sys.stdout.write("\033[A\033[100D\033[2K")
- def findDuplicates(self):
- return [(key, inodes) for key, inodes in self.files.items() if len(inodes) > 1]
-
-def prettyFormatDups(dups):
- return '\n'.join( \
- '%s\n\t%s' % (key, \
- '\n\t'.join('%s: %s' % (inode_key, ', '.join(files)) for inode_key, files in inodes.items()) \
- ) for key, inodes in dups \
- )
-
- ret = []
- for key, inodes in dups:
- section = []
- for inode_key, files in inodes.items():
- section.append('%s: %s' % (inode_key, ', '.join(files)))
- ret.append('%s\n\t%s' % (key, '\n\t'.join(section)))
-
- return '\n'.join(ret)
-
-
-def makeBashScript(dups, fd):
- spaceSaved = 0
- print >>fd, "#!/bin/bash"
- print >>fd, '# This script was created automatically by "%s"' % __file__
- # Print out a helper function
- print >>fd
- print >>fd, 'function doLink() {'
- print >>fd, '\tINPUT_FILE="${1}"'
- print >>fd, '\tshift'
- print >>fd, '\tfor file in "$@" ; do'
- print >>fd, '\t\tln "${INPUT_FILE}" "${file}"'
- print >>fd, '\tdone'
- print >>fd, '}'
-
- for dup_key, inodes in dups:
- print >>fd
- print >>fd, '# Handling %s' % str(dup_key)
- inodes_data = inodes.items()
- inodes_data.sort(key = lambda x: len(x[1]), reverse = True)
- for inode_key, files in inodes_data[1:]:
- print >>fd, '# Removing files connected to inode %d on device %d' % (inode_key[1], inode_key[0])
- print >>fd, 'rm -f "%s"' % '" "'.join(file for file in files)
- fileToLink = inodes_data[0][1][0] # Get the first filename of the largest group of (already) linked files
- print >>fd, '# Now link all the files together'
- print >>fd, 'doLink "%s" "%s"' % (fileToLink, '" "'.join('" "'.join(files) for inode_key, files in inodes_data[1:]))
- spaceSaved += sum(len(files) for inode_key, files in inodes_data[1:]) * dup_key[1]
-
- print >>fd
- print >>fd, '# Total space saved: %d B (%dK B) (%d MB)' % (spaceSaved, spaceSaved / 1024, spaceSaved / 1024**2)
-
-def main():
- loadCache()
- files = GlobalFileInfo()
-
- for dir in sys.argv[2:]:
- files.scanDir(dir)
-
- storeCache()
- print "Done."
- try:
- fd = open(sys.argv[1], 'wb')
- makeBashScript(files.findDuplicates(), fd)
- finally:
- fd.close()
-
-if __name__ == "__main__":
- main()
+++ /dev/null
-#!/usr/bin/env python
-
-import sys, os, re, itertools
-from wget_lib import *
-
-import twisted_wget
-from twisted_wget import reactor
-from Enum import enum
-
-DEBUG = True
-
-URL_TYPE = enum('ImageBoard', 'HtmlPage', 'Image', 'Other')
-
-def addtolist(list, *regexStrs):
- def decorator(func):
- for regexStr in regexStrs:
- regex = re.compile(regexStr)
- list.append( (regex, func) )
- return func
- return decorator
-
-class Downloader(object):
- htmlParsers = []
- class ParserException(Exception):
- pass
-
- def __init__(self, url):
- self.url = url
- self.deferred = None
-
- def downloadFiles(self):
- # XXX: This is a major hack and needs to be cleaned
- def commonCallback(downloadObject):
- self.workingUrls.remove(downloadObject)
- self.activeHosts.remove(downloadObject.host)
- self.__scheduleDownloadLater()
- def successCallback(downloadObject, data):
- print 'Downloaded %s' % downloadObject.url
- commonCallback(downloadObject)
- downloadObject.data = data
- downloadObject.callback(downloadObject)
- self.doneUrls.add(downloadObject)
- def errorCallback(downloadObject, data):
- commonCallback(downloadObject)
- print 'Error: %s' % data
- def doDownload(file):
- print 'About to download "%s"' % file.url
- twisted_wget.downloadURL(file.url,
- successBack = lambda data: successCallback(file, data),
- errorBack = lambda data: errorCallback(file, data)
- )
- self.waitingUrls.remove(file)
- self.workingUrls.add(file)
- self.activeHosts.add(file.host)
-
-
- self.deferred = None
- for file in list(self.waitingUrls):
- if file.host not in self.activeHosts:
- doDownload(file)
-
- # Notes:
- # - image_object.data is a string containing all of the data
- # - image_object.url is a string containing the url where the data was downloaded from
- def _parseImageBoard(self, image_object):
- assert(image_object.data != None)
- assert(image_object.url != None)
-
- for parser_regex, parser in self.htmlParsers:
- if parser_regex.search(image_object.url):
- return parser(image_object)
- raise DownloadManager.ParserException('Could not find the correct parser')
-
- @addtolist(htmlParsers, '\.4chan\.org')
- def _parseImageBoard_4chan(self, image_object):
- import htmldata
- def __extractImageUrlsFromList(urllist):
- for url_elem in urllist:
- if url_elem.tag_name.upper() == 'A' and isImageURL(url_elem.url):
- yield url_elem.url
-
- # TODO: Extract a better filename from the list
- urllist = __extractImageUrlsFromList( htmldata.urlextract(image_object.data, image_object.url) )
- urllist = xRemoveDups(urllist)
- urllist = itertools.ifilter(
- lambda elem: elem.find('/thumb/') == -1,
- itertools.ifilter(lambda elem: elem.find('/src.cgi/') == -1, urllist)
- )
-
- if DEBUG:
- urllist, urllist_dup = itertools.tee(urllist)
- print >>sys.stderr, 'Got the following urls: \n\t%s' % '\n\t'.join(urllist_dup)
-
- for url in urllist:
- self.downloadImage(url, referer = image_object.url)
-
-def main(output_directory):
- dm = DownloadManager()
- for url in sys.argv[1:]:
- dm.recursiveDownloadImages(url)
-
- reactor.run()
-
-if __name__ == "__main__":
- output_directory = os.environ.get('WGET_IMAGEBOARD_DIRECTORY',
- os.path.join(os.environ['HOME'], 'Images_old', 'wget'))
- main(output_directory)
+++ /dev/null
-#!/usr/bin/env python
-# Copyright 2007 James Bunton <jamesbunton@fastmail.fm>
-# Modified by Greg Darke <gdar9540@usyd.edu.au> (2007)
-# Licensed for distribution under the GPL version 2, check COPYING for details
-# Check to see if people are online...
-
-import commands_async, pwd, socket, sys
-
-def matchNames(names):
- def getFullName(gecos_entry):
- return gecos_entry[: entry.pw_gecos.find(',')]
- def parsePWDentry(entry):
- return (entry.pw_name.lower(), getFullName(entry.pw_gecos).lower())
-
- pwall = [parsePWDentry(entry) for entry in pwd.getpwall()]
-
- out = []
- for name in names:
- found = False
- name = name.lower()
- for entry in pwall:
- username, realname = entry
- if username.find(name) >= 0 or realname.find(name) >= 0:
- found = True
- out.append((username, realname))
- if not found:
- print "User '%s' not found in /etc/passwd, assuming you gave a username and you are not on the IT machines..." % name
- out.append((name, "[Not Found in /etc/passwd]"))
- return out
-
-def getSmbStatus():
- def halfparse(data):
- return data.split('\n')[4:]
-
- sshcmd = "ssh %s -q -o StrictHostKeyChecking=no -o BatchMode=true '/usr/samba/bin/smbstatus -b'"
-
- cmd_async = commands_async.CommandRunner()
- cmd_async.executeCommand(sshcmd % "ugitsmb.ug.it.usyd.edu.au")
- cmd_async.executeCommand(sshcmd % "itsmb.ug.it.usyd.edu.au")
- cmd_async.waitForCompletion()
-
- data = []
- for cmd, output in cmd_async.getOutputs().items():
- data += halfparse(output)
-
- out = []
- for line in data:
- line_split = line.strip().split()
- if not line_split or len(line_split) != 5:
- continue
-
- pid, username, group, _, ip = line_split
- host = socket.gethostbyaddr(ip[1:-1])[0]
- out.append((username, host))
- return out
-
-def getLastStatus():
- hosts = ["mono"]
- hosts += ['congo%d' % i for i in range(1,5)]
- hosts += ['nlp%d' % i for i in range(0,9)]
- #hosts += ['userf%d' % i for i in range(1,6)]
-
- sshcmd = "ssh %s -q -o StrictHostKeyChecking=no -o BatchMode=true 'last -a -t $(date +%%Y%%m%%d%%H%%M%%S)|grep \"still logged in\"'"
-### sshcmd = "rsh -n %s 'last -a -t $(date +%%Y%%m%%d%%H%%M%%S)|grep \"still logged in\"'"
-
- cmd_async = commands_async.CommandRunner()
- for host in hosts:
- cmd_async.executeCommand(sshcmd % host)
-
- cmd_async.waitForCompletion()
- data = "".join(output for cmd,output in cmd_async.getOutputs().items())
-
- out = []
- for line in data.split('\n'):
- if not line.strip():
- continue
- try:
- chunk = line.strip().split()
- username = chunk[0]
- ip = chunk[-1]
- except Exception, e:
- print "Error:", line, e
- return []
- if ip == 'in': # From 'still logged in'
- host = "unknown"
- else:
- try:
- host = socket.gethostbyaddr(ip)[0]
- except:
- host = "unknown"
- out.append((username, host))
- return out
-
-
-def printLocation((username, fullname), smbStatus):
- # Since we only want to know if they are at a location, and now how many times they are at
- # the location, we store it in a set
- locationList = set(ip for username2, ip in smbStatus if username == username2)
- if locationList:
- print "Username %s:\n Full name: '%s'\n %s\n" % \
- (username, fullname, '\n '.join('Location: %s' % ip for ip in locationList))
-
-def main():
- names = matchNames(sys.argv[1:])
- smbStatus = getSmbStatus()
- lastStatus = getLastStatus()
- status = smbStatus + lastStatus
-
- for name in names:
- printLocation(name, status)
-
-if __name__ == "__main__":
- main()
+++ /dev/null
-#!/usr/bin/env python
-
-import sys, os, os.path, socket
-from optparse import OptionParser, Values
-
-VERSION = "1.1"
-CACHE_LOCATION = os.path.expanduser('~/.randombg2_filelist_cache')
-SOCKET_FILE = os.path.expanduser('~/tmp/tmp_socket')
-
-try:
- # These are my libraries...
- import GregDebug, AsyncSocket, WallChanger, SigHandler
-
- from GregDebug import debug, setDebugLevel, DEBUG_LEVEL_DEBUG, DEBUG_LEVEL_LOW, DEBUG_LEVEL_MEDIUM, DEBUG_LEVEL_HIGH, DEBUG_INCREMENT
-
- from FileLists import *
-except ImportError, e:
- print >>sys.stderr, "Missing libraries!\nExiting..."
- sys.exit(1)
-
-def buildparser():
- def buildOptions():
- pass
- def addfilestolist(optclass, opt, value, parser, fileList):
- fo = open(value)
- for line in fo:
- fileList.list.append(line.strip())
- fo.close()
- fileList.allowAllRandom = False
-
- parser = OptionParser(version="%prog " + VERSION,
- description = "Picks a random background image",
- usage = "%prog [options] dir [dir2 ...]")
- parser.add_option("-p", "--permanent",
- action="store_true", dest="permanent", default=False,
- help="Make the background permanent. Note: This will cause all machines logged in with this account to simultaneously change background [Default: %default]")
- parser.add_option("-q", "--quiet", "--silent",
- action="count", dest="quiet", default=0,
- help="Make the script quiet (good for running from a shell script)")
- parser.add_option("-v", '-d', "--verbose", "--debug",
- action="count", dest="verbose", default=0,
- help="Make the louder (good for debugging, or those who are curious)")
- parser.add_option("-b", "--background-colour",
- action="store", type="string", dest="background_colour", default="black",
- help="Change the default background colour that is displayed if the image is not in the correct aspect ratio [Default: %default]")
- parser.add_option("--all-random",
- action="store_true", dest="all_random", default=False,
- help="Make sure that all images have been displayed before repeating an image")
- parser.add_option("--folder-random",
- action="store_true", dest="folder_random", default=False,
- help="Give each folder an equal chance of having an image selected from it")
- #parser.add_option("--file-list",
- # action="callback", callback=addfilestolist, type="string", callback_args=(fileList,),
- # help="Adds the list of images from the external file")
- parser.add_option("--cycle",
- action="store", type="int", default=0, dest="cycle_time",
- help="Cause the image to cycle every X seconds")
- return parser
-
-
-def createIPCClient(domainSocketName):
- sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- sock.connect(domainSocketName)
- sock_file = sock.makefile()
- return sock_file
-
-def main():
- if os.path.exists(SOCKET_FILE):
- # We are the client
- sock = createIPCClient(SOCKET_FILE)
- print >>sock, "CMD NEXT"
-### print >>sock, "CMD PREVIOUS"
-### print >>sock, "CMD PAUSE"
- sock.close()
- else:
- # We are the server
- try:
- Server(SOCKET_FILE)()
- finally:
- # Make sure that the socket is cleaned up
- os.unlink(SOCKET_FILE)
-
-class Server(object):
- def __init__(self, domainSocketName):
- self.socketHandler = self._createIPCServer(domainSocketName)
- self.callbackObj = None
-
- parser = buildparser()
- useroptions, paths = parser.parse_args(sys.argv[1:])
-
- setDebugLevel(DEBUG_INCREMENT * (useroptions.quiet - useroptions.verbose))
- debug("Just set GregDebug.DEBUG_LEVEL to %d" % GregDebug.DEBUG_LEVEL, DEBUG_LEVEL_LOW)
-
- self.filelist = self.__getFileList(useroptions, paths)
-
- if not self.filelist.hasImages():
- print >>sys.stderr, "No files!"
- parser.print_help()
- sys.exit(1)
-
- debug("Initilizing RandomBG", DEBUG_LEVEL_DEBUG)
- self.randombg = WallChanger.RandomBG(self.filelist, useroptions.background_colour, useroptions.permanent)
-
- # Store some of the other useful options
- self.cycle_time = useroptions.cycle_time
-
- def __getFileList(self, useroptions, paths):
- if useroptions.all_random:
- filelist = AllRandomFileList()
- elif useroptions.folder_random:
- filelist = FolderRandomFileList()
- else:
- filelist = RandomFileList()
-
- for path in paths:
- filelist.doAddPath(path)
-
- if filelist.attemptCacheLoad(CACHE_LOCATION):
- debug("Loaded cache successfully", DEBUG_LEVEL_LOW)
- else:
- debug("Could not load cache")
- filelist.doScanPaths()
- return filelist
-
- def cycle_reload(self):
- debug("Reloading wallpaper", DEBUG_LEVEL_LOW)
- ret = self.randombg.cycleReload()
- if not ret:
- debug('Could not set wallpaper. Returned "%s"' % ret)
- debug('About to sleep for "%d" seconds' % self.cycle_time, DEBUG_LEVEL_LOW)
- self.callbackObj = self.socketHandler.addCallback(self.cycle_time, self.cycle_next)
- return ret
-
- def cycle_next(self):
- debug("Cycling wallpaper", DEBUG_LEVEL_LOW)
- ret = self.randombg.cycleNext()
- if not ret:
- debug('Could not set wallpaper. Returned "%s"' % ret)
- debug('About to sleep for "%d" seconds' % self.cycle_time, DEBUG_LEVEL_LOW)
- self.callbackObj = self.socketHandler.addCallback(self.cycle_time, self.cycle_next)
- self.filelist.doStoreCache(CACHE_LOCATION)
- return ret
-
- def cycle_prev(self):
- debug("Cycling wallpaper", DEBUG_LEVEL_LOW)
- ret = self.randombg.cyclePrev()
- if not ret:
- debug('Could not set wallpaper. Returned "%s"' % ret)
- debug('About to sleep for "%d" seconds' % self.cycle_time, DEBUG_LEVEL_LOW)
- # Yes this is ment to be cycle_next
- self.callbackObj = self.socketHandler.addCallback(self.cycle_time, self.cycle_next)
- self.filelist.doStoreCache(CACHE_LOCATION)
- return ret
-
- def _finished(self):
- self.filelist.doStoreCache(CACHE_LOCATION)
-
- def __call__(self):
- # Callback immediatly
- self.socketHandler.addCallback(0.0, self.cycle_reload)
- # Now go into the main loop
- self.socketHandler.mainLoop()
- # Clean up time
- self._finished()
-
- def _createIPCServer(self, domainSocketName):
- """Create the Server socket, and start listening for clients"""
-
- class Handler(object):
- def __init__(self, parent):
- self.parent = parent
- def _removeOldTimer(self):
- if self.parent.callbackObj:
- self.parent.socketHandler.removeCallback(self.parent.callbackObj)
- def _cmd_PAUSE(self):
- debug("Pausing randombg")
- self._removeOldTimer()
- def _cmd_NEXT(self):
- self._removeOldTimer()
- self.parent.cycle_next()
- def _cmd_PREVIOUS(self):
- self._removeOldTimer()
- self.parent.cycle_prev()
- def _cmd_RESCAN(self):
- self.parent.filelist.doScanPaths()
- self._cmd_NEXT()
- def _cmd_RELOAD(self):
- self._removeOldTimer()
- self.parent.cycle_reload()
- def _processLine(self, line):
- prefix, cmd = line.split(None, 1)
- if prefix != 'CMD':
- debug('Unknown command received "%s"' % line)
- return
- if hasattr(self, '_cmd_%s' % cmd):
- getattr(self, '_cmd_%s' % cmd)()
- else:
- debug('Unknown command received "%s"' % cmd)
- def __call__(self, lineReader):
- try:
- while lineReader.hasLine():
- self._processLine(lineReader.readline())
- except Exception, e:
- debug(str(e))
-
- def handleClient(sock):
- conn, address = sock.accept()
- async_handler.addLineBufferedSocket(conn, Handler(self) )
-
- async_handler = AsyncSocket.AsyncSocketOwner()
-
- sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- sock.bind(domainSocketName)
- sock.listen(2) # Backlog = 2
-
- async_handler.addSocket(sock, handleClient)
-
- return async_handler
-
-if __name__ == "__main__":
- main()
self.filelist = randombg2.AllRandomFileList()
self.filelist.doAddPaths([
- '/home/greg/images_sfw/Futakoi',
- '/home/greg/images_sfw/Moon Phase',
- '/home/greg/images_sfw/Ouran High',
- '/home/greg/images_sfw/Paniponi',
- '/home/greg/images_sfw/Popotan',
- '/home/greg/images_sfw/Rozen Maiden',
- '/home/greg/images_sfw/Yotsuba',
- '/home/greg/images_sfw/chobits',
- '/home/greg/images_sfw/ichigo Mashimaro',
- '/home/greg/images_sfw/カードキャプターさくら',
- '/home/greg/images_sfw/涼宮ハルヒの憂鬱',
- '/home/greg/images_sfw/灼眼のシャナ',
- '/home/greg/images_sfw/舞-乙HiME',
- '/home/greg/images_sfw/舞HiME',
- '/home/greg/images_sfw/魔法先生ネギま',
- '/home/greg/images_sfw/魔法少女リリカルなのは'])
- #for path in ['/home/greg/images_sfw/Air','/home/greg/images_sfw/Azumanga Daioh','/home/greg/images_sfw/Futakoi','/home/greg/images_sfw/Karin','/home/greg/images_sfw/Love Hina','/home/greg/images_sfw/Moon Phase','/home/greg/images_sfw/Neon Genesis','/home/greg/images_sfw/Ouran High','/home/greg/images_sfw/Paniponi','/home/greg/images_sfw/Popotan','/home/greg/images_sfw/Rozen Maiden','/home/greg/images_sfw/Yotsuba','/home/greg/images_sfw/chobits','/home/greg/images_sfw/dot Hack','/home/greg/images_sfw/ichigo Mashimaro','/home/greg/images_sfw/kasimasi','/home/greg/images_sfw/カードキャププターさくら','/home/greg/images_sfw/涼宮ハルヒの憂鬱','/home/greg/images_sfw/灼眼のシャナ','/home/greg/images_sfw/舞-乙HiME','/home/greg/images_sfw/舞HiME','/home/greg/images_sfw/魔法先生ネギま','/home/greg/images_sfw/魔法少女リリカルなのは']:
- # self.filelist.doAddPath(path)
+ # Add paths here
+ ])
if not self.filelist.attemptCacheLoad(randombg2.CACHE_LOCATION):
self.filelist.doScanPaths()
self.randombg = randombg2.RandomBG(self.filelist)
+++ /dev/null
-#!/usr/bin/env python
-# Copyright 2007 Greg Darke <gdar9540@usyd.edu.au>
-# Licensed for distribution under the GPL version 2, check COPYING for details
-# A async framework for sockets and fds (fds only supported under unix operating systems)
-# NOTE: Orig version submitted for NETS3603 assignment 1 (Semester 1 - 2007)
-
-
-from __future__ import division
-import os, sys, select, socket, bisect, fcntl
-from time import time
-
-class Callback(object):
- __slots__ = ['callback', 'callback_time']
- def __init__(self, callback_time, callback):
- self.callback_time = callback_time
- self.callback = callback
- def __call__(self):
- return self.callback()
- def __lt__(self, other):
- if hasattr(other, 'callback_time'):
- return self.callback_time < other.callback_time
- else:
- return NotImplemented
-
-class AsyncSocketOwner(object):
- """This is the object contains the 'main loop' of the application"""
- def __init__(self):
- self.sockets_input = []
- self.socket_callbacks = {}
- self.timer_callbacks = []
- self._exit = False
- self.state = {}
-
- def print_state(self):
-### sys.stdout.write('\033[H\033[2J')
- print "\n".join(['%s: %s' % v for v in self.state.items()])
- self.addCallback(1.0, self.print_state)
-
- def _check_timers_callbacks(self):
- now = time()
- i = bisect.bisect(self.timer_callbacks, Callback(now, None))
- self.state['Processing Callbacks'] = '%d of %d' % (i,
- len(self.timer_callbacks))
- needCall = self.timer_callbacks[0:i]
- self.timer_callbacks = self.timer_callbacks[i:]
-
- for callback in needCall:
- callback()
-
- def exit(self):
- self._exit = True
-
- def mainLoop(self):
- try:
- while not self._exit:
- if len(self.timer_callbacks) > 0:
- timeout = max(self.timer_callbacks[0].callback_time - time(), 0)
- # Wait until the next timer expires for input
- inputready, outputready, exceptready = \
- select.select(self.sockets_input, [], [], timeout)
- else:
- # Wait forever for input
- inputready, outputready, exceptready = \
- select.select(self.sockets_input, [], [])
-
- # Handle any data received
- self.state['Waiting sockets'] = len(inputready)
- self.state['Socket count'] = len(self.sockets_input)
- for s in inputready:
- self.socket_callbacks[s](s)
-
- # Handle timers:
- if len(self.timer_callbacks) > 0 and \
- self.timer_callbacks[0].callback_time < time():
- self._check_timers_callbacks()
- except KeyboardInterrupt:
- pass
-
- def _addFDCallback(self, fd, callback):
- """Add a callback for a file descriptor, also add it to the select call"""
- self.sockets_input.append(fd)
- self.socket_callbacks[fd] = callback
-
- def removeSocket(self, fd):
- """Removes the sepecified fd from the event loop"""
- self.sockets_input.remove(fd)
- del self.socket_callbacks[fd]
-
- def addFD(self, fd, callback):
- """Adds a file descriptor to the event loop"""
- # Turn blocking off
- flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK
- fcntl.fcntl(fd, fcntl.F_SETFL, flags)
- self._addFDCallback(fd, callback)
-
- def addSocket(self, s, callback):
- """Adds a socket to the event loop"""
- # Disable blocking - So now we have an async socket
- s.setblocking(False)
- self._addFDCallback(s, callback)
-
- def addLineBufferedSocket(self, s, callback):
- sockWrapper = LineBufferedAsyncClientConnection(s, callback, self)
- s.setblocking(False)
- self._addFDCallback(s, sockWrapper._dataArrived)
-
- def addCallback(self, seconds, callback):
- """Add a timer callback"""
- # Keep the list of callbacks sorted to keep things more efficient (Note: This would be better with a heap)
- cb = Callback(time() + seconds, callback)
- bisect.insort(self.timer_callbacks, cb)
- return cb
-
- def removeCallback(self, callback_object):
- """Remove a callback from the list. NB: If the time has fired/is in the list to be
- fired, the outcome is undefined (currently it will be called - but this may change)"""
- if callback_object in self.timer_callbacks:
- self.timer_callbacks.remove(callback_object)
-
-class LineBufferedAsyncClientConnection(object):
- __slots__ = ['sock', 'callback', 'delim', 'eventLoop', 'linesBuffer', 'lineBuffer', 'closed']
- def __init__(self, sock, callback, eventLoop, delim = '\n'):
- self.sock = sock
- self.callback = callback
- self.delim = delim
- self.eventLoop = eventLoop
- self.linesBuffer = []
- self.lineBuffer = ''
-
- def _dataArrived(self, *args, **kwargs):
- data = self.sock.recv(65535)
- if not data:
- self.closed = True
- self.eventLoop.removeSocket(self.sock)
- return
-
- self.lineBuffer += data
- newLinePos = self.lineBuffer.rfind(self.delim)
- if newLinePos >= 0:
- self.linesBuffer += self.lineBuffer[:newLinePos].split(self.delim)
- self.lineBuffer = self.lineBuffer[newLinePos+1:]
- self.callback(self)
-
- def fileno(self):
- """Return the encapsulated socket's fileno (used for select.select)"""
- return self.sock.fileno()
-
- def readline(self):
- if not self.hasLine():
- raise Exception('No data in buffer')
- ret = self.linesBuffer[0]
- del self.linesBuffer[0]
- return ret
-
- def write(self, data):
- self.sock.write(data)
- send = write
-
- def hasLine(self):
- return len(self.linesBuffer) > 0
+++ /dev/null
-#! python
-# Copyright 2007 Greg Darke <gdar9540@usyd.edu.au>
-# Licensed for distribution under the GPL version 2, check COPYING for details
-# An enum like interface
-
-"""
-An enum like interface
-"""
-
-__all__ = ('enum', )
-
-import itertools
-
-def enum(*args):
- return EnumContainer(*args)
-
-class EnumElement(object):
- __slots__ = ('enumName', 'enumContainer')
- def __init__(self, enumName, enumContainer):
- self.enumName = enumName
- self.enumContainer = enumContainer
- def __repr__(self):
- return '%s(%s)' % (self.__class__.__name__, self.enumName)
- def __str__(self):
- return self.enumName
- def __eq__(self, other):
- if not isinstance(other, self.__class__): return NotImplemented
- return other is self
- def __hash__(self):
- return hash(self.enumName) ^ hash(self.enumContainer)
-
-class EnumContainer(object):
- def __init__(self, *enums):
- self.enumList = tuple( EnumElement(enumName, self) for enumName in enums)
- for enumName, enumElement in itertools.izip(enums, self.enumList):
- setattr(self, enumName, enumElement)
-
- def __contains__(self, enum):
- return enum in self.enumList
-
- def __repr__(self):
- return '%s(%s)' % (self.__class__.__name__, ', '.join(self.enumList))
-
- def explode(self):
- """Places contents of this enum into the callers global namespace"""
- import inspect
- frameObject, _, _, _, _ = inspect.stack[1] # The stackframe of who called us
- global_dict = frameObject.f_globals
- del frameObject
- for enum in self.enumList:
- if str(enum) in global_dict:
- raise ValueError, '%s is already in your global dict' % enum
- for enum in self.enumList:
- global_dict[str(enum)] = enum
+++ /dev/null
-#! python
-
-import sys, os, os.path, random
-import cPickle
-
-try:
- import GregDebug
- from GregDebug import debug, DEBUG_LEVEL_DEBUG, DEBUG_LEVEL_LOW, DEBUG_LEVEL_MEDIUM, DEBUG_LEVEL_HIGH
- if __name__ == "__main__":
- GregDebug.DEBUG_LEVEL = -10
-except ImportError:
- print >>sys.stderr, "WARNING: debugging disabled as GregDebug could not be found"
- DEBUG_LEVEL_DEBUG = DEBUG_LEVEL_LOW = DEBUG_LEVEL_MEDIUM = DEBUG_LEVEL_HIGH = None
- def debug(message, level=None, indent_level = None):
- pass
-
-class FileListNotImplemented(Exception):
- pass
-
-def filterImageFiles(imageList):
- IMAGE_EXT_LIST = ('.jpg', '.jpe', '.jpeg', '.png', '.gif', '.bmp')
- def isImageFile(fname):
- filebase, fileext = os.path.splitext(fname)
- fileext = fileext.lower()
- return fileext in IMAGE_EXT_LIST
- return [fname for fname in imageList if isImageFile(fname)]
-
-
-class BaseFileList(object):
- """Base file list implementation"""
- def doScanPaths(self):
- raise FileListNotImplemented()
- def doAddPaths(self, paths):
- for path in paths:
- self.doAddPath(path)
- def doAddPath(self, path):
- raise FileListNotImplemented()
- def doStoreCache(self, path):
- return False
- def getNextRandomImage(self):
- raise FileListNotImplemented()
- def getPrevRandomImage(self):
- raise FileListNotImplemented()
- def getCurrentImage(self):
- raise FileListNotImplemented()
- def attemptCacheLoad(self, filename, rescanPaths = False):
- return False
- def hasImages(self):
- return False
-
-class RandomFileList(BaseFileList):
- def __init__(self):
- self.list = []
- self.paths = []
- self.lastImage = None
-
- def doScanPaths(self):
- for path in self.paths:
- for dirpath, dirsnames, filenames in os.walk(path):
- for filename in filterImageFiles(filenames):
- self.list.append(os.path.join(dirpath, filename))
-
- def doAddPath(self, path):
- self.paths.append(path)
- debug('Added path "%s" to the list' % path, DEBUG_LEVEL_DEBUG)
-
- def doStoreCache(self, filename):
- pass
-
- def getLastImage(self):
- return self.lastImage
-
- def getNextRandomImage(self):
- n = random.randint(0, len(self.list)-1)
- self.lastImage = self.list[n]
- debug("Picked file '%s' from list" % self.lastImage)
- return self.lastImage
-
- def attemptCacheLoad(self, filename, rescanPaths = False):
- return False
-
- def hasImages(self):
- return len(self.list) > 0
-
-
-class AllRandomFileList(BaseFileList):
- def __init__(self):
- self.list = None
- self.paths = []
- self.imagePointer = 0
-
- # Scan the input directory, and then randomize the file list
- def doScanPaths(self):
- debug("Scanning paths", DEBUG_LEVEL_DEBUG)
-
- self.list = []
- for path in self.paths:
- debug('Scanning "%s"' % path, DEBUG_LEVEL_DEBUG)
- print os.path.exists(path)
- for dirpath, dirsnames, filenames in os.walk(path):
- for filename in filterImageFiles(filenames):
- debug('Adding file "%s"' % filename, DEBUG_LEVEL_DEBUG - 2*GregDebug.DEBUG_INCREMENT)
- self.list.append(os.path.join(dirpath, filename))
-
- random.shuffle(self.list)
-
- def doAddPath(self, path):
- self.paths.append(path)
- debug('Added path "%s" to the list' % path, DEBUG_LEVEL_DEBUG)
-
- def doStoreCache(self, filename):
- fd = open(filename, 'wb')
- cPickle.dump(obj = self, file = fd, protocol = 2)
- debug("Cache successfully stored", DEBUG_LEVEL_LOW)
- return True
-
- def getCurrentImage(self):
- return self.list[self.imagePointer]
-
- def __incrementInRange(self, n, amount = 1, rangeMax = None, rangeMin = 0):
- if rangeMax == None: rangeMax = len(self.list)
- assert rangeMax > 0
- return (n + amount) % rangeMax
-
- def getNextRandomImage(self):
- self.imagePointer = self.__incrementInRange(self.imagePointer)
- imageName = self.list[self.imagePointer]
- debug("Picked file '%s' (pointer=%d) from list" % (imageName, self.imagePointer))
- return imageName
-
- def getPrevRandomImage(self):
- self.imagePointer = self.__incrementInRange(self.imagePointer, amount=-1)
- imageName = self.list[self.imagePointer]
- debug("Picked file '%s' (pointer=%d) from list" % (imageName, self.imagePointer))
- return imageName
-
- def attemptCacheLoad(self, filename, rescanPaths = False):
- debug('Attempting to load cache from "%s"' % filename, DEBUG_LEVEL_DEBUG)
- self.paths.sort()
- try:
- return self._attemptCacheLoad(filename)
- except Exception, e:
- debug("Exception while loading cache: '%s'" % e)
-
- def _attemptCacheLoad(self, filename):
- try:
- fd = open(filename, 'rb')
- tmp = cPickle.load(fd)
- if self.paths == tmp.paths:
- debug("Path lists match, copying properties", DEBUG_LEVEL_DEBUG)
- # Overwrite this object with the other
- for attr in ('list', 'imagePointer'):
- setattr(self, attr, getattr(tmp, attr))
- return True
- else:
- debug("Ignoring cache, path lists do not match", DEBUG_LEVEL_LOW)
- return False
- except IOError, e:
- debug("Exception raised while trying to load cache: '%s'" % e)
- return False
-
- def hasImages(self):
- return self.list
-
-class FolderRandomFileList(BaseFileList):
- """A file list that will pick a file randomly within a directory. Each directory
- has the same chance of being chosen."""
- def __init__(self):
- self.directories = {}
-
- def doScanPaths(self):
- return True # Since it is already done
-
- def doAddPath(self, path):
- debug('Added path "%s" to the list' % path, DEBUG_LEVEL_DEBUG)
- for dirpath, dirs, filenames in os.walk(path):
- debug('Scanning "%s" for images' % dirpath)
- if self.directories.has_key(dirpath):
- continue
- filenames = filterImageFiles(filenames)
- if len(filenames):
- self.directories[dirpath] = filenames
- debug('Adding "%s" to "%s"' % (filenames, dirpath))
- else:
- debug("No images found in '%s'" % dirpath)
-
- def getNextRandomImage(self):
- directory = random.choice(self.directories.keys())
- debug('directory: "%s"' % directory)
- filename = random.choice(self.directories[directory])
- debug('filename: "%s"' % filename)
- return os.path.join(directory, filename)
-
- def hasImages(self):
- return len(self.directories.values())
+++ /dev/null
-#! python
-
-import sys
-import cgitb
-import inspect
-
-DEBUG_INCREMENT = 5
-DEBUG_LEVEL_DEBUG = DEBUG_INCREMENT * -2
-DEBUG_LEVEL_LOW = DEBUG_INCREMENT * -1
-DEBUG_LEVEL_MEDIUM = DEBUG_INCREMENT * 0
-DEBUG_LEVEL_HIGH = DEBUG_INCREMENT * 1
-DEBUG_LEVEL = DEBUG_LEVEL_MEDIUM
-
-__stackTraceEnabled = True
-
-def stackTraceEnabled(value):
- global __stackTraceEnabled
- __stackTraceEnabled = value
-
-def setDebugLevel(level):
- global DEBUG_LEVEL
- DEBUG_LEVEL = level
-
-def isBoundMethod(stackFrame):
- """Checks to see if the method that is running in the specified stackFrame is
-a bound method.
-Returns a 2-tuple containing if it is a bound method, and the object that it is
-bound to if it is bound."""
- def errout():
- return (False, None)
-
- if stackFrame.f_code.co_argcount < 1:
- return errout()
- firstVarName = stackFrame.f_code.co_varnames[0]
- firstVar = stackFrame.f_locals[firstVarName]
- if not hasattr(firstVar, stackFrame.f_code.co_name):
- return errout()
- if not hasattr(getattr(firstVar, stackFrame.f_code.co_name), 'func_code'):
- return errout()
- if getattr(getattr(firstVar, stackFrame.f_code.co_name), 'func_code') == stackFrame.f_code:
- return (True, firstVar)
- else:
- return errout()
-
-def createStackTrace(stackList):
- if not __stackTraceEnabled:
- return ''
- strStackList = []
- for stackItem in stackList:
- stackItemRepr = ""
- bm = isBoundMethod(stackItem[0]) # stackframe
- if bm[0]:
- stackItemRepr = '%s.' % bm[1].__class__.__name__
- stackItemRepr += stackItem[3] # Function Name
- del bm # Help remove circular dependencies (reduces memory useage)
- strStackList.append(stackItemRepr)
-
- return '=>'.join(strStackList)
-
-def debug(message, level=DEBUG_LEVEL_MEDIUM, indent_level = None):
- if level >= DEBUG_LEVEL:
- stack = inspect.stack()[1:-1] # Ignore this method
- stack.reverse()
- if indent_level == None:
- indent_level = len(stack)
- for line in message.split('\n'):
- print >>sys.stderr, '%s %s [%s]' %('>' * indent_level, line, createStackTrace(stack))
-
-def tracebackHook(etype, evalue, etb):
- print cgitb.text( (etype, evalue, etb), context = 5)
+++ /dev/null
-#! python
-
-from signal import signal, SIGHUP, SIGTERM
-
-class HUPInterrupt(Exception):
- pass
-class TERMInterrupt(Exception):
- pass
-
-def HUPHandler(signal, stackFrame):
- raise HUPInterrupt
-
-def TERMHandler(signal, stackFrame):
- raise TERMInterrupt
-
-# Install the handlers
-signal(SIGHUP, HUPHandler)
-signal(SIGTERM, TERMHandler)
+++ /dev/null
-#! python
-
-"""
-A small utility that provides similar functionality to the commands module, but
-allows you to get the output from multiple processes at the same time
-"""
-
-__author__ = "Greg Darke"
-
-import subprocess, fcntl, os
-from select import select
-try:
- import cStringIO as _StringIO
-except ImportError:
- import StringIO as _StringIO
-
-class CommandRunner(object):
- def __init__(self):
- self._outputs = {}
- self._processes = {}
- self._fds = []
-
- def _executeCommand(self, cmd):
- """Execute the command"""
- output = _StringIO.StringIO()
- isShell = isinstance(cmd, str)
- process = subprocess.Popen(args = cmd, shell = isShell,
- stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
-
- # Turn blocking off
- flags = fcntl.fcntl(process.stdout, fcntl.F_GETFL) | os.O_NONBLOCK
- fcntl.fcntl(process.stdout, fcntl.F_SETFL, flags)
-
- return (output, process)
-
- def executeCommand(self, cmd):
- """Executes a command, but does not return anything"""
- output, process = self._executeCommand(cmd)
- self._outputs[process.stdout] = output
- self._processes[cmd] = process
- self._fds.append(process.stdout)
-
- def _waitLoop(self):
- count = 0
- while self._fds:
- # While there are still some processes left
- inputReady, outputReady, exceptReady = \
- select(self._fds, [], self._fds)
-
- for fd in inputReady:
- data = fd.read()
- if not data:
- self._fds.remove(fd)
- continue
- self._outputs[fd].write(data)
-
- for fd in exceptReady:
- self._fds.remove(fd)
-
- def waitForCompletion(self):
- """Waits for all of the running processes to finish"""
- self._waitLoop()
-
- def getOutputs(self):
- """Returns a dictionay containing the command as the key, and a string (of the output) as the value"""
- outputs = dict((cmd, self._outputs[process.stdout].getvalue()) for cmd, process in self._processes.items())
- return outputs
+++ /dev/null
-#! /usr/bin/env python
-
-import os
-
-__all__ = ['load_options']
-
-def load_options():
- FILENAME = os.path.expanduser('~/priv/credentials.conf')
- options = {}
- try:
- options_fd = open(FILENAME, "r")
-
- for line in options_fd:
- if line.startswith('#'):
- continue
- line = line.strip().split('=')
- if len(line) < 2:
- continue
-
- key = line[0]
- value = '='.join(line[1:])
-
- options[key] = value
-
- options_fd.close()
- except:
- pass
- return options
+++ /dev/null
-#!/usr/bin/env python2.4
-
-import GregDebug, base64, os, sys, urlparse
-
-from twisted.internet import reactor, protocol
-from twisted.web.client import HTTPClientFactory
-from twisted.web.http import HTTPClient
-from twisted.web.client import _parse as parseURL
-
-__all__ = ('downloadURL', )
-
-def parseURL(url, defaultPort = None):
- """Based on twisted.web.client._parse"""
- parsed = urlparse.urlparse(url)
- scheme = parsed[0]
- path = urlparse.urlunparse(('','')+parsed[2:])
- if defaultPort is None:
- if scheme == 'https':
- defaultPort = 443
- else:
- defaultPort = 80
- host, port = parsed[1], defaultPort
-
- if '@' in host:
- authUser, host = host.split('@', 1)
- auth = (authUser, )
- if ':' in authUser:
- auth = tuple(authUser.split(':', 1))
- else:
- auth = None
-
- if ':' in host:
- host, port = host.rsplit(':', 1)
- port = int(port)
-
- return scheme, auth, host, port, path
-
-class HTTPProxyFactory(protocol.ClientFactory):
- def __init__(self, realFactory, proxyServer, proxyMethod = 'GET', proxyPassword = None):
- self.realFactory = realFactory
- self.proxyHost, self.proxyPort = proxyServer
- self.proxyMethod = proxyMethod
- self.proxyPassword = proxyPassword
-
- def buildProtocol(self, addr):
- protocol = HTTPProxyProtocol(self, self.realFactory.buildProtocol(addr) )
- return protocol
-
- def __getattr__(self, key):
- return getattr(self.realFactory, key)
-
-class HTTPProxyProtocol(protocol.Protocol):
- def __init__(self, factory, proxied):
- self.factory = factory
- self.proxied = proxied
- self.proxyPassword = factory.proxyPassword
- if self.proxyPassword is not None:
- self.proxyPassword = base64.standard_b64encode('%s:%s' % self.proxyPassword)
- if factory.proxyMethod == 'GET':
- self.__connectionMade = self.__connectionMade_GET
- else:
- raise NotImplementedError
-
- def __send(self, value):
- self.transport.write(value)
-
- def __getTransportWrites(self, function, *args, **kwargs):
- temp = self.transport.write
- request = []
- self.transport.write = lambda data: request.append(data)
- function(*args, **kwargs)
- self.proxied.connectionMade()
- self.transport.write = temp
- return request
-
- def __connectionMade_GET(self):
- self.factory.realFactory.path = self.factory.realFactory.url
- self.proxied.makeConnection(self.transport)
-
- self.__send('GET %s HTTP/1.0\r\n' % self.factory.realFactory.url)
- if self.proxyPassword is not None:
- self.__send('Proxy-Authorization: Basic %s\r\n' % self.proxyPassword)
-
- # Remove the real http client's get request
- for line in self.__getTransportWrites(self.proxied.connectionMade)[1:]:
- self.__send(line)
-
- def connectionMade(self):
- self.proxied.transport = self.transport
- self.__connectionMade()
-
- def dataReceived(self, data):
- self.proxied.dataReceived(data)
-
- def connectionLost(self, reason):
- self.proxied.connectionLost(reason)
-
-proxies = {}
-def downloadURL(url, method = 'GET', successBack = None, errorBack = None):
- factory = HTTPClientFactory(url, method = method)
- scheme, auth, host, port, path = parseURL(url)
- if successBack is not None:
- factory.deferred.addCallback(successBack)
- if errorBack is not None:
- factory.deferred.addErrback(errorBack)
- if scheme in proxies:
- (host, port), password, factory_type = proxies[scheme]
- # Change the factory to the proxies one
- factory = factory_type(realFactory = factory, proxyServer = (host, port), proxyMethod = method, proxyPassword = password)
-
- reactor.connectTCP(host, port, factory)
- return factory
-
-# Note: Does not currently honor the no-proxy variable
-def parseProxies():
- for k,v in ( (k,v) for k,v in os.environ.items() if v and k.endswith('_proxy')):
- proxy_type = k[:-len('_proxy')]
- if proxy_type == 'http':
- _, auth, host, port, _ = parseURL(v)
- proxies[proxy_type] = (host, port), auth, HTTPProxyFactory
-
-def main(urls):
- def summerise(string, summerisedLen = 100):
- if len(string) <= summerisedLen:
- return string
- else:
- summerisedLen -= 5
- start = summerisedLen // 2
- return '%s ... %s' % (string[:start], string[-(summerisedLen - start):])
-
- def s(data):
- print 'Success: "%r"' % summerise(data)
-### print 'factory: (\n\t%s\n)' % '\n\t'.join('%s:%s' % (attr, getattr(factory, attr)) for attr in dir(factory))
-
- def e(data):
- print data
-
- for url in urls:
- factory = downloadURL(url, successBack = s, errorBack = e)
- reactor.run()
-
-# Parse the environment variables for proxy servers
-parseProxies()
-if __name__ == "__main__":
- main(sys.argv[1:])
+++ /dev/null
-#! python
-
-__all__ = ('parse_url', 'isImageURL', 'unique', 'removeDups', 'xRemoveDups')
-
-IMAGE_EXTENSIONS = ('PNG', 'JPG', 'JPEG', 'BMP', 'GIF', 'SWF', 'TIF', 'TIFF')
-
-def parse_url(url):
- """Parses a url into a tuple of (hostname, directory, filename)."""
- return ('hostname', 'directory', 'filename')
-
-def isImageURL(url):
- """Checks if an filename is an image"""
- try:
- _, extension = url.rsplit('.', 1)
- except ValueError:
- # There was no '.' in the url
- return False
- else:
- return extension.upper() in IMAGE_EXTENSIONS
-
-def unique(l):
- list_iter = iter(l)
- last_item = list_iter.next()
- yield last_item
- for item in list_iter:
- if last_item != item:
- yield item
- last_item = item
-
-def removeDups(l):
- """Removes duplicates from the list (Note: The ordering of the list may change)"""
- return list(unique(sorted(l)))
-
-def xRemoveDups(l):
- """Removes duplicates from the list.
- Requires O(n) memory, objects must be hashable"""
- yielded = set()
- for elem in l:
- if elem in yielded:
- continue
- yielded.add(elem)
- yield elem
+++ /dev/null
-#! python
-# Copyright 2007 Greg Darke <starstuff@optusnet.com.au>
-# Licensed for distribution under the GPL version 2, check COPYING for details
-# Some little helper utils
-
-import subprocess, commands, itertools
-
-__all__ = ('getResolutions', )
-
-def _seperateGroups(lines):
- ret = []
- current_section = []
- for line in lines:
- if line.strip() == '':
- ret.append(current_section)
- current_section = []
- continue
- current_section.append(line)
- if current_section:
- ret.append(current_section)
- return ret
-
-def getResolutions():
- xdpyinfo_status, xdpyinfo_output = commands.getstatusoutput('xdpyinfo')
- lines = xdpyinfo_output.splitlines()
- groups = _seperateGroups(xdpyinfo_output.splitlines())
-
- screens = []
- for screen_data in itertools.islice(groups, 1, None, None):
- _, screen_number = screen_data[0].split()
- # remove the leading and trailing characters
- screen_number = screen_number[1:-1]
-
- _, screen_resolution_str, _, _, _ = screen_data[1].strip().split()
- screen_resolution = screen_resolution_str.split('x')
-
- screens.append( (screen_number, tuple(int(val) for val in screen_resolution)))
- return dict(screens)
--- /dev/null
+#!/usr/bin/env python
+
+VERSION = "2.0"
+
+
+import asyncore, asynchat, socket
+import os, os.path, random, sys, time
+from optparse import OptionParser
+import logging
+from logging import debug, info, warning, error, critical
+logging.basicConfig(format="%(levelname)s: %(message)s")
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+
+try:
+ # Required libraries
+ import asyncsched
+ import wallchanger
+except ImportError, e:
+ critical("Missing libraries! Exiting...")
+ sys.exit(1)
+
+
+
+
+def filter_images(filenames):
+ extensions = ('.jpg', '.jpe', '.jpeg', '.png', '.gif', '.bmp')
+ for filename in filenames:
+ _, ext = os.path.splitext(filename)
+ if ext.lower() in extensions:
+ yield filename
+
+class BaseFileList(object):
+ """Base file list implementation"""
+ def scan_paths(self):
+ raise NotImplementedError()
+
+ def add_path(self, path):
+ raise NotImplementedError()
+
+ def store_cache(self, path):
+ pass
+
+ def load_cache(self, filename, rescanPaths = False):
+ pass
+
+ def get_next_image(self):
+ raise NotImplementedError()
+
+ def get_prev_image(self):
+ raise NotImplementedError()
+
+ def get_current_image(self):
+ raise NotImplementedError()
+
+ def is_empty(self):
+ return True
+
+
+class RandomFileList(BaseFileList):
+ def __init__(self):
+ self.list = []
+ self.paths = []
+ self.last_image = None
+
+ def scan_paths(self):
+ for path in self.paths:
+ for dirpath, dirsnames, filenames in os.walk(path):
+ for filename in filter_images(filenames):
+ self.list.append(os.path.join(dirpath, filename))
+
+ def add_path(self, path):
+ self.paths.append(path)
+ debug('Added path "%s" to the list' % path)
+
+ def get_next_image(self):
+ n = random.randint(0, len(self.list)-1)
+ self.last_image = self.list[n]
+ debug("Picked file '%s' from list" % self.last_image)
+ return self.last_image
+
+ def is_empty(self):
+ return len(self.list) == 0
+
+
+class AllRandomFileList(BaseFileList):
+ def __init__(self):
+ self.list = None
+ self.paths = []
+ self.imagePointer = 0
+
+ # Scan the input directory, and then randomize the file list
+ def scan_paths(self):
+ debug("Scanning paths")
+
+ self.list = []
+ for path in self.paths:
+ debug('Scanning "%s"' % path)
+ for dirpath, dirsnames, filenames in os.walk(path):
+ for filename in filter_images(filenames):
+ debug('Adding file "%s"' % filename)
+ self.list.append(os.path.join(dirpath, filename))
+
+ random.shuffle(self.list)
+
+ def add_path(self, path):
+ self.paths.append(path)
+ debug('Added path "%s" to the list' % path)
+
+ def store_cache(self, filename):
+ try:
+ fd = open(filename, 'wb')
+ pickle.dump(obj = self, file = fd, protocol = 2)
+ debug("Cache successfully stored")
+ except Exception, e:
+ warning("Storing cache: %s" % e)
+
+ def load_cache(self, filename, rescanPaths = False):
+ debug('Attempting to load cache from "%s"' % filename)
+ self.paths.sort()
+ try:
+ fd = open(filename, 'rb')
+ tmp = pickle.load(fd)
+ if self.paths == tmp.paths:
+ debug("Path lists match, copying properties")
+ # Overwrite this object with the other
+ for attr in ('list', 'imagePointer'):
+ setattr(self, attr, getattr(tmp, attr))
+ else:
+ debug("Ignoring cache, path lists do not match")
+ except Exception, e:
+ warning("Loading cache: %s" % e)
+
+ def get_current_image(self):
+ return self.list[self.imagePointer]
+
+ def __inc_in_range(self, n, amount = 1, rangeMax = None, rangeMin = 0):
+ if rangeMax == None: rangeMax = len(self.list)
+ assert rangeMax > 0
+ return (n + amount) % rangeMax
+
+ def get_next_image(self):
+ self.imagePointer = self.__inc_in_range(self.imagePointer)
+ imageName = self.list[self.imagePointer]
+ debug("Picked file '%s' (pointer=%d) from list" % (imageName, self.imagePointer))
+ return imageName
+
+ def get_prev_image(self):
+ self.imagePointer = self.__inc_in_range(self.imagePointer, amount=-1)
+ imageName = self.list[self.imagePointer]
+ debug("Picked file '%s' (pointer=%d) from list" % (imageName, self.imagePointer))
+ return imageName
+
+ def is_empty(self):
+ return len(self.list) == 0
+
+class FolderRandomFileList(BaseFileList):
+ """A file list that will pick a file randomly within a directory. Each
+ directory has the same chance of being chosen."""
+ def __init__(self):
+ self.directories = {}
+
+ def scan_paths(self):
+ pass
+
+ def add_path(self, path):
+ debug('Added path "%s" to the list' % path)
+ for dirpath, dirs, filenames in os.walk(path):
+ debug('Scanning "%s" for images' % dirpath)
+ if self.directories.has_key(dirpath):
+ continue
+ filenames = list(filter_images(filenames))
+ if len(filenames):
+ self.directories[dirpath] = filenames
+ debug('Adding "%s" to "%s"' % (filenames, dirpath))
+ else:
+ debug("No images found in '%s'" % dirpath)
+
+ def get_next_image(self):
+ directory = random.choice(self.directories.keys())
+ debug('directory: "%s"' % directory)
+ filename = random.choice(self.directories[directory])
+ debug('filename: "%s"' % filename)
+ return os.path.join(directory, filename)
+
+ def is_empty(self):
+ return len(self.directories.values()) == 0
+
+
+class Cycler(object):
+ def init(self, options, paths):
+ self.cycle_time = options.cycle_time
+ self.history_filename = options.history_filename
+
+ debug("Initialising wallchanger")
+ wallchanger.init(options.background_colour, options.permanent)
+
+ debug("Initialising file list")
+ if options.all_random:
+ self.filelist = AllRandomFileList()
+ elif options.folder_random:
+ self.filelist = FolderRandomFileList()
+ else:
+ self.filelist = RandomFileList()
+
+ for path in paths:
+ self.filelist.add_path(path)
+
+ if self.filelist.load_cache(self.history_filename):
+ debug("Loaded cache successfully")
+ else:
+ debug("Could not load cache")
+ self.filelist.scan_paths()
+
+ if self.filelist.is_empty():
+ error("No images were found. Exiting...")
+ sys.exit(1)
+
+ self.task = None
+ self.cmd_reload()
+
+ def finish(self):
+ self.filelist.store_cache(self.history_filename)
+
+ def find_files(self, options, paths):
+ return filelist
+
+ def cmd_reset(self):
+ def next():
+ image = self.filelist.get_next_image()
+ wallchanger.set_image(image)
+ self.task = None
+ self.cmd_reset()
+
+ if self.task is not None:
+ self.task.cancel()
+ self.task = asyncsched.schedule(self.cycle_time, next)
+ debug("Reset timer for %s seconds" % self.cycle_time)
+
+ def cmd_reload(self):
+ image = self.filelist.get_current_image()
+ wallchanger.set_image(image)
+ self.cmd_reset()
+
+ def cmd_next(self):
+ image = self.filelist.get_next_image()
+ wallchanger.set_image(image)
+ self.cmd_reset()
+
+ def cmd_prev(self):
+ image = self.filelist.get_prev_image()
+ wallchanger.set_image(image)
+ self.cmd_reset()
+
+ def cmd_rescan(self):
+ self.filelist.scan_paths()
+ self.cmd_next()
+
+ def cmd_pause(self):
+ if self.task is not None:
+ self.task.cancel()
+ self.task = None
+
+ def cmd_exit(self):
+ asyncsched.exit()
+
+class Server(asynchat.async_chat):
+ def __init__(self, cycler, conn, addr):
+ asynchat.async_chat.__init__(self, conn=conn)
+ self.cycler = cycler
+ self.ibuffer = []
+ self.set_terminator("\n")
+
+ def collect_incoming_data(self, data):
+ self.ibuffer.append(data)
+
+ def found_terminator(self):
+ line = "".join(self.ibuffer).lower()
+ self.ibuffer = []
+ prefix, cmd = line.split(None, 1)
+ if prefix != "cmd":
+ debug('Bad line received "%s"' % line)
+ return
+ if hasattr(self.cycler, "cmd_" + cmd):
+ debug('Executing command "%s"' % cmd)
+ getattr(self.cycler, "cmd_" + cmd)()
+ else:
+ debug('Unknown command received "%s"' % cmd)
+
+
+
+class Listener(asyncore.dispatcher):
+ def __init__(self, socket_filename, cycler):
+ asyncore.dispatcher.__init__(self)
+ self.cycler = cycler
+ self.create_socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ self.bind(socket_filename)
+ self.listen(2) # Backlog = 2
+
+ def handle_accept(self):
+ conn, addr = self.accept()
+ Server(self.cycler, conn, addr)
+
+ def writable(self):
+ return False
+
+
+def do_server(options, paths):
+ try:
+ cycler = Cycler()
+ listener = Listener(options.socket_filename, cycler)
+ # Initialisation of Cycler delayed so we grab the socket quickly
+ cycler.init(options, paths)
+ try:
+ asyncsched.loop()
+ except KeyboardInterrupt:
+ print
+ cycler.finish()
+ finally:
+ # Make sure that the socket is cleaned up
+ try:
+ os.unlink(options.socket_filename)
+ except:
+ pass
+
+def do_client(options, args):
+ if len(args) == 0:
+ args = ["next"]
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ sock.connect(options.socket_filename)
+ sock = sock.makefile()
+ for i, cmd in enumerate(args):
+ sock.write("cmd %s\n" % cmd)
+ if i+1 != len(args):
+ time.sleep(options.cycle_time)
+ sock.close()
+
+def do_oneshot(options, paths):
+ cycler = Cycler()
+ cycler.init(options, paths)
+
+def build_parser():
+ parser = OptionParser(version="%prog " + VERSION,
+ description = "Cycles through random background images.",
+ usage =
+ "\n(server) %prog [options] dir [dir2 ...]"
+ "\n(client) %prog [options] [next|prev|rescan|reload|pause] [...]"
+ "\nThe first instance to be run will be the server.\n"
+ )
+ parser.add_option("-p", "--permanent",
+ action="store_true", dest="permanent", default=False,
+ help="Make the background permanent. Note: This will cause all machines logged in with this account to simultaneously change background [Default: %default]")
+ parser.add_option("-v", '-d', "--verbose", "--debug",
+ action="count", dest="verbose", default=0,
+ help="Make the louder (good for debugging, or those who are curious)")
+ parser.add_option("-b", "--background-colour",
+ action="store", type="string", dest="background_colour", default="black",
+ help="Change the default background colour that is displayed if the image is not in the correct aspect ratio [Default: %default]")
+ parser.add_option("--all-random",
+ action="store_true", dest="all_random", default=False,
+ help="Make sure that all images have been displayed before repeating an image")
+ parser.add_option("-1", "--oneshot",
+ action="store_true", dest="oneshot", default=False,
+ help="Set one random image and terminate immediately.")
+ parser.add_option("--folder-random",
+ action="store_true", dest="folder_random", default=False,
+ help="Give each folder an equal chance of having an image selected from it")
+ parser.add_option("--convert",
+ action="store_true", dest="convert", default=False,
+ help="Do conversions using ImageMagick or PIL, don't rely on the window manager")
+ parser.add_option("--cycle-time",
+ action="store", type="int", default=1800, dest="cycle_time",
+ help="Cause the image to cycle every X seconds")
+ parser.add_option("--socket",
+ action="store", type="string", dest="socket_filename", default=os.path.expanduser('~/.randombg_socket'),
+ help="Location of the command/control socket.")
+ parser.add_option("--history-file",
+ action="store", type="string", dest="history_filename", default=os.path.expanduser('~/.randombg_historyfile'),
+ help="Stores the location of the last image to be loaded.")
+ return parser
+
+def main():
+ parser = build_parser()
+ options, args = parser.parse_args(sys.argv[1:])
+
+ if options.verbose == 1:
+ logging.getLogger().setLevel(logging.INFO)
+ elif options.verbose >= 2:
+ logging.getLogger().setLevel(logging.DEBUG)
+
+ if options.oneshot:
+ do_oneshot(options, args)
+
+ if os.path.exists(options.socket_filename):
+ do_client(options, args)
+ else:
+ do_server(options, args)
+
+
+if __name__ == "__main__":
+ main()
+
-#! python
+#!/usr/bin/env python
+# Copyright 2008 Greg Darke <greg@tsukasa.net.au>
+# Copyright 2008 James Bunton <jamesbunton@fastmail.fm>
+# Licensed for distribution under the GPL version 2, check COPYING for details
+# This is a cross platform/cross window manager way to change your wallpaper
import commands, sys, os, os.path, subprocess, time
-from GregDebug import debug, setDebugLevel, DEBUG_LEVEL_DEBUG, DEBUG_LEVEL_LOW, DEBUG_LEVEL_MEDIUM, DEBUG_LEVEL_HIGH, DEBUG_INCREMENT
-import FileLists
+from logging import debug, info, warning
-import python24_adapter # NB: Must be imported before collections
-import collections
+__all__ = ("init", "set_image")
-"""This is a cross platform/cross window manager way to change your current
-desktop image."""
-__all__ = ('RandomBG')
+changers = []
-def RandomBG(*args, **kwargs):
- """Desktop Changer factory"""
+def set_image(filename):
+ info("Setting image: %s" % filename)
+ for changer in changers:
+ if not changer.set_image(filename):
+ warning("Failed to set background: wallchanger.set_image(%s), changer=%s" % (filename, changer))
- ret = None
+def init(*args, **kwargs):
+ """Desktop Changer factory"""
- debug("Testing for OSX (NonX11)", DEBUG_LEVEL_LOW)
+ debug("Testing for OSX (NonX11)")
if commands.getstatusoutput("ps ax -o command -c|grep -q WindowServer")[0] == 0:
- ret = __OSXChanger(*args, **kwargs)
+ changers.append(OSXChanger(*args, **kwargs))
if 'DISPLAY' not in os.environ or os.environ['DISPLAY'].startswith('/tmp/launch'):
# X11 is not running
- return ret
+ return
else:
if os.uname()[0] == 'Darwin':
# Try to detect if the X11 server is running on OSX
if commands.getstatusoutput("ps ax -o command|grep -q '/.*X11 .* %s'" % os.environ['DISPLAY'])[0] != 0:
# X11 is not running for this display
- return ret
+ return
- debug("Testing for KDE", DEBUG_LEVEL_LOW)
+ debug("Testing for KDE")
if commands.getstatusoutput("xwininfo -name 'KDE Desktop'")[0] == 0:
- if ret is not None:
- ret.nextChanger = __KDEChanger(*args, **kwargs)
- else:
- ret = __KDEChanger(*args, **kwargs)
+ changers.append(KDEChanger(*args, **kwargs))
- debug("Testing for Gnome", DEBUG_LEVEL_LOW)
+ debug("Testing for Gnome")
if commands.getstatusoutput("xwininfo -name 'gnome-session'")[0] == 0:
- if ret is not None:
- ret.nextChanger = __GnomeChanger(*args, **kwargs)
- else:
- ret = __GnomeChanger(*args, **kwargs)
+ changers.append(GnomeChanger(*args, **kwargs))
- debug("Testing for WMaker", DEBUG_LEVEL_LOW)
+ debug("Testing for WMaker")
if commands.getstatusoutput("xlsclients | grep -qi wmaker")[0] == 0:
- if ret is not None:
- ret.nextChanger = __WMakerChanger(*args, **kwargs)
- else:
- ret = __WMakerChanger(*args, **kwargs)
+ changers.append(WMakerChanger(*args, **kwargs))
- if ret is None:
+ if len(changers) == 0:
raise Exception("Unknown window manager")
- else:
- return ret
-class __BaseChanger(object):
- def __init__(self, filelist, backgroundColour='black', permanent=False):
- debug('Determined the window manager is "%s"' % self.__class__.__name__, DEBUG_LEVEL_MEDIUM)
- self.backgroundColour = backgroundColour
+
+class BaseChanger(object):
+ name = "undefined"
+ def __init__(self, background_color='black', permanent=False, convert=False):
+ info('Determined the window manager is "%s"' % self.name)
+ self.background_color = background_color
self.permanent = permanent
- self.filelist = filelist
- # Used to 'chain' background changers
- self.nextChanger = None
-
- def callChained(self, filename):
- if self.nextChanger is None:
- return True
- else:
- return self.nextChanger.changeTo(filename)
-
- def cycleNext(self):
- file = self.filelist.getNextRandomImage()
- return self.changeTo(file) and self.callChained(file)
-
- def cyclePrev(self):
- file = self.filelist.getPrevRandomImage()
- return self.changeTo(file) and self.callChained(file)
-
- def cycleReload(self):
- try:
- file = self.filelist.getCurrentImage()
- return self.changeTo(file) and self.callChained(file)
- except FileLists.FileListNotImplemented:
- return self.cycleNext()
+ self.convert = convert
+ def set_image(self, filename):
+ raise NotImplementedError()
-class __WMakerChanger(__BaseChanger):
+class WMakerChanger(BaseChanger):
+ name = "WindowMaker"
_ConvertedWallpaperLocation = '/tmp/wallpapers_wmaker/'
- def _removeOldImageCache(self):
+ def remove_old_image_cache(self):
"""Cleans up any old temp images"""
if not os.path.isdir(self._ConvertedWallpaperLocation):
os.mkdir(self._ConvertedWallpaperLocation)
for dirname in dirnames:
os.unlink(os.path.join(fullpath, dirname))
- def _convertImageFormat(self, file):
+ def convert_image_format(self, file):
"""Convert the image to a png, and store it in a local place"""
- self._removeOldImageCache()
+ self.remove_old_image_cache()
output_name = os.path.join(self._ConvertedWallpaperLocation, '%s.png' % time.time())
cmd = ["convert", '-resize', '1280', '-gravity', 'Center', '-crop', '1280x800+0+0', file, output_name]
- debug("""Convert command: '"%s"'""" % '" "'.join(cmd), DEBUG_LEVEL_DEBUG)
+ debug("""Convert command: '"%s"'""" % '" "'.join(cmd))
return output_name, subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, stdin=None).wait()
- def changeTo(self, file):
- file, convert_status = self._convertImageFormat(file)
- if convert_status:
- debug('Convert failed')
+
+ def set_image(self, file):
+ if self.convert:
+ file, convert_status = self.convert_image_format(file)
+ if convert_status:
+ debug('Convert failed')
cmd = ["wmsetbg",
- "-b", self.backgroundColour, # Sets the background colour to be what the user specified
+ "-b", self.background_color, # Sets the background colour to be what the user specified
"-S", # 'Smooth' (WTF?)
"-e", # Center the image on the screen (only affects when the image in no the in the correct aspect ratio
### "-a", # scale the image, keeping the aspect ratio
if self.permanent:
cmd += ["-u"] # update the wmaker database
cmd += [file]
- debug('''WMaker bgset command: "'%s'"''' % "' '".join(cmd), DEBUG_LEVEL_DEBUG)
+ debug('''WMaker bgset command: "'%s'"''' % "' '".join(cmd))
return not subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, stdin=None).wait()
-class __OSXChanger(__BaseChanger):
+class OSXChanger(BaseChanger):
+ name = "Mac OS X"
_ConvertedWallpaperLocation = '/tmp/wallpapers/'
_DesktopPlistLocation = os.path.expanduser('~/Library/Preferences/com.apple.desktop.plist')
- def _removeOldImageCache(self):
+ def __init__(self, *args, **kwargs):
+ BaseChanger.__init__(self, *args, **kwargs)
+ self.fix_desktop_plist()
+
+ def remove_old_image_cache(self):
"""Cleans up any old temp images"""
if not os.path.isdir(self._ConvertedWallpaperLocation):
os.mkdir(self._ConvertedWallpaperLocation)
for dirname in dirnames:
os.unlink(os.path.join(fullpath, dirname))
- def _convertImageFormat(self, file):
+ def convert_image_format(self, file):
"""Convert the image to a png, and store it in a local place"""
- self._removeOldImageCache()
+ self.remove_old_image_cache()
output_name = os.path.join(self._ConvertedWallpaperLocation, '%s.png' % time.time())
try:
import PIL, PIL.Image
shutil.copyfile(file, output_name)
return output_name, True
- def _fixDesktopPList(self):
+ def fix_desktop_plist(self):
"""Removes the entry in the desktop plist file that specifies the wallpaper for each monitor"""
try:
import Foundation
- desktopPList = Foundation.NSMutableDictionary.dictionaryWithContentsOfFile_(self._DesktopPlistLocation)
+ desktop_plist = Foundation.NSMutableDictionary.dictionaryWithContentsOfFile_(self._DesktopPlistLocation)
# Remove all but the 'default' entry
- for k in desktopPList['Background'].keys():
+ for k in desktop_plist['Background'].keys():
if k == 'default':
continue
- desktopPList['Background'].removeObjectForKey_(k)
+ desktop_plist['Background'].removeObjectForKey_(k)
# Store the plist again (Make sure we write it out atomically -- Don't want to break finder)
- desktopPList.writeToFile_atomically_(self._DesktopPlistLocation, True)
+ desktop_plist.writeToFile_atomically_(self._DesktopPlistLocation, True)
except ImportError:
- debug('Could not import the Foundation module, you may have problems with dual screens', DEBUG_LEVEL_MEDIUM)
-
- def changeTo(self, file):
- output_name, ret = self._convertImageFormat(file)
- if not ret:
- debug("Convert failed")
- return False
- self._fixDesktopPList()
- cmd = """osascript -e 'tell application "finder" to set desktop picture to posix file "%s"'""" % output_name
- debug(cmd, DEBUG_LEVEL_DEBUG)
+ debug('Could not import the Foundation module, you may have problems with dual screens')
+
+ def set_image(self, filename):
+ if self.convert:
+ filename, ret = self.convert_image_format(filename)
+ if not ret:
+ debug("Convert failed")
+ return False
+ cmd = """osascript -e 'tell application "finder" to set desktop picture to posix file "%s"'""" % filename
+ debug(cmd)
return not commands.getstatusoutput(cmd)[0]
-class __GnomeChanger(__BaseChanger):
- def changeTo(self, file):
+class GnomeChanger(BaseChanger):
+ name = "Gnome"
+ def set_image(self, file):
cmd = ['gconftool-2', '--type', 'string', '--set', '/desktop/gnome/background/picture_filename', file]
- debug(cmd, DEBUG_LEVEL_DEBUG)
- return subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, stdin=None).wait()
+ debug(cmd)
+ return not subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, stdin=None).wait()
-class __KDEChanger(__BaseChanger):
- def changeTo(self, file):
+class KDEChanger(BaseChanger):
+ name = "KDE"
+ def set_image(self, file):
cmds = []
for group in ('Desktop0', 'Desktop0Screen0'):
base = ['kwriteconfig', '--file', 'kdesktoprc', '--group', group, '--key']
cmds.append(['dcop', 'kdesktop', 'KBackgroundIface', 'configure'])
for cmd in cmds:
- debug(cmd, DEBUG_LEVEL_DEBUG)
+ debug(cmd)
if subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, stdin=None).wait() != 0:
- return 1 # Fail
+ return False
+
+ return True
+
+
+def main(filename):
+ import logging
+ logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
+ init()
+ set_image(filename)
+
+if __name__ == "__main__":
+ try:
+ filename = sys.argv[1]
+ except:
+ print >>sys.stderr, "Usage: %s filename" % sys.argv[0]
+ sys.exit(1)
+
+ main(filename)
- return 0 # Success