scrape.py :  » Network » Rufus-BitTorrent-Client » Rufus_0.7.0_src » Python Open Source

Home
Python Open Source
1.3.1.2 Python
2.Ajax
3.Aspect Oriented
4.Blog
5.Build
6.Business Application
7.Chart Report
8.Content Management Systems
9.Cryptographic
10.Database
11.Development
12.Editor
13.Email
14.ERP
15.Game 2D 3D
16.GIS
17.GUI
18.IDE
19.Installer
20.IRC
21.Issue Tracker
22.Language Interface
23.Log
24.Math
25.Media Sound Audio
26.Mobile
27.Network
28.Parser
29.PDF
30.Project Management
31.RSS
32.Search
33.Security
34.Template Engines
35.Test
36.UML
37.USB Serial
38.Web Frameworks
39.Web Server
40.Web Services
41.Web Unit
42.Wiki
43.Windows
44.XML
Python Open Source » Network » Rufus BitTorrent Client 
Rufus BitTorrent Client » Rufus_0.7.0_src » scrape.py
#! /usr/bin/env python

# -*- coding: utf-8 -*-

#-----------------------------------------------------------------------------

# Name:        scrape.py

# Purpose:     

#

# Author:      Jeremy Arendt

#

# Created:     2004/03/03

# RCS-ID:      $Id: scrape.py,v 1.3 2005/09/16 07:41:31 Inigo Exp $

# Copyright:   (c) 2002

# Licence:     MIT

#-----------------------------------------------------------------------------

import wx

import httplib

import socket

from threading import Thread

from BitTorrent.zurllib import URLError,urlopen,Request

from os.path import split,join

from urlparse import urlparse

from BitTorrent.bencode import bdecode

from g3listctrl import G3ListCtrl

from wx.lib.mixins.listctrl import ColumnSorterMixin,ListCtrlAutoWidthMixin

from traceback import print_exc





class ScrapeListCtrl(G3ListCtrl, ListCtrlAutoWidthMixin):

    def __init__(self, parent, btconfig, images):

        G3ListCtrl.__init__(self, parent, btconfig, "ScrapeList")

        ListCtrlAutoWidthMixin.__init__(self)

        

        cols = [ [True, "Filename", wx.LIST_FORMAT_LEFT, 160],

                 [True, "Seeds", wx.LIST_FORMAT_RIGHT, 60],

                 [True, "Peers", wx.LIST_FORMAT_RIGHT, 60],

                 [True, "Downloads", wx.LIST_FORMAT_LEFT, wx.LIST_AUTOSIZE_USEHEADER],

                ]

        self.InsertColumns(cols)

    

    def Populate(self, data):

        pass

    

class ScrapeDlg(wx.Frame):

    def __init__(self, parent, btconfig, images, id=-1, invokefunc=None):

        wx.Frame.__init__(self, parent, -1, 'Scrape', size=(400,300))

        self.btconfig = btconfig

        self.images = images

        self.InvokeLater = invokefunc

        

        okbut_id = wx.NewId()

        

        panel = wx.Panel(self, -1)

        self.list = ScrapeListCtrl(panel, btconfig, images)

        self.edit1 = wx.TextCtrl(panel, -1, "")

        okbut = wx.Button(panel, okbut_id, "OK")



        

        colsizer = wx.FlexGridSizer(2, 1, 0, 0)

        colsizer.AddGrowableRow(0)

        colsizer.AddGrowableCol(0)



        buttsizer = wx.FlexGridSizer(1, 2, 0, 0)

        buttsizer.AddGrowableCol(0)



        buttsizer.Add(self.edit1, 0, wx.EXPAND|wx.FIXED_MINSIZE, 0)

        buttsizer.Add(okbut, 0, wx.FIXED_MINSIZE, 0)



        colsizer.Add(self.list, 1, wx.EXPAND, 0)

        colsizer.Add(buttsizer, 1, wx.EXPAND, 0)

        panel.SetAutoLayout(True)

        panel.SetSizer(colsizer)



        wx.EVT_BUTTON(self, okbut_id , self.Scrape)                

        wx.EVT_CLOSE(self, self.Close)

        self.Show(True)

        

    def Scrape(self, url):

        url = self.edit1.GetValue()

        t = T_Scrape(self.PostScrape, "", url)

        t.start()



    def PostScrape(self, params, data):

        files = data[1]

        if files == None:

            return

        

        i=0

        for hash, data in files.items():

            if data.get('name') == None: 

                name = "No name field"

            else:

                name = "%s" % data.get('name')

            downloaded = "%d" % data.get('downloaded')

            complete = "%d" % data.get('complete')

            incomplete = "%d" % data.get('incomplete')

            

            self.list.InsertRow(i, [name,complete,incomplete,downloaded] ) 

            i += 1

        

    def Close(self, event=None):

        self.Destroy()







class T_Scrape(Thread):



    def __init__(self, invokelater, callback, url, infohash=None):



        Thread.__init__(self)

        self.infohash = infohash



        self.success = False



        self.Callback = callback

        self.InvokeLater = invokelater

        self.url = url

     

        

    def run(self):

        try:

            h = urlopen(self.url)

            self.success = True

        except (IOError, URLError, socket.error, httplib.HTTPException), e:

            print "ERROR: urlopen(%s) failed" % self.url

            print str(e)



        data = None



        if self.success == True:

            try:

                data = bdecode( h.read() )

                data = data['files']

                if self.infohash:

                    data = data[self.infohash]

            except:

                self.success = False



        try:

            self.InvokeLater(self.Callback, (self.success, data))

            return True

        except wx.PyDeadObjectError:

            pass

        return False





def dummy_callback(params1, params2):

    print params1, params2

    

    data = params2[2]



if __name__ == "__main__":

##    t = T_Scrape(dummy_callback, "", "http://tracker4.slash0.org:6969/scrape")

##    t.start()

    

    app = wx.PySimpleApp()

    frame = ScrapeDlg(None, None, None, None)

    app.SetTopWindow(frame)

    frame.Show()

    app.MainLoop()

        

www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.