OVH Cloud OVH Cloud

socket et objet

1 réponse
Avatar
snyff
Je suis en train de faire un petit proxy à base de socket mais j'ai un
problème :
quand je le lance sur une page simple ( un seul GET ) ça marche nickel
mais dés que je refait un get ça plante quelqu'un a une idée :

le code :


import socket
import os
import time
import re

class Proxy:

def __init__(self) :
self.port = 8081
self.host =''
self.mode = "Normal" #"Maintenance"
self.serveur_nominal ='192.168.0.1'
self.serveur_secours = '127.0.0.1'
self.socketclient = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socketserveur = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.maintenancefile ='index.maintenance.html'


def Run(self) :
self.socketserveur.bind((self.host, self.port))
self.socketserveur.listen(1)
while 1:
self.conn, self.addr = self.socketserveur.accept()
if os.fork():
self.conn.close()
continue
else:
self.traitement_requete()
self.conn.close()

def traitement_requete(self) :
print 'Connected by', self.addr
#time.sleep(60)


if self.mode == "Maintenance":
##### Mode maintenance ######
self.maintenance()
return(0)
##### Fin du Mode maintenance ######

if self.mode == "Normal":
##### Mode Normal #####
self.normal()
return(0)
if self.mode == "Redondance":

def maintenance(self):
print "Maintenance du serveur"
data = self.conn.recv(1024)
fd=open(self.maintenancefile,'r')
while 1:
data = fd.read(1024)
if data=="":break
print data
self.conn.send(data)

def normal(self):
print "Mode Normal"
data = self.conn.recv(1024)
serveurs = re.findall('GET http://(.*?)/',data)
for serveur in serveurs:
print "Serveur : "
print serveur
try:
self.socketclient.connect((serveur, 80))
except socket.error:
print 'socket deja utilisee'
print data
self.socketclient.send(data)
while 1:
buffer = self.socketclient.recv(1024)
if buffer=="":break
self.conn.send(buffer)

self.socketclient.close()

if __name__=='__main__':
proxy = Proxy()
proxy.Run()


merci d'avance

1 réponse

Avatar
Michel Claveau - abstraction méta-galactique non triviale en fuite perpétuelle.
Bonsoir !

J'en ai trouvé un petit, de proxy. Je ne sais plus où, mais il ne marche pas
trop mal.

Je le met ci-dessous.

--
Michel Claveau








import asyncore, socket, os, time, calendar, sys, re, optparse, logging


class Listener (asyncore.dispatcher):

def __init__ (self, port, iplist):

asyncore.dispatcher.__init__(self)

self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind(('', port))
self.listen(5)

ipstring '^'+'|'.join([ip.replace('?','d').replace('*','d+').replace('.','[.]') for
ip in iplist])+'$'
self.ipcheck = re.compile(ipstring).match

def handle_accept (self):

sock, address = self.accept()
if self.ipcheck(address[0]):
HttpClient(sock)
else:
logging.warning('blocked incoming request from %s:%i', *address)


class Http (asyncore.dispatcher):

static = False
flat = False
debug = False
intolerant = False
external = None
external_auth = None
chunk = 65536
maxhead = 600
alias = {}

id = 0
pointer = 0
beg, end = 0, 1e99
sending = False
receiving = False
counterpart = None
downloads = {}

timefmt = '%a, %d %b %Y %H:%M:%S GMT'
message = {
200: 'OK',
206: 'Partial Content',
304: 'Not Modified',
404: 'Not Found',
416: 'Requested Range Not Satisfiable',
503: 'Service Unavailable' }

def __init__ (self, sock=None, counterpart=None):

asyncore.dispatcher.__init__(self, sock)

if counterpart:
self.counterpart = counterpart
self.id = counterpart.id
else:
self.id = Http.id = Http.id + 1
self.log = logging.getLogger('%s %i' % (self.__class__.__name__, self.id))
if sock:
self.handle_connect()
self.data = os.tmpfile()

def handle_connect (self):

if self.addr:
self.log.stat('bound to %s', self.addr[0])

def writable (self):

if self.sending:
return self.header or self.pointer > self.end or
self.counterpart.data.tell() > self.pointer or not
self.counterpart.receiving
else:
return False

def handle_write (self):

if self.header:
self.header = self.header[self.send(self.header):]
elif self.pointer > self.end:
self.handle_close()
elif self.counterpart.data.tell() > self.pointer:
self.counterpart.data.seek(self.pointer)
if self.pointer + self.chunk > self.end:
chunksize = self.end - self.pointer + 1
else:
chunksize = self.chunk
self.pointer += self.send(self.counterpart.data.read(chunksize))
self.counterpart.data.seek(0,2)
elif not self.counterpart.receiving:
self.handle_close()

def handle_read (self):

chunk = self.recv(self.chunk)
self.data.write(chunk)
if self.receiving or not chunk:
return

self.data.seek(0)
line = self.data.readline(self.maxhead)
head = line.strip().split(' ', 2)
body = {}
while line.endswith('n'):
if not line.strip():
break
line = self.data.readline(self.maxhead)
if ':' in line:
key, value = line.split(':', 1)
body[key.lower()] = value.strip()
else:
if len(line) >= self.maxhead:
self.log.error('header line exceeded maximum allowed %i bytes',
self.maxhead)
self.handle_close()
return

data = self.data.read()
self.data.seek(0)
self.data.write(data)
self.data.truncate()
self.receiving = True

self.handle_header(head, body)

def set_header (self, head, body):

lines = [' '.join(head)] + map(': '.join, body.items())
self.log.debug('received header:nn %sn', 'n '.join(lines))
self.header = 'rn'.join(lines)+'rnrn'
self.sending = True

def handle_error (self):

exception, value, traceback = sys.exc_info()
if self.intolerant or exception is KeyboardInterrupt:
raise
elif self.debug:
self.log.exception('caught an exception, closing socket')
else:
while traceback.tb_next:
traceback = traceback.tb_next
self.log.error('caught an exception in %s: %s',
traceback.tb_frame.f_code.co_name, value)

self.close()

def handle_close (self):

self.close()
self.counterpart = None
self.log.debug('closed')
if self.pointer > self.beg:
self.log.stat('received %i bytes', self.pointer - self.beg)

self.handle_data()


class HttpClient (Http):

spliturl = re.compile(r'^http:/+([^/:]*):*([^/]*)(.*)$').match
range = None

def handle_header (self, head, body):

proxy = self.spliturl(head[1])
if proxy:
self.log.info('proxy request for %s', head[1])
host, port, path = proxy.groups()
port = int(port or 80)
self.path = host + path
self.direct = False
body['connection'] = 'close'
body['host'] = host
body.pop('keep-alive', None)
body.pop('proxy-connection', None)
body.pop('accept-encoding', None)
body.pop('proxy-authorization', None)
if not self.external:
head[1] = path
elif self.external_auth:
body['proxy-authorization'] = self.external_auth
else:
self.log.info('direct request for %s', head[1])
self.path = head[1]
if 'host' in body:
self.direct = 'http://' + body['host'] + head[1].rstrip('/')
else:
self.direct = '.'

self.counterpart = HttpServer(counterpart=self)

func = 'handle_header_'+head[0]
response = hasattr(self, func) and getattr(self, func)(body)
if not response:

try:
assert proxy, 'direct request to hidden or unaccessible file'
self.log.debug('connecting to %s', host)
self.counterpart.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.counterpart.settimeout(30.)
if self.external:
self.counterpart.connect(self.external)
else:
assert port == 80 or port > 1024, 'illegal attempt to connect to port
%i' % port
self.counterpart.connect((host, port))
except:
exception, value, traceback = sys.exc_info()
self.log.error('connection failed: %s', value)
print >> self.counterpart.data, '<html><body><pre><h1>Service
unavailable</h1></pre></body></html>'
response = 503

self.counterpart.set_header(head, body)

if response:
head = ['HTTP/1.1', str(response), self.message[response]]
body = {}
self.counterpart.handle_header(head, body)

def handle_header_GET (self, body):

range = 'range' in body and re.match(r'^bytes=(d*)-(d*)$',
body.pop('range'))
if range:
beg, end = range.groups()
self.range = beg or 'end-'+ end, beg and end or 'end-1'
self.log.info('requested range: bytes %s to %s' % self.range)

head = ''
for tail in self.path.split('/'):
head = os.path.join(head, tail)
if head in self.alias:
head = self.alias[head]
elif os.path.islink(head) or tail.startswith('.'):
return False
if self.direct or not self.flat:
if not tail or os.path.isdir(head):
path = os.path.join(head, 'index.html')
else:
path = head
else:
if tail:
path = tail
else:
return False

self.log.debug('cache position: %s', path)

if path in self.downloads:
self.counterpart = self.downloads[path]
self.counterpart.counterpart = self
self.log.info('joined running download %i', self.counterpart.id)
return 200
elif os.path.isfile(path):
self.counterpart.path = path
if self.static or self.direct:
return 304
else:
mtime = os.path.getmtime(path)
value = body.get('if-modified-since')
if not value or mtime > calendar.timegm(time.strptime(value,
self.timefmt)):
self.log.debug('checking modification since %s', time.ctime(mtime))
body['if-modified-since'] = time.strftime(self.timefmt,
time.gmtime(mtime))
elif not self.direct:
self.counterpart.path = path
if self.static and 'if-modified-since' in body:
return 304
elif os.path.isdir(head or os.curdir):
print >> self.counterpart.data, '<html><body><pre><h1>Index of %s</h1>' %
self.path
print >> self.counterpart.data, '<b>Name
Size Last modified</b>n'
for tail in os.listdir(head or os.curdir):
if tail.startswith('.'):
continue
path = os.path.join(head, tail)
if os.path.isdir(path):
print >> self.counterpart.data, '<a href="%s/%s/">%-63s -' %
(self.direct, tail, tail[:50]+'/</a>'),
else:
print >> self.counterpart.data, '<a href="%s/%s">%-54s %10i' %
(self.direct, tail, tail[:50]+'</a>', os.path.getsize(path)),
print >> self.counterpart.data, time.ctime(os.path.getmtime(path))
print >> self.counterpart.data, '</pre></body></html>'
return 200
else:
print >> self.counterpart.data, '<html><body><pre><h1>Not
found</h1></pre></body></html>'
return 404

return False

def handle_header_POST (self, body):

if 'content-length' not in body:
self.log.error('unspecified content length in post request')
return 503

return False

def handle_data (self):

pass


class HttpServer (Http):

time = None
chunked = False
size = 0
path = ''

def handle_header (self, head, body):

func = 'handle_header_'+head[1]
if hasattr(self, func) and getattr(self, func)(body):

if self.counterpart.range:
end = self.size
beg, end = self.counterpart.beg, self.counterpart.end = map(eval,
self.counterpart.range)
if end >= beg >= 0:
response = 206
self.counterpart.pointer = beg
body['content-range'] = 'bytes %i-%i/%s' % (beg, end, self.size or '*')
body['content-length'] = str(end - beg + 1)
else:
response = 416
body['content-range'] = 'bytes */%s' % (self.size or '*')
body['content-length'] = '0'
else:
response = 200
if self.size:
self.counterpart.end = self.size - 1
body['content-length'] = str(self.size)

if self.chunked:
body['transfer-encoding'] = 'chunked'

head[1] = str(response)
head[2] = self.message[response]

body['connection'] = 'close'
if 'date' in body:
self.time = body['date']
else:
body['date'] = self.time or time.strftime(self.timefmt, time.gmtime())

self.counterpart.set_header(head, body)

def handle_header_200 (self, body):

if not self.path:
return False

self.downloads[self.path] = self

if 'content-length' in body:
self.size = int(body['content-length'])
if 'transfer-encoding' in body:
if body['transfer-encoding'].lower() == 'chunked':
self.chunked = True
else:
self.log.warning('unsupported transfer encoding %(transfer-encoding)r,
not cached', body)
del self.downloads[self.path]
return False

self.log.info('serving file from remote host')
return True

def handle_header_304 (self, body):

if not os.path.isfile(self.path):
return False

self.data = open(self.path, 'r')
self.data.seek(0,2)
self.size = self.data.tell()

self.log.info('serving file from cache')
return True

def handle_data (self):

self.receiving = False
if self.path in self.downloads:
del self.downloads[self.path]
else:
return

self.log.stat('sent %i bytes', self.data.tell())

if self.chunked:
self.log.debug('post processing chunked data')
elif self.size:
assert self.size == self.data.tell(), 'file not cached: size mismatch'
else:
self.log.warning('unable to verify file size')

chunks = []
try:
self.data.seek(0)
if self.chunked:
chunksize = int(self.data.readline().split(';')[0], 16)
else:
chunksize = self.chunk
chunk = self.data.read(chunksize)
while chunk:
if self.chunked:
assert self.data.read(2) == 'rn', 'file not cached: chunked data
error'
chunksize = int(self.data.readline().split(';')[0], 16)
chunks.append(chunk)
chunk = self.data.read(chunksize)
finally:
self.data.seek(0,2)

self.prepare_path(self.path)
open(self.path, 'w').writelines(chunks)

self.log.info('cached %s', self.path)

if self.time:
mtime = calendar.timegm(time.strptime(self.time, self.timefmt))
os.utime(self.path, (mtime, mtime))

def prepare_path (self, path):

dir = os.path.dirname(path)
if dir and not os.path.isdir(dir):
if os.path.isfile(dir):
self.log.warning('directory %s mistaken for a file', dir)
os.remove(dir)
else:
self.prepare_path(dir)
os.mkdir(dir)


def main ():

parser = optparse.OptionParser()
parser.add_option('-p', '--port', type='int', default€80, help='listen on
PORT for incoming connections')
parser.add_option('-i', '--ip', action='append', default=['127.0.0.1'],
help='allow connections from these IP addresses')
parser.add_option('-d', '--dir', type='string', default=os.curdir,
help='cache in DIR instead of current directory')
parser.add_option('-a', '--alias', metavar='STR', action='append',
default=[], help='cache in path:url1:url2:...')
parser.add_option('-s', '--static', action='store_true', help='never check
for modifications')
parser.add_option('-f', '--flat', action='store_true', help='save files in
a single directory')
parser.add_option('-e', '--external', metavar='EX', help='forward requests
to external proxy server')
parser.add_option('-q', '--quiet', action='count', default=0,
help='decrease verbosity')
daemon = optparse.OptionGroup(parser, 'Daemon Options')
daemon.add_option('--daemon', action='store_true', help='enter daemon
mode')
daemon.add_option('--log', type='string', help='write output to LOG')
daemon.add_option('--pid', type='string', help='write process id to PID')
daemon.add_option('--user', type='string', help='change uid to USER')
parser.add_option_group(daemon)
debug = optparse.OptionGroup(parser, 'Debugging Options')
debug.add_option('--debug', action='store_true', help='enter debug mode')
debug.add_option('--intolerant', action='store_true', help='crash on
exceptions')
parser.add_option_group(debug)
options, args = parser.parse_args()

try:
Listener(options.port, options.ip)
except socket.error:
parser.error('port %i is not available' % options.port)
except re.error:
parser.error('invalid ip address format %r' % options.ip)

for alias in options.alias:
aliases = alias.split(':')
dir = aliases.pop(0)
for alias in aliases:
Http.alias[alias] = dir
if options.static:
Http.static = True
if options.flat:
Http.flat = True
if options.external:
try:
addr = options.external
if '@' in addr:
import base64
auth, addr = options.external.split('@')
Http.external_auth = 'Basic '+ base64.encodestring(auth)[:-1]
host, port = addr.split(':')
Http.external = host, int(port)
except:
parser.error('invalid external address %r' % options.external)

logging.STAT = logging.INFO + 1
logging.addLevelName(logging.STAT, 'STAT')
logging.Logger.stat = lambda self, *args: self.log(logging.STAT, *args)
if options.debug:
Http.debug = True
logging.root.setLevel(logging.DEBUG)
if options.intolerant:
Http.intolerant = True
else:
logging.root.setLevel([logging.INFO, logging.STAT, logging.WARNING,
logging.ERROR, logging.CRITICAL][min(4, options.quiet)])

if options.daemon:
if options.log:
try:
handler = logging.FileHandler(options.log)
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s:
%(name)s %(message)s', '%d %b %Y %H:%M:%S'))
logging.root.addHandler(handler)
except IOError:
parser.error('invalid log file %r' % options.log)
if options.pid:
try:
pidfile = open(options.pid, 'w')
except IOError:
parser.error('invalid pid file %r' % options.pid)
else:
pidfile = sys.stdout
if options.user:
try:
import pwd
pwnam = pwd.getpwnam(options.user)
os.setgid(pwnam[3])
os.setuid(pwnam[2])
except KeyError:
parser.error('user %r does not exist' % options.user)
except OSError:
parser.error('no permission for changing to user %r' % options.user)
pid = os.fork()
if pid:
pidfile.write(str(pid))
pidfile.close()
return
else:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('%(levelname)s: %(name)s
%(message)s'))
logging.root.addHandler(handler)

try:
os.chdir(options.dir)
except OSError:
parser.error('invalid directory %r' % options.dir)
if not os.access(os.curdir, os.R_OK | os.W_OK):
parser.error('no read/write permission for directory %r' % options.dir)

sys.stdout = sys.stderr = open('/dev/null', 'w')
logging.root.name = 'HttpReplicator'
try:
logging.info('started')
asyncore.loop()
except KeyboardInterrupt:
logging.info('terminated')
except:
logging.exception('caught an exception, terminated')

if __name__ == '__main__':

main()