Je suis en train de faire un petit proxy à base de socket mais j'ai un
problème :
quand je le lance sur une page simple ( un seul GET ) ça marche nickel
mais dés que je refait un get ça plante quelqu'un a une idée :
if self.mode == "Maintenance":
##### Mode maintenance ######
self.maintenance()
return(0)
##### Fin du Mode maintenance ######
if self.mode == "Normal":
##### Mode Normal #####
self.normal()
return(0)
if self.mode == "Redondance":
def maintenance(self):
print "Maintenance du serveur"
data = self.conn.recv(1024)
fd=open(self.maintenancefile,'r')
while 1:
data = fd.read(1024)
if data=="":break
print data
self.conn.send(data)
def normal(self):
print "Mode Normal"
data = self.conn.recv(1024)
serveurs = re.findall('GET http://(.*?)/',data)
for serveur in serveurs:
print "Serveur : "
print serveur
try:
self.socketclient.connect((serveur, 80))
except socket.error:
print 'socket deja utilisee'
print data
self.socketclient.send(data)
while 1:
buffer = self.socketclient.recv(1024)
if buffer=="":break
self.conn.send(buffer)
self.socketclient.close()
if __name__=='__main__':
proxy = Proxy()
proxy.Run()
if self.addr: self.log.stat('bound to %s', self.addr[0])
def writable (self):
if self.sending: return self.header or self.pointer > self.end or self.counterpart.data.tell() > self.pointer or not self.counterpart.receiving else: return False
chunk = self.recv(self.chunk) self.data.write(chunk) if self.receiving or not chunk: return
self.data.seek(0) line = self.data.readline(self.maxhead) head = line.strip().split(' ', 2) body = {} while line.endswith('n'): if not line.strip(): break line = self.data.readline(self.maxhead) if ':' in line: key, value = line.split(':', 1) body[key.lower()] = value.strip() else: if len(line) >= self.maxhead: self.log.error('header line exceeded maximum allowed %i bytes', self.maxhead) self.handle_close() return
data = self.data.read() self.data.seek(0) self.data.write(data) self.data.truncate() self.receiving = True
exception, value, traceback = sys.exc_info() if self.intolerant or exception is KeyboardInterrupt: raise elif self.debug: self.log.exception('caught an exception, closing socket') else: while traceback.tb_next: traceback = traceback.tb_next self.log.error('caught an exception in %s: %s', traceback.tb_frame.f_code.co_name, value)
spliturl = re.compile(r'^http:/+([^/:]*):*([^/]*)(.*)$').match range = None
def handle_header (self, head, body):
proxy = self.spliturl(head[1]) if proxy: self.log.info('proxy request for %s', head[1]) host, port, path = proxy.groups() port = int(port or 80) self.path = host + path self.direct = False body['connection'] = 'close' body['host'] = host body.pop('keep-alive', None) body.pop('proxy-connection', None) body.pop('accept-encoding', None) body.pop('proxy-authorization', None) if not self.external: head[1] = path elif self.external_auth: body['proxy-authorization'] = self.external_auth else: self.log.info('direct request for %s', head[1]) self.path = head[1] if 'host' in body: self.direct = 'http://' + body['host'] + head[1].rstrip('/') else: self.direct = '.'
self.counterpart = HttpServer(counterpart=self)
func = 'handle_header_'+head[0] response = hasattr(self, func) and getattr(self, func)(body) if not response:
try: assert proxy, 'direct request to hidden or unaccessible file' self.log.debug('connecting to %s', host) self.counterpart.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.counterpart.settimeout(30.) if self.external: self.counterpart.connect(self.external) else: assert port == 80 or port > 1024, 'illegal attempt to connect to port %i' % port self.counterpart.connect((host, port)) except: exception, value, traceback = sys.exc_info() self.log.error('connection failed: %s', value) print >> self.counterpart.data, '<html><body><pre><h1>Service unavailable</h1></pre></body></html>' response = 503
self.counterpart.set_header(head, body)
if response: head = ['HTTP/1.1', str(response), self.message[response]] body = {} self.counterpart.handle_header(head, body)
def handle_header_GET (self, body):
range = 'range' in body and re.match(r'^bytes=(d*)-(d*)$', body.pop('range')) if range: beg, end = range.groups() self.range = beg or 'end-'+ end, beg and end or 'end-1' self.log.info('requested range: bytes %s to %s' % self.range)
head = '' for tail in self.path.split('/'): head = os.path.join(head, tail) if head in self.alias: head = self.alias[head] elif os.path.islink(head) or tail.startswith('.'): return False if self.direct or not self.flat: if not tail or os.path.isdir(head): path = os.path.join(head, 'index.html') else: path = head else: if tail: path = tail else: return False
self.log.debug('cache position: %s', path)
if path in self.downloads: self.counterpart = self.downloads[path] self.counterpart.counterpart = self self.log.info('joined running download %i', self.counterpart.id) return 200 elif os.path.isfile(path): self.counterpart.path = path if self.static or self.direct: return 304 else: mtime = os.path.getmtime(path) value = body.get('if-modified-since') if not value or mtime > calendar.timegm(time.strptime(value, self.timefmt)): self.log.debug('checking modification since %s', time.ctime(mtime)) body['if-modified-since'] = time.strftime(self.timefmt, time.gmtime(mtime)) elif not self.direct: self.counterpart.path = path if self.static and 'if-modified-since' in body: return 304 elif os.path.isdir(head or os.curdir): print >> self.counterpart.data, '<html><body><pre><h1>Index of %s</h1>' % self.path print >> self.counterpart.data, '<b>Name Size Last modified</b>n' for tail in os.listdir(head or os.curdir): if tail.startswith('.'): continue path = os.path.join(head, tail) if os.path.isdir(path): print >> self.counterpart.data, '<a href="%s/%s/">%-63s -' % (self.direct, tail, tail[:50]+'/</a>'), else: print >> self.counterpart.data, '<a href="%s/%s">%-54s %10i' % (self.direct, tail, tail[:50]+'</a>', os.path.getsize(path)), print >> self.counterpart.data, time.ctime(os.path.getmtime(path)) print >> self.counterpart.data, '</pre></body></html>' return 200 else: print >> self.counterpart.data, '<html><body><pre><h1>Not found</h1></pre></body></html>' return 404
return False
def handle_header_POST (self, body):
if 'content-length' not in body: self.log.error('unspecified content length in post request') return 503
return False
def handle_data (self):
pass
class HttpServer (Http):
time = None chunked = False size = 0 path = ''
def handle_header (self, head, body):
func = 'handle_header_'+head[1] if hasattr(self, func) and getattr(self, func)(body):
if self.counterpart.range: end = self.size beg, end = self.counterpart.beg, self.counterpart.end = map(eval, self.counterpart.range) if end >= beg >= 0: response = 206 self.counterpart.pointer = beg body['content-range'] = 'bytes %i-%i/%s' % (beg, end, self.size or '*') body['content-length'] = str(end - beg + 1) else: response = 416 body['content-range'] = 'bytes */%s' % (self.size or '*') body['content-length'] = '0' else: response = 200 if self.size: self.counterpart.end = self.size - 1 body['content-length'] = str(self.size)
if self.chunked: body['transfer-encoding'] = 'chunked'
body['connection'] = 'close' if 'date' in body: self.time = body['date'] else: body['date'] = self.time or time.strftime(self.timefmt, time.gmtime())
self.counterpart.set_header(head, body)
def handle_header_200 (self, body):
if not self.path: return False
self.downloads[self.path] = self
if 'content-length' in body: self.size = int(body['content-length']) if 'transfer-encoding' in body: if body['transfer-encoding'].lower() == 'chunked': self.chunked = True else: self.log.warning('unsupported transfer encoding %(transfer-encoding)r, not cached', body) del self.downloads[self.path] return False
self.log.info('serving file from remote host') return True
if self.time: mtime = calendar.timegm(time.strptime(self.time, self.timefmt)) os.utime(self.path, (mtime, mtime))
def prepare_path (self, path):
dir = os.path.dirname(path) if dir and not os.path.isdir(dir): if os.path.isfile(dir): self.log.warning('directory %s mistaken for a file', dir) os.remove(dir) else: self.prepare_path(dir) os.mkdir(dir)
def main ():
parser = optparse.OptionParser() parser.add_option('-p', '--port', type='int', default80, help='listen on PORT for incoming connections') parser.add_option('-i', '--ip', action='append', default=['127.0.0.1'], help='allow connections from these IP addresses') parser.add_option('-d', '--dir', type='string', default=os.curdir, help='cache in DIR instead of current directory') parser.add_option('-a', '--alias', metavar='STR', action='append', default=[], help='cache in path:url1:url2:...') parser.add_option('-s', '--static', action='store_true', help='never check for modifications') parser.add_option('-f', '--flat', action='store_true', help='save files in a single directory') parser.add_option('-e', '--external', metavar='EX', help='forward requests to external proxy server') parser.add_option('-q', '--quiet', action='count', default=0, help='decrease verbosity') daemon = optparse.OptionGroup(parser, 'Daemon Options') daemon.add_option('--daemon', action='store_true', help='enter daemon mode') daemon.add_option('--log', type='string', help='write output to LOG') daemon.add_option('--pid', type='string', help='write process id to PID') daemon.add_option('--user', type='string', help='change uid to USER') parser.add_option_group(daemon) debug = optparse.OptionGroup(parser, 'Debugging Options') debug.add_option('--debug', action='store_true', help='enter debug mode') debug.add_option('--intolerant', action='store_true', help='crash on exceptions') parser.add_option_group(debug) options, args = parser.parse_args()
try: Listener(options.port, options.ip) except socket.error: parser.error('port %i is not available' % options.port) except re.error: parser.error('invalid ip address format %r' % options.ip)
for alias in options.alias: aliases = alias.split(':') dir = aliases.pop(0) for alias in aliases: Http.alias[alias] = dir if options.static: Http.static = True if options.flat: Http.flat = True if options.external: try: addr = options.external if '@' in addr: import base64 auth, addr = options.external.split('@') Http.external_auth = 'Basic '+ base64.encodestring(auth)[:-1] host, port = addr.split(':') Http.external = host, int(port) except: parser.error('invalid external address %r' % options.external)
if self.addr:
self.log.stat('bound to %s', self.addr[0])
def writable (self):
if self.sending:
return self.header or self.pointer > self.end or
self.counterpart.data.tell() > self.pointer or not
self.counterpart.receiving
else:
return False
chunk = self.recv(self.chunk)
self.data.write(chunk)
if self.receiving or not chunk:
return
self.data.seek(0)
line = self.data.readline(self.maxhead)
head = line.strip().split(' ', 2)
body = {}
while line.endswith('n'):
if not line.strip():
break
line = self.data.readline(self.maxhead)
if ':' in line:
key, value = line.split(':', 1)
body[key.lower()] = value.strip()
else:
if len(line) >= self.maxhead:
self.log.error('header line exceeded maximum allowed %i bytes',
self.maxhead)
self.handle_close()
return
data = self.data.read()
self.data.seek(0)
self.data.write(data)
self.data.truncate()
self.receiving = True
exception, value, traceback = sys.exc_info()
if self.intolerant or exception is KeyboardInterrupt:
raise
elif self.debug:
self.log.exception('caught an exception, closing socket')
else:
while traceback.tb_next:
traceback = traceback.tb_next
self.log.error('caught an exception in %s: %s',
traceback.tb_frame.f_code.co_name, value)
spliturl = re.compile(r'^http:/+([^/:]*):*([^/]*)(.*)$').match
range = None
def handle_header (self, head, body):
proxy = self.spliturl(head[1])
if proxy:
self.log.info('proxy request for %s', head[1])
host, port, path = proxy.groups()
port = int(port or 80)
self.path = host + path
self.direct = False
body['connection'] = 'close'
body['host'] = host
body.pop('keep-alive', None)
body.pop('proxy-connection', None)
body.pop('accept-encoding', None)
body.pop('proxy-authorization', None)
if not self.external:
head[1] = path
elif self.external_auth:
body['proxy-authorization'] = self.external_auth
else:
self.log.info('direct request for %s', head[1])
self.path = head[1]
if 'host' in body:
self.direct = 'http://' + body['host'] + head[1].rstrip('/')
else:
self.direct = '.'
self.counterpart = HttpServer(counterpart=self)
func = 'handle_header_'+head[0]
response = hasattr(self, func) and getattr(self, func)(body)
if not response:
try:
assert proxy, 'direct request to hidden or unaccessible file'
self.log.debug('connecting to %s', host)
self.counterpart.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.counterpart.settimeout(30.)
if self.external:
self.counterpart.connect(self.external)
else:
assert port == 80 or port > 1024, 'illegal attempt to connect to port
%i' % port
self.counterpart.connect((host, port))
except:
exception, value, traceback = sys.exc_info()
self.log.error('connection failed: %s', value)
print >> self.counterpart.data, '<html><body><pre><h1>Service
unavailable</h1></pre></body></html>'
response = 503
self.counterpart.set_header(head, body)
if response:
head = ['HTTP/1.1', str(response), self.message[response]]
body = {}
self.counterpart.handle_header(head, body)
def handle_header_GET (self, body):
range = 'range' in body and re.match(r'^bytes=(d*)-(d*)$',
body.pop('range'))
if range:
beg, end = range.groups()
self.range = beg or 'end-'+ end, beg and end or 'end-1'
self.log.info('requested range: bytes %s to %s' % self.range)
head = ''
for tail in self.path.split('/'):
head = os.path.join(head, tail)
if head in self.alias:
head = self.alias[head]
elif os.path.islink(head) or tail.startswith('.'):
return False
if self.direct or not self.flat:
if not tail or os.path.isdir(head):
path = os.path.join(head, 'index.html')
else:
path = head
else:
if tail:
path = tail
else:
return False
self.log.debug('cache position: %s', path)
if path in self.downloads:
self.counterpart = self.downloads[path]
self.counterpart.counterpart = self
self.log.info('joined running download %i', self.counterpart.id)
return 200
elif os.path.isfile(path):
self.counterpart.path = path
if self.static or self.direct:
return 304
else:
mtime = os.path.getmtime(path)
value = body.get('if-modified-since')
if not value or mtime > calendar.timegm(time.strptime(value,
self.timefmt)):
self.log.debug('checking modification since %s', time.ctime(mtime))
body['if-modified-since'] = time.strftime(self.timefmt,
time.gmtime(mtime))
elif not self.direct:
self.counterpart.path = path
if self.static and 'if-modified-since' in body:
return 304
elif os.path.isdir(head or os.curdir):
print >> self.counterpart.data, '<html><body><pre><h1>Index of %s</h1>' %
self.path
print >> self.counterpart.data, '<b>Name
Size Last modified</b>n'
for tail in os.listdir(head or os.curdir):
if tail.startswith('.'):
continue
path = os.path.join(head, tail)
if os.path.isdir(path):
print >> self.counterpart.data, '<a href="%s/%s/">%-63s -' %
(self.direct, tail, tail[:50]+'/</a>'),
else:
print >> self.counterpart.data, '<a href="%s/%s">%-54s %10i' %
(self.direct, tail, tail[:50]+'</a>', os.path.getsize(path)),
print >> self.counterpart.data, time.ctime(os.path.getmtime(path))
print >> self.counterpart.data, '</pre></body></html>'
return 200
else:
print >> self.counterpart.data, '<html><body><pre><h1>Not
found</h1></pre></body></html>'
return 404
return False
def handle_header_POST (self, body):
if 'content-length' not in body:
self.log.error('unspecified content length in post request')
return 503
return False
def handle_data (self):
pass
class HttpServer (Http):
time = None
chunked = False
size = 0
path = ''
def handle_header (self, head, body):
func = 'handle_header_'+head[1]
if hasattr(self, func) and getattr(self, func)(body):
if self.counterpart.range:
end = self.size
beg, end = self.counterpart.beg, self.counterpart.end = map(eval,
self.counterpart.range)
if end >= beg >= 0:
response = 206
self.counterpart.pointer = beg
body['content-range'] = 'bytes %i-%i/%s' % (beg, end, self.size or '*')
body['content-length'] = str(end - beg + 1)
else:
response = 416
body['content-range'] = 'bytes */%s' % (self.size or '*')
body['content-length'] = '0'
else:
response = 200
if self.size:
self.counterpart.end = self.size - 1
body['content-length'] = str(self.size)
if self.chunked:
body['transfer-encoding'] = 'chunked'
body['connection'] = 'close'
if 'date' in body:
self.time = body['date']
else:
body['date'] = self.time or time.strftime(self.timefmt, time.gmtime())
self.counterpart.set_header(head, body)
def handle_header_200 (self, body):
if not self.path:
return False
self.downloads[self.path] = self
if 'content-length' in body:
self.size = int(body['content-length'])
if 'transfer-encoding' in body:
if body['transfer-encoding'].lower() == 'chunked':
self.chunked = True
else:
self.log.warning('unsupported transfer encoding %(transfer-encoding)r,
not cached', body)
del self.downloads[self.path]
return False
self.log.info('serving file from remote host')
return True
if self.time:
mtime = calendar.timegm(time.strptime(self.time, self.timefmt))
os.utime(self.path, (mtime, mtime))
def prepare_path (self, path):
dir = os.path.dirname(path)
if dir and not os.path.isdir(dir):
if os.path.isfile(dir):
self.log.warning('directory %s mistaken for a file', dir)
os.remove(dir)
else:
self.prepare_path(dir)
os.mkdir(dir)
def main ():
parser = optparse.OptionParser()
parser.add_option('-p', '--port', type='int', default80, help='listen on
PORT for incoming connections')
parser.add_option('-i', '--ip', action='append', default=['127.0.0.1'],
help='allow connections from these IP addresses')
parser.add_option('-d', '--dir', type='string', default=os.curdir,
help='cache in DIR instead of current directory')
parser.add_option('-a', '--alias', metavar='STR', action='append',
default=[], help='cache in path:url1:url2:...')
parser.add_option('-s', '--static', action='store_true', help='never check
for modifications')
parser.add_option('-f', '--flat', action='store_true', help='save files in
a single directory')
parser.add_option('-e', '--external', metavar='EX', help='forward requests
to external proxy server')
parser.add_option('-q', '--quiet', action='count', default=0,
help='decrease verbosity')
daemon = optparse.OptionGroup(parser, 'Daemon Options')
daemon.add_option('--daemon', action='store_true', help='enter daemon
mode')
daemon.add_option('--log', type='string', help='write output to LOG')
daemon.add_option('--pid', type='string', help='write process id to PID')
daemon.add_option('--user', type='string', help='change uid to USER')
parser.add_option_group(daemon)
debug = optparse.OptionGroup(parser, 'Debugging Options')
debug.add_option('--debug', action='store_true', help='enter debug mode')
debug.add_option('--intolerant', action='store_true', help='crash on
exceptions')
parser.add_option_group(debug)
options, args = parser.parse_args()
try:
Listener(options.port, options.ip)
except socket.error:
parser.error('port %i is not available' % options.port)
except re.error:
parser.error('invalid ip address format %r' % options.ip)
for alias in options.alias:
aliases = alias.split(':')
dir = aliases.pop(0)
for alias in aliases:
Http.alias[alias] = dir
if options.static:
Http.static = True
if options.flat:
Http.flat = True
if options.external:
try:
addr = options.external
if '@' in addr:
import base64
auth, addr = options.external.split('@')
Http.external_auth = 'Basic '+ base64.encodestring(auth)[:-1]
host, port = addr.split(':')
Http.external = host, int(port)
except:
parser.error('invalid external address %r' % options.external)
if self.addr: self.log.stat('bound to %s', self.addr[0])
def writable (self):
if self.sending: return self.header or self.pointer > self.end or self.counterpart.data.tell() > self.pointer or not self.counterpart.receiving else: return False
chunk = self.recv(self.chunk) self.data.write(chunk) if self.receiving or not chunk: return
self.data.seek(0) line = self.data.readline(self.maxhead) head = line.strip().split(' ', 2) body = {} while line.endswith('n'): if not line.strip(): break line = self.data.readline(self.maxhead) if ':' in line: key, value = line.split(':', 1) body[key.lower()] = value.strip() else: if len(line) >= self.maxhead: self.log.error('header line exceeded maximum allowed %i bytes', self.maxhead) self.handle_close() return
data = self.data.read() self.data.seek(0) self.data.write(data) self.data.truncate() self.receiving = True
exception, value, traceback = sys.exc_info() if self.intolerant or exception is KeyboardInterrupt: raise elif self.debug: self.log.exception('caught an exception, closing socket') else: while traceback.tb_next: traceback = traceback.tb_next self.log.error('caught an exception in %s: %s', traceback.tb_frame.f_code.co_name, value)
spliturl = re.compile(r'^http:/+([^/:]*):*([^/]*)(.*)$').match range = None
def handle_header (self, head, body):
proxy = self.spliturl(head[1]) if proxy: self.log.info('proxy request for %s', head[1]) host, port, path = proxy.groups() port = int(port or 80) self.path = host + path self.direct = False body['connection'] = 'close' body['host'] = host body.pop('keep-alive', None) body.pop('proxy-connection', None) body.pop('accept-encoding', None) body.pop('proxy-authorization', None) if not self.external: head[1] = path elif self.external_auth: body['proxy-authorization'] = self.external_auth else: self.log.info('direct request for %s', head[1]) self.path = head[1] if 'host' in body: self.direct = 'http://' + body['host'] + head[1].rstrip('/') else: self.direct = '.'
self.counterpart = HttpServer(counterpart=self)
func = 'handle_header_'+head[0] response = hasattr(self, func) and getattr(self, func)(body) if not response:
try: assert proxy, 'direct request to hidden or unaccessible file' self.log.debug('connecting to %s', host) self.counterpart.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.counterpart.settimeout(30.) if self.external: self.counterpart.connect(self.external) else: assert port == 80 or port > 1024, 'illegal attempt to connect to port %i' % port self.counterpart.connect((host, port)) except: exception, value, traceback = sys.exc_info() self.log.error('connection failed: %s', value) print >> self.counterpart.data, '<html><body><pre><h1>Service unavailable</h1></pre></body></html>' response = 503
self.counterpart.set_header(head, body)
if response: head = ['HTTP/1.1', str(response), self.message[response]] body = {} self.counterpart.handle_header(head, body)
def handle_header_GET (self, body):
range = 'range' in body and re.match(r'^bytes=(d*)-(d*)$', body.pop('range')) if range: beg, end = range.groups() self.range = beg or 'end-'+ end, beg and end or 'end-1' self.log.info('requested range: bytes %s to %s' % self.range)
head = '' for tail in self.path.split('/'): head = os.path.join(head, tail) if head in self.alias: head = self.alias[head] elif os.path.islink(head) or tail.startswith('.'): return False if self.direct or not self.flat: if not tail or os.path.isdir(head): path = os.path.join(head, 'index.html') else: path = head else: if tail: path = tail else: return False
self.log.debug('cache position: %s', path)
if path in self.downloads: self.counterpart = self.downloads[path] self.counterpart.counterpart = self self.log.info('joined running download %i', self.counterpart.id) return 200 elif os.path.isfile(path): self.counterpart.path = path if self.static or self.direct: return 304 else: mtime = os.path.getmtime(path) value = body.get('if-modified-since') if not value or mtime > calendar.timegm(time.strptime(value, self.timefmt)): self.log.debug('checking modification since %s', time.ctime(mtime)) body['if-modified-since'] = time.strftime(self.timefmt, time.gmtime(mtime)) elif not self.direct: self.counterpart.path = path if self.static and 'if-modified-since' in body: return 304 elif os.path.isdir(head or os.curdir): print >> self.counterpart.data, '<html><body><pre><h1>Index of %s</h1>' % self.path print >> self.counterpart.data, '<b>Name Size Last modified</b>n' for tail in os.listdir(head or os.curdir): if tail.startswith('.'): continue path = os.path.join(head, tail) if os.path.isdir(path): print >> self.counterpart.data, '<a href="%s/%s/">%-63s -' % (self.direct, tail, tail[:50]+'/</a>'), else: print >> self.counterpart.data, '<a href="%s/%s">%-54s %10i' % (self.direct, tail, tail[:50]+'</a>', os.path.getsize(path)), print >> self.counterpart.data, time.ctime(os.path.getmtime(path)) print >> self.counterpart.data, '</pre></body></html>' return 200 else: print >> self.counterpart.data, '<html><body><pre><h1>Not found</h1></pre></body></html>' return 404
return False
def handle_header_POST (self, body):
if 'content-length' not in body: self.log.error('unspecified content length in post request') return 503
return False
def handle_data (self):
pass
class HttpServer (Http):
time = None chunked = False size = 0 path = ''
def handle_header (self, head, body):
func = 'handle_header_'+head[1] if hasattr(self, func) and getattr(self, func)(body):
if self.counterpart.range: end = self.size beg, end = self.counterpart.beg, self.counterpart.end = map(eval, self.counterpart.range) if end >= beg >= 0: response = 206 self.counterpart.pointer = beg body['content-range'] = 'bytes %i-%i/%s' % (beg, end, self.size or '*') body['content-length'] = str(end - beg + 1) else: response = 416 body['content-range'] = 'bytes */%s' % (self.size or '*') body['content-length'] = '0' else: response = 200 if self.size: self.counterpart.end = self.size - 1 body['content-length'] = str(self.size)
if self.chunked: body['transfer-encoding'] = 'chunked'
body['connection'] = 'close' if 'date' in body: self.time = body['date'] else: body['date'] = self.time or time.strftime(self.timefmt, time.gmtime())
self.counterpart.set_header(head, body)
def handle_header_200 (self, body):
if not self.path: return False
self.downloads[self.path] = self
if 'content-length' in body: self.size = int(body['content-length']) if 'transfer-encoding' in body: if body['transfer-encoding'].lower() == 'chunked': self.chunked = True else: self.log.warning('unsupported transfer encoding %(transfer-encoding)r, not cached', body) del self.downloads[self.path] return False
self.log.info('serving file from remote host') return True
if self.time: mtime = calendar.timegm(time.strptime(self.time, self.timefmt)) os.utime(self.path, (mtime, mtime))
def prepare_path (self, path):
dir = os.path.dirname(path) if dir and not os.path.isdir(dir): if os.path.isfile(dir): self.log.warning('directory %s mistaken for a file', dir) os.remove(dir) else: self.prepare_path(dir) os.mkdir(dir)
def main ():
parser = optparse.OptionParser() parser.add_option('-p', '--port', type='int', default80, help='listen on PORT for incoming connections') parser.add_option('-i', '--ip', action='append', default=['127.0.0.1'], help='allow connections from these IP addresses') parser.add_option('-d', '--dir', type='string', default=os.curdir, help='cache in DIR instead of current directory') parser.add_option('-a', '--alias', metavar='STR', action='append', default=[], help='cache in path:url1:url2:...') parser.add_option('-s', '--static', action='store_true', help='never check for modifications') parser.add_option('-f', '--flat', action='store_true', help='save files in a single directory') parser.add_option('-e', '--external', metavar='EX', help='forward requests to external proxy server') parser.add_option('-q', '--quiet', action='count', default=0, help='decrease verbosity') daemon = optparse.OptionGroup(parser, 'Daemon Options') daemon.add_option('--daemon', action='store_true', help='enter daemon mode') daemon.add_option('--log', type='string', help='write output to LOG') daemon.add_option('--pid', type='string', help='write process id to PID') daemon.add_option('--user', type='string', help='change uid to USER') parser.add_option_group(daemon) debug = optparse.OptionGroup(parser, 'Debugging Options') debug.add_option('--debug', action='store_true', help='enter debug mode') debug.add_option('--intolerant', action='store_true', help='crash on exceptions') parser.add_option_group(debug) options, args = parser.parse_args()
try: Listener(options.port, options.ip) except socket.error: parser.error('port %i is not available' % options.port) except re.error: parser.error('invalid ip address format %r' % options.ip)
for alias in options.alias: aliases = alias.split(':') dir = aliases.pop(0) for alias in aliases: Http.alias[alias] = dir if options.static: Http.static = True if options.flat: Http.flat = True if options.external: try: addr = options.external if '@' in addr: import base64 auth, addr = options.external.split('@') Http.external_auth = 'Basic '+ base64.encodestring(auth)[:-1] host, port = addr.split(':') Http.external = host, int(port) except: parser.error('invalid external address %r' % options.external)