lua协程并发下载简单测试

下载8个1m大小文件,测试五次分别耗时12.038s,10.316s,8.955s,11.275s,9.499s(lua代码实现如下)

require "socket"

--host = "www.w3.org"
--file = "/TR/REC-html32.html"

function lua_string_split(str, split_char)    --字符串拆分

    local sub_str_tab = {};
    while (true and str ~= nil) do
        local pos = string.find(str, split_char);
        if (not pos) then
            sub_str_tab[#sub_str_tab + 1] = str;
            break;
        end
        local sub_str = string.sub(str, 1, pos - 1);
        sub_str_tab[#sub_str_tab + 1] = sub_str;
        str = string.sub(str, pos + 1, #str);
    end

    return sub_str_tab;
end


function download(url)
    local infolist = lua_string_split(url,"/")
    local cache_file = infolist[#infolist]

    local host = infolist[3]
    local pos = string.find(url, host)
    local file = nil
    if pos then
        file = string.sub(url, pos + #host)
    end
    pos = nil

    local out = io.open(cache_file,"wb")
    local c = assert(socket.connect(host, 80))
    local count = 0
    local pos = nil

    c:send("GET " ..file .." HTTP/1.0\r\n\r\n")
    while true do
        local s, status, partial = receive(c)
        count = count + #(s or partial)
        local data = s or partial

        if data then
            if pos == nil then            --去除响应头
                pos = string.find(data, "\r\n\r\n")
                if pos then
                    out:write(string.sub(data, pos + #"\r\n\r\n"))
                end
            else
                out:write(data)
            end
        end
        if status == "closed" then break end
    end
    c:close()
--  print(file, count)
    out:close()
--    os.execute("del " .. cache_file)
end

function receive(connection)
    connection:settimeout(0)
    local s, status, partial = connection:receive(2^10*100)
    if status == 'timeout' then
        coroutine.yield(connection)
    end
    return s or partial, status
end

threads = {}

function get(url)
    local co = coroutine.create(function ()
        download(url)
    end)
    table.insert(threads, co)
end

function dispatch()
    local i = 1
    local connections = {}
    while true do
        if threads[i] == nil then
            if threads[1] == nil then break end
            i = 1
            connections = {}
        end
        local status, res = coroutine.resume(threads[i])
        if not res then
            table.remove(threads, i)
        else
            i = i + 1
            connections[#connections + 1] = res
            if #connections == #threads then
                local a, b, err = socket.select(connections)
                for k, v in pairs(a) do
                    --print(k, 'a=a', v)
                end
                for k, v in pairs(b) do
                    --print(k, 'b=b', v)
                end
            end
        end
    end
end

--get("http://www.w3.org/TR/REC-html32.html")
--get("http:///1/将夜") --下载中文会出错


local files = {'a','b','c','d','e','f','g','h'}
for i=1,#files do
    get("http://127.0.0.1/" .. files[i] .. ".file")
end

dispatch()

print(os.clock())

下载8个1m大小文件,测试五次分别耗时12.324s,14.407s,13.883s,15.188s,8.887s(python代码实现如下)

#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os
import time
import urllib
import urllib2
import multiprocessing
from time import clock

def worker(pline):

        rinfo = pline.split("/")
        filename, url = rinfo[len(rinfo)-1], pline

        filename = urllib2.unquote(filename)
        try:
                f = urllib2.urlopen(url,timeout=10800)
                with open(filename, "wb") as code:
                        code.write(f.read())
        except:
                print sys.exc_info()
                return 0

        return 1
#
# main
#

if __name__ == "__main__":

        start = clock()
        pool_size = multiprocessing.cpu_count() * 2
        pool = multiprocessing.Pool(processes=pool_size)

        for i in ["a","b","c","d","e","f","g","h"]:
                url = "http://127.0.0.1/"+i+".file"
                pool.apply_async(worker, (url,))
                
        pool.close()
        pool.join()
        end = clock()
        print (end-start)

就算把python的进程池设置为1个,利用单cpu跑,耗时结果测试也是一样的。看不出来lua的协程对性能有多大的提升,感觉用多进程能实现的,就还是不要去折腾协程吧,毕竟协程不能利用多核的优势,编程也麻烦很多。

附:lua使用luasocket库发起http请求很方便

local http = require("socket.http")

local chunkbody = {}
res, code, headers = http.request {
        method = "GET",
        url = "http://127.0.0.1/get.php",
        sink = ltn12.sink.table(chunkbody)
}

if res ~= nil then
        print(code)
        for k,v in pairs(headers) do
                print(k .. ":" .. v)
        end
        print(table.concat(chunkbody))
else
        io.write(code .. "\n")
end

--res, code, headers = http.request("http://127.0.0.1/post","name=admin&passwd=adminpwd")
local postdata = "name=admin&passwd=adminpwd"
chunkbody = {}
res, code, headers = http.request {
        method = "POST",
        url = "http://127.0.0.1/post",
        headers = {
                ["Content-Type"] = "application/x-www-form-urlencoded",
                ["Content-Length"] = string.len(postdata)
        },
        source = ltn12.source.string(postdata),
        sink = ltn12.sink.table(chunkbody)
}
if res ~= nil then
        print(code)
        for k,v in pairs(headers) do
                print(k .. ":" .. v)
        end
        print(table.concat(chunkbody))
else
        io.write(code .. "\n")
end

--[[
200
connection:close
content-type:text/html
date:Sun, 16 Nov 2014 16:02:16 GMT
transfer-encoding:chunked
x-powered-by:PHP/5.3.3
server:openresty/1.7.2.1
a and b
201
connection:close
content-type:application/octet-stream
date:Sun, 16 Nov 2014 16:02:16 GMT
transfer-encoding:chunked
server:openresty/1.7.2.1
pass
--]]
posted @ 2013-06-28 09:32  ciaos  阅读(1333)  评论(0编辑  收藏  举报