Android 在解析网页之前获取其大小

Android 在解析网页之前获取其大小,android,jsoup,Android,Jsoup,我正在尝试使用jsoup解析网页,使用以下代码一切正常: class DownloadSearchResultsTask extends AsyncTask<String, Integer, ArrayList> { private String link = "link"; private String title = "title"; private String vote = "vote"; private String age = "age";

我正在尝试使用jsoup解析网页,使用以下代码一切正常:

class DownloadSearchResultsTask extends AsyncTask<String, Integer, ArrayList> {
    private String link = "link";
    private String title = "title";
    private String vote = "vote";
    private String age = "age";
    private String size = "size";

    private String seeders = "seeders";
    private String leechers = "leachers";


    @Override
    protected void onPreExecute() {
        // TODO Auto-generated method stub 
        super.onPreExecute();
    }


    @Override
    protected ArrayList doInBackground(String... params) {
        // TODO Auto-generated method stub ArrayList

        <HashMap<String, String>> searchResult = new ArrayList<HashMap<String, String>>();
        HashMap<String, String> map;
        String link, title, vote, age, size, seeders, leechers;

        try {
            HttpURLConnection httpURLConnection = (HttpURLConnection) new URL("http://www.facebook.com").openConnection();
            Log.d("VIVZ", httpURLConnection.getContentLength() + "");
        }
        catch( MalformedURLException e1 ) {
            // TODO Auto-generated catch block 
            e1.printStackTrace();
        }
        catch( IOException e1 ) {
            // TODO Auto-generated catch block 
            e1.printStackTrace();
        }

        Document mDocument;

        try {
            long l1 = System.nanoTime();
            Log.e("VIVZ", l1 + "");

            mDocument = Jsoup.connect(params[0]).userAgent("Mozilla/5.0 (Windows; U; WindowsNT 5.1; en-US; rv1.8.1.6) Gecko/20070725 Firefox/2.0.0.6").referrer("http://www.google.com").get();

            long l2 = System.nanoTime();
            Log.e("VIVZ", (l2 - l1) + "");
            Elements mResults = mDocument.select("div.results dl");

            for( Element result : mResults ) {
                map = new HashMap<String, String>();
                Elements elements = result.select("dt a");

                for( Element linkAndTitle : elements ) {

                    link = linkAndTitle.attr("abs:href");
                    title = linkAndTitle.text();
                    map.put(this.link, link);
                    map.put(this.title, title);
                }

                elements = result.select("dd span.v");

                for( Element v : elements ) {
                    vote = v.text();
                    map.put(this.vote, vote);
                }

                elements = result.select("dd span.a");

                for( Element a : elements ) {
                    age = a.text();
                    map.put(this.age, age);
                }

                elements = result.select("dd span.s");

                for( Element s : elements ) {
                    size = s.text();
                    map.put(this.size, size);
                }

                elements = result.select("dd span.u");

                for( Element u : elements ) {
                    seeders = u.text();
                    map.put(this.seeders, seeders);
                }

                elements = result.select("dd span.d");

                for( Element d : elements ) {
                    leechers = d.text();
                    map.put(this.leechers, leechers);
                }

                searchResult.add(map);

            }
            Log.e("VIVZ", searchResult.toString());

            return searchResult;
        }
        catch( IOException e ) {
            // TODO Auto-generated catch block 
            Log.e("VIVZ", e + "");

        }
        return null;
    }


    @Override
    protected void onPostExecute(ArrayList result) {
        // TODO Auto-generated method stub 
        super.onPostExecute(result);
    }
}
类下载SearchResultsTask扩展了AsyncTask{
私有字符串link=“link”;
私有字符串title=“title”;
私有字符串vote=“投票”;
私有字符串age=“age”;
私有字符串size=“size”;
专用线播种机=“播种机”;
私有字符串leechers=“leachers”;
@凌驾
受保护的void onPreExecute(){
//TODO自动生成的方法存根
super.onPreExecute();
}
@凌驾
受保护的ArrayList doInBackground(字符串…参数){
//TODO自动生成的方法存根ArrayList
searchResult=newarraylist();
HashMap图;
字符串链接,标题,投票,年龄,大小,种子,leechers;
试一试{
HttpURLConnection HttpURLConnection=(HttpURLConnection)新URL(“http://www.facebook.com”).openConnection();
Log.d(“VIVZ”,httpURLConnection.getContentLength()+”);
}
捕获(格式错误的异常e1){
//TODO自动生成的捕捉块
e1.printStackTrace();
}
捕获(IOE1异常){
//TODO自动生成的捕捉块
e1.printStackTrace();
}
文件资料;
试一试{
long l1=System.nanoTime();
Log.e(“VIVZ”,l1+);
mDocument=Jsoup.connect(参数[0]).userAgent(“Mozilla/5.0(Windows;U;WindowsNT 5.1;en-US;rv1.8.1.6)Gecko/20070725 Firefox/2.0.0.6”)。推荐人(“http://www.google.com).get();
long l2=System.nanoTime();
Log.e(“VIVZ”,(l2-l1)+”);
元素mResults=mDocument.select(“div.results dl”);
用于(元素结果:mResults){
map=新的HashMap();
元素=结果。选择(“dt a”);
对于(元素链接和标题:元素){
link=linkAndTitle.attr(“abs:href”);
title=linkAndTitle.text();
map.put(this.link,link);
地图。放置(这个。标题,标题);
}
元素=结果。选择(“dd span.v”);
对于(元素v:元素){
投票=v.text();
地图。放置(this.vote,vote);
}
元素=结果。选择(“dd span.a”);
对于(元素a:元素){
年龄=a.text();
map.put(this.age,age);
}
元素=结果。选择(“dd span.s”);
对于(元素s:元素){
大小=s.text();
地图。放置(这个。大小,大小);
}
元素=结果。选择(“dd span.u”);
对于(元素u:元素){
播种机=u.text();
地图。放置(这个。播种机,播种机);
}
元素=结果。选择(“dd span.d”);
用于(元素d:元素){
leechers=d.text();
map.put(this.leechers,leechers);
}
searchResult.add(map);
}
Log.e(“VIVZ”,searchResult.toString());
返回搜索结果;
}
捕获(IOE异常){
//TODO自动生成的捕捉块
Log.e(“VIVZ”,e+);
}
返回null;
}
@凌驾
受保护的void onPostExecute(ArrayList结果){
//TODO自动生成的方法存根
super.onPostExecute(结果);
}
}
问题是,我想在解析之前获得页面的大小,并显示一个确定的进度条。请帮助我


thanx预先读取http响应的内容长度头

实际上,它可以直接使用Jsoup,如下所示:

Connection conn = Jsoup.connect(url);
conn.userAgent("xxx");
Connection.Response response = conn.execute();
int length = response.header("Content-Length");
Document doc = response.parse();

我该怎么做??