最近做java网页爬虫用jsoup 写了一个方法main执行正常 放在action里调用这个方法就不正常求解
代码如下Stringurl="http://www.sogou.com/web?sut=1374&lkt=1%2C1386588673481%2C138658867348...
代码如下
String url = "http://www.sogou.com/web?sut=1374&lkt=1%2C1386588673481%2C1386588673481&ie=utf8&sst0=1386588674552&p=40040100&dp=1&w=01019900&dr=1&_asf=www.sogou.com&_ast=1386589056&query=java网页爬虫&page=1";
String ua = "Mozilla/5.0 (Windows NT 6.1; rv:25.0) Gecko/20100101 Firefox/25.0";
try {
Document doc = Jsoup.connect(url).header("User-Agent", ua).get();
System.out.println(doc.html());
} catch (IOException e) {
e.printStackTrace();
}
main里执行爬的网页 和把url放浏览器里得到的网页一致
action里执行 貌似爬的html只是sogou 首页
求大神 大神 展开
String url = "http://www.sogou.com/web?sut=1374&lkt=1%2C1386588673481%2C1386588673481&ie=utf8&sst0=1386588674552&p=40040100&dp=1&w=01019900&dr=1&_asf=www.sogou.com&_ast=1386589056&query=java网页爬虫&page=1";
String ua = "Mozilla/5.0 (Windows NT 6.1; rv:25.0) Gecko/20100101 Firefox/25.0";
try {
Document doc = Jsoup.connect(url).header("User-Agent", ua).get();
System.out.println(doc.html());
} catch (IOException e) {
e.printStackTrace();
}
main里执行爬的网页 和把url放浏览器里得到的网页一致
action里执行 貌似爬的html只是sogou 首页
求大神 大神 展开
1个回答
展开全部
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.SocketTimeoutException;
import java.net.UnknownHostException;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
public class JsoupTest {
static String url = "http://www.sogou.com/web?sut=1374&lkt=1%2C1386588673481%2C1386588673481&ie=utf8&sst0=1386588674552&p=40040100&dp=1&w=01019900&dr=1&_asf=www.sogou.com&_ast=1386589056&query=java网页爬虫&page=1";
public static void main(String[] args) {
Document doc = readUrlFist(url);
write(doc);
}
public static void write(Document doc) {
try {
FileOutputStream fos=new FileOutputStream("C:\\Documents and Settings\\Administrator\\桌面\\a.html");
OutputStreamWriter osw=new OutputStreamWriter(fos);
BufferedWriter bw=new BufferedWriter(osw);
bw.write(doc.toString());
bw.flush();
fos.close();
osw.close();
bw.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public static Document readUrlFist(String url) {
Document doc = null;
Connection conn = Jsoup.connect(url);
conn.header(
"User-Agent",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.4; en-US; rv:1.9.2.2) Gecko/20100316 Firefox/3.6.2 Googlebot/2.1");
try {
doc = conn.timeout(200 * 1000).get();
} catch (IOException e) {
e.printStackTrace();
if ((e instanceof UnknownHostException)
|| (e instanceof SocketTimeoutException)) {
doc = readUrlFist(url);
}
}
return doc;
}
}
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.SocketTimeoutException;
import java.net.UnknownHostException;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
public class JsoupTest {
static String url = "http://www.sogou.com/web?sut=1374&lkt=1%2C1386588673481%2C1386588673481&ie=utf8&sst0=1386588674552&p=40040100&dp=1&w=01019900&dr=1&_asf=www.sogou.com&_ast=1386589056&query=java网页爬虫&page=1";
public static void main(String[] args) {
Document doc = readUrlFist(url);
write(doc);
}
public static void write(Document doc) {
try {
FileOutputStream fos=new FileOutputStream("C:\\Documents and Settings\\Administrator\\桌面\\a.html");
OutputStreamWriter osw=new OutputStreamWriter(fos);
BufferedWriter bw=new BufferedWriter(osw);
bw.write(doc.toString());
bw.flush();
fos.close();
osw.close();
bw.close();
} catch (Exception e) {
e.printStackTrace();
}
}
public static Document readUrlFist(String url) {
Document doc = null;
Connection conn = Jsoup.connect(url);
conn.header(
"User-Agent",
"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.4; en-US; rv:1.9.2.2) Gecko/20100316 Firefox/3.6.2 Googlebot/2.1");
try {
doc = conn.timeout(200 * 1000).get();
} catch (IOException e) {
e.printStackTrace();
if ((e instanceof UnknownHostException)
|| (e instanceof SocketTimeoutException)) {
doc = readUrlFist(url);
}
}
return doc;
}
}
本回答被提问者采纳
已赞过
已踩过<
评论
收起
你对这个回答的评价是?
推荐律师服务:
若未解决您的问题,请您详细描述您的问题,通过百度律临进行免费专业咨询