234 lines
7.3 KiB
C#
234 lines
7.3 KiB
C#
// /**
|
|
// * File: Crawler.cs
|
|
// * Author: haraldwolff
|
|
// *
|
|
// * This file and it's content is copyrighted by the Author and / or copyright holder.
|
|
// * Any use wihtout proper permission is illegal and may lead to legal actions.
|
|
// *
|
|
// *
|
|
// **/
|
|
using System;
|
|
using ln.skyscanner.threads;
|
|
using System.Net;
|
|
using System.Collections.Generic;
|
|
using System.IO;
|
|
using ln.logging;
|
|
using ln.types;
|
|
using System.Linq;
|
|
using ln.types.serialize;
|
|
using ln.skyscanner.entities;
|
|
using System.Net.NetworkInformation;
|
|
using ln.snmp;
|
|
using ln.snmp.endpoint;
|
|
using ln.snmp.rfc1213;
|
|
using ln.perfdb;
|
|
using ln.perfdb.storage;
|
|
using ln.skyscanner.check;
|
|
using System.Threading;
|
|
|
|
namespace ln.skyscanner.crawl
|
|
{
|
|
public class Crawler : IPerfFileProvider
|
|
{
|
|
public String BasePath { get; set; }
|
|
public String PerfPath => Path.Combine(BasePath, "perfdb");
|
|
|
|
bool stopping;
|
|
|
|
Pool crawlThreadPool = new Pool();
|
|
|
|
public PoolThreadState[] ThreadStates => crawlThreadPool.ThreadStates;
|
|
public int QueuedJobs => crawlThreadPool.QueuedJobs;
|
|
|
|
public DiskObject<CrawlPool> _CrawlPool;
|
|
public CrawlPool CrawlPool => _CrawlPool.Instance;
|
|
|
|
public SNMPEngine SNMPEngine { get; }
|
|
|
|
Thread threadChecker;
|
|
|
|
public Crawler()
|
|
{
|
|
BasePath = Path.GetFullPath("./crawler");
|
|
|
|
if (!Directory.Exists(BasePath))
|
|
Directory.CreateDirectory(BasePath);
|
|
if (!Directory.Exists(PerfPath))
|
|
Directory.CreateDirectory(PerfPath);
|
|
|
|
|
|
SNMPEngine = new SNMPEngine();
|
|
|
|
_CrawlPool = new DiskObject<CrawlPool>(String.Format("{0}/pool",BasePath));
|
|
|
|
crawlThreadPool.NumThreads = 12;
|
|
|
|
threadChecker = new Thread(Checker);
|
|
threadChecker.Start();
|
|
}
|
|
|
|
public void Stop()
|
|
{
|
|
stopping = true;
|
|
crawlThreadPool.Close();
|
|
SNMPEngine.Close();
|
|
_CrawlPool.Save();
|
|
}
|
|
|
|
public void Enqueue(JobDelegate job)
|
|
{
|
|
crawlThreadPool.Enqueue(job);
|
|
}
|
|
|
|
public void Crawl(IPAddress host)
|
|
{
|
|
crawlThreadPool.Enqueue(() => crawlHost(host));
|
|
}
|
|
|
|
private void crawlHost(IPAddress host)
|
|
{
|
|
Ping ping = new Ping();
|
|
PingReply pingReply = ping.Send(host,500);
|
|
|
|
if (pingReply.Status != IPStatus.Success)
|
|
{
|
|
Logging.Log(LogLevel.INFO, "Host not reachable: {0} {1}", host, pingReply.Status);
|
|
}
|
|
else
|
|
{
|
|
//SnmpV1Endpoint v1endpoint = new SnmpV1Endpoint(engine, new IPEndPoint(IPAddress.Parse("10.75.1.10"), 161), "ByFR4oW98hap");
|
|
//SnmpV2Endpoint v2endpoint = new SnmpV2Endpoint(engine, new IPEndPoint(IPAddress.Parse("10.113.254.4"), 161), "ghE7wUmFPoPpkRno");
|
|
|
|
USMEndpoint v3endpoint = new USMEndpoint(SNMPEngine, new IPEndPoint(host, 161));
|
|
v3endpoint.AuthMethod = SnmpV3AuthMethod.SHA;
|
|
v3endpoint.AuthKeyPhrase = "qVy3hnZJ2fov";
|
|
v3endpoint.Username = "skytron";
|
|
|
|
try
|
|
{
|
|
RFC1213.Interface[] interfaces = RFC1213.GetInterfaces(v3endpoint);
|
|
|
|
foreach (RFC1213.Interface netIf in interfaces)
|
|
{
|
|
Logging.Log(LogLevel.INFO, "Interface: {0}", netIf);
|
|
|
|
foreach (CIDR ip in netIf.IPAddresses)
|
|
{
|
|
Subnet subnet = CrawlPool.GetSubnet(ip.Network);
|
|
if ((DateTime.Now - subnet.LastScanned).Hours >= 1)
|
|
{
|
|
Enqueue(() => crawlSubnet(ip.Network));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
catch (TimeoutException)
|
|
{
|
|
Logging.Log(LogLevel.INFO, "Host: {0} SNMP communication timed out.", host);
|
|
}
|
|
|
|
}
|
|
}
|
|
|
|
public void crawlSubnet(CIDR subnet)
|
|
{
|
|
Ping ping = new Ping();
|
|
|
|
Subnet sub = CrawlPool.GetSubnet(subnet);
|
|
sub.LastScanned = DateTime.Now;
|
|
|
|
Logging.Log(LogLevel.INFO, "Scanning {0}", subnet);
|
|
|
|
foreach (CIDR ip in subnet)
|
|
{
|
|
long roundTripTime = 0;
|
|
int success = 0;
|
|
|
|
for (int n = 0; n < 4; n++)
|
|
{
|
|
PingReply reply = ping.Send(ip,250);
|
|
if (reply.Status == IPStatus.Success)
|
|
{
|
|
success++;
|
|
roundTripTime += reply.RoundtripTime;
|
|
}
|
|
}
|
|
|
|
if (success > 0)
|
|
{
|
|
roundTripTime /= success;
|
|
Logging.Log(LogLevel.INFO, "IP {0} reachable ({1}/10) {2}ms", ip, success, roundTripTime);
|
|
|
|
Node node = CrawlPool.GetNode(ip);
|
|
if ((DateTime.Now - node.LastSeen ).Hours > 0)
|
|
{
|
|
Enqueue(() => crawlHost(ip));
|
|
}
|
|
|
|
string checkID = Hostalive.CalcCheckName(node);
|
|
if (!CrawlPool.Checks.ContainsKey(checkID))
|
|
{
|
|
Hostalive hostalive = new Hostalive(node);
|
|
CrawlPool.Checks.Add(hostalive.CheckID, hostalive);
|
|
}
|
|
|
|
}
|
|
else
|
|
{
|
|
Logging.Log(LogLevel.INFO, "IP {0} unreachable", ip);
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
/** Checks **/
|
|
|
|
private void Checker()
|
|
{
|
|
while (!stopping)
|
|
{
|
|
DateTime now = DateTime.Now;
|
|
|
|
Check[] checks = CrawlPool.Checks.Values.ToArray();
|
|
if (checks.Length == 0)
|
|
Thread.Sleep(1000);
|
|
|
|
foreach (Check check in checks)
|
|
{
|
|
if (check.NextCheck <= now)
|
|
{
|
|
check.Mark();
|
|
Enqueue(() => check.Run(this));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
/** PerfDB **/
|
|
|
|
Dictionary<string, PerfFile> perfFiles = new Dictionary<string, PerfFile>();
|
|
|
|
public PerfFile GetPerfFile(string name)
|
|
{
|
|
if (perfFiles.ContainsKey(name))
|
|
return perfFiles[name];
|
|
|
|
PerfFile perfFile = new PerfFile(Path.Combine(PerfPath, String.Format("{0}.perf", name)));
|
|
perfFile.Open();
|
|
|
|
perfFiles.Add(name, perfFile);
|
|
|
|
if (perfFile.FirstSection == null)
|
|
{
|
|
PerfFile.PerfFileSection section = new PerfFile.PerfFileSection(perfFile, null, 1440, 60, AggregationMethod.AVERAGE);
|
|
section = new PerfFile.PerfFileSection(perfFile, section, 1728, 300, AggregationMethod.AVERAGE);
|
|
section = new PerfFile.PerfFileSection(perfFile, section, 2016, 900, AggregationMethod.AVERAGE);
|
|
section = new PerfFile.PerfFileSection(perfFile, section, 1344, 3600, AggregationMethod.AVERAGE);
|
|
section = new PerfFile.PerfFileSection(perfFile, section, 1344, 10800, AggregationMethod.AVERAGE);
|
|
}
|
|
return perfFile;
|
|
}
|
|
}
|
|
}
|