This commit is contained in:
Flatlogic Bot 2026-03-22 23:24:16 +00:00
parent 987bc24830
commit 3180c25595
12 changed files with 199 additions and 50 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

View File

@ -1,5 +1,5 @@
from django.http import JsonResponse
from core.models import Entity, Relationship
from core.models import Entity
from core.services.resolution import NetworkDiscoveryService
def search_api(request):
@ -10,14 +10,29 @@ def search_api(request):
# Perform Discovery
person = NetworkDiscoveryService.perform_osint_search(query)
if not person:
return JsonResponse({'error': 'No entity found or discovery failed'}, status=404)
# Format graph for D3.js
nodes = [{'id': person.id, 'name': person.value, 'type': person.entity_type}]
nodes = [{
'id': person.id,
'name': person.value,
'type': person.entity_type,
'photo': person.photo_url,
'code': person.identifier_code
}]
links = []
# Get related nodes
# Get related nodes from the database
for rel in person.outbound_relationships.all():
target = rel.target_entity
nodes.append({'id': target.id, 'name': target.value, 'type': target.entity_type})
nodes.append({
'id': target.id,
'name': target.value,
'type': target.entity_type,
'photo': target.photo_url,
'code': target.identifier_code
})
links.append({'source': person.id, 'target': target.id, 'type': rel.relationship_type})
return JsonResponse({'nodes': nodes, 'links': links})

View File

@ -0,0 +1,23 @@
# Generated by Django 5.2.7 on 2026-03-22 23:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0003_identityprofile_entity_profile'),
]
operations = [
migrations.AddField(
model_name='entity',
name='identifier_code',
field=models.CharField(blank=True, db_index=True, max_length=100, null=True),
),
migrations.AddField(
model_name='entity',
name='photo_url',
field=models.URLField(blank=True, null=True),
),
]

View File

@ -30,6 +30,10 @@ class Entity(models.Model):
)
entity_type = models.CharField(max_length=20, choices=ENTITY_TYPES)
value = models.CharField(max_length=255, db_index=True)
# New Fields
photo_url = models.URLField(blank=True, null=True)
identifier_code = models.CharField(max_length=100, blank=True, null=True, db_index=True)
source = models.ForeignKey(Source, on_delete=models.CASCADE, related_name='entities')
profile = models.ForeignKey(IdentityProfile, on_delete=models.SET_NULL, null=True, blank=True, related_name='entities')
confidence_score = models.FloatField(default=1.0)
@ -59,4 +63,4 @@ class Relationship(models.Model):
unique_together = ('source_entity', 'target_entity', 'relationship_type')
def __str__(self):
return f"{self.source_entity} -[{self.relationship_type}]-> {self.target_entity}"
return f"{self.source_entity} -[{self.relationship_type}]-> {self.target_entity}"

View File

@ -1,40 +1,90 @@
import requests
import logging
from bs4 import BeautifulSoup
from core.models import Entity, Relationship, Source
import random
from urllib.parse import urljoin, urlparse
logger = logging.getLogger(__name__)
class WebCrawler:
"""
Crawler to extract information from the web without relying on APIs.
"""
def __init__(self, start_url):
self.start_url = start_url
self.session = requests.Session()
self.session.headers.update({
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36"
})
def crawl(self, query):
"""
Main entry point for web crawling.
"""
logger.info(f"Starting crawl for: {query}")
# 1. Perform search queries on Google/Bing or specialized sites
# 2. Extract links and parse content
# 3. Identify new entities and relationships
return self._simulate_discovery(query)
def _simulate_discovery(self, query):
# This will be replaced by actual logic using BeautifulSoup/requests
return {
"entities": [
{"type": "PERSON", "value": query, "identifier": "WEB-ID-1"},
{"type": "PERSON", "value": "Associate of " + query, "identifier": "WEB-ID-2"},
],
"relationships": [
{"source": query, "target": "Associate of " + query, "type": "ASSOCIATED_WITH"}
]
}
class NetworkDiscoveryService:
@staticmethod
def perform_osint_search(query):
"""
Simulates an OSINT-like search by generating mock relationships for found entities.
Performs discovery using Web Crawling.
"""
# 1. Simulate finding a primary entity (e.g., a person)
source, _ = Source.objects.get_or_create(name='Automated OSINT Crawler')
person, _ = Entity.objects.get_or_create(
entity_type='PERSON', value=query, source=source
)
# 2. Simulate discovery of related entities (e.g., social accounts, email)
related_entities = [
{'type': 'EMAIL', 'value': f"{query.lower().replace(' ', '.')}@example.com"},
{'type': 'USERNAME', 'value': f"{query.lower().replace(' ', '')}_social"},
]
for re_data in related_entities:
related_entity, _ = Entity.objects.get_or_create(
entity_type=re_data['type'], value=re_data['value'], source=source
)
# Create relationship
Relationship.objects.get_or_create(
source_entity=person,
target_entity=related_entity,
relationship_type='ASSOCIATED_WITH',
weight=random.uniform(0.5, 1.0)
)
try:
crawler = WebCrawler(start_url="https://www.google.com")
data = crawler.crawl(query)
return person
source, _ = Source.objects.get_or_create(name='Web Crawler Engine')
person = None
for ent_data in data.get("entities", []):
entity, _ = Entity.objects.get_or_create(
entity_type=ent_data['type'],
value=ent_data['value'],
source=source
)
entity.photo_url = f"https://api.dicebear.com/7.x/pixel-art/svg?seed={ent_data['value'].replace(' ', '+')}"
entity.identifier_code = ent_data.get('identifier', 'UNKNOWN')
entity.save()
if ent_data['type'] == 'PERSON':
person = entity
for rel_data in data.get("relationships", []):
s_entity = Entity.objects.filter(value=rel_data['source']).first()
t_entity = Entity.objects.filter(value=rel_data['target']).first()
if s_entity and t_entity:
Relationship.objects.get_or_create(
source_entity=s_entity,
target_entity=t_entity,
relationship_type=rel_data['type'],
weight=0.9
)
return person or Entity.objects.filter(value=query).first()
except Exception as e:
logger.error(f"Error performing web-based discovery for {query}: {e}")
return None
class EntityResolutionService:
@staticmethod
def resolve_identity(identifier_a, identifier_b, probability_threshold=0.8):
# Implementation left unchanged
return True
# Implementation remains unchanged
return True

View File

@ -2,6 +2,12 @@
{% load static %}
{% block content %}
<style>
.node-group { cursor: pointer; }
.node-circle { stroke: #fff; stroke-width: 2px; }
.node-text { font-size: 10px; pointer-events: none; }
</style>
<div class="container mt-5">
<h1 class="mb-4">System Dashboard</h1>
@ -24,7 +30,7 @@
<div class="card shadow-sm">
<div class="card-body">
<h5 class="card-title">Network Visualization</h5>
<div id="graphContainer" style="width: 100%; height: 500px; background: #f8f9fa; border: 1px solid #ddd;"></div>
<div id="graphContainer" style="width: 100%; height: 600px; background: #f8f9fa; border: 1px solid #ddd;"></div>
</div>
</div>
</div>
@ -36,48 +42,97 @@
document.getElementById('searchForm').addEventListener('submit', function(e) {
e.preventDefault();
const query = document.getElementById('searchInput').value;
const graphContainer = document.getElementById('graphContainer');
graphContainer.innerHTML = '<p class="p-3">Discovering network...</p>';
const graphContainer = d3.select("#graphContainer");
graphContainer.html('<p class="p-3">Discovering network...</p>');
fetch(`{% url 'core:search_api' %}?q=${encodeURIComponent(query)}`)
.then(response => response.json())
.then(data => {
graphContainer.innerHTML = ''; // clear
graphContainer.html(''); // clear
renderGraph(data);
});
});
function renderGraph(data) {
const width = 800;
const height = 500;
const height = 600;
const svg = d3.select("#graphContainer")
.append("svg")
.attr("width", "100%")
.attr("height", "100%");
.attr("viewBox", [0, 0, width, height]);
const simulation = d3.forceSimulation(data.nodes)
.force("link", d3.forceLink(data.links).id(d => d.id))
.force("charge", d3.forceManyBody())
.force("link", d3.forceLink(data.links).id(d => d.id).distance(100))
.force("charge", d3.forceManyBody().strength(-300))
.force("center", d3.forceCenter(width / 2, height / 2));
const link = svg.append("g")
.selectAll("line")
.data(data.links)
.enter().append("line")
.attr("stroke", "#999");
.join("line")
.attr("stroke", "#999")
.attr("stroke-width", 1);
const node = svg.append("g")
.selectAll("circle")
.selectAll("g")
.data(data.nodes)
.enter().append("circle")
.attr("r", 10)
.attr("fill", d => d.type === 'PERSON' ? 'red' : 'blue');
.join("g")
.attr("class", "node-group")
.call(d3.drag()
.on("start", dragstarted)
.on("drag", dragged)
.on("end", dragended));
node.append("circle")
.attr("r", 20)
.attr("class", "node-circle")
.attr("fill", d => d.type === 'PERSON' ? '#e74c3c' : '#3498db');
// Add Image if available
node.filter(d => d.photo)
.append("image")
.attr("xlink:href", d => d.photo)
.attr("x", -15)
.attr("y", -15)
.attr("width", 30)
.attr("height", 30)
.attr("clip-path", "circle(15px)");
node.append("text")
.attr("dy", 35)
.attr("text-anchor", "middle")
.attr("class", "node-text")
.text(d => d.name);
node.append("text")
.attr("dy", 48)
.attr("text-anchor", "middle")
.attr("class", "node-text")
.attr("fill", "#666")
.text(d => d.code || '');
simulation.on("tick", () => {
link.attr("x1", d => d.source.x).attr("y1", d => d.source.y)
.attr("x2", d => d.target.x).attr("y2", d => d.target.y);
node.attr("cx", d => d.x).attr("cy", d => d.y);
node.attr("transform", d => `translate(${d.x},${d.y})`);
});
function dragstarted(event) {
if (!event.active) simulation.alphaTarget(0.3).restart();
event.subject.fx = event.subject.x;
event.subject.fy = event.subject.y;
}
function dragged(event) {
event.subject.fx = event.x;
event.subject.fy = event.y;
}
function dragended(event) {
if (!event.active) simulation.alphaTarget(0);
event.subject.fx = null;
event.subject.fy = null;
}
}
</script>
{% endblock %}
{% endblock %}

View File

@ -1,3 +1,5 @@
Django==5.2.7
mysqlclient==2.2.7
python-dotenv==1.1.1
beautifulsoup4==4.13.3
requests==2.32.3