v2.0.0
Loading...
Searching...
No Matches
brainview.cpp
Go to the documentation of this file.
1//=============================================================================================================
36
37//=============================================================================================================
38// INCLUDES
39//=============================================================================================================
40
41
42#include "brainview.h"
43#include "brainrenderer.h"
47#include "core/surfacekeys.h"
48#include "core/dataloader.h"
49#include "input/raypicker.h"
53
54#include <rhi/qrhi.h>
60
61#include <Eigen/Dense>
62#include <QMatrix4x4>
63#include <QDebug>
64#include <QLabel>
65#include <QFrame>
66#include <QMouseEvent>
67#include <QKeyEvent>
68#include <QWheelEvent>
69#include <QResizeEvent>
70#include <QSettings>
71#include <QCoreApplication>
72#include <QMenu>
73#include <QStandardItem>
74#include <algorithm>
75#include <cmath>
76
77#include <mne/mne_bem.h>
81
82using namespace FIFFLIB;
83
84//=============================================================================================================
85// DEFINE MEMBER METHODS
86//=============================================================================================================
87
88//=============================================================================================================
89
90BrainView::BrainView(QWidget *parent)
91 : QRhiWidget(parent)
92{
93 setMinimumSize(800, 600);
94 setSampleCount(1);
95 setAutoRenderTarget(false); // We manage our own dual render targets
96
97#if defined(WASMBUILD) || defined(__EMSCRIPTEN__)
98 setApi(Api::OpenGL); // WebGL 2 (OpenGL ES 3.0) on WASM
99#elif defined(Q_OS_MACOS) || defined(Q_OS_IOS)
100 setApi(Api::Metal);
101#elif defined(Q_OS_WIN)
102 setApi(Api::Direct3D11);
103#else
104 setApi(Api::OpenGL);
105#endif
106
107 setMouseTracking(true); // Enable hover events
108
109 // No periodic update timer — redraws are demand-driven via update().
110 // Every setter that changes scene state calls m_sceneDirty = true
111 // followed by update(), which coalesces into one render() per frame.
112
113 m_fpsLabel = new QLabel(this);
114 m_fpsLabel->setStyleSheet("color: white; font-weight: bold; font-family: monospace; font-size: 13px; background: transparent; padding: 5px;");
115 m_fpsLabel->setAttribute(Qt::WA_TransparentForMouseEvents);
116 m_fpsLabel->setAlignment(Qt::AlignRight | Qt::AlignTop);
117 m_fpsLabel->setText("FPS: --.-\nVertices: 0");
118 m_fpsLabel->adjustSize();
119 m_fpsLabel->move(width() - m_fpsLabel->width() - 10, 10);
120 m_fpsLabel->raise();
121
122 m_singleViewInfoLabel = new QLabel(this);
123 m_singleViewInfoLabel->setStyleSheet("color: white; font-family: monospace; font-size: 10px; background: rgba(0,0,0,110); border-radius: 3px; padding: 2px 4px;");
124 m_singleViewInfoLabel->setAttribute(Qt::WA_TransparentForMouseEvents);
125 m_singleViewInfoLabel->setAlignment(Qt::AlignLeft | Qt::AlignTop);
126 m_singleViewInfoLabel->setText("");
127 m_singleViewInfoLabel->adjustSize();
128 m_singleViewInfoLabel->hide();
129
130 m_fpsTimer.start();
131
132 m_regionLabel = new QLabel(this);
133 m_regionLabel->setStyleSheet("color: white; font-weight: bold; font-family: sans-serif; font-size: 16px; background: transparent; padding: 5px;");
134 m_regionLabel->setText("");
135 m_regionLabel->move(10, 10);
136 m_regionLabel->resize(300, 30);
137 m_regionLabel->hide();
138
139 // ── Initialise viewport labels (sized to kDefaultViewportCount) ────────
140 m_subViews.resize(kDefaultViewportCount);
141 m_viewportNameLabels.resize(kDefaultViewportCount, nullptr);
142 m_viewportInfoLabels.resize(kDefaultViewportCount, nullptr);
143 for (int i = 0; i < kDefaultViewportCount; ++i) {
144 m_subViews[i] = SubView::defaultForIndex(i);
145
146 m_viewportNameLabels[i] = new QLabel(this);
147 m_viewportNameLabels[i]->setStyleSheet("color: white; font-weight: bold; font-family: sans-serif; font-size: 12px; background: transparent; padding: 2px 4px;");
148 m_viewportNameLabels[i]->setAttribute(Qt::WA_TransparentForMouseEvents);
149 m_viewportNameLabels[i]->setText(multiViewPresetName(m_subViews[i].preset));
150 m_viewportNameLabels[i]->adjustSize();
151 m_viewportNameLabels[i]->hide();
152
153 m_viewportInfoLabels[i] = new QLabel(this);
154 m_viewportInfoLabels[i]->setStyleSheet("color: white; font-family: monospace; font-size: 10px; background: rgba(0,0,0,110); border-radius: 3px; padding: 2px 4px;");
155 m_viewportInfoLabels[i]->setAttribute(Qt::WA_TransparentForMouseEvents);
156 m_viewportInfoLabels[i]->setAlignment(Qt::AlignLeft | Qt::AlignTop);
157 m_viewportInfoLabels[i]->setText("");
158 m_viewportInfoLabels[i]->adjustSize();
159 m_viewportInfoLabels[i]->hide();
160 }
161
162 m_verticalSeparator = new QFrame(this);
163 m_verticalSeparator->setFrameShape(QFrame::NoFrame);
164 m_verticalSeparator->setAttribute(Qt::WA_TransparentForMouseEvents);
165 m_verticalSeparator->hide();
166
167 m_horizontalSeparator = new QFrame(this);
168 m_horizontalSeparator->setFrameShape(QFrame::NoFrame);
169 m_horizontalSeparator->setAttribute(Qt::WA_TransparentForMouseEvents);
170 m_horizontalSeparator->hide();
171
172 QColor sepColor = palette().color(QPalette::Midlight);
173 if (sepColor.alpha() == 255) {
174 sepColor.setAlpha(180);
175 }
176 const QString sepStyle = QString("background-color: rgba(%1,%2,%3,%4);")
177 .arg(sepColor.red())
178 .arg(sepColor.green())
179 .arg(sepColor.blue())
180 .arg(sepColor.alpha());
181 m_verticalSeparator->setStyleSheet(sepStyle);
182 m_horizontalSeparator->setStyleSheet(sepStyle);
183
184 loadMultiViewSettings();
185 updateViewportSeparators();
186 updateOverlayLayout();
187
188 // Setup Debug Pointer: Semi-transparent sphere for subtle intersection indicator
189 m_debugPointerSurface = MeshFactory::createSphere(QVector3D(0, 0, 0), 0.002f,
190 QColor(200, 255, 255, 160));
191
192 // ── Connect SourceEstimateManager signals ─────────────────────────
193 connect(&m_sourceManager, &SourceEstimateManager::loaded,
194 this, &BrainView::onSourceEstimateLoaded);
195 connect(&m_sourceManager, &SourceEstimateManager::thresholdsUpdated,
197 connect(&m_sourceManager, &SourceEstimateManager::timePointChanged,
199 connect(&m_sourceManager, &SourceEstimateManager::loadingProgress,
201 connect(&m_sourceManager, &SourceEstimateManager::realtimeColorsAvailable,
202 this, &BrainView::onRealtimeColorsAvailable);
203
204 // RtSensorStreamManager → BrainView
205 connect(&m_sensorStreamManager, &RtSensorStreamManager::colorsAvailable,
206 this, &BrainView::onSensorStreamColorsAvailable);
207}
208
209//=============================================================================================================
210
212{
213 saveMultiViewSettings();
214}
215
216//=============================================================================================================
217
219{
220 m_model = model;
221 connect(m_model, &BrainTreeModel::rowsInserted, this, &BrainView::onRowsInserted);
222 connect(m_model, &BrainTreeModel::dataChanged, this, &BrainView::onDataChanged);
223
224 // Initial population if not empty?
225 // For now assuming we set model before adding data or iterate.
226}
227
228//=============================================================================================================
229
230void BrainView::setInitialCameraRotation(const QQuaternion &rotation)
231{
232 m_cameraRotation = rotation;
233 saveMultiViewSettings();
234 m_sceneDirty = true; update();
235}
236
237void BrainView::onRowsInserted(const QModelIndex &parent, int first, int last)
238{
239
240 if (!m_model) return;
241
242 for (int i = first; i <= last; ++i) {
243 QModelIndex index = m_model->index(i, 0, parent);
244 QStandardItem* item = m_model->itemFromIndex(index);
245
246 AbstractTreeItem* absItem = dynamic_cast<AbstractTreeItem*>(item);
247
248 // Handle FsSurface Items
250 SurfaceTreeItem* surfItem = static_cast<SurfaceTreeItem*>(absItem);
251 auto brainSurf = std::make_shared<BrainSurface>();
252
253 // Load geometry from item
254 brainSurf->fromSurface(surfItem->surfaceData());
255
256 // Determine Hemisphere from Parent
257 if (absItem->parent()) {
258 QString parentText = absItem->parent()->text();
259 if (parentText == "lh") brainSurf->setHemi(0);
260 else if (parentText == "rh") brainSurf->setHemi(1);
261 }
262
263 // Set properties
264 brainSurf->setVisible(surfItem->isVisible());
265
266 // Brain surfaces (pial, white, inflated, etc.) are brain tissue
267 brainSurf->setTissueType(BrainSurface::TissueBrain);
268
269 m_itemSurfaceMap[item] = brainSurf;
270
271 // Key generation: "hemi_type" e.g. "lh_pial"
272 QString key;
273 if (absItem->parent()) {
274 key = absItem->parent()->text() + "_" + surfItem->text();
275 } else {
276 key = surfItem->text();
277 }
278 m_surfaces[key] = brainSurf;
279
280 // Check for annotations
281 if (!surfItem->annotationData().isEmpty()) {
282 brainSurf->addAnnotation(surfItem->annotationData());
283 }
284
285 // Set active if first
286 if (!m_activeSurface) {
287 m_activeSurface = brainSurf;
288 m_activeSurfaceType = surfItem->text();
289 }
290 }
291 // Check for BEM Item (using dynamic_cast for safety)
292 BemTreeItem* bemItem = dynamic_cast<BemTreeItem*>(absItem);
293 if (bemItem) {
294 const MNELIB::MNEBemSurface &bemSurfData = bemItem->bemSurfaceData();
295
296 auto brainSurf = std::make_shared<BrainSurface>();
297
298 // Load BEM geometry with color from item
299 brainSurf->fromBemSurface(bemSurfData, bemItem->color());
300
301 brainSurf->setVisible(bemItem->isVisible());
302
303 // Set tissue type based on surface name
304 QString surfName = bemItem->text().toLower();
305 if (surfName.contains("head") || surfName.contains("skin") || surfName.contains("scalp")) {
306 brainSurf->setTissueType(BrainSurface::TissueSkin);
307 } else if (surfName.contains("outer") && surfName.contains("skull")) {
308 brainSurf->setTissueType(BrainSurface::TissueOuterSkull);
309 } else if (surfName.contains("inner") && surfName.contains("skull")) {
310 brainSurf->setTissueType(BrainSurface::TissueInnerSkull);
311 } else if (surfName.contains("skull")) {
312 brainSurf->setTissueType(BrainSurface::TissueOuterSkull); // Default skull to outer
313 } else if (surfName.contains("brain")) {
314 brainSurf->setTissueType(BrainSurface::TissueBrain);
315 }
316
317 m_itemSurfaceMap[item] = brainSurf;
318
319 // Legacy map support (Use item text e.g. "bem_head")
320 m_surfaces["bem_" + bemItem->text()] = brainSurf;
321 }
322
323 // Handle Sensor Items
324 if (absItem && absItem->type() == AbstractTreeItem::itemTypeId(AbstractTreeItem::SensorItem)) {
325 SensorTreeItem* sensItem = static_cast<SensorTreeItem*>(absItem);
326
327 std::shared_ptr<BrainSurface> brainSurf;
328
329 QString parentText = "";
330 if (sensItem->parent()) parentText = sensItem->parent()->text();
331
332 if (parentText.contains("MEG/Grad") && sensItem->hasOrientation()) {
333 brainSurf = MeshFactory::createBarbell(sensItem->position(), sensItem->orientation(),
334 sensItem->color(), sensItem->scale());
335 } else if (parentText.contains("MEG/Mag") && sensItem->hasOrientation()) {
336 brainSurf = MeshFactory::createPlate(sensItem->position(), sensItem->orientation(),
337 sensItem->color(), sensItem->scale());
338 } else {
339 // EEG and other sensors: smooth icosphere
340 brainSurf = MeshFactory::createSphere(sensItem->position(), sensItem->scale(),
341 sensItem->color());
342 }
343
344 brainSurf->setVisible(sensItem->isVisible());
345 m_itemSurfaceMap[item] = brainSurf;
346
347 // Apply Head-to-MRI transformation if available
348 // Note: meg positions in info might already be head-space, but check if we need this global trans
349 if (!m_headToMriTrans.isEmpty()) {
350 QMatrix4x4 m;
351 if (m_applySensorTrans) {
352 m = SURFACEKEYS::toQMatrix4x4(m_headToMriTrans.trans);
353 }
354 brainSurf->applyTransform(m);
355 }
356
357 // Legacy map support
358 const QString keyPrefix = SURFACEKEYS::sensorParentToKeyPrefix(parentText);
359
360 QString key = keyPrefix + sensItem->text() + "_" + QString::number((quintptr)sensItem);
361 m_surfaces[key] = brainSurf;
362
363
364 }
365
366 // Handle Dipole Items
367 if (absItem && absItem->type() == AbstractTreeItem::itemTypeId(AbstractTreeItem::DipoleItem)) {
368 DipoleTreeItem* dipItem = static_cast<DipoleTreeItem*>(absItem);
369 auto dipObject = std::make_shared<DipoleObject>();
370 dipObject->load(dipItem->ecdSet());
371 dipObject->setVisible(dipItem->isVisible());
372
373 m_itemDipoleMap[item] = dipObject;
374 }
375
376 // Handle Source Space Items (one item per hemisphere, batched mesh)
378 SourceSpaceTreeItem* srcItem = static_cast<SourceSpaceTreeItem*>(absItem);
379 const QVector<QVector3D>& positions = srcItem->positions();
380 if (positions.isEmpty()) continue;
381
382 auto brainSurf = MeshFactory::createBatchedSpheres(positions, srcItem->scale(),
383 srcItem->color());
384 brainSurf->setVisible(srcItem->isVisible());
385 m_itemSurfaceMap[item] = brainSurf;
386
387 QString key = "srcsp_" + srcItem->text();
388 m_surfaces[key] = brainSurf;
389 }
390
391 // Handle Digitizer Items (batched sphere mesh per category)
393 DigitizerTreeItem* digItem = static_cast<DigitizerTreeItem*>(absItem);
394 const QVector<QVector3D>& positions = digItem->positions();
395 if (positions.isEmpty()) continue;
396
397 auto brainSurf = MeshFactory::createBatchedSpheres(positions, digItem->scale(),
398 digItem->color());
399 brainSurf->setVisible(digItem->isVisible());
400
401 // Apply Head-to-MRI transformation if available
402 if (!m_headToMriTrans.isEmpty()) {
403 QMatrix4x4 m;
404 if (m_applySensorTrans) {
405 m = SURFACEKEYS::toQMatrix4x4(m_headToMriTrans.trans);
406 }
407 brainSurf->applyTransform(m);
408 }
409
410 m_itemSurfaceMap[item] = brainSurf;
411
412 // Category name for legacy map key
413 QString catName;
414 switch (digItem->pointKind()) {
415 case DigitizerTreeItem::Cardinal: catName = "cardinal"; break;
416 case DigitizerTreeItem::HPI: catName = "hpi"; break;
417 case DigitizerTreeItem::EEG: catName = "eeg"; break;
418 case DigitizerTreeItem::Extra: catName = "extra"; break;
419 }
420 QString key = "dig_" + catName;
421 m_surfaces[key] = brainSurf;
422 }
423
424
425 // Check children recursively
426 if (m_model->hasChildren(index)) {
427 onRowsInserted(index, 0, m_model->rowCount(index) - 1);
428 }
429 }
430 updateInflatedSurfaceTransforms();
431 updateSceneBounds();
432 m_vertexCountDirty = true;
433 m_sceneDirty = true; update();
434}
435
436void BrainView::onDataChanged(const QModelIndex &topLeft, const QModelIndex &bottomRight, const QVector<int> &roles)
437{
438 // Update visuals based on roles
439 for (int i = topLeft.row(); i <= bottomRight.row(); ++i) {
440 QModelIndex index = m_model->index(i, 0, topLeft.parent());
441 QStandardItem* item = m_model->itemFromIndex(index);
442
443 if (m_itemSurfaceMap.contains(item)) {
444 auto surf = m_itemSurfaceMap[item];
445
446 AbstractTreeItem* absItem = dynamic_cast<AbstractTreeItem*>(item);
447 if (absItem) {
448 if (roles.contains(AbstractTreeItem::VisibleRole)) {
449 surf->setVisible(absItem->isVisible());
450 m_vertexCountDirty = true;
451 }
452 if (roles.contains(AbstractTreeItem::ColorRole)) {
453 // Update color (not fully impl in BrainSurface yet for uniform override, but prepared)
454 }
455 if (roles.contains(SurfaceTreeItem::AnnotationDataRole)) {
456 SurfaceTreeItem* sItem = static_cast<SurfaceTreeItem*>(absItem);
457 if (!sItem->annotationData().isEmpty()) {
458 surf->addAnnotation(sItem->annotationData());
459 }
460 }
461 }
462 }
463 }
464 updateSceneBounds();
465 m_sceneDirty = true; update();
466}
467
468//=============================================================================================================
469
470void BrainView::setActiveSurface(const QString &type)
471{
472 subViewForTarget(m_visualizationEditTarget).surfaceType = type;
473
474 m_activeSurfaceType = type;
475
476 // Update m_activeSurface pointer to one of the matching surfaces for stats/helpers
477 QString key = "lh_" + type;
478 if (m_surfaces.contains(key)) m_activeSurface = m_surfaces[key];
479 else {
480 key = "rh_" + type;
481 if (m_surfaces.contains(key)) m_activeSurface = m_surfaces[key];
482 }
483
484 updateInflatedSurfaceTransforms();
485 saveMultiViewSettings();
486
487 updateSceneBounds();
488 m_vertexCountDirty = true;
489 m_sceneDirty = true; update();
490}
491
492void BrainView::updateSceneBounds()
493{
494 QVector3D min(std::numeric_limits<float>::max(), std::numeric_limits<float>::max(), std::numeric_limits<float>::max());
495 QVector3D max(std::numeric_limits<float>::lowest(), std::numeric_limits<float>::lowest(), std::numeric_limits<float>::lowest());
496 bool hasContent = false;
497
498 // Iterate over all surfaces
499 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
500 if (it.value()->isVisible()) {
501 QVector3D sMin, sMax;
502 it.value()->boundingBox(sMin, sMax);
503
504 min.setX(std::min(min.x(), sMin.x()));
505 min.setY(std::min(min.y(), sMin.y()));
506 min.setZ(std::min(min.z(), sMin.z()));
507
508 max.setX(std::max(max.x(), sMax.x()));
509 max.setY(std::max(max.y(), sMax.y()));
510 max.setZ(std::max(max.z(), sMax.z()));
511 hasContent = true;
512 }
513 }
514
515 // Iterate over all dipoles
516 for (auto it = m_itemDipoleMap.begin(); it != m_itemDipoleMap.end(); ++it) {
517 if (it.value()->isVisible()) {
518 // Dipoles don't have a bounding box method in DipoleObject yet,
519 // but we can approximate or skip for now.
520 // Ideally DipoleObject should expose bounds.
521 // For now, let's assume surfaces dictate the scene size usually.
522 }
523 }
524
525 if (hasContent) {
526 m_sceneCenter = (min + max) * 0.5f;
527
528 QVector3D diag = max - min;
529 m_sceneSize = std::max(diag.x(), std::max(diag.y(), diag.z()));
530
531 // Ensure non-zero size
532 if (m_sceneSize < 0.01f) m_sceneSize = 0.3f;
533
534 } else {
535 // Default
536 m_sceneCenter = QVector3D(0,0,0);
537 m_sceneSize = 0.3f;
538 }
539}
540
541//=============================================================================================================
542
543void BrainView::setShaderMode(const QString &modeName)
544{
545 const BrainRenderer::ShaderMode mode = shaderModeFromName(modeName);
546 subViewForTarget(m_visualizationEditTarget).brainShader = mode;
547
548 m_brainShaderMode = mode;
549 saveMultiViewSettings();
550 m_sceneDirty = true; update();
551}
552
553//=============================================================================================================
554
556{
557 const int prev = m_visualizationEditTarget;
558 m_visualizationEditTarget = normalizedVisualizationTarget(target, static_cast<int>(m_subViews.size()) - 1);
559
560 const SubView &sv = subViewForTarget(m_visualizationEditTarget);
561 m_activeSurfaceType = sv.surfaceType;
562 m_brainShaderMode = sv.brainShader;
563 m_bemShaderMode = sv.bemShader;
564 m_currentVisMode = sv.overlayMode;
565 const ViewVisibilityProfile &visibility = sv.visibility;
566
567 const bool remapMegSurface = (m_fieldMapper.megFieldMapOnHead() != visibility.megFieldMapOnHead);
568 m_fieldMapper.setMegFieldMapOnHead(visibility.megFieldMapOnHead);
569 m_dipolesVisible = visibility.dipoles;
570 m_networkVisible = visibility.network;
571
572 // Note: we intentionally do NOT call setVisualizationMode() on surfaces
573 // here. Each viewport's overlay mode is sent as a per-draw shader
574 // uniform (sceneData.overlayMode), so the surface objects must keep
575 // their vertex data intact — in particular the STC colour channel —
576 // regardless of which viewport is currently selected for editing.
577
578 if (m_fieldMapper.isLoaded()) {
579 if (remapMegSurface) {
580 m_fieldMapper.buildMapping(m_surfaces, m_headToMriTrans, m_applySensorTrans);
581 }
582 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
583 }
584
585 // Update viewport label highlighting
586 updateViewportLabelHighlight();
587
588 saveMultiViewSettings();
589
590 if (prev != m_visualizationEditTarget) {
591 emit visualizationEditTargetChanged(m_visualizationEditTarget);
592 }
593}
594
595//=============================================================================================================
596
598{
599 return m_visualizationEditTarget;
600}
601
602//=============================================================================================================
603
604QString BrainView::activeSurfaceForTarget(int target) const
605{
606 return subViewForTarget(target).surfaceType;
607}
608
609//=============================================================================================================
610
611QString BrainView::shaderModeForTarget(int target) const
612{
613 return shaderModeName(subViewForTarget(target).brainShader);
614}
615
616//=============================================================================================================
617
618QString BrainView::bemShaderModeForTarget(int target) const
619{
620 return shaderModeName(subViewForTarget(target).bemShader);
621}
622
623//=============================================================================================================
624
625QString BrainView::overlayModeForTarget(int target) const
626{
627 return visualizationModeName(subViewForTarget(target).overlayMode);
628}
629
630//=============================================================================================================
631
632ViewVisibilityProfile& BrainView::visibilityProfileForTarget(int target)
633{
634 return subViewForTarget(target).visibility;
635}
636
637//=============================================================================================================
638
639const ViewVisibilityProfile& BrainView::visibilityProfileForTarget(int target) const
640{
641 return subViewForTarget(target).visibility;
642}
643
644//=============================================================================================================
645
646SubView& BrainView::subViewForTarget(int target)
647{
648 const int normalized = normalizedVisualizationTarget(target, static_cast<int>(m_subViews.size()) - 1);
649 return (normalized < 0) ? m_singleView : m_subViews[normalized];
650}
651
652//=============================================================================================================
653
654const SubView& BrainView::subViewForTarget(int target) const
655{
656 const int normalized = normalizedVisualizationTarget(target, static_cast<int>(m_subViews.size()) - 1);
657 return (normalized < 0) ? m_singleView : m_subViews[normalized];
658}
659
660//=============================================================================================================
661
662// Note: SubView::isBrainSurfaceKey, matchesSurfaceType, shouldRenderSurface,
663// and applyOverlayToSurfaces are defined in core/viewstate.cpp.
664
665//=============================================================================================================
666
667bool BrainView::objectVisibleForTarget(const QString &object, int target) const
668{
669 return visibilityProfileForTarget(target).isObjectVisible(object);
670}
671
672//=============================================================================================================
673
675{
676 return visibilityProfileForTarget(target).megFieldMapOnHead;
677}
678
679//=============================================================================================================
680
681void BrainView::updateInflatedSurfaceTransforms()
682{
683 const bool needsInflated = (m_singleView.surfaceType == "inflated")
684 || std::any_of(m_subViews.cbegin(), m_subViews.cend(),
685 [](const SubView &sv) { return sv.surfaceType == "inflated"; });
686
687 const QString lhKey = "lh_inflated";
688 const QString rhKey = "rh_inflated";
689
690 if (!m_surfaces.contains(lhKey) || !m_surfaces.contains(rhKey)) {
691 return;
692 }
693
694 auto lhSurf = m_surfaces[lhKey];
695 auto rhSurf = m_surfaces[rhKey];
696
697 QMatrix4x4 identity;
698 lhSurf->applyTransform(identity);
699 rhSurf->applyTransform(identity);
700
701 if (!needsInflated) {
702 return;
703 }
704
705 const float lhMaxX = lhSurf->maxX();
706 const float rhMinX = rhSurf->minX();
707
708 const float gap = 0.005f;
709 const float lhOffset = -gap / 2.0f - lhMaxX;
710 const float rhOffset = gap / 2.0f - rhMinX;
711
712 lhSurf->translateX(lhOffset);
713 rhSurf->translateX(rhOffset);
714}
715
716void BrainView::setBemShaderMode(const QString &modeName)
717{
718 const BrainRenderer::ShaderMode mode = shaderModeFromName(modeName);
719
720 subViewForTarget(m_visualizationEditTarget).bemShader = mode;
721
722 m_bemShaderMode = mode;
723 saveMultiViewSettings();
724 m_sceneDirty = true; update();
725}
726
727//=============================================================================================================
728
730{
731 m_singleView.bemShader = m_singleView.brainShader;
732 for (int i = 0; i < m_subViews.size(); ++i) {
733 m_subViews[i].bemShader = m_subViews[i].brainShader;
734 }
735
736 m_bemShaderMode = subViewForTarget(m_visualizationEditTarget).bemShader;
737
738 saveMultiViewSettings();
739 m_sceneDirty = true; update();
740}
741
742void BrainView::setSensorVisible(const QString &type, bool visible)
743{
744 const QString object = SURFACEKEYS::sensorTypeToObjectKey(type);
745 if (object.isEmpty()) return;
746
747 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
748 profile.setObjectVisible(object, visible);
749
750 // Cascade parent toggle to child sub-types so that e.g. "MEG" also
751 // enables/disables MEG/Grad and MEG/Mag sub-types.
752 // Note: MEG Helmet has its own independent checkbox and is NOT cascaded.
753 if (type == QLatin1String("MEG")) {
754 profile.sensMegGrad = visible;
755 profile.sensMegMag = visible;
756 } else if (type == QLatin1String("EEG")) {
757 // No sub-types for EEG currently, but keep symmetric.
758 } else if (type == QLatin1String("Digitizer")) {
759 profile.digCardinal = visible;
760 profile.digHpi = visible;
761 profile.digEeg = visible;
762 profile.digExtra = visible;
763 }
764
765 saveMultiViewSettings();
766 m_sceneDirty = true; update();
767}
768
770{
771 if (m_applySensorTrans != enabled) {
772 m_applySensorTrans = enabled;
773 refreshSensorTransforms();
774 m_sceneDirty = true; update();
775 }
776}
777
778//=============================================================================================================
779
780void BrainView::setMegHelmetOverride(const QString &path)
781{
782 m_megHelmetOverridePath = path;
783}
784
786{
787 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
788 profile.dipoles = visible;
789 m_dipolesVisible = visible;
790 saveMultiViewSettings();
791 m_sceneDirty = true; update();
792}
793
794//=============================================================================================================
795
796void BrainView::setVisualizationMode(const QString &modeName)
797{
799 SubView &sv = subViewForTarget(m_visualizationEditTarget);
800 sv.overlayMode = mode;
801
802 m_currentVisMode = mode;
803
804 // Propagate the mode to brain hemisphere surfaces only (lh_*, rh_*)
805 // so that the primary colour channel holds the right data: curvature
806 // grays for Scientific or STC colours for SourceEstimate.
807 // BEM, sensor, and source-space surfaces are left untouched.
808 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
809 const QString &key = it.key();
810 if (key.startsWith("lh_") || key.startsWith("rh_")) {
811 it.value()->setVisualizationMode(mode);
812 }
813 }
814
815 saveMultiViewSettings();
816 m_sceneDirty = true; update();
817}
818
819//=============================================================================================================
820
821void BrainView::setHemiVisible(int hemiIdx, bool visible)
822{
823 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
824 if (hemiIdx == 0) {
825 profile.lh = visible;
826 } else if (hemiIdx == 1) {
827 profile.rh = visible;
828 }
829 saveMultiViewSettings();
830 m_sceneDirty = true; update();
831}
832
833//=============================================================================================================
834
835void BrainView::setBemVisible(const QString &name, bool visible)
836{
837 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
838 profile.setObjectVisible("bem_" + name, visible);
839 saveMultiViewSettings();
840 m_sceneDirty = true; update();
841}
842
844{
845 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
846 if (it.key().startsWith("bem_")) {
847 it.value()->setUseDefaultColor(enabled);
848 }
849 }
850 m_sceneDirty = true; update();
851}
852
853//=============================================================================================================
854
856{
857 m_lightingEnabled = enabled;
858 m_sceneDirty = true; update();
859}
860
861//=============================================================================================================
862
864{
865 QImage img = grabFramebuffer();
866 QString fileName = QString("snapshot_refactor_%1.png").arg(m_snapshotCounter++, 4, 10, QChar('0'));
867 img.save(fileName);
868
869}
870
871//=============================================================================================================
872
874{
875 m_viewMode = SingleView;
876 m_isDraggingSplitter = false;
877 m_activeSplitter = SplitterHit::None;
878 unsetCursor();
879 saveMultiViewSettings();
880 updateViewportSeparators();
881 updateOverlayLayout();
882 m_sceneDirty = true; update();
883}
884
885//=============================================================================================================
886
888{
889 m_viewMode = MultiView;
890 saveMultiViewSettings();
891 updateViewportSeparators();
892 updateOverlayLayout();
893 m_sceneDirty = true; update();
894}
895
896//=============================================================================================================
897
899{
900 count = std::clamp(count, 1, static_cast<int>(m_subViews.size()));
901 m_viewCount = count;
902
903 if (count == 1) {
904 m_viewMode = SingleView;
905 m_isDraggingSplitter = false;
906 m_activeSplitter = SplitterHit::None;
907 unsetCursor();
909 } else {
910 m_viewMode = MultiView;
911 // Default edit target to first pane when entering multi-view
912 if (m_visualizationEditTarget < 0)
914 }
915
916 // Enable first N sub-views, disable the rest
917 for (int i = 0; i < m_subViews.size(); ++i)
918 m_subViews[i].enabled = (i < count);
919
920 saveMultiViewSettings();
921 updateViewportSeparators();
922 updateOverlayLayout();
923 m_sceneDirty = true; update();
924}
925
926//=============================================================================================================
927
929{
930 m_layout.resetSplits();
931 m_multiSplitX = m_layout.splitX();
932 m_multiSplitY = m_layout.splitY();
933 saveMultiViewSettings();
934 updateViewportSeparators();
935 updateOverlayLayout();
936 m_sceneDirty = true; update();
937}
938
939bool BrainView::isViewportEnabled(int index) const
940{
941 if (index < 0 || index >= m_subViews.size()) {
942 return false;
943 }
944
945 return m_subViews[index].enabled;
946}
947
948//=============================================================================================================
949
950int BrainView::enabledViewportCount() const
951{
952 if (m_viewMode != MultiView) {
953 return 1;
954 }
955
956 int numEnabled = 0;
957 for (int i = 0; i < m_subViews.size(); ++i) {
958 if (m_subViews[i].enabled) {
959 ++numEnabled;
960 }
961 }
962
963 return numEnabled > 0 ? numEnabled : 1;
964}
965
966//=============================================================================================================
967
968QVector<int> BrainView::enabledViewportIndices() const
969{
970 QVector<int> vps;
971 if (m_viewMode == MultiView) {
972 for (int i = 0; i < m_subViews.size(); ++i) {
973 if (m_subViews[i].enabled)
974 vps.append(i);
975 }
976 if (vps.isEmpty())
977 vps.append(0);
978 } else {
979 vps.append(0);
980 }
981 return vps;
982}
983
984//=============================================================================================================
985
986int BrainView::viewportIndexAt(const QPoint& pos) const
987{
988 if (m_viewMode != MultiView) {
989 return 0;
990 }
991
992 const auto enabledViewports = enabledViewportIndices();
993 return m_layout.viewportIndexAt(pos, enabledViewports, size());
994}
995
996//=============================================================================================================
997
998QRect BrainView::multiViewSlotRect(int slot, int numEnabled, const QSize& outputSize) const
999{
1000 return m_layout.slotRect(slot, numEnabled, outputSize);
1001}
1002
1003//=============================================================================================================
1004
1005SplitterHit BrainView::hitTestSplitter(const QPoint& pos, int numEnabled, const QSize& outputSize) const
1006{
1007 if (m_viewMode != MultiView || numEnabled <= 1) {
1008 return SplitterHit::None;
1009 }
1010 return m_layout.hitTestSplitter(pos, numEnabled, outputSize);
1011}
1012
1013//=============================================================================================================
1014
1015void BrainView::updateSplitterCursor(const QPoint& pos)
1016{
1017 const SplitterHit hit = hitTestSplitter(pos, enabledViewportCount(), size());
1018 const Qt::CursorShape shape = MultiViewLayout::cursorForHit(hit);
1019 if (shape == Qt::ArrowCursor) {
1020 unsetCursor();
1021 } else {
1022 setCursor(shape);
1023 }
1024}
1025
1026//=============================================================================================================
1027
1028void BrainView::updateViewportSeparators()
1029{
1030 if (!m_verticalSeparator || !m_horizontalSeparator) {
1031 return;
1032 }
1033
1034 m_verticalSeparator->hide();
1035 m_horizontalSeparator->hide();
1036
1037 const int numEnabled = enabledViewportCount();
1038 if (m_viewMode != MultiView || numEnabled <= 1) {
1039 return;
1040 }
1041
1042 QRect vRect, hRect;
1043 m_layout.separatorGeometries(numEnabled, size(), vRect, hRect);
1044
1045 if (!vRect.isEmpty()) {
1046 m_verticalSeparator->setGeometry(vRect);
1047 m_verticalSeparator->show();
1048 m_verticalSeparator->raise();
1049 }
1050 if (!hRect.isEmpty()) {
1051 m_horizontalSeparator->setGeometry(hRect);
1052 m_horizontalSeparator->show();
1053 m_horizontalSeparator->raise();
1054 }
1055
1056 updateOverlayLayout();
1057}
1058
1059//=============================================================================================================
1060
1061void BrainView::updateOverlayLayout()
1062{
1063 const auto enabledViewports = enabledViewportIndices();
1064
1065 if (m_fpsLabel) {
1066 m_fpsLabel->setVisible(m_infoPanelVisible);
1067 m_fpsLabel->adjustSize();
1068 const int perfBottomMargin = 2;
1069
1070 if (m_viewMode == MultiView) {
1071 m_fpsLabel->move(width() - m_fpsLabel->width() - 10,
1072 height() - m_fpsLabel->height() - perfBottomMargin);
1073 } else {
1074 m_fpsLabel->move(width() - m_fpsLabel->width() - 10,
1075 height() - m_fpsLabel->height() - perfBottomMargin);
1076 }
1077
1078 m_fpsLabel->raise();
1079 }
1080
1081 if (m_singleViewInfoLabel) {
1082 const bool showSingleInfo = (m_viewMode == SingleView) && m_infoPanelVisible;
1083 m_singleViewInfoLabel->setVisible(showSingleInfo);
1084 if (showSingleInfo) {
1085 m_singleViewInfoLabel->adjustSize();
1086 m_singleViewInfoLabel->move(width() - m_singleViewInfoLabel->width() - 8, 8);
1087 m_singleViewInfoLabel->raise();
1088 }
1089 }
1090
1091 if (m_regionLabel) {
1092 const int regionY = (m_viewMode == MultiView) ? 38 : 10;
1093 m_regionLabel->move(10, regionY);
1094 if (!m_regionLabel->text().isEmpty()) {
1095 m_regionLabel->raise();
1096 }
1097 }
1098
1099 for (int i = 0; i < m_viewportNameLabels.size(); ++i) {
1100 if (m_viewportNameLabels[i]) {
1101 m_viewportNameLabels[i]->hide();
1102 }
1103 if (m_viewportInfoLabels[i]) {
1104 m_viewportInfoLabels[i]->hide();
1105 }
1106 }
1107
1108 if (m_viewMode != MultiView) {
1109 return;
1110 }
1111
1112 const int numEnabled = enabledViewports.size();
1113 const QSize overlaySize = size();
1114 for (int slot = 0; slot < numEnabled; ++slot) {
1115 const int vp = enabledViewports[slot];
1116 QLabel* label = m_viewportNameLabels[vp];
1117 QLabel* infoLabel = m_viewportInfoLabels[vp];
1118 if (!label) {
1119 continue;
1120 }
1121
1122 const int preset = std::clamp(m_subViews[vp].preset, 0, 6);
1123 label->setText(multiViewPresetName(preset));
1124
1125 const QRect pane = multiViewSlotRect(slot, numEnabled, overlaySize);
1126 label->adjustSize();
1127 label->move(pane.x() + 8, pane.y() + 8);
1128 label->setVisible(true);
1129 label->raise();
1130
1131 if (infoLabel) {
1132 infoLabel->adjustSize();
1133 infoLabel->move(pane.x() + pane.width() - infoLabel->width() - 8,
1134 pane.y() + 8);
1135 infoLabel->setVisible(m_infoPanelVisible);
1136 infoLabel->raise();
1137 }
1138 }
1139
1140 updateViewportLabelHighlight();
1141}
1142
1143//=============================================================================================================
1144
1145void BrainView::updateViewportLabelHighlight()
1146{
1147 static const QString normalStyle =
1148 QStringLiteral("color: white; font-weight: bold; font-family: sans-serif; "
1149 "font-size: 12px; background: transparent; padding: 2px 4px;");
1150 static const QString selectedStyle =
1151 QStringLiteral("color: #FFD54F; font-weight: bold; font-family: sans-serif; "
1152 "font-size: 13px; background: rgba(255,213,79,40); "
1153 "border: 1px solid #FFD54F; border-radius: 3px; padding: 2px 6px;");
1154
1155 for (int i = 0; i < m_viewportNameLabels.size(); ++i) {
1156 if (!m_viewportNameLabels[i]) continue;
1157 const bool selected = (m_viewMode == MultiView && m_visualizationEditTarget == i);
1158 m_viewportNameLabels[i]->setStyleSheet(selected ? selectedStyle : normalStyle);
1159 m_viewportNameLabels[i]->adjustSize();
1160 }
1161}
1162
1163//=============================================================================================================
1164
1165void BrainView::logPerspectiveRotation(const QString& context) const
1166{
1167 Q_UNUSED(context);
1168}
1169
1170//=============================================================================================================
1171
1172void BrainView::loadMultiViewSettings()
1173{
1174 QSettings settings("MNECPP");
1175 settings.beginGroup("ex_brain_view/BrainView");
1176
1177 m_multiSplitX = settings.value("multiSplitX", 0.5f).toFloat();
1178 m_multiSplitY = settings.value("multiSplitY", 0.5f).toFloat();
1179
1180 const int savedViewMode = settings.value("viewMode", static_cast<int>(SingleView)).toInt();
1181 m_viewMode = (savedViewMode == static_cast<int>(MultiView)) ? MultiView : SingleView;
1182 m_viewCount = std::clamp(settings.value("viewCount", 1).toInt(), 1, static_cast<int>(m_subViews.size()));
1183 // Reconcile: viewCount > 1 implies MultiView
1184 if (m_viewCount > 1) m_viewMode = MultiView;
1185 else m_viewMode = SingleView;
1186
1187 const bool hasCameraQuat = settings.contains("cameraRotW")
1188 && settings.contains("cameraRotX")
1189 && settings.contains("cameraRotY")
1190 && settings.contains("cameraRotZ");
1191 if (hasCameraQuat) {
1192 const float w = settings.value("cameraRotW", 1.0f).toFloat();
1193 const float x = settings.value("cameraRotX", 0.0f).toFloat();
1194 const float y = settings.value("cameraRotY", 0.0f).toFloat();
1195 const float z = settings.value("cameraRotZ", 0.0f).toFloat();
1196 m_cameraRotation = QQuaternion(w, x, y, z);
1197 if (m_cameraRotation.lengthSquared() <= std::numeric_limits<float>::epsilon()) {
1198 m_cameraRotation = QQuaternion();
1199 } else {
1200 m_cameraRotation.normalize();
1201 }
1202 }
1203
1204 // Reset per-index defaults, then load saved state on top
1205 for (int i = 0; i < m_subViews.size(); ++i) {
1206 m_subViews[i] = SubView::defaultForIndex(i);
1207 m_subViews[i].enabled = (i < m_viewCount);
1208 }
1209
1210 // Delegate per-SubView serialization
1211 m_singleView.load(settings, "single_", m_cameraRotation);
1212 for (int i = 0; i < m_subViews.size(); ++i)
1213 m_subViews[i].load(settings, QStringLiteral("multi%1_").arg(i), m_cameraRotation);
1214
1215 const int maxIdx = static_cast<int>(m_subViews.size()) - 1;
1216 m_visualizationEditTarget = normalizedVisualizationTarget(
1217 settings.value("visualizationEditTarget", -1).toInt(), maxIdx);
1218
1219 m_infoPanelVisible = settings.value("infoPanelVisible", true).toBool();
1220
1221 settings.endGroup();
1222
1223 m_multiSplitX = std::clamp(m_multiSplitX, 0.15f, 0.85f);
1224 m_multiSplitY = std::clamp(m_multiSplitY, 0.15f, 0.85f);
1225 m_layout.setSplitX(m_multiSplitX);
1226 m_layout.setSplitY(m_multiSplitY);
1227
1228 setVisualizationEditTarget(m_visualizationEditTarget);
1229}
1230
1231//=============================================================================================================
1232
1233void BrainView::saveMultiViewSettings() const
1234{
1235 QSettings settings("MNECPP");
1236 settings.beginGroup("ex_brain_view/BrainView");
1237 settings.setValue("multiSplitX", m_multiSplitX);
1238 settings.setValue("multiSplitY", m_multiSplitY);
1239 settings.setValue("viewMode", static_cast<int>(m_viewMode));
1240 settings.setValue("viewCount", m_viewCount);
1241 settings.setValue("cameraRotW", m_cameraRotation.scalar());
1242 settings.setValue("cameraRotX", m_cameraRotation.x());
1243 settings.setValue("cameraRotY", m_cameraRotation.y());
1244 settings.setValue("cameraRotZ", m_cameraRotation.z());
1245 for (int i = 0; i < m_subViews.size(); ++i)
1246 settings.setValue(QStringLiteral("viewportEnabled%1").arg(i), m_subViews[i].enabled);
1247 settings.setValue("visualizationEditTarget", m_visualizationEditTarget);
1248 settings.setValue("infoPanelVisible", m_infoPanelVisible);
1249
1250 // Delegate per-SubView serialization
1251 m_singleView.save(settings, "single_");
1252 for (int i = 0; i < m_subViews.size(); ++i)
1253 m_subViews[i].save(settings, QStringLiteral("multi%1_").arg(i));
1254
1255 settings.endGroup();
1256}
1257
1258//=============================================================================================================
1259
1260void BrainView::setViewportEnabled(int index, bool enabled)
1261{
1262 if (index >= 0 && index < m_subViews.size()) {
1263 m_subViews[index].enabled = enabled;
1264 saveMultiViewSettings();
1265 updateViewportSeparators();
1266 updateOverlayLayout();
1267 m_sceneDirty = true; update();
1268 }
1269}
1270
1271//=============================================================================================================
1272
1273void BrainView::setViewportCameraPreset(int index, int preset)
1274{
1275 if (index < 0 || index >= static_cast<int>(m_subViews.size()))
1276 return;
1277 preset = std::clamp(preset, 0, 6);
1278 if (m_subViews[index].preset == preset)
1279 return;
1280 m_subViews[index].preset = preset;
1281 saveMultiViewSettings();
1282 updateOverlayLayout();
1283 m_sceneDirty = true; update();
1284}
1285
1286//=============================================================================================================
1287
1289{
1290 if (index < 0 || index >= static_cast<int>(m_subViews.size()))
1291 return -1;
1292 return std::clamp(m_subViews[index].preset, 0, 6);
1293}
1294
1295//=============================================================================================================
1296
1298{
1299 m_infoPanelVisible = visible;
1300 saveMultiViewSettings();
1301 updateOverlayLayout();
1302}
1303
1304//=============================================================================================================
1305
1306void BrainView::resizeEvent(QResizeEvent *event)
1307{
1308 QRhiWidget::resizeEvent(event);
1309 updateViewportSeparators();
1310 updateOverlayLayout();
1311}
1312
1313//=============================================================================================================
1314
1315void BrainView::initialize(QRhiCommandBuffer *cb)
1316{
1317 Q_UNUSED(cb);
1318
1319 m_renderer = std::make_unique<BrainRenderer>();
1320
1321 // Create dual render targets (clearing + preserving) sharing this
1322 // widget's color texture. Must be done here because colorTexture()
1323 // is only valid inside initialize()/render().
1324 m_renderer->ensureRenderTargets(rhi(), colorTexture(), colorTexture()->pixelSize());
1325}
1326
1327//=============================================================================================================
1328
1329void BrainView::render(QRhiCommandBuffer *cb)
1330{
1331 // Check if there is anything to render
1332 bool hasSurfaces = !m_surfaces.isEmpty();
1333 bool hasDipoles = !m_itemDipoleMap.isEmpty() || m_dipoles; // Check managed dipoles too
1334
1335 // If absolutely nothing is loaded, render black background
1336 if (!hasSurfaces && !hasDipoles) {
1337 // No surface loaded: render a black background instead of leaving the widget uninitialized
1338 if (!m_renderer) {
1339 m_renderer = std::make_unique<BrainRenderer>();
1340 }
1341 m_renderer->ensureRenderTargets(rhi(), colorTexture(), colorTexture()->pixelSize());
1342 m_renderer->initialize(rhi(), m_renderer->rtClear()->renderPassDescriptor(), sampleCount());
1343 m_renderer->beginFrame(cb);
1344 m_renderer->endPass(cb);
1345 return;
1346 }
1347
1348 // Ensure active surface pointer is valid if possible, otherwise just use first available for stats
1349 if (!m_activeSurface && !m_surfaces.isEmpty()) {
1350 m_activeSurface = m_surfaces.begin().value();
1351 }
1352
1353
1354 m_frameCount++;
1355 if (m_fpsTimer.elapsed() >= 500) {
1356 float fps = m_frameCount / (m_fpsTimer.elapsed() / 1000.0f);
1357
1358 // Recount vertices only when surface list/visibility changed
1359 if (m_vertexCountDirty) {
1360 auto countVerticesForSubView = [this](const SubView &sv) -> qint64 {
1361 qint64 total = 0;
1362 for (auto it = m_surfaces.cbegin(); it != m_surfaces.cend(); ++it) {
1363 const QString &key = it.key();
1364 auto surface = it.value();
1365 if (!surface) continue;
1366 if (!sv.shouldRenderSurface(key)) continue;
1367 if (SubView::isBrainSurfaceKey(key)) {
1368 if (!sv.matchesSurfaceType(key)) continue;
1369 } else {
1370 if (!surface->isVisible()) continue;
1371 }
1372 total += surface->vertexCount();
1373 }
1374 return total;
1375 };
1376
1377 qint64 vCount = 0;
1378 if (m_viewMode == MultiView) {
1379 for (int vp : enabledViewportIndices()) {
1380 vCount += countVerticesForSubView(m_subViews[vp]);
1381 }
1382 } else {
1383 vCount = countVerticesForSubView(m_singleView);
1384 }
1385 m_cachedVertexCount = vCount;
1386 m_vertexCountDirty = false;
1387 }
1388
1389 m_fpsLabel->setText(QString("FPS: %1\nVertices: %2").arg(fps, 0, 'f', 1).arg(m_cachedVertexCount));
1390 updateOverlayLayout();
1391 m_fpsLabel->raise();
1392 m_frameCount = 0;
1393 m_fpsTimer.restart();
1394 }
1395
1396 // Initialize renderer
1397 m_renderer->ensureRenderTargets(rhi(), colorTexture(), colorTexture()->pixelSize());
1398 m_renderer->initialize(rhi(), m_renderer->rtClear()->renderPassDescriptor(), sampleCount());
1399
1400 // Determine viewport configuration
1401 QSize outputSize = m_renderer->rtClear()->pixelSize();
1402
1403 // Build list of enabled viewports
1404 const auto enabledViewports = enabledViewportIndices();
1405 int numEnabled = enabledViewports.size();
1406
1407 // ── Pre-render phase ────────────────────────────────────────────────
1408 // Pre-upload ALL Immutable GPU buffers BEFORE the render pass starts.
1409 // On Metal, uploading an Immutable buffer during an active render pass
1410 // forces an encoder restart which resets the viewport state. On
1411 // WebGL/GLES2, buffer create()/upload calls invoke glBindBuffer() which
1412 // silently modifies the currently-bound VAO's element-buffer binding,
1413 // corrupting previously drawn surfaces.
1414 //
1415 // By doing all static uploads here (outside any render pass), we
1416 // guarantee that the draw loop below only records Dynamic uniform
1417 // updates — those never interrupt the pass.
1418
1419 // Pre-upload every surface and dipole buffer that is dirty or new.
1420 // NOTE: Overlay modes are applied per-pane inside the render loop below
1421 // (not here), because different panes can have different overlays on the
1422 // same shared BrainSurface objects. Applying all pane overlays
1423 // sequentially here would leave only the last pane's vertex colours.
1424 {
1425 QRhiResourceUpdateBatch *preUpload = rhi()->nextResourceUpdateBatch();
1426 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1427#ifdef __EMSCRIPTEN__
1428 // WORKAROUND(QRhi-GLES2): Per-surface GPU buffers are unused on
1429 // WASM — all geometry is drawn via merged per-category buffers.
1430 // Skip individual uploads to avoid polluting GLES2
1431 // element-buffer bindings.
1432 continue;
1433#endif
1434 it.value()->updateBuffers(rhi(), preUpload);
1435 }
1436#ifndef __EMSCRIPTEN__
1437 if (m_debugPointerSurface) {
1438 m_debugPointerSurface->updateBuffers(rhi(), preUpload);
1439 }
1440 for (auto it = m_itemDipoleMap.begin(); it != m_itemDipoleMap.end(); ++it) {
1441 it.value()->updateBuffers(rhi(), preUpload);
1442 }
1443 if (m_dipoles) {
1444 m_dipoles->updateBuffers(rhi(), preUpload);
1445 }
1446#endif
1447
1448#ifdef __EMSCRIPTEN__
1449 // WORKAROUND(QRhi-GLES2): Single merged buffer for ALL surfaces.
1450 // The Qt QRhi GLES2/WebGL backend only renders the first
1451 // drawIndexed() per render pass. Multi-pass compositing via
1452 // PreserveColorContents is unreliable across WebGL implementations.
1453 // Merge everything into one VBO/IBO and issue one drawIndexed().
1454 {
1455 const SubView &sv = (m_viewMode == MultiView) ? m_subViews[0] : m_singleView;
1456
1457 QVector<BrainSurface*> allSurfaces;
1458
1459 // Brain surfaces (opaque, drawn first for depth)
1460 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1461 if (!SubView::isBrainSurfaceKey(it.key())) continue;
1462 if (!sv.matchesSurfaceType(it.key())) continue;
1463 if (!sv.shouldRenderSurface(it.key())) continue;
1464 allSurfaces.append(it.value().get());
1465 }
1466
1467 // Source space points
1468 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1469 if (!it.key().startsWith("srcsp_")) continue;
1470 if (!sv.shouldRenderSurface(it.key())) continue;
1471 if (!it.value()->isVisible()) continue;
1472 allSurfaces.append(it.value().get());
1473 }
1474
1475 // Digitizer points
1476 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1477 if (!it.key().startsWith("dig_")) continue;
1478 if (!sv.shouldRenderSurface(it.key())) continue;
1479 if (!it.value()->isVisible()) continue;
1480 allSurfaces.append(it.value().get());
1481 }
1482
1483 // BEM + sensors (transparent — appended last for correct blending order)
1484 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1485 bool isSensor = it.key().startsWith("sens_");
1486 bool isBem = it.key().startsWith("bem_");
1487 if (!isSensor && !isBem) continue;
1488 if (!sv.shouldRenderSurface(it.key())) continue;
1489 if (!it.value()->isVisible()) continue;
1490 allSurfaces.append(it.value().get());
1491 }
1492
1493 m_renderer->prepareMergedSurfaces(rhi(), preUpload, allSurfaces, QStringLiteral("default"));
1494 }
1495#endif
1496
1497 // Network buffers are updated inside renderNetwork() via updateNodeBuffers/updateEdgeBuffers
1498 cb->resourceUpdate(preUpload);
1499 }
1500
1501 // ── Render passes ───────────────────────────────────────────────────
1502 m_renderer->beginFrame(cb);
1503
1504 for (int slot = 0; slot < numEnabled; ++slot) {
1505 int vp = (m_viewMode == MultiView) ? enabledViewports[slot] : 0;
1506 const SubView &sv = (m_viewMode == MultiView) ? m_subViews[vp] : m_singleView;
1507 const int preset = (m_viewMode == MultiView) ? std::clamp(sv.preset, 0, 6) : 1;
1508
1509 const QRect paneRect = (m_viewMode == MultiView)
1510 ? multiViewSlotRect(slot, numEnabled, outputSize)
1511 : QRect(0, 0, outputSize.width(), outputSize.height());
1512
1513 QRect renderRect = paneRect;
1514 if (m_viewMode == MultiView && numEnabled > 1) {
1515 constexpr int separatorPx = 2;
1516
1517 if (numEnabled == 2) {
1518 if (slot == 0) {
1519 renderRect.setWidth(std::max(1, renderRect.width() - separatorPx));
1520 }
1521 } else if (numEnabled == 3) {
1522 // 3-view: slot 0 = full top row, slots 1&2 = bottom row
1523 if (slot == 0) {
1524 // Top pane: no right neighbor, has bottom neighbor
1525 renderRect.setHeight(std::max(1, renderRect.height() - separatorPx));
1526 } else if (slot == 1) {
1527 // Bottom-left: has right neighbor, no bottom neighbor
1528 renderRect.setWidth(std::max(1, renderRect.width() - separatorPx));
1529 }
1530 // slot 2 (bottom-right): no insets needed
1531 } else {
1532 const int col = slot % 2;
1533 const int row = slot / 2;
1534
1535 const bool hasRightNeighbor = (col == 0)
1536 && (slot + 1 < numEnabled)
1537 && ((slot / 2) == ((slot + 1) / 2));
1538 const bool hasBottomNeighbor = (row == 0)
1539 && (slot + 2 < numEnabled);
1540
1541 if (hasRightNeighbor) {
1542 renderRect.setWidth(std::max(1, renderRect.width() - separatorPx));
1543 }
1544 if (hasBottomNeighbor) {
1545 renderRect.setHeight(std::max(1, renderRect.height() - separatorPx));
1546 }
1547 }
1548 }
1549
1550 const int viewX = renderRect.x();
1551 const int viewY = outputSize.height() - (renderRect.y() + renderRect.height());
1552 const int viewW = std::max(1, renderRect.width());
1553 const int viewH = std::max(1, renderRect.height());
1554
1555 QRhiViewport viewport(viewX, viewY, viewW, viewH);
1556 QRhiScissor scissor(viewX, viewY, viewW, viewH);
1557 const float aspectRatio = float(viewW) / float(viewH);
1558
1559 // Set viewport and scissor
1560 cb->setViewport(viewport);
1561 cb->setScissor(scissor);
1562
1563 // Calculate camera for this viewport
1564 m_camera.setSceneCenter(m_sceneCenter);
1565 m_camera.setSceneSize(m_sceneSize);
1566 m_camera.setRotation(m_cameraRotation);
1567 m_camera.setZoom(m_zoom);
1568 const CameraResult cam = (m_viewMode == MultiView)
1569 ? m_camera.computeMultiView(sv, aspectRatio)
1570 : m_camera.computeSingleView(aspectRatio);
1571
1572 BrainRenderer::SceneData sceneData;
1573 sceneData.mvp = rhi()->clipSpaceCorrMatrix();
1574 sceneData.mvp *= cam.projection;
1575 sceneData.mvp *= cam.view;
1576 sceneData.mvp *= cam.model;
1577
1578 sceneData.cameraPos = cam.cameraPos;
1579 sceneData.lightDir = cam.cameraPos.normalized();
1580 sceneData.lightingEnabled = m_lightingEnabled;
1581 sceneData.viewportX = viewX;
1582 sceneData.viewportY = viewY;
1583 sceneData.viewportW = viewW;
1584 sceneData.viewportH = viewH;
1585 sceneData.scissorX = viewX;
1586 sceneData.scissorY = viewY;
1587 sceneData.scissorW = viewW;
1588 sceneData.scissorH = viewH;
1589
1590 // Per-draw overlayMode uniform — the shader selects the vertex colour
1591 // channel (curvature / annotation) so no per-pane vertex buffer
1592 // re-uploads are needed.
1593 sceneData.overlayMode = static_cast<float>(sv.overlayMode);
1594
1595 // Pass 1: Opaque Surfaces (Brain surfaces)
1596 // Use viewport-specific shader from subview
1597 BrainRenderer::ShaderMode currentShader = sv.brainShader;
1598 BrainRenderer::ShaderMode currentBemShader = sv.bemShader;
1599 const QString overlayName = visualizationModeName(sv.overlayMode);
1600
1601 // Collect matched brain surface keys for this pane's info panel
1602#ifndef __EMSCRIPTEN__
1603 QStringList drawnKeys;
1604#else
1605 const QString drawnInfo = QStringLiteral("merged");
1606#endif
1607
1608 if (m_viewMode == MultiView && m_viewportInfoLabels[vp]) {
1609 m_viewportInfoLabels[vp]->setText(
1610 QString("Shader: %1\nSurface: %2\nOverlay: %3")
1611 .arg(shaderModeName(currentShader), sv.surfaceType, overlayName));
1612 } else if (m_viewMode == SingleView && m_singleViewInfoLabel) {
1613 m_singleViewInfoLabel->setText(
1614 QString("Shader: %1\nSurface: %2\nOverlay: %3")
1615 .arg(shaderModeName(currentShader), sv.surfaceType, overlayName));
1616 }
1617
1618 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1619 if (!sv.matchesSurfaceType(it.key())) continue;
1620 if (!sv.shouldRenderSurface(it.key())) continue;
1621
1622#ifdef __EMSCRIPTEN__
1623 // WORKAROUND(QRhi-GLES2): Brain surfaces drawn via merged
1624 // per-category buffer (one drawIndexed per render pass).
1625 continue;
1626#endif
1627#ifndef __EMSCRIPTEN__
1628 drawnKeys << it.key();
1629#endif
1630 m_renderer->renderSurface(cb, rhi(), sceneData, it.value().get(), currentShader);
1631 }
1632
1633#ifndef __EMSCRIPTEN__
1634 // Update info panel with drawn brain surface keys after rendering
1635 {
1636 const QString drawnInfo = drawnKeys.isEmpty() ? QStringLiteral("none") : drawnKeys.join(QStringLiteral(", "));
1637 if (m_viewMode == MultiView && m_viewportInfoLabels[vp]) {
1638 m_viewportInfoLabels[vp]->setText(m_viewportInfoLabels[vp]->text()
1639 + QStringLiteral("\nDrawn: ") + drawnInfo);
1640 } else if (m_viewMode == SingleView && m_singleViewInfoLabel) {
1641 m_singleViewInfoLabel->setText(m_singleViewInfoLabel->text()
1642 + QStringLiteral("\nDrawn: ") + drawnInfo);
1643 }
1644 }
1645#endif
1646
1647#ifdef __EMSCRIPTEN__
1648 // ══════════════════════════════════════════════════════════════════════
1649 // WORKAROUND(QRhi-GLES2): Single-pass merged rendering.
1650 // The Qt QRhi GLES2/WebGL backend only renders the first drawIndexed()
1651 // per render pass, AND multi-pass compositing via PreserveColorContents
1652 // is unreliable. All visible surfaces (brain, BEM, sensors, source
1653 // space, digitizers) are merged into one VBO/IBO and drawn in a single
1654 // drawIndexed() call in the clearing pass.
1655 //
1656 // Remove when upstream Qt fixes the QRhi GLES2 drawIndexed bug.
1657 // ══════════════════════════════════════════════════════════════════════
1658 m_renderer->drawMergedSurfaces(cb, rhi(), sceneData, currentShader, QStringLiteral("default"));
1659
1660#else
1661
1662 // ── Batched desktop rendering ───────────────────────────────────────
1663 // Single pass over m_surfaces categorises non-brain items into opaque
1664 // and transparent draw lists. All uniform uploads are batched into
1665 // one QRhiResourceUpdateBatch and submitted once, eliminating per-
1666 // surface batch allocation and redundant viewport/scissor reassertion.
1667
1668 // Determine per-viewport field-map visibility
1669 const bool megFieldVisible = sv.visibility.megFieldMap;
1670 const bool eegFieldVisible = sv.visibility.eegFieldMap;
1671 const QString &megFieldKey = m_fieldMapper.megSurfaceKey();
1672 const QString &eegFieldKey = m_fieldMapper.eegSurfaceKey();
1673
1674 struct DrawItem {
1675 BrainSurface *surface;
1677 float overlayMode;
1678 float distSq; // for transparent back-to-front sort
1679 int uniformOffset; // filled by prepareSurfaceDraw
1680 };
1681
1682 QVector<DrawItem> opaqueDraws;
1683 QVector<DrawItem> transparentDraws;
1684
1685 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1686 const QString &key = it.key();
1687 BrainSurface *surf = it.value().get();
1688
1689 if (SubView::isBrainSurfaceKey(key)) {
1690 // Brain surfaces already rendered above
1691 continue;
1692 } else if (key.startsWith("srcsp_") || key.startsWith("dig_")) {
1693 if (!sv.shouldRenderSurface(key)) continue;
1694 if (!surf->isVisible()) continue;
1695 opaqueDraws.append({surf, currentShader,
1696 static_cast<float>(BrainSurface::ModeScientific),
1697 0.0f, -1});
1698 } else {
1699 bool isSensor = key.startsWith("sens_");
1700 bool isBem = key.startsWith("bem_");
1701 if (!isSensor && !isBem) continue;
1702 if (!sv.shouldRenderSurface(key)) continue;
1703 if (!surf->isVisible()) continue;
1704
1705 QVector3D bmin, bmax;
1706 surf->boundingBox(bmin, bmax);
1707 QVector3D center = (bmin + bmax) * 0.5f;
1708 float dist = (sceneData.cameraPos - center).lengthSquared();
1709
1710 auto mode = isBem ? currentBemShader : BrainRenderer::Holographic;
1711 float itemOverlay = static_cast<float>(BrainSurface::ModeScientific);
1712 if (key == megFieldKey && !megFieldVisible)
1713 itemOverlay = static_cast<float>(BrainSurface::ModeSurface);
1714 else if (key == eegFieldKey && !eegFieldVisible)
1715 itemOverlay = static_cast<float>(BrainSurface::ModeSurface);
1716
1717 transparentDraws.append({surf, mode, itemOverlay, dist, -1});
1718 }
1719 }
1720
1721 // Sort transparent items back-to-front for correct alpha blending
1722 std::sort(transparentDraws.begin(), transparentDraws.end(),
1723 [](const DrawItem &a, const DrawItem &b) { return a.distSq > b.distSq; });
1724
1725 // Batch all uniform uploads into a single resource update
1726 QRhiResourceUpdateBatch *surfBatch = rhi()->nextResourceUpdateBatch();
1727 BrainRenderer::SceneData batchData = sceneData;
1728
1729 for (auto &item : opaqueDraws) {
1730 batchData.overlayMode = item.overlayMode;
1731 item.uniformOffset = m_renderer->prepareSurfaceDraw(surfBatch, batchData, item.surface);
1732 }
1733 for (auto &item : transparentDraws) {
1734 batchData.overlayMode = item.overlayMode;
1735 item.uniformOffset = m_renderer->prepareSurfaceDraw(surfBatch, batchData, item.surface);
1736 }
1737
1738 cb->resourceUpdate(surfBatch);
1739
1740 // Set viewport/scissor once for all batched draws
1741 cb->setViewport(viewport);
1742 cb->setScissor(scissor);
1743
1744 // Issue all draw calls — no resource updates or state resets between them
1745 for (const auto &item : opaqueDraws)
1746 m_renderer->issueSurfaceDraw(cb, item.surface, item.mode, item.uniformOffset);
1747 for (const auto &item : transparentDraws)
1748 m_renderer->issueSurfaceDraw(cb, item.surface, item.mode, item.uniformOffset);
1749
1750 // Render Dipoles
1751 for(auto it = m_itemDipoleMap.begin(); it != m_itemDipoleMap.end(); ++it) {
1752 if (it.value()->isVisible() && sv.visibility.dipoles) {
1753 m_renderer->renderDipoles(cb, rhi(), sceneData, it.value().get());
1754 }
1755 }
1756
1757 if (sv.visibility.dipoles && m_dipoles) {
1758 m_renderer->renderDipoles(cb, rhi(), sceneData, m_dipoles.get());
1759 }
1760
1761 // Render Connectivity Network
1762 if (sv.visibility.network && m_network) {
1763 m_renderer->renderNetwork(cb, rhi(), sceneData, m_network.get());
1764 }
1765
1766 // Intersection Pointer
1767 if (m_hasIntersection && m_debugPointerSurface) {
1768 BrainRenderer::SceneData debugSceneData = sceneData;
1769 debugSceneData.overlayMode = 0.0f; // pass-through for holographic shell
1770
1771 QMatrix4x4 translation;
1772 translation.translate(m_lastIntersectionPoint);
1773
1774 debugSceneData.mvp = rhi()->clipSpaceCorrMatrix() * cam.projection * cam.view * cam.model * translation;
1775
1776 m_renderer->renderSurface(cb, rhi(), debugSceneData, m_debugPointerSurface.get(), BrainRenderer::Holographic);
1777 }
1778
1779#endif // !__EMSCRIPTEN__ — end of per-surface draw path
1780
1781 } // End of viewport loop
1782
1783 m_renderer->endPass(cb);
1784
1785 // On WASM, all surfaces are drawn in the single clearing pass above
1786 // via the merged "default" group. No additional preserving passes needed.
1787}
1788
1789//=============================================================================================================
1790
1791void BrainView::mousePressEvent(QMouseEvent *e)
1792{
1793 if (e->button() == Qt::LeftButton) {
1794 m_perspectiveRotatedSincePress = false;
1795 }
1796
1797 if (e->button() == Qt::LeftButton && m_viewMode == MultiView) {
1798 const int clickedVp = viewportIndexAt(e->pos());
1799 if (clickedVp >= 0 && m_viewportNameLabels[clickedVp] && m_viewportNameLabels[clickedVp]->isVisible()) {
1800 if (m_viewportNameLabels[clickedVp]->geometry().contains(e->pos())) {
1801 if (clickedVp != m_visualizationEditTarget) {
1802 setVisualizationEditTarget(clickedVp);
1803 }
1804 showViewportPresetMenu(clickedVp, mapToGlobal(e->pos()));
1805 m_lastMousePos = e->pos();
1806 return;
1807 }
1808 }
1809
1810 const int numEnabled = enabledViewportCount();
1811 const SplitterHit hit = hitTestSplitter(e->pos(), numEnabled, size());
1812 if (hit != SplitterHit::None) {
1813 m_isDraggingSplitter = true;
1814 m_activeSplitter = hit;
1815 m_lastMousePos = e->pos();
1816 updateSplitterCursor(e->pos());
1817 return;
1818 }
1819
1820 // Select the clicked viewport as the active edit target
1821 const int clickedVpForSelection = viewportIndexAt(e->pos());
1822 if (clickedVpForSelection >= 0 && clickedVpForSelection != m_visualizationEditTarget) {
1823 setVisualizationEditTarget(clickedVpForSelection);
1824 }
1825 }
1826
1827 m_lastMousePos = e->pos();
1828}
1829
1830//=============================================================================================================
1831
1832void BrainView::mouseMoveEvent(QMouseEvent *event)
1833{
1834 if (m_isDraggingSplitter && (event->buttons() & Qt::LeftButton)) {
1835 m_layout.dragSplitter(event->pos(), m_activeSplitter, size());
1836 m_multiSplitX = m_layout.splitX();
1837 m_multiSplitY = m_layout.splitY();
1838
1839 m_lastMousePos = event->pos();
1840 updateViewportSeparators();
1841 m_sceneDirty = true; update();
1842 return;
1843 }
1844
1845 if (event->buttons() & Qt::LeftButton) {
1846 if (m_viewMode == MultiView) {
1847 const int activeVp = viewportIndexAt(event->pos());
1848 const int activePreset = (activeVp >= 0 && activeVp < m_subViews.size())
1849 ? std::clamp(m_subViews[activeVp].preset, 0, 6)
1850 : 1;
1851
1852 if (activeVp >= 0 && !multiViewPresetIsPerspective(activePreset)) {
1853 // Planar views (Top/Front/Left): pan along the view plane
1854 const QPoint diff = event->pos() - m_lastMousePos;
1855 CameraController::applyMousePan(diff, m_subViews[activeVp].pan, m_sceneSize);
1856 m_lastMousePos = event->pos();
1857 m_sceneDirty = true; update();
1858 return;
1859 }
1860
1861 if (activeVp >= 0 && multiViewPresetIsPerspective(activePreset)) {
1862 // Perspective view: rotate
1863 QPoint diff = event->pos() - m_lastMousePos;
1864 CameraController::applyMouseRotation(diff, m_subViews[activeVp].perspectiveRotation);
1865
1866 m_perspectiveRotatedSincePress = true;
1867 m_lastMousePos = event->pos();
1868 m_sceneDirty = true; update();
1869 return;
1870 }
1871
1872 m_lastMousePos = event->pos();
1873 return;
1874 }
1875
1876 // Single-view rotation
1877 QPoint diff = event->pos() - m_lastMousePos;
1878 CameraController::applyMouseRotation(diff, m_cameraRotation);
1879
1880 m_lastMousePos = event->pos();
1881 m_sceneDirty = true; update();
1882 } else {
1883 if (m_viewMode == MultiView) {
1884 updateSplitterCursor(event->pos());
1885 } else {
1886 unsetCursor();
1887 }
1888 castRay(event->pos());
1889 }
1890}
1891
1892//=============================================================================================================
1893
1894void BrainView::mouseReleaseEvent(QMouseEvent *event)
1895{
1896 if (event->button() == Qt::LeftButton && m_isDraggingSplitter) {
1897 m_isDraggingSplitter = false;
1898 m_activeSplitter = SplitterHit::None;
1899 saveMultiViewSettings();
1900 updateSplitterCursor(event->pos());
1901 return;
1902 }
1903
1904 if (event->button() == Qt::LeftButton && m_viewMode == MultiView && m_perspectiveRotatedSincePress) {
1905 m_perspectiveRotatedSincePress = false;
1906 saveMultiViewSettings();
1907 }
1908
1909 // Save pan offset after dragging in a planar viewport
1910 if (event->button() == Qt::LeftButton && m_viewMode == MultiView && !m_perspectiveRotatedSincePress) {
1911 saveMultiViewSettings();
1912 }
1913
1914 if (m_viewMode == MultiView) {
1915 updateSplitterCursor(event->pos());
1916 } else {
1917 unsetCursor();
1918 }
1919}
1920
1921//=============================================================================================================
1922
1923void BrainView::wheelEvent(QWheelEvent *event)
1924{
1925 const float delta = event->angleDelta().y() / 120.0f;
1926
1927 if (m_viewMode == MultiView) {
1928 const int vp = viewportIndexAt(event->position().toPoint());
1929 if (vp >= 0 && vp < m_subViews.size()) {
1930 m_subViews[vp].zoom += delta;
1931 saveMultiViewSettings();
1932 }
1933 } else {
1934 m_zoom += delta;
1935 }
1936 m_sceneDirty = true; update();
1937}
1938
1939//=============================================================================================================
1940
1941void BrainView::keyPressEvent(QKeyEvent *event)
1942{
1943 if (event->key() == Qt::Key_S) {
1944 saveSnapshot();
1945 } else if (event->key() == Qt::Key_R) {
1946 m_cameraRotation = QQuaternion();
1947 logPerspectiveRotation("reset-initial");
1948 saveMultiViewSettings();
1949 m_sceneDirty = true; update();
1950 }
1951}
1952
1953//=============================================================================================================
1954
1955bool BrainView::loadSourceEstimate(const QString &lhPath, const QString &rhPath)
1956{
1957 return m_sourceManager.load(lhPath, rhPath, m_surfaces, m_activeSurfaceType);
1958}
1959
1960//=============================================================================================================
1961
1962void BrainView::onSourceEstimateLoaded(int numTimePoints)
1963{
1964 setVisualizationMode("Source Estimate");
1965 emit sourceEstimateLoaded(numTimePoints);
1966 setTimePoint(0);
1967}
1968
1969//=============================================================================================================
1970
1972{
1973 m_sourceManager.setTimePoint(index, m_surfaces, m_singleView, m_subViews);
1974 m_sceneDirty = true; update();
1975}
1976
1977//=============================================================================================================
1978
1979void BrainView::setSourceColormap(const QString &name)
1980{
1981 m_sourceManager.setColormap(name);
1982 setTimePoint(m_sourceManager.currentTimePoint());
1983}
1984
1985//=============================================================================================================
1986
1987void BrainView::setSourceThresholds(float min, float mid, float max)
1988{
1989 m_sourceManager.setThresholds(min, mid, max);
1990 setTimePoint(m_sourceManager.currentTimePoint());
1991}
1992
1993//=============================================================================================================
1994
1996{
1997 setVisualizationMode("Source Estimate");
1998 m_sourceManager.startStreaming(m_surfaces, m_singleView, m_subViews);
1999}
2000
2001//=============================================================================================================
2002
2004{
2005 m_sourceManager.stopStreaming();
2006}
2007
2008//=============================================================================================================
2009
2011{
2012 return m_sourceManager.isStreaming();
2013}
2014
2015//=============================================================================================================
2016
2017void BrainView::pushRealtimeSourceData(const Eigen::VectorXd &data)
2018{
2019 m_sourceManager.pushData(data);
2020}
2021
2022//=============================================================================================================
2023
2025{
2026 m_sourceManager.setInterval(msec);
2027}
2028
2029//=============================================================================================================
2030
2032{
2033 m_sourceManager.setLooping(enabled);
2034}
2035
2036//=============================================================================================================
2037
2038void BrainView::onRealtimeColorsAvailable(const QVector<uint32_t> &colorsLh,
2039 const QVector<uint32_t> &colorsRh)
2040{
2041 // Apply colors to all brain surfaces matching active surface types
2042 QSet<QString> activeTypes;
2043 activeTypes.insert(m_singleView.surfaceType);
2044 for (int i = 0; i < m_subViews.size(); ++i) {
2045 activeTypes.insert(m_subViews[i].surfaceType);
2046 }
2047
2048 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
2049 if (!it.value() || it.value()->tissueType() != BrainSurface::TissueBrain)
2050 continue;
2051
2052 for (const QString &type : activeTypes) {
2053 if (it.key().endsWith(type)) {
2054 int hemi = it.value()->hemi();
2055 const QVector<uint32_t> &colors = (hemi == 0) ? colorsLh : colorsRh;
2056 if (!colors.isEmpty()) {
2057 it.value()->applySourceEstimateColors(colors);
2058 }
2059 break;
2060 }
2061 }
2062 }
2063
2064 m_sceneDirty = true; update();
2065}
2066
2067//=============================================================================================================
2068
2069bool BrainView::loadSensorField(const QString &evokedPath, int aveIndex)
2070{
2071 auto evoked = DataLoader::loadEvoked(evokedPath, aveIndex);
2072 if (evoked.isEmpty()) return false;
2073
2074 // Preserve the current time point when switching between evoked sets
2075 // that share the same sensor configuration (same file, different condition).
2076 const int previousTimePoint = m_fieldMapper.timePoint();
2077 const bool canReuse = m_fieldMapper.hasMappingFor(evoked);
2078
2079 m_fieldMapper.setEvoked(evoked);
2080
2081 if (!canReuse) {
2082 // Sensor config changed — full rebuild required (also precomputes global range)
2083 if (!m_fieldMapper.buildMapping(m_surfaces, m_headToMriTrans, m_applySensorTrans)) {
2084 m_fieldMapper.setEvoked(FIFFLIB::FiffEvoked()); // Clear state on failure
2085 return false;
2086 }
2087 } else {
2088 // Mapping reused — recompute normalization for new evoked data
2089 m_fieldMapper.computeNormRange();
2090 }
2091
2092 // Clamp preserved time point to the range of the new evoked data
2093 const int numTimes = static_cast<int>(m_fieldMapper.evoked().times.size());
2094 const int tp = qBound(0, previousTimePoint, numTimes - 1);
2095
2096 emit sensorFieldLoaded(numTimes, tp);
2098 return true;
2099}
2100
2101//=============================================================================================================
2102
2103QStringList BrainView::probeEvokedSets(const QString &evokedPath)
2104{
2105 return DataLoader::probeEvokedSets(evokedPath);
2106}
2107
2108//=============================================================================================================
2109
2111{
2112 if (!m_fieldMapper.isLoaded() || m_fieldMapper.evoked().isEmpty()) {
2113 return;
2114 }
2115
2116 int maxIdx = static_cast<int>(m_fieldMapper.evoked().times.size()) - 1;
2117 if (maxIdx < 0) {
2118 return;
2119 }
2120
2121 m_fieldMapper.setTimePoint(qBound(0, index, maxIdx));
2122 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2123 emit sensorFieldTimePointChanged(m_fieldMapper.timePoint(), m_fieldMapper.evoked().times(m_fieldMapper.timePoint()));
2124 m_sceneDirty = true; update();
2125}
2126
2127//=============================================================================================================
2128
2129void BrainView::setSensorFieldVisible(const QString &type, bool visible)
2130{
2131 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
2132 if (type == "MEG") {
2133 profile.megFieldMap = visible;
2134 } else if (type == "EEG") {
2135 profile.eegFieldMap = visible;
2136 } else {
2137 return;
2138 }
2139
2140 saveMultiViewSettings();
2141 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2142 m_sceneDirty = true; update();
2143}
2144
2145//=============================================================================================================
2146
2147void BrainView::setSensorFieldContourVisible(const QString &type, bool visible)
2148{
2149 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
2150 if (type == "MEG") {
2151 profile.megFieldContours = visible;
2152 } else if (type == "EEG") {
2153 profile.eegFieldContours = visible;
2154 } else {
2155 return;
2156 }
2157
2158 saveMultiViewSettings();
2159 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2160 m_sceneDirty = true; update();
2161}
2162
2163//=============================================================================================================
2164
2166{
2167 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
2168 if (profile.megFieldMapOnHead == useHead && m_fieldMapper.megFieldMapOnHead() == useHead) {
2169 return;
2170 }
2171
2172 profile.megFieldMapOnHead = useHead;
2173 m_fieldMapper.setMegFieldMapOnHead(useHead);
2174 saveMultiViewSettings();
2175 if (m_fieldMapper.isLoaded()) {
2176 m_fieldMapper.buildMapping(m_surfaces, m_headToMriTrans, m_applySensorTrans);
2177 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2178 m_sceneDirty = true; update();
2179 }
2180}
2181
2182//=============================================================================================================
2183
2184void BrainView::setSensorFieldColormap(const QString &name)
2185{
2186 if (m_fieldMapper.colormap() == name) {
2187 return;
2188 }
2189 m_fieldMapper.setColormap(name);
2190 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2191 m_sceneDirty = true; update();
2192}
2193
2194//=============================================================================================================
2195
2197{
2198 return m_sourceManager.tstep();
2199}
2200
2201//=============================================================================================================
2202
2204{
2205 return m_sourceManager.tmin();
2206}
2207
2208//=============================================================================================================
2209
2211{
2212 return m_sourceManager.numTimePoints();
2213}
2214
2215//=============================================================================================================
2216
2218{
2219 if (!m_fieldMapper.isLoaded() || m_fieldMapper.evoked().nave == -1 || m_fieldMapper.evoked().times.size() == 0) {
2220 return -1;
2221 }
2222
2223 int bestIdx = 0;
2224 float bestDist = std::abs(m_fieldMapper.evoked().times(0) - timeSec);
2225 for (int i = 1; i < m_fieldMapper.evoked().times.size(); ++i) {
2226 float dist = std::abs(m_fieldMapper.evoked().times(i) - timeSec);
2227 if (dist < bestDist) {
2228 bestDist = dist;
2229 bestIdx = i;
2230 }
2231 }
2232 return bestIdx;
2233}
2234
2235//=============================================================================================================
2236
2237int BrainView::closestStcIndex(float timeSec) const
2238{
2239 return m_sourceManager.closestIndex(timeSec);
2240}
2241
2242//=============================================================================================================
2243
2244bool BrainView::sensorFieldTimeRange(float &tmin, float &tmax) const
2245{
2246 if (!m_fieldMapper.isLoaded() || m_fieldMapper.evoked().nave == -1 || m_fieldMapper.evoked().times.size() == 0) {
2247 return false;
2248 }
2249 tmin = m_fieldMapper.evoked().times(0);
2250 tmax = m_fieldMapper.evoked().times(m_fieldMapper.evoked().times.size() - 1);
2251 return true;
2252}
2253
2254//=============================================================================================================
2255// ── Real-time sensor data streaming ────────────────────────────────────
2256//=============================================================================================================
2257
2258void BrainView::startRealtimeSensorStreaming(const QString &modality)
2259{
2260 m_sensorStreamManager.startStreaming(modality, m_fieldMapper, m_surfaces);
2261}
2262
2263//=============================================================================================================
2264
2266{
2267 m_sensorStreamManager.stopStreaming();
2268}
2269
2270//=============================================================================================================
2271
2273{
2274 return m_sensorStreamManager.isStreaming();
2275}
2276
2277//=============================================================================================================
2278
2279void BrainView::pushRealtimeSensorData(const Eigen::VectorXf &data)
2280{
2281 m_sensorStreamManager.pushData(data);
2282}
2283
2284//=============================================================================================================
2285
2287{
2288 m_sensorStreamManager.setInterval(msec);
2289}
2290
2291//=============================================================================================================
2292
2294{
2295 m_sensorStreamManager.setLooping(enabled);
2296}
2297
2298//=============================================================================================================
2299
2301{
2302 m_sensorStreamManager.setAverages(numAvr);
2303}
2304
2305//=============================================================================================================
2306
2308{
2309 m_sensorStreamManager.setColormap(name);
2310}
2311
2312//=============================================================================================================
2313
2314void BrainView::onSensorStreamColorsAvailable(const QString &surfaceKey,
2315 const QVector<uint32_t> &colors)
2316{
2317 if (surfaceKey.isEmpty() || !m_surfaces.contains(surfaceKey)) {
2318 return;
2319 }
2320
2321 auto surface = m_surfaces[surfaceKey];
2322 if (surface && !colors.isEmpty()) {
2323 surface->applySourceEstimateColors(colors);
2324 }
2325
2326 m_sceneDirty = true; update();
2327}
2328
2329//=============================================================================================================
2330
2331bool BrainView::loadSensors(const QString &fifPath) {
2332 auto r = DataLoader::loadSensors(fifPath, m_megHelmetOverridePath);
2333 if (!r.hasInfo && !r.hasDigitizer) return false;
2334
2335 // Store Device→Head transform for later helmet surface reloads
2336 m_devHeadTrans = r.devHeadTrans;
2337 m_hasDevHead = r.hasDevHead;
2338
2339 if (!r.megGradItems.isEmpty()) m_model->addSensors("MEG/Grad", r.megGradItems);
2340 if (!r.megMagItems.isEmpty()) m_model->addSensors("MEG/Mag", r.megMagItems);
2341 if (!r.eegItems.isEmpty()) m_model->addSensors("EEG", r.eegItems);
2342
2343 if (r.helmetSurface) {
2344 m_surfaces["sens_surface_meg"] = r.helmetSurface;
2345 } else {
2346 qWarning() << "BrainView::loadSensors: NO helmet surface returned from DataLoader!";
2347 }
2348
2349 if (!r.digitizerPoints.isEmpty())
2350 m_model->addDigitizerData(r.digitizerPoints);
2351
2352 return true;
2353}
2354
2355//=============================================================================================================
2356
2357bool BrainView::loadMegHelmetSurface(const QString &helmetFilePath) {
2358 auto surface = DataLoader::loadHelmetSurface(helmetFilePath, m_devHeadTrans, m_hasDevHead);
2359 if (!surface) {
2360 qWarning() << "BrainView::loadMegHelmetSurface: DataLoader returned nullptr!";
2361 return false;
2362 }
2363
2364 m_surfaces["sens_surface_meg"] = surface;
2365 refreshSensorTransforms();
2366 updateSceneBounds();
2367 m_sceneDirty = true; update();
2368 return true;
2369}
2370
2371//=============================================================================================================
2372
2373bool BrainView::loadDipoles(const QString &dipPath)
2374{
2375 auto ecdSet = DataLoader::loadDipoles(dipPath);
2376 if (ecdSet.size() == 0) return false;
2377 m_model->addDipoles(ecdSet);
2378 return true;
2379}
2380
2381//=============================================================================================================
2382
2383bool BrainView::loadNetwork(const CONNLIB::Network &network, const QString &name)
2384{
2385 if (network.getNodes().isEmpty()) return false;
2386
2387 m_network = std::make_unique<NetworkObject>();
2388 m_network->load(network);
2389 m_network->setVisible(true);
2390
2391 // Also register in the tree model
2392 m_model->addNetwork(network, name);
2393
2394 m_sceneDirty = true; update();
2395 return true;
2396}
2397
2398//=============================================================================================================
2399
2401{
2402 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
2403 profile.network = visible;
2404 m_networkVisible = visible;
2405 if (m_network) m_network->setVisible(visible);
2406 saveMultiViewSettings();
2407 m_sceneDirty = true; update();
2408}
2409
2410//=============================================================================================================
2411
2413{
2414 if (m_network) {
2415 m_network->setThreshold(threshold);
2416 m_sceneDirty = true; update();
2417 }
2418}
2419
2420//=============================================================================================================
2421
2422void BrainView::setNetworkColormap(const QString &name)
2423{
2424 if (m_network) {
2425 m_network->setColormap(name);
2426 m_sceneDirty = true; update();
2427 }
2428}
2429
2430//=============================================================================================================
2431
2432bool BrainView::loadSourceSpace(const QString &fwdPath)
2433{
2434 auto srcSpace = DataLoader::loadSourceSpace(fwdPath);
2435 if (srcSpace.isEmpty()) return false;
2436 m_model->addSourceSpace(srcSpace);
2437 return true;
2438}
2439
2440//=============================================================================================================
2441
2443{
2444 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
2445 profile.sourceSpace = visible;
2446 saveMultiViewSettings();
2447 m_sceneDirty = true; update();
2448}
2449
2450//=============================================================================================================
2451
2452bool BrainView::loadTransformation(const QString &transPath)
2453{
2454 FiffCoordTrans trans;
2455 if (!DataLoader::loadHeadToMriTransform(transPath, trans))
2456 return false;
2457
2458 m_headToMriTrans = trans;
2459 refreshSensorTransforms();
2460 return true;
2461}
2462
2463void BrainView::refreshSensorTransforms()
2464{
2465 QMatrix4x4 qmat;
2466 if (m_applySensorTrans && !m_headToMriTrans.isEmpty()) {
2467 qmat = SURFACEKEYS::toQMatrix4x4(m_headToMriTrans.trans);
2468 }
2469
2470 int surfCount = 0;
2471 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
2472 if ((it.key().startsWith("sens_") || it.key().startsWith("dig_")) && it.value()) {
2473 it.value()->applyTransform(qmat);
2474 surfCount++;
2475 }
2476 }
2477
2478 if (m_fieldMapper.isLoaded()) {
2479 m_fieldMapper.buildMapping(m_surfaces, m_headToMriTrans, m_applySensorTrans);
2480 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2481 }
2482}
2483
2484//=============================================================================================================
2485
2486void BrainView::castRay(const QPoint &pos)
2487{
2488 // 1. Setup Matrix Stack (Must match render exactly, including multiview pane layout)
2489 const QSize outputSize = size();
2490
2491 const auto enabledViewports = enabledViewportIndices();
2492
2493 const int numEnabled = enabledViewports.size();
2494 int activeSlot = 0;
2495 QRect activePane(0, 0, outputSize.width(), outputSize.height());
2496
2497 bool hasValidPane = true;
2498 if (m_viewMode == MultiView && numEnabled > 1) {
2499 bool foundSlot = false;
2500 for (int slot = 0; slot < numEnabled; ++slot) {
2501 const QRect pane = multiViewSlotRect(slot, numEnabled, outputSize);
2502 if (pane.contains(pos)) {
2503 activeSlot = slot;
2504 activePane = pane;
2505 foundSlot = true;
2506 break;
2507 }
2508 }
2509
2510 hasValidPane = foundSlot;
2511 }
2512
2513 const int vp = (m_viewMode == MultiView) ? enabledViewports[activeSlot] : 0;
2514 const SubView &sv = (m_viewMode == MultiView) ? m_subViews[vp] : m_singleView;
2515
2516 m_camera.setSceneCenter(m_sceneCenter);
2517 m_camera.setSceneSize(m_sceneSize);
2518 m_camera.setRotation(m_cameraRotation);
2519 m_camera.setZoom(m_zoom);
2520 const float aspect = float(std::max(1, activePane.width())) / float(std::max(1, activePane.height()));
2521 const CameraResult cam = (m_viewMode == MultiView)
2522 ? m_camera.computeMultiView(sv, aspect)
2523 : m_camera.computeSingleView(aspect);
2524 QMatrix4x4 pvm = cam.projection * cam.view * cam.model;
2525
2526 // ── Unproject screen position to world-space ray ───────────────────
2527 QVector3D rayOrigin, rayDir;
2528 if (!RayPicker::unproject(pos, activePane, pvm, rayOrigin, rayDir))
2529 return;
2530
2531 // ── Pick against all scene geometry ────────────────────────────────
2532 PickResult pickResult;
2533 if (hasValidPane) {
2534 pickResult = RayPicker::pick(rayOrigin, rayDir, sv, m_surfaces, m_itemSurfaceMap, m_itemDipoleMap);
2535 }
2536 m_hasIntersection = pickResult.hit;
2537 if (pickResult.hit) {
2538 m_lastIntersectionPoint = pickResult.hitPoint;
2539 }
2540
2541 QStandardItem *hitItem = pickResult.item;
2542 int hitIndex = pickResult.vertexIndex;
2543
2544 // ── Build hover label ──────────────────────────────────────────────
2545 const QString displayLabel = RayPicker::buildLabel(pickResult, m_itemSurfaceMap, m_surfaces);
2546 const QString &hitKey = pickResult.surfaceKey;
2547 int currentRegionId = pickResult.regionId;
2548
2549 if (displayLabel != m_hoveredRegion) {
2550 m_hoveredRegion = displayLabel;
2551 emit hoveredRegionChanged(m_hoveredRegion);
2552 if (m_regionLabel) {
2553 if (m_hoveredRegion.isEmpty()) {
2554 m_regionLabel->hide();
2555 } else {
2556 m_regionLabel->setText(m_hoveredRegion);
2557 m_regionLabel->show();
2558 }
2559 }
2560 }
2561
2562 QString hoveredSurfaceKey;
2563 if (hitKey.startsWith("sens_surface_meg")) {
2564 hoveredSurfaceKey = hitKey;
2565 }
2566
2567 if (hitItem != m_hoveredItem || hitIndex != m_hoveredIndex || hoveredSurfaceKey != m_hoveredSurfaceKey) {
2568 // Deselect previous
2569 if (m_hoveredItem) {
2570 if (m_itemSurfaceMap.contains(m_hoveredItem)) {
2571 m_itemSurfaceMap[m_hoveredItem]->setSelected(false);
2572 m_itemSurfaceMap[m_hoveredItem]->setSelectedRegion(-1);
2573 m_itemSurfaceMap[m_hoveredItem]->setSelectedVertexRange(-1, 0);
2574 } else if (m_itemDipoleMap.contains(m_hoveredItem)) {
2575 m_itemDipoleMap[m_hoveredItem]->setSelected(m_hoveredIndex, false);
2576 }
2577 }
2578 if (!m_hoveredSurfaceKey.isEmpty() && m_surfaces.contains(m_hoveredSurfaceKey)) {
2579 m_surfaces[m_hoveredSurfaceKey]->setSelected(false);
2580 m_surfaces[m_hoveredSurfaceKey]->setSelectedRegion(-1);
2581 m_surfaces[m_hoveredSurfaceKey]->setSelectedVertexRange(-1, 0);
2582 }
2583
2584 m_hoveredItem = hitItem;
2585 m_hoveredIndex = hitIndex;
2586 m_hoveredSurfaceKey = hoveredSurfaceKey;
2587
2588 if (m_hoveredItem) {
2589 // Select new
2590 if (m_itemSurfaceMap.contains(m_hoveredItem)) {
2591 // Check if this is a digitizer batched mesh — highlight single sphere
2592 AbstractTreeItem* absHitSel = dynamic_cast<AbstractTreeItem*>(m_hoveredItem);
2593 bool isDigitizer = absHitSel &&
2595
2596 if (isDigitizer && m_hoveredIndex >= 0) {
2597 const int vertsPerSphere = MeshFactory::sphereVertexCount();
2598 int sphereIdx = m_hoveredIndex / vertsPerSphere;
2599 m_itemSurfaceMap[m_hoveredItem]->setSelectedVertexRange(
2600 sphereIdx * vertsPerSphere, vertsPerSphere);
2601 } else if (currentRegionId != -1) {
2602 m_itemSurfaceMap[m_hoveredItem]->setSelectedRegion(currentRegionId);
2603 // Keep the surface selected so the shader gold glow
2604 // activates. The old CPU vertex-color region highlight
2605 // was removed to avoid buffer re-uploads on WASM.
2606 m_itemSurfaceMap[m_hoveredItem]->setSelected(true);
2607 } else {
2608 m_itemSurfaceMap[m_hoveredItem]->setSelected(true);
2609 m_itemSurfaceMap[m_hoveredItem]->setSelectedRegion(-1);
2610 }
2611 } else if (m_itemDipoleMap.contains(m_hoveredItem)) {
2612 m_itemDipoleMap[m_hoveredItem]->setSelected(m_hoveredIndex, true);
2613 }
2614 } else if (!m_hoveredSurfaceKey.isEmpty() && m_surfaces.contains(m_hoveredSurfaceKey)) {
2615 m_surfaces[m_hoveredSurfaceKey]->setSelected(true);
2616 m_surfaces[m_hoveredSurfaceKey]->setSelectedRegion(-1);
2617 }
2618 } else if (m_hoveredItem && m_itemSurfaceMap.contains(m_hoveredItem)) {
2619 AbstractTreeItem* absHitUpd = dynamic_cast<AbstractTreeItem*>(m_hoveredItem);
2620 bool isDigitizer = absHitUpd &&
2622
2623 if (isDigitizer && m_hoveredIndex >= 0) {
2624 const int vertsPerSphere = MeshFactory::sphereVertexCount();
2625 int sphereIdx = m_hoveredIndex / vertsPerSphere;
2626 m_itemSurfaceMap[m_hoveredItem]->setSelectedVertexRange(
2627 sphereIdx * vertsPerSphere, vertsPerSphere);
2628 } else if (currentRegionId != -1) {
2629 m_itemSurfaceMap[m_hoveredItem]->setSelectedRegion(currentRegionId);
2630 m_itemSurfaceMap[m_hoveredItem]->setSelected(true);
2631 } else {
2632 m_itemSurfaceMap[m_hoveredItem]->setSelectedRegion(-1);
2633 m_itemSurfaceMap[m_hoveredItem]->setSelected(true);
2634 }
2635 } else if (!m_hoveredSurfaceKey.isEmpty() && m_surfaces.contains(m_hoveredSurfaceKey)) {
2636 m_surfaces[m_hoveredSurfaceKey]->setSelected(true);
2637 }
2638 m_sceneDirty = true; update();
2639}
2640
2641//=============================================================================================================
2642
2643void BrainView::showViewportPresetMenu(int viewport, const QPoint &globalPos)
2644{
2645 if (viewport < 0 || viewport >= m_subViews.size()) {
2646 return;
2647 }
2648
2649 QMenu menu;
2650 QAction *topAction = menu.addAction("Top");
2651 QAction *perspectiveAction = menu.addAction("Perspective");
2652 QAction *frontAction = menu.addAction("Front");
2653 QAction *leftAction = menu.addAction("Left");
2654 menu.addSeparator();
2655 QAction *bottomAction = menu.addAction("Bottom");
2656 QAction *backAction = menu.addAction("Back");
2657 QAction *rightAction = menu.addAction("Right");
2658
2659 const int currentPreset = std::clamp(m_subViews[viewport].preset, 0, 6);
2660 topAction->setCheckable(true);
2661 perspectiveAction->setCheckable(true);
2662 frontAction->setCheckable(true);
2663 leftAction->setCheckable(true);
2664 bottomAction->setCheckable(true);
2665 backAction->setCheckable(true);
2666 rightAction->setCheckable(true);
2667
2668 topAction->setChecked(currentPreset == 0);
2669 perspectiveAction->setChecked(currentPreset == 1);
2670 frontAction->setChecked(currentPreset == 2);
2671 leftAction->setChecked(currentPreset == 3);
2672 bottomAction->setChecked(currentPreset == 4);
2673 backAction->setChecked(currentPreset == 5);
2674 rightAction->setChecked(currentPreset == 6);
2675
2676 QAction *selected = menu.exec(globalPos);
2677 if (!selected) {
2678 return;
2679 }
2680
2681 int newPreset = currentPreset;
2682 if (selected == topAction) {
2683 newPreset = 0;
2684 } else if (selected == perspectiveAction) {
2685 newPreset = 1;
2686 } else if (selected == frontAction) {
2687 newPreset = 2;
2688 } else if (selected == leftAction) {
2689 newPreset = 3;
2690 } else if (selected == bottomAction) {
2691 newPreset = 4;
2692 } else if (selected == backAction) {
2693 newPreset = 5;
2694 } else if (selected == rightAction) {
2695 newPreset = 6;
2696 }
2697
2698 if (newPreset == currentPreset) {
2699 return;
2700 }
2701
2702 m_subViews[viewport].preset = newPreset;
2703 saveMultiViewSettings();
2704 updateOverlayLayout();
2705 m_sceneDirty = true; update();
2706}
2707
2708//=============================================================================================================
2709// Data removal
2710//=============================================================================================================
2711
2716void BrainView::removeSurfacesByPrefix(const QString &prefix)
2717{
2718 // Collect keys first to avoid modifying the map while iterating
2719 QStringList keysToRemove;
2720 for (auto it = m_surfaces.cbegin(); it != m_surfaces.cend(); ++it) {
2721 if (it.key().startsWith(prefix))
2722 keysToRemove << it.key();
2723 }
2724 for (const QString &key : keysToRemove)
2725 m_surfaces.remove(key);
2726
2727 // Remove corresponding itemSurfaceMap entries + model rows
2728 QList<const QStandardItem*> itemsToRemove;
2729 for (auto it = m_itemSurfaceMap.cbegin(); it != m_itemSurfaceMap.cend(); ++it) {
2730 bool found = false;
2731 for (const QString &key : keysToRemove) {
2732 // Check if this item's surface matches any removed surface
2733 for (auto sit = m_surfaces.cbegin(); sit != m_surfaces.cend(); ++sit) {
2734 if (sit.value() == it.value()) { found = true; break; }
2735 }
2736 }
2737 // If the surface is no longer in m_surfaces, it was removed
2738 bool stillPresent = false;
2739 for (auto sit = m_surfaces.cbegin(); sit != m_surfaces.cend(); ++sit) {
2740 if (sit.value() == it.value()) { stillPresent = true; break; }
2741 }
2742 if (!stillPresent)
2743 itemsToRemove << it.key();
2744 }
2745 for (const QStandardItem *item : itemsToRemove) {
2746 m_itemSurfaceMap.remove(item);
2747 // Remove from model
2748 if (m_model) {
2749 QStandardItem *mutableItem = const_cast<QStandardItem*>(item);
2750 if (mutableItem->parent())
2751 mutableItem->parent()->removeRow(mutableItem->row());
2752 else
2753 m_model->removeRow(mutableItem->row());
2754 }
2755 }
2756}
2757
2758//=============================================================================================================
2759
2761{
2762 // Remove brain surfaces (lh_*, rh_*)
2763 QStringList keysToRemove;
2764 for (auto it = m_surfaces.cbegin(); it != m_surfaces.cend(); ++it) {
2765 if (it.key().startsWith("lh_") || it.key().startsWith("rh_"))
2766 keysToRemove << it.key();
2767 }
2768
2769 // Clean up itemSurfaceMap
2770 for (auto it = m_itemSurfaceMap.begin(); it != m_itemSurfaceMap.end(); ) {
2771 bool remove = false;
2772 for (const QString &key : keysToRemove) {
2773 if (m_surfaces.contains(key) && m_surfaces[key] == it.value()) {
2774 remove = true;
2775 break;
2776 }
2777 }
2778 if (remove) {
2779 if (m_model) {
2780 QStandardItem *mutableItem = const_cast<QStandardItem*>(it.key());
2781 if (mutableItem->parent())
2782 mutableItem->parent()->removeRow(mutableItem->row());
2783 else
2784 m_model->removeRow(mutableItem->row());
2785 }
2786 it = m_itemSurfaceMap.erase(it);
2787 } else {
2788 ++it;
2789 }
2790 }
2791
2792 for (const QString &key : keysToRemove)
2793 m_surfaces.remove(key);
2794
2795 m_activeSurface.reset();
2796 m_activeSurfaceType.clear();
2797 updateSceneBounds();
2798 m_sceneDirty = true; update();
2799}
2800
2801//=============================================================================================================
2802
2804{
2805 QStringList keysToRemove;
2806 for (auto it = m_surfaces.cbegin(); it != m_surfaces.cend(); ++it) {
2807 if (it.key().startsWith("bem_"))
2808 keysToRemove << it.key();
2809 }
2810
2811 for (auto it = m_itemSurfaceMap.begin(); it != m_itemSurfaceMap.end(); ) {
2812 bool remove = false;
2813 for (const QString &key : keysToRemove) {
2814 if (m_surfaces.contains(key) && m_surfaces[key] == it.value()) {
2815 remove = true;
2816 break;
2817 }
2818 }
2819 if (remove) {
2820 if (m_model) {
2821 QStandardItem *mutableItem = const_cast<QStandardItem*>(it.key());
2822 if (mutableItem->parent())
2823 mutableItem->parent()->removeRow(mutableItem->row());
2824 else
2825 m_model->removeRow(mutableItem->row());
2826 }
2827 it = m_itemSurfaceMap.erase(it);
2828 } else {
2829 ++it;
2830 }
2831 }
2832
2833 for (const QString &key : keysToRemove)
2834 m_surfaces.remove(key);
2835
2836 updateSceneBounds();
2837 m_sceneDirty = true; update();
2838}
2839
2840//=============================================================================================================
2841
2843{
2844 m_sourceManager.stopStreaming();
2845 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
2846 if (it.key().startsWith("lh_") || it.key().startsWith("rh_")) {
2847 it.value()->clearSourceEstimateColors();
2848 }
2849 }
2850 m_sceneDirty = true; update();
2851}
2852
2853//=============================================================================================================
2854
2856{
2857 m_dipoles.reset();
2858
2859 // Remove dipole items from model and maps
2860 for (auto it = m_itemDipoleMap.begin(); it != m_itemDipoleMap.end(); ) {
2861 if (m_model) {
2862 QStandardItem *mutableItem = const_cast<QStandardItem*>(it.key());
2863 if (mutableItem->parent())
2864 mutableItem->parent()->removeRow(mutableItem->row());
2865 else
2866 m_model->removeRow(mutableItem->row());
2867 }
2868 it = m_itemDipoleMap.erase(it);
2869 }
2870 m_sceneDirty = true; update();
2871}
2872
2873//=============================================================================================================
2874
2876{
2877 QStringList keysToRemove;
2878 for (auto it = m_surfaces.cbegin(); it != m_surfaces.cend(); ++it) {
2879 if (it.key().startsWith("srcsp_"))
2880 keysToRemove << it.key();
2881 }
2882
2883 for (auto it = m_itemSurfaceMap.begin(); it != m_itemSurfaceMap.end(); ) {
2884 bool remove = false;
2885 for (const QString &key : keysToRemove) {
2886 if (m_surfaces.contains(key) && m_surfaces[key] == it.value()) {
2887 remove = true;
2888 break;
2889 }
2890 }
2891 if (remove) {
2892 if (m_model) {
2893 QStandardItem *mutableItem = const_cast<QStandardItem*>(it.key());
2894 if (mutableItem->parent())
2895 mutableItem->parent()->removeRow(mutableItem->row());
2896 else
2897 m_model->removeRow(mutableItem->row());
2898 }
2899 it = m_itemSurfaceMap.erase(it);
2900 } else {
2901 ++it;
2902 }
2903 }
2904
2905 for (const QString &key : keysToRemove)
2906 m_surfaces.remove(key);
2907
2908 updateSceneBounds();
2909 m_sceneDirty = true; update();
2910}
2911
2912//=============================================================================================================
2913
2915{
2916 QStringList keysToRemove;
2917 for (auto it = m_surfaces.cbegin(); it != m_surfaces.cend(); ++it) {
2918 if (it.key().startsWith("sens_") || it.key().startsWith("dig_"))
2919 keysToRemove << it.key();
2920 }
2921
2922 for (auto it = m_itemSurfaceMap.begin(); it != m_itemSurfaceMap.end(); ) {
2923 bool remove = false;
2924 for (const QString &key : keysToRemove) {
2925 if (m_surfaces.contains(key) && m_surfaces[key] == it.value()) {
2926 remove = true;
2927 break;
2928 }
2929 }
2930 if (remove) {
2931 if (m_model) {
2932 QStandardItem *mutableItem = const_cast<QStandardItem*>(it.key());
2933 if (mutableItem->parent())
2934 mutableItem->parent()->removeRow(mutableItem->row());
2935 else
2936 m_model->removeRow(mutableItem->row());
2937 }
2938 it = m_itemSurfaceMap.erase(it);
2939 } else {
2940 ++it;
2941 }
2942 }
2943
2944 for (const QString &key : keysToRemove)
2945 m_surfaces.remove(key);
2946
2947 m_devHeadTrans = QMatrix4x4();
2948 m_hasDevHead = false;
2949 updateSceneBounds();
2950 m_sceneDirty = true; update();
2951}
2952
2953//=============================================================================================================
2954
2956{
2957 m_fieldMapper.setEvoked(FIFFLIB::FiffEvoked());
2958 m_sensorStreamManager.stopStreaming();
2959 m_sceneDirty = true; update();
2960}
2961
2962//=============================================================================================================
2963
2965{
2966 m_headToMriTrans = FIFFLIB::FiffCoordTrans();
2967 refreshSensorTransforms();
2968 m_sceneDirty = true; update();
2969}
2970
2971//=============================================================================================================
2972
2974{
2975 m_network.reset();
2976 m_networkVisible = false;
2977
2978 // Remove network items from model
2979 if (m_model) {
2980 for (int r = m_model->rowCount() - 1; r >= 0; --r) {
2981 QStandardItem *item = m_model->item(r);
2982 if (item && item->text() == "Networks") {
2983 m_model->removeRow(r);
2984 break;
2985 }
2986 }
2987 }
2988 m_sceneDirty = true; update();
2989}
DataLoader — static helpers for loading MNE data files.
FsSurface key constants and type-to-key mappings.
QString shaderModeName(ShaderMode mode)
VisualizationMode visualizationModeFromName(const QString &name)
QString visualizationModeName(VisualizationMode mode)
bool multiViewPresetIsPerspective(int preset)
ShaderMode shaderModeFromName(const QString &name)
int normalizedVisualizationTarget(int target, int maxIndex)
QString multiViewPresetName(int preset)
BrainView class declaration.
SplitterHit
BrainRenderer class declaration.
RayPicker class declaration — ray casting and intersection testing.
MeshFactory class declaration — static utilities for generating primitive meshes (spheres,...
BrainSurface class declaration.
DipoleObject class declaration.
NetworkObject class declaration.
BrainTreeModel class declaration.
SourceSpaceTreeItem class declaration.
SensorTreeItem class declaration.
DigitizerTreeItem class declaration.
BemTreeItem class declaration.
DipoleTreeItem class declaration.
SurfaceTreeItem class declaration.
FiffEvokedSet class declaration.
MNEBem class declaration.
MNESourceSpaces class declaration.
Network class declaration.
FIFF file I/O and data structures (raw, epochs, evoked, covariance, forward).
QString sensorParentToKeyPrefix(const QString &parentText)
QString sensorTypeToObjectKey(const QString &uiType)
QMatrix4x4 toQMatrix4x4(const Eigen::Matrix4f &m)
This class holds information about a network, can compute a distance table and provide network metric...
Definition network.h:92
const QList< QSharedPointer< NetworkNode > > & getNodes() const
Definition network.cpp:156
static SensorLoadResult loadSensors(const QString &fifPath, const QString &megHelmetOverridePath={})
static QStringList probeEvokedSets(const QString &evokedPath)
static MNELIB::MNESourceSpaces loadSourceSpace(const QString &fwdPath)
static bool loadHeadToMriTransform(const QString &transPath, FIFFLIB::FiffCoordTrans &trans)
static std::shared_ptr< BrainSurface > loadHelmetSurface(const QString &helmetFilePath, const QMatrix4x4 &devHeadTrans=QMatrix4x4(), bool applyTrans=false)
static INVLIB::InvEcdSet loadDipoles(const QString &dipPath)
static FIFFLIB::FiffEvoked loadEvoked(const QString &evokedPath, int aveIndex=0)
Per-view toggle flags controlling which data layers (brain, sensors, sources, network) are visible.
Definition viewstate.h:74
Viewport subdivision holding its own camera, projection, and scissor rectangle.
Definition viewstate.h:148
bool matchesSurfaceType(const QString &key) const
ViewVisibilityProfile visibility
Definition viewstate.h:154
static bool isBrainSurfaceKey(const QString &key)
ShaderMode bemShader
Definition viewstate.h:152
bool shouldRenderSurface(const QString &key) const
QString surfaceType
Definition viewstate.h:150
VisualizationMode overlayMode
Definition viewstate.h:153
static SubView defaultForIndex(int index)
ShaderMode brainShader
Definition viewstate.h:151
int preset
Definition viewstate.h:160
static std::shared_ptr< BrainSurface > createPlate(const QVector3D &center, const QMatrix4x4 &orientation, const QColor &color, float size)
static std::shared_ptr< BrainSurface > createBatchedSpheres(const QVector< QVector3D > &positions, float radius, const QColor &color, int subdivisions=1)
static std::shared_ptr< BrainSurface > createBarbell(const QVector3D &center, const QMatrix4x4 &orientation, const QColor &color, float size)
static int sphereVertexCount(int subdivisions=1)
static std::shared_ptr< BrainSurface > createSphere(const QVector3D &center, float radius, const QColor &color, int subdivisions=1)
Computed camera matrices (projection, view, model) and vectors for a single viewport.
QVector3D cameraPos
QMatrix4x4 view
QMatrix4x4 model
QMatrix4x4 projection
static void applyMouseRotation(const QPoint &delta, QQuaternion &rotation, float speed=0.5f)
static void applyMousePan(const QPoint &delta, QVector2D &pan, float sceneSize)
Result of a ray–mesh intersection test containing the hit point, triangle index, and distance.
Definition raypicker.h:65
int vertexIndex
Vertex or element index at hit.
Definition raypicker.h:72
bool hit
True if something was hit.
Definition raypicker.h:66
QString surfaceKey
FsSurface map key of the hit surface.
Definition raypicker.h:71
QVector3D hitPoint
World-space intersection point.
Definition raypicker.h:68
QStandardItem * item
Tree item that was hit (nullable).
Definition raypicker.h:70
int regionId
FsAnnotation label ID.
Definition raypicker.h:79
static bool unproject(const QPoint &screenPos, const QRect &paneRect, const QMatrix4x4 &pvm, QVector3D &rayOrigin, QVector3D &rayDir)
Definition raypicker.cpp:52
static QString buildLabel(const PickResult &result, const QMap< const QStandardItem *, std::shared_ptr< BrainSurface > > &itemSurfaceMap, const QMap< QString, std::shared_ptr< BrainSurface > > &surfaces)
static PickResult pick(const QVector3D &rayOrigin, const QVector3D &rayDir, const SubView &subView, const QMap< QString, std::shared_ptr< BrainSurface > > &surfaces, const QMap< const QStandardItem *, std::shared_ptr< BrainSurface > > &itemSurfaceMap, const QMap< const QStandardItem *, std::shared_ptr< DipoleObject > > &itemDipoleMap)
Definition raypicker.cpp:85
Hierarchical item model organizing all 3-D scene objects (surfaces, sensors, sources,...
Base tree item providing check-state, visibility, and data-role storage for all 3-D scene items.
QColor color() const
static constexpr int itemTypeId(ItemType type)
void setVisible(bool visible)
int type() const override
Tree item representing a BEM surface layer in the 3-D scene hierarchy.
Definition bemtreeitem.h:52
const MNELIB::MNEBemSurface & bemSurfaceData() const
Digitizer point group tree item.
PointKind pointKind() const
const QVector< QVector3D > & positions() const
Tree item representing a set of fitted dipoles in the 3-D scene hierarchy.
const INVLIB::InvEcdSet & ecdSet() const
Tree item representing MEG or EEG sensor positions in the 3-D scene hierarchy.
bool hasOrientation() const
QVector3D position() const
float scale() const
const QMatrix4x4 & orientation() const
Source space point tree item.
const QVector< QVector3D > & positions() const
Tree item representing a FreeSurfer cortical surface in the 3-D scene hierarchy.
FSLIB::FsSurface surfaceData() const
FSLIB::FsAnnotation annotationData() const
Renderable cortical surface mesh with per-vertex color, curvature data, and GPU buffer management.
static constexpr VisualizationMode ModeScientific
void boundingBox(QVector3D &min, QVector3D &max) const
::VisualizationMode VisualizationMode
bool isVisible() const
static constexpr VisualizationMode ModeSurface
void colorsAvailable(const QString &surfaceKey, const QVector< uint32_t > &colors)
void loadingProgress(int percent, const QString &message)
void timePointChanged(int index, float time)
void loaded(int numTimePoints)
void thresholdsUpdated(float min, float mid, float max)
void realtimeColorsAvailable(const QVector< uint32_t > &colorsLh, const QVector< uint32_t > &colorsRh)
static constexpr ShaderMode Holographic
::ShaderMode ShaderMode
Aggregated GPU resources and render state for the 3-D brain visualization scene.
void setBemHighContrast(bool enabled)
void clearDipoles()
void setSourceColormap(const QString &name)
bool loadMegHelmetSurface(const QString &helmetFilePath)
void setHemiVisible(int hemiIdx, bool visible)
void setSensorFieldTimePoint(int index)
void sourceThresholdsUpdated(float min, float mid, float max)
void setInfoPanelVisible(bool visible)
void clearSourceEstimate()
int stcNumTimePoints() const
bool loadTransformation(const QString &transPath)
bool loadDipoles(const QString &dipPath)
void wheelEvent(QWheelEvent *event) override
bool sensorFieldTimeRange(float &tmin, float &tmax) const
void sourceEstimateLoaded(int numTimePoints)
bool loadNetwork(const CONNLIB::Network &network, const QString &name="Network")
void setSensorFieldContourVisible(const QString &type, bool visible)
bool megFieldMapOnHeadForTarget(int target) const
void setShaderMode(const QString &mode)
void setNetworkColormap(const QString &name)
int closestSensorFieldIndex(float timeSec) const
bool isRealtimeSensorStreaming() const
void resizeEvent(QResizeEvent *event) override
int closestStcIndex(float timeSec) const
float stcStep() const
QString bemShaderModeForTarget(int target) const
void keyPressEvent(QKeyEvent *event) override
void setRealtimeLooping(bool enabled)
void setRealtimeInterval(int msec)
void showMultiView()
void clearSurfaces()
void setSensorFieldVisible(const QString &type, bool visible)
float stcTmin() const
int visualizationEditTarget() const
void startRealtimeSensorStreaming(const QString &modality=QStringLiteral("MEG"))
QString overlayModeForTarget(int target) const
void clearTransformation()
void onRowsInserted(const QModelIndex &parent, int first, int last)
void stopRealtimeSensorStreaming()
QString shaderModeForTarget(int target) const
bool objectVisibleForTarget(const QString &object, int target) const
void setRealtimeSensorAverages(int numAvr)
void setSourceThresholds(float min, float mid, float max)
void mouseMoveEvent(QMouseEvent *event) override
void pushRealtimeSourceData(const Eigen::VectorXd &data)
void visualizationEditTargetChanged(int target)
void clearBem()
void setVisualizationEditTarget(int target)
void timePointChanged(int index, float time)
void setSourceSpaceVisible(bool visible)
void render(QRhiCommandBuffer *cb) override
void setInitialCameraRotation(const QQuaternion &rotation)
bool loadSensors(const QString &fifPath)
void clearNetwork()
void setVisualizationMode(const QString &mode)
void initialize(QRhiCommandBuffer *cb) override
void setModel(BrainTreeModel *model)
void mouseReleaseEvent(QMouseEvent *event) override
BrainView(QWidget *parent=nullptr)
Definition brainview.cpp:90
void setSensorVisible(const QString &type, bool visible)
void setSensorFieldColormap(const QString &name)
void showSingleView()
void syncBemShadersToBrainShaders()
void setRealtimeSensorLooping(bool enabled)
void setSensorTransEnabled(bool enabled)
void startRealtimeStreaming()
bool loadSourceEstimate(const QString &lhPath, const QString &rhPath)
void stopRealtimeStreaming()
void setViewportCameraPreset(int index, int preset)
void setBemVisible(const QString &name, bool visible)
static QStringList probeEvokedSets(const QString &evokedPath)
void castRay(const QPoint &pos)
void saveSnapshot()
void setViewportEnabled(int index, bool enabled)
void setBemShaderMode(const QString &mode)
void mousePressEvent(QMouseEvent *event) override
bool loadSourceSpace(const QString &fwdPath)
void setTimePoint(int index)
QString activeSurfaceForTarget(int target) const
bool isViewportEnabled(int index) const
void setActiveSurface(const QString &type)
void sensorFieldTimePointChanged(int index, float time)
bool isRealtimeStreaming() const
void setViewCount(int count)
void clearSourceSpace()
void setMegHelmetOverride(const QString &path)
int viewportCameraPreset(int index) const
void setDipoleVisible(bool visible)
void setRealtimeSensorInterval(int msec)
void onDataChanged(const QModelIndex &topLeft, const QModelIndex &bottomRight, const QVector< int > &roles)
void hoveredRegionChanged(const QString &regionName)
void setLightingEnabled(bool enabled)
void clearEvoked()
void setNetworkVisible(bool visible)
void sensorFieldLoaded(int numTimePoints, int initialTimePoint=0)
void setMegFieldMapOnHead(bool useHead)
void clearSensors()
bool loadSensorField(const QString &evokedPath, int aveIndex=0)
void setRealtimeSensorColormap(const QString &name)
void resetMultiViewLayout()
void stcLoadingProgress(int percent, const QString &message)
void pushRealtimeSensorData(const Eigen::VectorXf &data)
void setNetworkThreshold(double threshold)
static Qt::CursorShape cursorForHit(SplitterHit hit)
Coordinate transformation description.
Eigen::Matrix< float, 4, 4, Eigen::DontAlign > trans
BEM surface provides geometry information.