v2.0.0
Loading...
Searching...
No Matches
brainview.cpp
Go to the documentation of this file.
1//=============================================================================================================
36
37//=============================================================================================================
38// INCLUDES
39//=============================================================================================================
40
41
42#include "brainview.h"
43#include "brainrenderer.h"
47#include "core/surfacekeys.h"
48#include "core/dataloader.h"
49#include "input/raypicker.h"
53
54#include <rhi/qrhi.h>
60
61#include <Eigen/Dense>
62#include <QMatrix4x4>
63#include <QDebug>
64#include <QTimer>
65#include <QLabel>
66#include <QFrame>
67#include <QMouseEvent>
68#include <QKeyEvent>
69#include <QWheelEvent>
70#include <QResizeEvent>
71#include <QSettings>
72#include <QCoreApplication>
73#include <QMenu>
74#include <QStandardItem>
75#include <algorithm>
76#include <cmath>
77
78#include <mne/mne_bem.h>
82
83using namespace FIFFLIB;
84
85//=============================================================================================================
86// DEFINE MEMBER METHODS
87//=============================================================================================================
88
89//=============================================================================================================
90
91BrainView::BrainView(QWidget *parent)
92 : QRhiWidget(parent)
93{
94 setMinimumSize(800, 600);
95 setSampleCount(1);
96
97#if defined(WASMBUILD) || defined(__EMSCRIPTEN__)
98 setApi(Api::OpenGL); // WebGL 2 (OpenGL ES 3.0) on WASM
99#elif defined(Q_OS_MACOS) || defined(Q_OS_IOS)
100 setApi(Api::Metal);
101#elif defined(Q_OS_WIN)
102 setApi(Api::Direct3D11);
103#else
104 setApi(Api::OpenGL);
105#endif
106
107 setMouseTracking(true); // Enable hover events
108
109 m_updateTimer = new QTimer(this);
110 connect(m_updateTimer, &QTimer::timeout, this, QOverload<>::of(&BrainView::update));
111 m_updateTimer->start(16); // ~60 FPS update
112
113 m_fpsLabel = new QLabel(this);
114 m_fpsLabel->setStyleSheet("color: white; font-weight: bold; font-family: monospace; font-size: 13px; background: transparent; padding: 5px;");
115 m_fpsLabel->setAttribute(Qt::WA_TransparentForMouseEvents);
116 m_fpsLabel->setAlignment(Qt::AlignRight | Qt::AlignTop);
117 m_fpsLabel->setText("FPS: --.-\nVertices: 0");
118 m_fpsLabel->adjustSize();
119 m_fpsLabel->move(width() - m_fpsLabel->width() - 10, 10);
120 m_fpsLabel->raise();
121
122 m_singleViewInfoLabel = new QLabel(this);
123 m_singleViewInfoLabel->setStyleSheet("color: white; font-family: monospace; font-size: 10px; background: rgba(0,0,0,110); border-radius: 3px; padding: 2px 4px;");
124 m_singleViewInfoLabel->setAttribute(Qt::WA_TransparentForMouseEvents);
125 m_singleViewInfoLabel->setAlignment(Qt::AlignLeft | Qt::AlignTop);
126 m_singleViewInfoLabel->setText("");
127 m_singleViewInfoLabel->adjustSize();
128 m_singleViewInfoLabel->hide();
129
130 m_fpsTimer.start();
131
132 m_regionLabel = new QLabel(this);
133 m_regionLabel->setStyleSheet("color: white; font-weight: bold; font-family: sans-serif; font-size: 16px; background: transparent; padding: 5px;");
134 m_regionLabel->setText("");
135 m_regionLabel->move(10, 10);
136 m_regionLabel->resize(300, 30);
137 m_regionLabel->hide();
138
139 // ── Initialise viewport labels (sized to kDefaultViewportCount) ────────
140 m_subViews.resize(kDefaultViewportCount);
141 m_viewportNameLabels.resize(kDefaultViewportCount, nullptr);
142 m_viewportInfoLabels.resize(kDefaultViewportCount, nullptr);
143 for (int i = 0; i < kDefaultViewportCount; ++i) {
144 m_subViews[i] = SubView::defaultForIndex(i);
145
146 m_viewportNameLabels[i] = new QLabel(this);
147 m_viewportNameLabels[i]->setStyleSheet("color: white; font-weight: bold; font-family: sans-serif; font-size: 12px; background: transparent; padding: 2px 4px;");
148 m_viewportNameLabels[i]->setAttribute(Qt::WA_TransparentForMouseEvents);
149 m_viewportNameLabels[i]->setText(multiViewPresetName(m_subViews[i].preset));
150 m_viewportNameLabels[i]->adjustSize();
151 m_viewportNameLabels[i]->hide();
152
153 m_viewportInfoLabels[i] = new QLabel(this);
154 m_viewportInfoLabels[i]->setStyleSheet("color: white; font-family: monospace; font-size: 10px; background: rgba(0,0,0,110); border-radius: 3px; padding: 2px 4px;");
155 m_viewportInfoLabels[i]->setAttribute(Qt::WA_TransparentForMouseEvents);
156 m_viewportInfoLabels[i]->setAlignment(Qt::AlignLeft | Qt::AlignTop);
157 m_viewportInfoLabels[i]->setText("");
158 m_viewportInfoLabels[i]->adjustSize();
159 m_viewportInfoLabels[i]->hide();
160 }
161
162 m_verticalSeparator = new QFrame(this);
163 m_verticalSeparator->setFrameShape(QFrame::NoFrame);
164 m_verticalSeparator->setAttribute(Qt::WA_TransparentForMouseEvents);
165 m_verticalSeparator->hide();
166
167 m_horizontalSeparator = new QFrame(this);
168 m_horizontalSeparator->setFrameShape(QFrame::NoFrame);
169 m_horizontalSeparator->setAttribute(Qt::WA_TransparentForMouseEvents);
170 m_horizontalSeparator->hide();
171
172 QColor sepColor = palette().color(QPalette::Midlight);
173 if (sepColor.alpha() == 255) {
174 sepColor.setAlpha(180);
175 }
176 const QString sepStyle = QString("background-color: rgba(%1,%2,%3,%4);")
177 .arg(sepColor.red())
178 .arg(sepColor.green())
179 .arg(sepColor.blue())
180 .arg(sepColor.alpha());
181 m_verticalSeparator->setStyleSheet(sepStyle);
182 m_horizontalSeparator->setStyleSheet(sepStyle);
183
184 loadMultiViewSettings();
185 updateViewportSeparators();
186 updateOverlayLayout();
187
188 // Setup Debug Pointer: Semi-transparent sphere for subtle intersection indicator
189 m_debugPointerSurface = MeshFactory::createSphere(QVector3D(0, 0, 0), 0.002f,
190 QColor(200, 255, 255, 160));
191
192 // ── Connect SourceEstimateManager signals ─────────────────────────
193 connect(&m_sourceManager, &SourceEstimateManager::loaded,
194 this, &BrainView::onSourceEstimateLoaded);
195 connect(&m_sourceManager, &SourceEstimateManager::thresholdsUpdated,
197 connect(&m_sourceManager, &SourceEstimateManager::timePointChanged,
199 connect(&m_sourceManager, &SourceEstimateManager::loadingProgress,
201 connect(&m_sourceManager, &SourceEstimateManager::realtimeColorsAvailable,
202 this, &BrainView::onRealtimeColorsAvailable);
203
204 // RtSensorStreamManager → BrainView
205 connect(&m_sensorStreamManager, &RtSensorStreamManager::colorsAvailable,
206 this, &BrainView::onSensorStreamColorsAvailable);
207}
208
209//=============================================================================================================
210
212{
213 saveMultiViewSettings();
214}
215
216//=============================================================================================================
217
219{
220 m_model = model;
221 connect(m_model, &BrainTreeModel::rowsInserted, this, &BrainView::onRowsInserted);
222 connect(m_model, &BrainTreeModel::dataChanged, this, &BrainView::onDataChanged);
223
224 // Initial population if not empty?
225 // For now assuming we set model before adding data or iterate.
226}
227
228//=============================================================================================================
229
230void BrainView::setInitialCameraRotation(const QQuaternion &rotation)
231{
232 m_cameraRotation = rotation;
233 saveMultiViewSettings();
234 update();
235}
236
237void BrainView::onRowsInserted(const QModelIndex &parent, int first, int last)
238{
239
240 if (!m_model) return;
241
242 for (int i = first; i <= last; ++i) {
243 QModelIndex index = m_model->index(i, 0, parent);
244 QStandardItem* item = m_model->itemFromIndex(index);
245
246 AbstractTreeItem* absItem = dynamic_cast<AbstractTreeItem*>(item);
247
248 // Handle Surface Items
249 if (absItem && absItem->type() == AbstractTreeItem::SurfaceItem + QStandardItem::UserType) {
250 SurfaceTreeItem* surfItem = static_cast<SurfaceTreeItem*>(absItem);
251 auto brainSurf = std::make_shared<BrainSurface>();
252
253 // Load geometry from item
254 brainSurf->fromSurface(surfItem->surfaceData());
255
256 // Determine Hemisphere from Parent
257 if (absItem->parent()) {
258 QString parentText = absItem->parent()->text();
259 if (parentText == "lh") brainSurf->setHemi(0);
260 else if (parentText == "rh") brainSurf->setHemi(1);
261 }
262
263 // Set properties
264 brainSurf->setVisible(surfItem->isVisible());
265
266 // Brain surfaces (pial, white, inflated, etc.) are brain tissue
267 brainSurf->setTissueType(BrainSurface::TissueBrain);
268
269 m_itemSurfaceMap[item] = brainSurf;
270
271 // Key generation: "hemi_type" e.g. "lh_pial"
272 QString key;
273 if (absItem->parent()) {
274 key = absItem->parent()->text() + "_" + surfItem->text();
275 } else {
276 key = surfItem->text();
277 }
278 m_surfaces[key] = brainSurf;
279
280 // Check for annotations
281 if (!surfItem->annotationData().isEmpty()) {
282 brainSurf->addAnnotation(surfItem->annotationData());
283 }
284
285 // Set active if first
286 if (!m_activeSurface) {
287 m_activeSurface = brainSurf;
288 m_activeSurfaceType = surfItem->text();
289 }
290 }
291 // Check for BEM Item (using dynamic_cast for safety)
292 BemTreeItem* bemItem = dynamic_cast<BemTreeItem*>(absItem);
293 if (bemItem) {
294 const MNELIB::MNEBemSurface &bemSurfData = bemItem->bemSurfaceData();
295
296 auto brainSurf = std::make_shared<BrainSurface>();
297
298 // Load BEM geometry with color from item
299 brainSurf->fromBemSurface(bemSurfData, bemItem->color());
300
301 brainSurf->setVisible(bemItem->isVisible());
302
303 // Set tissue type based on surface name
304 QString surfName = bemItem->text().toLower();
305 if (surfName.contains("head") || surfName.contains("skin") || surfName.contains("scalp")) {
306 brainSurf->setTissueType(BrainSurface::TissueSkin);
307 } else if (surfName.contains("outer") && surfName.contains("skull")) {
308 brainSurf->setTissueType(BrainSurface::TissueOuterSkull);
309 } else if (surfName.contains("inner") && surfName.contains("skull")) {
310 brainSurf->setTissueType(BrainSurface::TissueInnerSkull);
311 } else if (surfName.contains("skull")) {
312 brainSurf->setTissueType(BrainSurface::TissueOuterSkull); // Default skull to outer
313 } else if (surfName.contains("brain")) {
314 brainSurf->setTissueType(BrainSurface::TissueBrain);
315 }
316
317 m_itemSurfaceMap[item] = brainSurf;
318
319 // Legacy map support (Use item text e.g. "bem_head")
320 m_surfaces["bem_" + bemItem->text()] = brainSurf;
321 }
322
323 // Handle Sensor Items
324 if (absItem && absItem->type() == AbstractTreeItem::SensorItem + QStandardItem::UserType) {
325 SensorTreeItem* sensItem = static_cast<SensorTreeItem*>(absItem);
326
327 std::shared_ptr<BrainSurface> brainSurf;
328
329 QString parentText = "";
330 if (sensItem->parent()) parentText = sensItem->parent()->text();
331
332 if (parentText.contains("MEG/Grad") && sensItem->hasOrientation()) {
333 brainSurf = MeshFactory::createBarbell(sensItem->position(), sensItem->orientation(),
334 sensItem->color(), sensItem->scale());
335 } else if (parentText.contains("MEG/Mag") && sensItem->hasOrientation()) {
336 brainSurf = MeshFactory::createPlate(sensItem->position(), sensItem->orientation(),
337 sensItem->color(), sensItem->scale());
338 } else {
339 // EEG and other sensors: smooth icosphere
340 brainSurf = MeshFactory::createSphere(sensItem->position(), sensItem->scale(),
341 sensItem->color());
342 }
343
344 brainSurf->setVisible(sensItem->isVisible());
345 m_itemSurfaceMap[item] = brainSurf;
346
347 // Apply Head-to-MRI transformation if available
348 // Note: meg positions in info might already be head-space, but check if we need this global trans
349 if (!m_headToMriTrans.isEmpty()) {
350 QMatrix4x4 m;
351 if (m_applySensorTrans) {
352 m = SURFACEKEYS::toQMatrix4x4(m_headToMriTrans.trans);
353 }
354 brainSurf->applyTransform(m);
355 }
356
357 // Legacy map support
358 const QString keyPrefix = SURFACEKEYS::sensorParentToKeyPrefix(parentText);
359
360 QString key = keyPrefix + sensItem->text() + "_" + QString::number((quintptr)sensItem);
361 m_surfaces[key] = brainSurf;
362
363
364 }
365
366 // Handle Dipole Items
367 if (absItem && absItem->type() == AbstractTreeItem::DipoleItem + QStandardItem::UserType) {
368 DipoleTreeItem* dipItem = static_cast<DipoleTreeItem*>(absItem);
369 auto dipObject = std::make_shared<DipoleObject>();
370 dipObject->load(dipItem->ecdSet());
371 dipObject->setVisible(dipItem->isVisible());
372
373 m_itemDipoleMap[item] = dipObject;
374 }
375
376 // Handle Source Space Items (one item per hemisphere, batched mesh)
377 if (absItem && absItem->type() == AbstractTreeItem::SourceSpaceItem + QStandardItem::UserType) {
378 SourceSpaceTreeItem* srcItem = static_cast<SourceSpaceTreeItem*>(absItem);
379 const QVector<QVector3D>& positions = srcItem->positions();
380 if (positions.isEmpty()) continue;
381
382 auto brainSurf = MeshFactory::createBatchedSpheres(positions, srcItem->scale(),
383 srcItem->color());
384 brainSurf->setVisible(srcItem->isVisible());
385 m_itemSurfaceMap[item] = brainSurf;
386
387 QString key = "srcsp_" + srcItem->text();
388 m_surfaces[key] = brainSurf;
389 }
390
391 // Handle Digitizer Items (batched sphere mesh per category)
392 if (absItem && absItem->type() == AbstractTreeItem::DigitizerItem + QStandardItem::UserType) {
393 DigitizerTreeItem* digItem = static_cast<DigitizerTreeItem*>(absItem);
394 const QVector<QVector3D>& positions = digItem->positions();
395 if (positions.isEmpty()) continue;
396
397 auto brainSurf = MeshFactory::createBatchedSpheres(positions, digItem->scale(),
398 digItem->color());
399 brainSurf->setVisible(digItem->isVisible());
400
401 // Apply Head-to-MRI transformation if available
402 if (!m_headToMriTrans.isEmpty()) {
403 QMatrix4x4 m;
404 if (m_applySensorTrans) {
405 m = SURFACEKEYS::toQMatrix4x4(m_headToMriTrans.trans);
406 }
407 brainSurf->applyTransform(m);
408 }
409
410 m_itemSurfaceMap[item] = brainSurf;
411
412 // Category name for legacy map key
413 QString catName;
414 switch (digItem->pointKind()) {
415 case DigitizerTreeItem::Cardinal: catName = "cardinal"; break;
416 case DigitizerTreeItem::HPI: catName = "hpi"; break;
417 case DigitizerTreeItem::EEG: catName = "eeg"; break;
418 case DigitizerTreeItem::Extra: catName = "extra"; break;
419 }
420 QString key = "dig_" + catName;
421 m_surfaces[key] = brainSurf;
422 }
423
424
425 // Check children recursively
426 if (m_model->hasChildren(index)) {
427 onRowsInserted(index, 0, m_model->rowCount(index) - 1);
428 }
429 }
430 updateInflatedSurfaceTransforms();
431 updateSceneBounds();
432 update();
433}
434
435void BrainView::onDataChanged(const QModelIndex &topLeft, const QModelIndex &bottomRight, const QVector<int> &roles)
436{
437 // Update visuals based on roles
438 for (int i = topLeft.row(); i <= bottomRight.row(); ++i) {
439 QModelIndex index = m_model->index(i, 0, topLeft.parent());
440 QStandardItem* item = m_model->itemFromIndex(index);
441
442 if (m_itemSurfaceMap.contains(item)) {
443 auto surf = m_itemSurfaceMap[item];
444
445 AbstractTreeItem* absItem = dynamic_cast<AbstractTreeItem*>(item);
446 if (absItem) {
447 if (roles.contains(AbstractTreeItem::VisibleRole)) {
448 surf->setVisible(absItem->isVisible());
449 }
450 if (roles.contains(AbstractTreeItem::ColorRole)) {
451 // Update color (not fully impl in BrainSurface yet for uniform override, but prepared)
452 }
453 if (roles.contains(SurfaceTreeItem::AnnotationDataRole)) {
454 SurfaceTreeItem* sItem = static_cast<SurfaceTreeItem*>(absItem);
455 if (!sItem->annotationData().isEmpty()) {
456 surf->addAnnotation(sItem->annotationData());
457 }
458 }
459 }
460 }
461 }
462 updateSceneBounds();
463 update();
464}
465
466//=============================================================================================================
467
468void BrainView::setActiveSurface(const QString &type)
469{
470 subViewForTarget(m_visualizationEditTarget).surfaceType = type;
471
472 m_activeSurfaceType = type;
473
474 // Update m_activeSurface pointer to one of the matching surfaces for stats/helpers
475 QString key = "lh_" + type;
476 if (m_surfaces.contains(key)) m_activeSurface = m_surfaces[key];
477 else {
478 key = "rh_" + type;
479 if (m_surfaces.contains(key)) m_activeSurface = m_surfaces[key];
480 }
481
482 updateInflatedSurfaceTransforms();
483 saveMultiViewSettings();
484
485 updateSceneBounds();
486 update();
487}
488
489void BrainView::updateSceneBounds()
490{
491 QVector3D min(std::numeric_limits<float>::max(), std::numeric_limits<float>::max(), std::numeric_limits<float>::max());
492 QVector3D max(std::numeric_limits<float>::lowest(), std::numeric_limits<float>::lowest(), std::numeric_limits<float>::lowest());
493 bool hasContent = false;
494
495 // Iterate over all surfaces
496 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
497 if (it.value()->isVisible()) {
498 QVector3D sMin, sMax;
499 it.value()->boundingBox(sMin, sMax);
500
501 min.setX(std::min(min.x(), sMin.x()));
502 min.setY(std::min(min.y(), sMin.y()));
503 min.setZ(std::min(min.z(), sMin.z()));
504
505 max.setX(std::max(max.x(), sMax.x()));
506 max.setY(std::max(max.y(), sMax.y()));
507 max.setZ(std::max(max.z(), sMax.z()));
508 hasContent = true;
509 }
510 }
511
512 // Iterate over all dipoles
513 for (auto it = m_itemDipoleMap.begin(); it != m_itemDipoleMap.end(); ++it) {
514 if (it.value()->isVisible()) {
515 // Dipoles don't have a bounding box method in DipoleObject yet,
516 // but we can approximate or skip for now.
517 // Ideally DipoleObject should expose bounds.
518 // For now, let's assume surfaces dictate the scene size usually.
519 }
520 }
521
522 if (hasContent) {
523 m_sceneCenter = (min + max) * 0.5f;
524
525 QVector3D diag = max - min;
526 m_sceneSize = std::max(diag.x(), std::max(diag.y(), diag.z()));
527
528 // Ensure non-zero size
529 if (m_sceneSize < 0.01f) m_sceneSize = 0.3f;
530
531 } else {
532 // Default
533 m_sceneCenter = QVector3D(0,0,0);
534 m_sceneSize = 0.3f;
535 }
536}
537
538//=============================================================================================================
539
540void BrainView::setShaderMode(const QString &modeName)
541{
542 const BrainRenderer::ShaderMode mode = shaderModeFromName(modeName);
543 subViewForTarget(m_visualizationEditTarget).brainShader = mode;
544
545 m_brainShaderMode = mode;
546 saveMultiViewSettings();
547 update();
548}
549
550//=============================================================================================================
551
553{
554 const int prev = m_visualizationEditTarget;
555 m_visualizationEditTarget = normalizedVisualizationTarget(target, static_cast<int>(m_subViews.size()) - 1);
556
557 const SubView &sv = subViewForTarget(m_visualizationEditTarget);
558 m_activeSurfaceType = sv.surfaceType;
559 m_brainShaderMode = sv.brainShader;
560 m_bemShaderMode = sv.bemShader;
561 m_currentVisMode = sv.overlayMode;
562 const ViewVisibilityProfile &visibility = sv.visibility;
563
564 const bool remapMegSurface = (m_fieldMapper.megFieldMapOnHead() != visibility.megFieldMapOnHead);
565 m_fieldMapper.setMegFieldMapOnHead(visibility.megFieldMapOnHead);
566 m_dipolesVisible = visibility.dipoles;
567 m_networkVisible = visibility.network;
568
569 // Note: we intentionally do NOT call setVisualizationMode() on surfaces
570 // here. Each viewport's overlay mode is sent as a per-draw shader
571 // uniform (sceneData.overlayMode), so the surface objects must keep
572 // their vertex data intact — in particular the STC colour channel —
573 // regardless of which viewport is currently selected for editing.
574
575 if (m_fieldMapper.isLoaded()) {
576 if (remapMegSurface) {
577 m_fieldMapper.buildMapping(m_surfaces, m_headToMriTrans, m_applySensorTrans);
578 }
579 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
580 }
581
582 // Update viewport label highlighting
583 updateViewportLabelHighlight();
584
585 saveMultiViewSettings();
586
587 if (prev != m_visualizationEditTarget) {
588 emit visualizationEditTargetChanged(m_visualizationEditTarget);
589 }
590}
591
592//=============================================================================================================
593
595{
596 return m_visualizationEditTarget;
597}
598
599//=============================================================================================================
600
601QString BrainView::activeSurfaceForTarget(int target) const
602{
603 return subViewForTarget(target).surfaceType;
604}
605
606//=============================================================================================================
607
608QString BrainView::shaderModeForTarget(int target) const
609{
610 return shaderModeName(subViewForTarget(target).brainShader);
611}
612
613//=============================================================================================================
614
615QString BrainView::bemShaderModeForTarget(int target) const
616{
617 return shaderModeName(subViewForTarget(target).bemShader);
618}
619
620//=============================================================================================================
621
622QString BrainView::overlayModeForTarget(int target) const
623{
624 return visualizationModeName(subViewForTarget(target).overlayMode);
625}
626
627//=============================================================================================================
628
629ViewVisibilityProfile& BrainView::visibilityProfileForTarget(int target)
630{
631 return subViewForTarget(target).visibility;
632}
633
634//=============================================================================================================
635
636const ViewVisibilityProfile& BrainView::visibilityProfileForTarget(int target) const
637{
638 return subViewForTarget(target).visibility;
639}
640
641//=============================================================================================================
642
643SubView& BrainView::subViewForTarget(int target)
644{
645 const int normalized = normalizedVisualizationTarget(target, static_cast<int>(m_subViews.size()) - 1);
646 return (normalized < 0) ? m_singleView : m_subViews[normalized];
647}
648
649//=============================================================================================================
650
651const SubView& BrainView::subViewForTarget(int target) const
652{
653 const int normalized = normalizedVisualizationTarget(target, static_cast<int>(m_subViews.size()) - 1);
654 return (normalized < 0) ? m_singleView : m_subViews[normalized];
655}
656
657//=============================================================================================================
658
659// Note: SubView::isBrainSurfaceKey, matchesSurfaceType, shouldRenderSurface,
660// and applyOverlayToSurfaces are defined in core/viewstate.cpp.
661
662//=============================================================================================================
663
664bool BrainView::objectVisibleForTarget(const QString &object, int target) const
665{
666 return visibilityProfileForTarget(target).isObjectVisible(object);
667}
668
669//=============================================================================================================
670
672{
673 return visibilityProfileForTarget(target).megFieldMapOnHead;
674}
675
676//=============================================================================================================
677
678void BrainView::updateInflatedSurfaceTransforms()
679{
680 const bool needsInflated = (m_singleView.surfaceType == "inflated")
681 || std::any_of(m_subViews.cbegin(), m_subViews.cend(),
682 [](const SubView &sv) { return sv.surfaceType == "inflated"; });
683
684 const QString lhKey = "lh_inflated";
685 const QString rhKey = "rh_inflated";
686
687 if (!m_surfaces.contains(lhKey) || !m_surfaces.contains(rhKey)) {
688 return;
689 }
690
691 auto lhSurf = m_surfaces[lhKey];
692 auto rhSurf = m_surfaces[rhKey];
693
694 QMatrix4x4 identity;
695 lhSurf->applyTransform(identity);
696 rhSurf->applyTransform(identity);
697
698 if (!needsInflated) {
699 return;
700 }
701
702 const float lhMaxX = lhSurf->maxX();
703 const float rhMinX = rhSurf->minX();
704
705 const float gap = 0.005f;
706 const float lhOffset = -gap / 2.0f - lhMaxX;
707 const float rhOffset = gap / 2.0f - rhMinX;
708
709 lhSurf->translateX(lhOffset);
710 rhSurf->translateX(rhOffset);
711}
712
713void BrainView::setBemShaderMode(const QString &modeName)
714{
715 const BrainRenderer::ShaderMode mode = shaderModeFromName(modeName);
716
717 subViewForTarget(m_visualizationEditTarget).bemShader = mode;
718
719 m_bemShaderMode = mode;
720 saveMultiViewSettings();
721 update();
722}
723
724//=============================================================================================================
725
727{
728 m_singleView.bemShader = m_singleView.brainShader;
729 for (int i = 0; i < m_subViews.size(); ++i) {
730 m_subViews[i].bemShader = m_subViews[i].brainShader;
731 }
732
733 m_bemShaderMode = subViewForTarget(m_visualizationEditTarget).bemShader;
734
735 saveMultiViewSettings();
736 update();
737}
738
739void BrainView::setSensorVisible(const QString &type, bool visible)
740{
741 const QString object = SURFACEKEYS::sensorTypeToObjectKey(type);
742 if (object.isEmpty()) return;
743
744 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
745 profile.setObjectVisible(object, visible);
746
747 // Cascade parent toggle to child sub-types so that e.g. "MEG" also
748 // enables/disables MEG/Grad and MEG/Mag sub-types.
749 // Note: MEG Helmet has its own independent checkbox and is NOT cascaded.
750 if (type == QLatin1String("MEG")) {
751 profile.sensMegGrad = visible;
752 profile.sensMegMag = visible;
753 } else if (type == QLatin1String("EEG")) {
754 // No sub-types for EEG currently, but keep symmetric.
755 } else if (type == QLatin1String("Digitizer")) {
756 profile.digCardinal = visible;
757 profile.digHpi = visible;
758 profile.digEeg = visible;
759 profile.digExtra = visible;
760 }
761
762 saveMultiViewSettings();
763 update();
764}
765
767{
768 if (m_applySensorTrans != enabled) {
769 m_applySensorTrans = enabled;
770 refreshSensorTransforms();
771 update();
772 }
773}
774
775//=============================================================================================================
776
777void BrainView::setMegHelmetOverride(const QString &path)
778{
779 m_megHelmetOverridePath = path;
780}
781
783{
784 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
785 profile.dipoles = visible;
786 m_dipolesVisible = visible;
787 saveMultiViewSettings();
788 update();
789}
790
791//=============================================================================================================
792
793void BrainView::setVisualizationMode(const QString &modeName)
794{
796 SubView &sv = subViewForTarget(m_visualizationEditTarget);
797 sv.overlayMode = mode;
798
799 m_currentVisMode = mode;
800
801 // Propagate the mode to brain hemisphere surfaces only (lh_*, rh_*)
802 // so that the primary colour channel holds the right data: curvature
803 // grays for Scientific or STC colours for SourceEstimate.
804 // BEM, sensor, and source-space surfaces are left untouched.
805 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
806 const QString &key = it.key();
807 if (key.startsWith("lh_") || key.startsWith("rh_")) {
808 it.value()->setVisualizationMode(mode);
809 }
810 }
811
812 saveMultiViewSettings();
813 update();
814}
815
816//=============================================================================================================
817
818void BrainView::setHemiVisible(int hemiIdx, bool visible)
819{
820 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
821 if (hemiIdx == 0) {
822 profile.lh = visible;
823 } else if (hemiIdx == 1) {
824 profile.rh = visible;
825 }
826 saveMultiViewSettings();
827 update();
828}
829
830//=============================================================================================================
831
832void BrainView::setBemVisible(const QString &name, bool visible)
833{
834 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
835 profile.setObjectVisible("bem_" + name, visible);
836 saveMultiViewSettings();
837 update();
838}
839
841{
842 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
843 if (it.key().startsWith("bem_")) {
844 it.value()->setUseDefaultColor(enabled);
845 }
846 }
847 update();
848}
849
850//=============================================================================================================
851
853{
854 m_lightingEnabled = enabled;
855 update();
856}
857
858//=============================================================================================================
859
861{
862 QImage img = grabFramebuffer();
863 QString fileName = QString("snapshot_refactor_%1.png").arg(m_snapshotCounter++, 4, 10, QChar('0'));
864 img.save(fileName);
865
866}
867
868//=============================================================================================================
869
871{
872 m_viewMode = SingleView;
873 m_isDraggingSplitter = false;
874 m_activeSplitter = SplitterHit::None;
875 unsetCursor();
876 saveMultiViewSettings();
877 updateViewportSeparators();
878 updateOverlayLayout();
879 update();
880}
881
882//=============================================================================================================
883
885{
886 m_viewMode = MultiView;
887 saveMultiViewSettings();
888 updateViewportSeparators();
889 updateOverlayLayout();
890 update();
891}
892
893//=============================================================================================================
894
896{
897 count = std::clamp(count, 1, static_cast<int>(m_subViews.size()));
898 m_viewCount = count;
899
900 if (count == 1) {
901 m_viewMode = SingleView;
902 m_isDraggingSplitter = false;
903 m_activeSplitter = SplitterHit::None;
904 unsetCursor();
906 } else {
907 m_viewMode = MultiView;
908 // Default edit target to first pane when entering multi-view
909 if (m_visualizationEditTarget < 0)
911 }
912
913 // Enable first N sub-views, disable the rest
914 for (int i = 0; i < m_subViews.size(); ++i)
915 m_subViews[i].enabled = (i < count);
916
917 saveMultiViewSettings();
918 updateViewportSeparators();
919 updateOverlayLayout();
920 update();
921}
922
923//=============================================================================================================
924
926{
927 m_layout.resetSplits();
928 m_multiSplitX = m_layout.splitX();
929 m_multiSplitY = m_layout.splitY();
930 saveMultiViewSettings();
931 updateViewportSeparators();
932 updateOverlayLayout();
933 update();
934}
935
936bool BrainView::isViewportEnabled(int index) const
937{
938 if (index < 0 || index >= m_subViews.size()) {
939 return false;
940 }
941
942 return m_subViews[index].enabled;
943}
944
945//=============================================================================================================
946
947int BrainView::enabledViewportCount() const
948{
949 if (m_viewMode != MultiView) {
950 return 1;
951 }
952
953 int numEnabled = 0;
954 for (int i = 0; i < m_subViews.size(); ++i) {
955 if (m_subViews[i].enabled) {
956 ++numEnabled;
957 }
958 }
959
960 return numEnabled > 0 ? numEnabled : 1;
961}
962
963//=============================================================================================================
964
965QVector<int> BrainView::enabledViewportIndices() const
966{
967 QVector<int> vps;
968 if (m_viewMode == MultiView) {
969 for (int i = 0; i < m_subViews.size(); ++i) {
970 if (m_subViews[i].enabled)
971 vps.append(i);
972 }
973 if (vps.isEmpty())
974 vps.append(0);
975 } else {
976 vps.append(0);
977 }
978 return vps;
979}
980
981//=============================================================================================================
982
983int BrainView::viewportIndexAt(const QPoint& pos) const
984{
985 if (m_viewMode != MultiView) {
986 return 0;
987 }
988
989 const auto enabledViewports = enabledViewportIndices();
990 return m_layout.viewportIndexAt(pos, enabledViewports, size());
991}
992
993//=============================================================================================================
994
995QRect BrainView::multiViewSlotRect(int slot, int numEnabled, const QSize& outputSize) const
996{
997 return m_layout.slotRect(slot, numEnabled, outputSize);
998}
999
1000//=============================================================================================================
1001
1002SplitterHit BrainView::hitTestSplitter(const QPoint& pos, int numEnabled, const QSize& outputSize) const
1003{
1004 if (m_viewMode != MultiView || numEnabled <= 1) {
1005 return SplitterHit::None;
1006 }
1007 return m_layout.hitTestSplitter(pos, numEnabled, outputSize);
1008}
1009
1010//=============================================================================================================
1011
1012void BrainView::updateSplitterCursor(const QPoint& pos)
1013{
1014 const SplitterHit hit = hitTestSplitter(pos, enabledViewportCount(), size());
1015 const Qt::CursorShape shape = MultiViewLayout::cursorForHit(hit);
1016 if (shape == Qt::ArrowCursor) {
1017 unsetCursor();
1018 } else {
1019 setCursor(shape);
1020 }
1021}
1022
1023//=============================================================================================================
1024
1025void BrainView::updateViewportSeparators()
1026{
1027 if (!m_verticalSeparator || !m_horizontalSeparator) {
1028 return;
1029 }
1030
1031 m_verticalSeparator->hide();
1032 m_horizontalSeparator->hide();
1033
1034 const int numEnabled = enabledViewportCount();
1035 if (m_viewMode != MultiView || numEnabled <= 1) {
1036 return;
1037 }
1038
1039 QRect vRect, hRect;
1040 m_layout.separatorGeometries(numEnabled, size(), vRect, hRect);
1041
1042 if (!vRect.isEmpty()) {
1043 m_verticalSeparator->setGeometry(vRect);
1044 m_verticalSeparator->show();
1045 m_verticalSeparator->raise();
1046 }
1047 if (!hRect.isEmpty()) {
1048 m_horizontalSeparator->setGeometry(hRect);
1049 m_horizontalSeparator->show();
1050 m_horizontalSeparator->raise();
1051 }
1052
1053 updateOverlayLayout();
1054}
1055
1056//=============================================================================================================
1057
1058void BrainView::updateOverlayLayout()
1059{
1060 const auto enabledViewports = enabledViewportIndices();
1061
1062 if (m_fpsLabel) {
1063 m_fpsLabel->setVisible(m_infoPanelVisible);
1064 m_fpsLabel->adjustSize();
1065 const int perfBottomMargin = 2;
1066
1067 if (m_viewMode == MultiView) {
1068 m_fpsLabel->move(width() - m_fpsLabel->width() - 10,
1069 height() - m_fpsLabel->height() - perfBottomMargin);
1070 } else {
1071 m_fpsLabel->move(width() - m_fpsLabel->width() - 10,
1072 height() - m_fpsLabel->height() - perfBottomMargin);
1073 }
1074
1075 m_fpsLabel->raise();
1076 }
1077
1078 if (m_singleViewInfoLabel) {
1079 const bool showSingleInfo = (m_viewMode == SingleView) && m_infoPanelVisible;
1080 m_singleViewInfoLabel->setVisible(showSingleInfo);
1081 if (showSingleInfo) {
1082 m_singleViewInfoLabel->adjustSize();
1083 m_singleViewInfoLabel->move(width() - m_singleViewInfoLabel->width() - 8, 8);
1084 m_singleViewInfoLabel->raise();
1085 }
1086 }
1087
1088 if (m_regionLabel) {
1089 const int regionY = (m_viewMode == MultiView) ? 38 : 10;
1090 m_regionLabel->move(10, regionY);
1091 if (!m_regionLabel->text().isEmpty()) {
1092 m_regionLabel->raise();
1093 }
1094 }
1095
1096 for (int i = 0; i < m_viewportNameLabels.size(); ++i) {
1097 if (m_viewportNameLabels[i]) {
1098 m_viewportNameLabels[i]->hide();
1099 }
1100 if (m_viewportInfoLabels[i]) {
1101 m_viewportInfoLabels[i]->hide();
1102 }
1103 }
1104
1105 if (m_viewMode != MultiView) {
1106 return;
1107 }
1108
1109 const int numEnabled = enabledViewports.size();
1110 const QSize overlaySize = size();
1111 for (int slot = 0; slot < numEnabled; ++slot) {
1112 const int vp = enabledViewports[slot];
1113 QLabel* label = m_viewportNameLabels[vp];
1114 QLabel* infoLabel = m_viewportInfoLabels[vp];
1115 if (!label) {
1116 continue;
1117 }
1118
1119 const int preset = std::clamp(m_subViews[vp].preset, 0, 6);
1120 label->setText(multiViewPresetName(preset));
1121
1122 const QRect pane = multiViewSlotRect(slot, numEnabled, overlaySize);
1123 label->adjustSize();
1124 label->move(pane.x() + 8, pane.y() + 8);
1125 label->setVisible(true);
1126 label->raise();
1127
1128 if (infoLabel) {
1129 infoLabel->adjustSize();
1130 infoLabel->move(pane.x() + pane.width() - infoLabel->width() - 8,
1131 pane.y() + 8);
1132 infoLabel->setVisible(m_infoPanelVisible);
1133 infoLabel->raise();
1134 }
1135 }
1136
1137 updateViewportLabelHighlight();
1138}
1139
1140//=============================================================================================================
1141
1142void BrainView::updateViewportLabelHighlight()
1143{
1144 static const QString normalStyle =
1145 QStringLiteral("color: white; font-weight: bold; font-family: sans-serif; "
1146 "font-size: 12px; background: transparent; padding: 2px 4px;");
1147 static const QString selectedStyle =
1148 QStringLiteral("color: #FFD54F; font-weight: bold; font-family: sans-serif; "
1149 "font-size: 13px; background: rgba(255,213,79,40); "
1150 "border: 1px solid #FFD54F; border-radius: 3px; padding: 2px 6px;");
1151
1152 for (int i = 0; i < m_viewportNameLabels.size(); ++i) {
1153 if (!m_viewportNameLabels[i]) continue;
1154 const bool selected = (m_viewMode == MultiView && m_visualizationEditTarget == i);
1155 m_viewportNameLabels[i]->setStyleSheet(selected ? selectedStyle : normalStyle);
1156 m_viewportNameLabels[i]->adjustSize();
1157 }
1158}
1159
1160//=============================================================================================================
1161
1162void BrainView::logPerspectiveRotation(const QString& context) const
1163{
1164 Q_UNUSED(context);
1165}
1166
1167//=============================================================================================================
1168
1169void BrainView::loadMultiViewSettings()
1170{
1171 QSettings settings("MNECPP");
1172 settings.beginGroup("ex_brain_view/BrainView");
1173
1174 m_multiSplitX = settings.value("multiSplitX", 0.5f).toFloat();
1175 m_multiSplitY = settings.value("multiSplitY", 0.5f).toFloat();
1176
1177 const int savedViewMode = settings.value("viewMode", static_cast<int>(SingleView)).toInt();
1178 m_viewMode = (savedViewMode == static_cast<int>(MultiView)) ? MultiView : SingleView;
1179 m_viewCount = std::clamp(settings.value("viewCount", 1).toInt(), 1, static_cast<int>(m_subViews.size()));
1180 // Reconcile: viewCount > 1 implies MultiView
1181 if (m_viewCount > 1) m_viewMode = MultiView;
1182 else m_viewMode = SingleView;
1183
1184 const bool hasCameraQuat = settings.contains("cameraRotW")
1185 && settings.contains("cameraRotX")
1186 && settings.contains("cameraRotY")
1187 && settings.contains("cameraRotZ");
1188 if (hasCameraQuat) {
1189 const float w = settings.value("cameraRotW", 1.0f).toFloat();
1190 const float x = settings.value("cameraRotX", 0.0f).toFloat();
1191 const float y = settings.value("cameraRotY", 0.0f).toFloat();
1192 const float z = settings.value("cameraRotZ", 0.0f).toFloat();
1193 m_cameraRotation = QQuaternion(w, x, y, z);
1194 if (m_cameraRotation.lengthSquared() <= std::numeric_limits<float>::epsilon()) {
1195 m_cameraRotation = QQuaternion();
1196 } else {
1197 m_cameraRotation.normalize();
1198 }
1199 }
1200
1201 // Reset per-index defaults, then load saved state on top
1202 for (int i = 0; i < m_subViews.size(); ++i) {
1203 m_subViews[i] = SubView::defaultForIndex(i);
1204 m_subViews[i].enabled = (i < m_viewCount);
1205 }
1206
1207 // Delegate per-SubView serialization
1208 m_singleView.load(settings, "single_", m_cameraRotation);
1209 for (int i = 0; i < m_subViews.size(); ++i)
1210 m_subViews[i].load(settings, QStringLiteral("multi%1_").arg(i), m_cameraRotation);
1211
1212 const int maxIdx = static_cast<int>(m_subViews.size()) - 1;
1213 m_visualizationEditTarget = normalizedVisualizationTarget(
1214 settings.value("visualizationEditTarget", -1).toInt(), maxIdx);
1215
1216 m_infoPanelVisible = settings.value("infoPanelVisible", true).toBool();
1217
1218 settings.endGroup();
1219
1220 m_multiSplitX = std::clamp(m_multiSplitX, 0.15f, 0.85f);
1221 m_multiSplitY = std::clamp(m_multiSplitY, 0.15f, 0.85f);
1222 m_layout.setSplitX(m_multiSplitX);
1223 m_layout.setSplitY(m_multiSplitY);
1224
1225 setVisualizationEditTarget(m_visualizationEditTarget);
1226}
1227
1228//=============================================================================================================
1229
1230void BrainView::saveMultiViewSettings() const
1231{
1232 QSettings settings("MNECPP");
1233 settings.beginGroup("ex_brain_view/BrainView");
1234 settings.setValue("multiSplitX", m_multiSplitX);
1235 settings.setValue("multiSplitY", m_multiSplitY);
1236 settings.setValue("viewMode", static_cast<int>(m_viewMode));
1237 settings.setValue("viewCount", m_viewCount);
1238 settings.setValue("cameraRotW", m_cameraRotation.scalar());
1239 settings.setValue("cameraRotX", m_cameraRotation.x());
1240 settings.setValue("cameraRotY", m_cameraRotation.y());
1241 settings.setValue("cameraRotZ", m_cameraRotation.z());
1242 for (int i = 0; i < m_subViews.size(); ++i)
1243 settings.setValue(QStringLiteral("viewportEnabled%1").arg(i), m_subViews[i].enabled);
1244 settings.setValue("visualizationEditTarget", m_visualizationEditTarget);
1245 settings.setValue("infoPanelVisible", m_infoPanelVisible);
1246
1247 // Delegate per-SubView serialization
1248 m_singleView.save(settings, "single_");
1249 for (int i = 0; i < m_subViews.size(); ++i)
1250 m_subViews[i].save(settings, QStringLiteral("multi%1_").arg(i));
1251
1252 settings.endGroup();
1253}
1254
1255//=============================================================================================================
1256
1257void BrainView::setViewportEnabled(int index, bool enabled)
1258{
1259 if (index >= 0 && index < m_subViews.size()) {
1260 m_subViews[index].enabled = enabled;
1261 saveMultiViewSettings();
1262 updateViewportSeparators();
1263 updateOverlayLayout();
1264 update();
1265 }
1266}
1267
1268//=============================================================================================================
1269
1270void BrainView::setViewportCameraPreset(int index, int preset)
1271{
1272 if (index < 0 || index >= static_cast<int>(m_subViews.size()))
1273 return;
1274 preset = std::clamp(preset, 0, 6);
1275 if (m_subViews[index].preset == preset)
1276 return;
1277 m_subViews[index].preset = preset;
1278 saveMultiViewSettings();
1279 updateOverlayLayout();
1280 update();
1281}
1282
1283//=============================================================================================================
1284
1286{
1287 if (index < 0 || index >= static_cast<int>(m_subViews.size()))
1288 return -1;
1289 return std::clamp(m_subViews[index].preset, 0, 6);
1290}
1291
1292//=============================================================================================================
1293
1295{
1296 m_infoPanelVisible = visible;
1297 saveMultiViewSettings();
1298 updateOverlayLayout();
1299}
1300
1301//=============================================================================================================
1302
1303void BrainView::resizeEvent(QResizeEvent *event)
1304{
1305 QRhiWidget::resizeEvent(event);
1306 updateViewportSeparators();
1307 updateOverlayLayout();
1308}
1309
1310//=============================================================================================================
1311
1312void BrainView::initialize(QRhiCommandBuffer *cb)
1313{
1314 Q_UNUSED(cb);
1315
1316 m_renderer = std::make_unique<BrainRenderer>();
1317}
1318
1319//=============================================================================================================
1320
1321void BrainView::render(QRhiCommandBuffer *cb)
1322{
1323 // Check if there is anything to render
1324 bool hasSurfaces = !m_surfaces.isEmpty();
1325 bool hasDipoles = !m_itemDipoleMap.isEmpty() || m_dipoles; // Check managed dipoles too
1326
1327 // If absolutely nothing is loaded, render black background
1328 if (!hasSurfaces && !hasDipoles) {
1329 // No surface loaded: render a black background instead of leaving the widget uninitialized
1330 if (!m_renderer) {
1331 m_renderer = std::make_unique<BrainRenderer>();
1332 }
1333 m_renderer->initialize(rhi(), renderTarget()->renderPassDescriptor(), sampleCount());
1334 m_renderer->beginFrame(cb, renderTarget());
1335 m_renderer->endFrame(cb);
1336 return;
1337 }
1338
1339 // Ensure active surface pointer is valid if possible, otherwise just use first available for stats
1340 if (!m_activeSurface && !m_surfaces.isEmpty()) {
1341 m_activeSurface = m_surfaces.begin().value();
1342 }
1343
1344
1345 m_frameCount++;
1346 if (m_fpsTimer.elapsed() >= 500) {
1347 float fps = m_frameCount / (m_fpsTimer.elapsed() / 1000.0f);
1348 auto countVerticesForSubView = [this](const SubView &sv) -> qint64 {
1349 qint64 total = 0;
1350
1351 for (auto it = m_surfaces.cbegin(); it != m_surfaces.cend(); ++it) {
1352 const QString &key = it.key();
1353 auto surface = it.value();
1354 if (!surface) {
1355 continue;
1356 }
1357
1358 if (!sv.shouldRenderSurface(key)) {
1359 continue;
1360 }
1361
1362 if (SubView::isBrainSurfaceKey(key)) {
1363 if (!sv.matchesSurfaceType(key)) {
1364 continue;
1365 }
1366 } else {
1367 if (!surface->isVisible()) {
1368 continue;
1369 }
1370 }
1371
1372 total += surface->vertexCount();
1373 }
1374
1375 return total;
1376 };
1377
1378 qint64 vCount = 0;
1379 if (m_viewMode == MultiView) {
1380 for (int vp : enabledViewportIndices()) {
1381 vCount += countVerticesForSubView(m_subViews[vp]);
1382 }
1383 } else {
1384 vCount = countVerticesForSubView(m_singleView);
1385 }
1386
1387 m_fpsLabel->setText(QString("FPS: %1\nVertices: %2").arg(fps, 0, 'f', 1).arg(vCount));
1388 updateOverlayLayout();
1389 m_fpsLabel->raise();
1390 m_frameCount = 0;
1391 m_fpsTimer.restart();
1392 }
1393
1394 // Initialize renderer
1395 m_renderer->initialize(rhi(), renderTarget()->renderPassDescriptor(), sampleCount());
1396
1397 // Determine viewport configuration
1398 QSize outputSize = renderTarget()->pixelSize();
1399
1400 // Build list of enabled viewports
1401 const auto enabledViewports = enabledViewportIndices();
1402 int numEnabled = enabledViewports.size();
1403
1404 // ── Pre-render phase ────────────────────────────────────────────────
1405 // Apply per-pane overlay modes and pre-upload ALL Immutable GPU buffers
1406 // BEFORE the render pass starts. On Metal, uploading an Immutable
1407 // buffer during an active render pass forces a pass restart which
1408 // resets the viewport state, causing subsequent draws to cover the
1409 // full framebuffer instead of the intended pane.
1410 //
1411 // By doing all static uploads here (outside any render pass), we
1412 // guarantee that the draw loop below only records Dynamic uniform
1413 // updates — those never interrupt the pass.
1414
1415 // Pre-upload every surface and dipole buffer that is dirty or new.
1416 // NOTE: Overlay modes are applied per-pane inside the render loop below
1417 // (not here), because different panes can have different overlays on the
1418 // same shared BrainSurface objects. Applying all pane overlays
1419 // sequentially here would leave only the last pane's vertex colours.
1420 {
1421 QRhiResourceUpdateBatch *preUpload = rhi()->nextResourceUpdateBatch();
1422 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1423 it.value()->updateBuffers(rhi(), preUpload);
1424 }
1425 if (m_debugPointerSurface) {
1426 m_debugPointerSurface->updateBuffers(rhi(), preUpload);
1427 }
1428 for (auto it = m_itemDipoleMap.begin(); it != m_itemDipoleMap.end(); ++it) {
1429 it.value()->updateBuffers(rhi(), preUpload);
1430 }
1431 if (m_dipoles) {
1432 m_dipoles->updateBuffers(rhi(), preUpload);
1433 }
1434 // Network buffers are updated inside renderNetwork() via updateNodeBuffers/updateEdgeBuffers
1435 cb->resourceUpdate(preUpload);
1436 }
1437
1438 // ── Render pass ─────────────────────────────────────────────────────
1439 m_renderer->beginFrame(cb, renderTarget());
1440
1441 for (int slot = 0; slot < numEnabled; ++slot) {
1442 int vp = (m_viewMode == MultiView) ? enabledViewports[slot] : 0;
1443 const SubView &sv = (m_viewMode == MultiView) ? m_subViews[vp] : m_singleView;
1444 const int preset = (m_viewMode == MultiView) ? std::clamp(sv.preset, 0, 6) : 1;
1445
1446 const QRect paneRect = (m_viewMode == MultiView)
1447 ? multiViewSlotRect(slot, numEnabled, outputSize)
1448 : QRect(0, 0, outputSize.width(), outputSize.height());
1449
1450 QRect renderRect = paneRect;
1451 if (m_viewMode == MultiView && numEnabled > 1) {
1452 constexpr int separatorPx = 2;
1453
1454 if (numEnabled == 2) {
1455 if (slot == 0) {
1456 renderRect.setWidth(std::max(1, renderRect.width() - separatorPx));
1457 }
1458 } else if (numEnabled == 3) {
1459 // 3-view: slot 0 = full top row, slots 1&2 = bottom row
1460 if (slot == 0) {
1461 // Top pane: no right neighbor, has bottom neighbor
1462 renderRect.setHeight(std::max(1, renderRect.height() - separatorPx));
1463 } else if (slot == 1) {
1464 // Bottom-left: has right neighbor, no bottom neighbor
1465 renderRect.setWidth(std::max(1, renderRect.width() - separatorPx));
1466 }
1467 // slot 2 (bottom-right): no insets needed
1468 } else {
1469 const int col = slot % 2;
1470 const int row = slot / 2;
1471
1472 const bool hasRightNeighbor = (col == 0)
1473 && (slot + 1 < numEnabled)
1474 && ((slot / 2) == ((slot + 1) / 2));
1475 const bool hasBottomNeighbor = (row == 0)
1476 && (slot + 2 < numEnabled);
1477
1478 if (hasRightNeighbor) {
1479 renderRect.setWidth(std::max(1, renderRect.width() - separatorPx));
1480 }
1481 if (hasBottomNeighbor) {
1482 renderRect.setHeight(std::max(1, renderRect.height() - separatorPx));
1483 }
1484 }
1485 }
1486
1487 const int viewX = renderRect.x();
1488 const int viewY = outputSize.height() - (renderRect.y() + renderRect.height());
1489 const int viewW = std::max(1, renderRect.width());
1490 const int viewH = std::max(1, renderRect.height());
1491
1492 QRhiViewport viewport(viewX, viewY, viewW, viewH);
1493 QRhiScissor scissor(viewX, viewY, viewW, viewH);
1494 const float aspectRatio = float(viewW) / float(viewH);
1495
1496 // Set viewport and scissor
1497 cb->setViewport(viewport);
1498 cb->setScissor(scissor);
1499
1500 // Calculate camera for this viewport
1501 m_camera.setSceneCenter(m_sceneCenter);
1502 m_camera.setSceneSize(m_sceneSize);
1503 m_camera.setRotation(m_cameraRotation);
1504 m_camera.setZoom(m_zoom);
1505 const CameraResult cam = (m_viewMode == MultiView)
1506 ? m_camera.computeMultiView(sv, aspectRatio)
1507 : m_camera.computeSingleView(aspectRatio);
1508
1509 BrainRenderer::SceneData sceneData;
1510 sceneData.mvp = rhi()->clipSpaceCorrMatrix();
1511 sceneData.mvp *= cam.projection;
1512 sceneData.mvp *= cam.view;
1513 sceneData.mvp *= cam.model;
1514
1515 sceneData.cameraPos = cam.cameraPos;
1516 sceneData.lightDir = cam.cameraPos.normalized();
1517 sceneData.lightingEnabled = m_lightingEnabled;
1518 sceneData.viewportX = viewX;
1519 sceneData.viewportY = viewY;
1520 sceneData.viewportW = viewW;
1521 sceneData.viewportH = viewH;
1522 sceneData.scissorX = viewX;
1523 sceneData.scissorY = viewY;
1524 sceneData.scissorW = viewW;
1525 sceneData.scissorH = viewH;
1526
1527 // Per-draw overlayMode uniform — the shader selects the vertex colour
1528 // channel (curvature / annotation) so no per-pane vertex buffer
1529 // re-uploads are needed.
1530 sceneData.overlayMode = static_cast<float>(sv.overlayMode);
1531
1532 // Pass 1: Opaque Surfaces (Brain surfaces)
1533 // Use viewport-specific shader from subview
1534 BrainRenderer::ShaderMode currentShader = sv.brainShader;
1535 BrainRenderer::ShaderMode currentBemShader = sv.bemShader;
1536 const QString overlayName = visualizationModeName(sv.overlayMode);
1537
1538 // Collect matched brain surface keys for this pane's info panel
1539 QStringList drawnKeys;
1540 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1541 if (!sv.matchesSurfaceType(it.key())) continue;
1542 if (!sv.shouldRenderSurface(it.key())) continue;
1543 drawnKeys << it.key();
1544 }
1545 const QString drawnInfo = drawnKeys.isEmpty() ? "none" : drawnKeys.join(", ");
1546
1547 if (m_viewMode == MultiView && m_viewportInfoLabels[vp]) {
1548 m_viewportInfoLabels[vp]->setText(
1549 QString("Shader: %1\nSurface: %2\nOverlay: %3\nDrawn: %4")
1550 .arg(shaderModeName(currentShader), sv.surfaceType, overlayName, drawnInfo));
1551 } else if (m_viewMode == SingleView && m_singleViewInfoLabel) {
1552 m_singleViewInfoLabel->setText(
1553 QString("Shader: %1\nSurface: %2\nOverlay: %3\nDrawn: %4")
1554 .arg(shaderModeName(currentShader), sv.surfaceType, overlayName, drawnInfo));
1555 }
1556
1557 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1558 if (!sv.matchesSurfaceType(it.key())) continue;
1559 if (!sv.shouldRenderSurface(it.key())) continue;
1560
1561 m_renderer->renderSurface(cb, rhi(), sceneData, it.value().get(), currentShader);
1562 }
1563
1564 // Pass 1b: Source Space Points (use same shader as brain for consistent depth/blend)
1565 // These use their own vertex colour, so force overlayMode to pass-through (Scientific)
1566 BrainRenderer::SceneData nonBrainSceneData = sceneData;
1567 nonBrainSceneData.overlayMode = static_cast<float>(BrainSurface::ModeScientific);
1568
1569 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1570 if (!it.key().startsWith("srcsp_")) continue;
1571 if (!sv.shouldRenderSurface(it.key())) continue;
1572 if (!it.value()->isVisible()) continue;
1573 m_renderer->renderSurface(cb, rhi(), nonBrainSceneData, it.value().get(), currentShader);
1574 }
1575
1576 // Pass 1c: Digitizer Points (opaque small spheres, render like source space)
1577 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1578 if (!it.key().startsWith("dig_")) continue;
1579 if (!sv.shouldRenderSurface(it.key())) continue;
1580 if (!it.value()->isVisible()) continue;
1581 m_renderer->renderSurface(cb, rhi(), nonBrainSceneData, it.value().get(), currentShader);
1582 }
1583
1584 // Pass 2: Transparent Surfaces sorted Back-to-Front
1585 struct RenderItem {
1586 BrainSurface* surf;
1587 float dist;
1589 float overlayMode; // per-item overlay mode
1590 };
1591 QVector<RenderItem> transparentItems;
1592
1593 // Determine per-viewport field-map visibility
1594 const bool megFieldVisible = sv.visibility.megFieldMap;
1595 const bool eegFieldVisible = sv.visibility.eegFieldMap;
1596 const QString &megFieldKey = m_fieldMapper.megSurfaceKey();
1597 const QString &eegFieldKey = m_fieldMapper.eegSurfaceKey();
1598
1599 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1600 bool isSensor = it.key().startsWith("sens_");
1601 bool isBem = it.key().startsWith("bem_");
1602
1603 if (!isSensor && !isBem) continue;
1604 if (!sv.shouldRenderSurface(it.key())) continue;
1605 if (!it.value()->isVisible()) continue;
1606
1607 QVector3D min, max;
1608 it.value()->boundingBox(min, max);
1609 QVector3D center = (min + max) * 0.5f;
1610 float d = (sceneData.cameraPos - center).lengthSquared();
1611
1613 if (isBem) mode = currentBemShader;
1614
1615 // For the field-map target surfaces, use ModeSurface (holographic
1616 // shell) when the field map is not visible in this viewport.
1617 // Otherwise use ModeScientific so the field-map vertex colours
1618 // are passed through to the shader.
1619 float itemOverlay = static_cast<float>(BrainSurface::ModeScientific);
1620 if (it.key() == megFieldKey && !megFieldVisible) {
1621 itemOverlay = static_cast<float>(BrainSurface::ModeSurface);
1622 } else if (it.key() == eegFieldKey && !eegFieldVisible) {
1623 itemOverlay = static_cast<float>(BrainSurface::ModeSurface);
1624 }
1625
1626 transparentItems.append({it.value().get(), d, mode, itemOverlay});
1627 }
1628
1629 std::sort(transparentItems.begin(), transparentItems.end(), [](const RenderItem &a, const RenderItem &b) {
1630 return a.dist > b.dist;
1631 });
1632
1633 // BEM / sensor surfaces: per-item overlayMode controls whether field
1634 // map colours are shown (Scientific) or the surface falls back to
1635 // its default holographic shell appearance (Surface).
1636 BrainRenderer::SceneData bemSceneData = sceneData;
1637
1638 for (const auto &item : transparentItems) {
1639 bemSceneData.overlayMode = item.overlayMode;
1640 m_renderer->renderSurface(cb, rhi(), bemSceneData, item.surf, item.mode);
1641 }
1642
1643 // Render Dipoles
1644 for(auto it = m_itemDipoleMap.begin(); it != m_itemDipoleMap.end(); ++it) {
1645 if (it.value()->isVisible() && sv.visibility.dipoles) {
1646 m_renderer->renderDipoles(cb, rhi(), sceneData, it.value().get());
1647 }
1648 }
1649
1650 if (sv.visibility.dipoles && m_dipoles) {
1651 m_renderer->renderDipoles(cb, rhi(), sceneData, m_dipoles.get());
1652 }
1653
1654 // Render Connectivity Network
1655 if (sv.visibility.network && m_network) {
1656 m_renderer->renderNetwork(cb, rhi(), sceneData, m_network.get());
1657 }
1658
1659 // Intersection Pointer
1660 if (m_hasIntersection && m_debugPointerSurface) {
1661 BrainRenderer::SceneData debugSceneData = sceneData;
1662 debugSceneData.overlayMode = 0.0f; // pass-through for holographic shell
1663
1664 QMatrix4x4 translation;
1665 translation.translate(m_lastIntersectionPoint);
1666
1667 debugSceneData.mvp = rhi()->clipSpaceCorrMatrix() * cam.projection * cam.view * cam.model * translation;
1668
1669 m_renderer->renderSurface(cb, rhi(), debugSceneData, m_debugPointerSurface.get(), BrainRenderer::Holographic);
1670 }
1671
1672 } // End of viewport loop
1673
1674 m_renderer->endFrame(cb);
1675}
1676
1677//=============================================================================================================
1678
1679void BrainView::mousePressEvent(QMouseEvent *e)
1680{
1681 if (e->button() == Qt::LeftButton) {
1682 m_perspectiveRotatedSincePress = false;
1683 }
1684
1685 if (e->button() == Qt::LeftButton && m_viewMode == MultiView) {
1686 const int clickedVp = viewportIndexAt(e->pos());
1687 if (clickedVp >= 0 && m_viewportNameLabels[clickedVp] && m_viewportNameLabels[clickedVp]->isVisible()) {
1688 if (m_viewportNameLabels[clickedVp]->geometry().contains(e->pos())) {
1689 if (clickedVp != m_visualizationEditTarget) {
1690 setVisualizationEditTarget(clickedVp);
1691 }
1692 showViewportPresetMenu(clickedVp, mapToGlobal(e->pos()));
1693 m_lastMousePos = e->pos();
1694 return;
1695 }
1696 }
1697
1698 const int numEnabled = enabledViewportCount();
1699 const SplitterHit hit = hitTestSplitter(e->pos(), numEnabled, size());
1700 if (hit != SplitterHit::None) {
1701 m_isDraggingSplitter = true;
1702 m_activeSplitter = hit;
1703 m_lastMousePos = e->pos();
1704 updateSplitterCursor(e->pos());
1705 return;
1706 }
1707
1708 // Select the clicked viewport as the active edit target
1709 const int clickedVpForSelection = viewportIndexAt(e->pos());
1710 if (clickedVpForSelection >= 0 && clickedVpForSelection != m_visualizationEditTarget) {
1711 setVisualizationEditTarget(clickedVpForSelection);
1712 }
1713 }
1714
1715 m_lastMousePos = e->pos();
1716}
1717
1718//=============================================================================================================
1719
1720void BrainView::mouseMoveEvent(QMouseEvent *event)
1721{
1722 if (m_isDraggingSplitter && (event->buttons() & Qt::LeftButton)) {
1723 m_layout.dragSplitter(event->pos(), m_activeSplitter, size());
1724 m_multiSplitX = m_layout.splitX();
1725 m_multiSplitY = m_layout.splitY();
1726
1727 m_lastMousePos = event->pos();
1728 updateViewportSeparators();
1729 update();
1730 return;
1731 }
1732
1733 if (event->buttons() & Qt::LeftButton) {
1734 if (m_viewMode == MultiView) {
1735 const int activeVp = viewportIndexAt(event->pos());
1736 const int activePreset = (activeVp >= 0 && activeVp < m_subViews.size())
1737 ? std::clamp(m_subViews[activeVp].preset, 0, 6)
1738 : 1;
1739
1740 if (activeVp >= 0 && !multiViewPresetIsPerspective(activePreset)) {
1741 // Planar views (Top/Front/Left): pan along the view plane
1742 const QPoint diff = event->pos() - m_lastMousePos;
1743 CameraController::applyMousePan(diff, m_subViews[activeVp].pan, m_sceneSize);
1744 m_lastMousePos = event->pos();
1745 update();
1746 return;
1747 }
1748
1749 if (activeVp >= 0 && multiViewPresetIsPerspective(activePreset)) {
1750 // Perspective view: rotate
1751 QPoint diff = event->pos() - m_lastMousePos;
1752 CameraController::applyMouseRotation(diff, m_subViews[activeVp].perspectiveRotation);
1753
1754 m_perspectiveRotatedSincePress = true;
1755 m_lastMousePos = event->pos();
1756 update();
1757 return;
1758 }
1759
1760 m_lastMousePos = event->pos();
1761 return;
1762 }
1763
1764 // Single-view rotation
1765 QPoint diff = event->pos() - m_lastMousePos;
1766 CameraController::applyMouseRotation(diff, m_cameraRotation);
1767
1768 m_lastMousePos = event->pos();
1769 update();
1770 } else {
1771 if (m_viewMode == MultiView) {
1772 updateSplitterCursor(event->pos());
1773 } else {
1774 unsetCursor();
1775 }
1776 castRay(event->pos());
1777 }
1778}
1779
1780//=============================================================================================================
1781
1782void BrainView::mouseReleaseEvent(QMouseEvent *event)
1783{
1784 if (event->button() == Qt::LeftButton && m_isDraggingSplitter) {
1785 m_isDraggingSplitter = false;
1786 m_activeSplitter = SplitterHit::None;
1787 saveMultiViewSettings();
1788 updateSplitterCursor(event->pos());
1789 return;
1790 }
1791
1792 if (event->button() == Qt::LeftButton && m_viewMode == MultiView && m_perspectiveRotatedSincePress) {
1793 m_perspectiveRotatedSincePress = false;
1794 saveMultiViewSettings();
1795 }
1796
1797 // Save pan offset after dragging in a planar viewport
1798 if (event->button() == Qt::LeftButton && m_viewMode == MultiView && !m_perspectiveRotatedSincePress) {
1799 saveMultiViewSettings();
1800 }
1801
1802 if (m_viewMode == MultiView) {
1803 updateSplitterCursor(event->pos());
1804 } else {
1805 unsetCursor();
1806 }
1807}
1808
1809//=============================================================================================================
1810
1811void BrainView::wheelEvent(QWheelEvent *event)
1812{
1813 const float delta = event->angleDelta().y() / 120.0f;
1814
1815 if (m_viewMode == MultiView) {
1816 const int vp = viewportIndexAt(event->position().toPoint());
1817 if (vp >= 0 && vp < m_subViews.size()) {
1818 m_subViews[vp].zoom += delta;
1819 saveMultiViewSettings();
1820 }
1821 } else {
1822 m_zoom += delta;
1823 }
1824 update();
1825}
1826
1827//=============================================================================================================
1828
1829void BrainView::keyPressEvent(QKeyEvent *event)
1830{
1831 if (event->key() == Qt::Key_S) {
1832 saveSnapshot();
1833 } else if (event->key() == Qt::Key_R) {
1834 m_cameraRotation = QQuaternion();
1835 logPerspectiveRotation("reset-initial");
1836 saveMultiViewSettings();
1837 update();
1838 }
1839}
1840
1841//=============================================================================================================
1842
1843bool BrainView::loadSourceEstimate(const QString &lhPath, const QString &rhPath)
1844{
1845 return m_sourceManager.load(lhPath, rhPath, m_surfaces, m_activeSurfaceType);
1846}
1847
1848//=============================================================================================================
1849
1850void BrainView::onSourceEstimateLoaded(int numTimePoints)
1851{
1852 setVisualizationMode("Source Estimate");
1853 emit sourceEstimateLoaded(numTimePoints);
1854 setTimePoint(0);
1855}
1856
1857//=============================================================================================================
1858
1860{
1861 m_sourceManager.setTimePoint(index, m_surfaces, m_singleView, m_subViews);
1862 update();
1863}
1864
1865//=============================================================================================================
1866
1867void BrainView::setSourceColormap(const QString &name)
1868{
1869 m_sourceManager.setColormap(name);
1870 setTimePoint(m_sourceManager.currentTimePoint());
1871}
1872
1873//=============================================================================================================
1874
1875void BrainView::setSourceThresholds(float min, float mid, float max)
1876{
1877 m_sourceManager.setThresholds(min, mid, max);
1878 setTimePoint(m_sourceManager.currentTimePoint());
1879}
1880
1881//=============================================================================================================
1882
1884{
1885 setVisualizationMode("Source Estimate");
1886 m_sourceManager.startStreaming(m_surfaces, m_singleView, m_subViews);
1887}
1888
1889//=============================================================================================================
1890
1892{
1893 m_sourceManager.stopStreaming();
1894}
1895
1896//=============================================================================================================
1897
1899{
1900 return m_sourceManager.isStreaming();
1901}
1902
1903//=============================================================================================================
1904
1905void BrainView::pushRealtimeSourceData(const Eigen::VectorXd &data)
1906{
1907 m_sourceManager.pushData(data);
1908}
1909
1910//=============================================================================================================
1911
1913{
1914 m_sourceManager.setInterval(msec);
1915}
1916
1917//=============================================================================================================
1918
1920{
1921 m_sourceManager.setLooping(enabled);
1922}
1923
1924//=============================================================================================================
1925
1926void BrainView::onRealtimeColorsAvailable(const QVector<uint32_t> &colorsLh,
1927 const QVector<uint32_t> &colorsRh)
1928{
1929 // Apply colors to all brain surfaces matching active surface types
1930 QSet<QString> activeTypes;
1931 activeTypes.insert(m_singleView.surfaceType);
1932 for (int i = 0; i < m_subViews.size(); ++i) {
1933 activeTypes.insert(m_subViews[i].surfaceType);
1934 }
1935
1936 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
1937 if (!it.value() || it.value()->tissueType() != BrainSurface::TissueBrain)
1938 continue;
1939
1940 for (const QString &type : activeTypes) {
1941 if (it.key().endsWith(type)) {
1942 int hemi = it.value()->hemi();
1943 const QVector<uint32_t> &colors = (hemi == 0) ? colorsLh : colorsRh;
1944 if (!colors.isEmpty()) {
1945 it.value()->applySourceEstimateColors(colors);
1946 }
1947 break;
1948 }
1949 }
1950 }
1951
1952 update();
1953}
1954
1955//=============================================================================================================
1956
1957bool BrainView::loadSensorField(const QString &evokedPath, int aveIndex)
1958{
1959 auto evoked = DataLoader::loadEvoked(evokedPath, aveIndex);
1960 if (evoked.isEmpty()) return false;
1961
1962 // Preserve the current time point when switching between evoked sets
1963 // that share the same sensor configuration (same file, different condition).
1964 const int previousTimePoint = m_fieldMapper.timePoint();
1965 const bool canReuse = m_fieldMapper.hasMappingFor(evoked);
1966
1967 m_fieldMapper.setEvoked(evoked);
1968
1969 if (!canReuse) {
1970 // Sensor config changed — full rebuild required (also precomputes global range)
1971 if (!m_fieldMapper.buildMapping(m_surfaces, m_headToMriTrans, m_applySensorTrans)) {
1972 m_fieldMapper.setEvoked(FIFFLIB::FiffEvoked()); // Clear state on failure
1973 return false;
1974 }
1975 } else {
1976 // Mapping reused — recompute normalization for new evoked data
1977 m_fieldMapper.computeNormRange();
1978 }
1979
1980 // Clamp preserved time point to the range of the new evoked data
1981 const int numTimes = static_cast<int>(m_fieldMapper.evoked().times.size());
1982 const int tp = qBound(0, previousTimePoint, numTimes - 1);
1983
1984 emit sensorFieldLoaded(numTimes, tp);
1986 return true;
1987}
1988
1989//=============================================================================================================
1990
1991QStringList BrainView::probeEvokedSets(const QString &evokedPath)
1992{
1993 return DataLoader::probeEvokedSets(evokedPath);
1994}
1995
1996//=============================================================================================================
1997
1999{
2000 if (!m_fieldMapper.isLoaded() || m_fieldMapper.evoked().isEmpty()) {
2001 return;
2002 }
2003
2004 int maxIdx = static_cast<int>(m_fieldMapper.evoked().times.size()) - 1;
2005 if (maxIdx < 0) {
2006 return;
2007 }
2008
2009 m_fieldMapper.setTimePoint(qBound(0, index, maxIdx));
2010 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2011 emit sensorFieldTimePointChanged(m_fieldMapper.timePoint(), m_fieldMapper.evoked().times(m_fieldMapper.timePoint()));
2012 update();
2013}
2014
2015//=============================================================================================================
2016
2017void BrainView::setSensorFieldVisible(const QString &type, bool visible)
2018{
2019 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
2020 if (type == "MEG") {
2021 profile.megFieldMap = visible;
2022 } else if (type == "EEG") {
2023 profile.eegFieldMap = visible;
2024 } else {
2025 return;
2026 }
2027
2028 saveMultiViewSettings();
2029 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2030 update();
2031}
2032
2033//=============================================================================================================
2034
2035void BrainView::setSensorFieldContourVisible(const QString &type, bool visible)
2036{
2037 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
2038 if (type == "MEG") {
2039 profile.megFieldContours = visible;
2040 } else if (type == "EEG") {
2041 profile.eegFieldContours = visible;
2042 } else {
2043 return;
2044 }
2045
2046 saveMultiViewSettings();
2047 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2048 update();
2049}
2050
2051//=============================================================================================================
2052
2054{
2055 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
2056 if (profile.megFieldMapOnHead == useHead && m_fieldMapper.megFieldMapOnHead() == useHead) {
2057 return;
2058 }
2059
2060 profile.megFieldMapOnHead = useHead;
2061 m_fieldMapper.setMegFieldMapOnHead(useHead);
2062 saveMultiViewSettings();
2063 if (m_fieldMapper.isLoaded()) {
2064 m_fieldMapper.buildMapping(m_surfaces, m_headToMriTrans, m_applySensorTrans);
2065 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2066 update();
2067 }
2068}
2069
2070//=============================================================================================================
2071
2072void BrainView::setSensorFieldColormap(const QString &name)
2073{
2074 if (m_fieldMapper.colormap() == name) {
2075 return;
2076 }
2077 m_fieldMapper.setColormap(name);
2078 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2079 update();
2080}
2081
2082//=============================================================================================================
2083
2085{
2086 return m_sourceManager.tstep();
2087}
2088
2089//=============================================================================================================
2090
2092{
2093 return m_sourceManager.tmin();
2094}
2095
2096//=============================================================================================================
2097
2099{
2100 return m_sourceManager.numTimePoints();
2101}
2102
2103//=============================================================================================================
2104
2106{
2107 if (!m_fieldMapper.isLoaded() || m_fieldMapper.evoked().nave == -1 || m_fieldMapper.evoked().times.size() == 0) {
2108 return -1;
2109 }
2110
2111 int bestIdx = 0;
2112 float bestDist = std::abs(m_fieldMapper.evoked().times(0) - timeSec);
2113 for (int i = 1; i < m_fieldMapper.evoked().times.size(); ++i) {
2114 float dist = std::abs(m_fieldMapper.evoked().times(i) - timeSec);
2115 if (dist < bestDist) {
2116 bestDist = dist;
2117 bestIdx = i;
2118 }
2119 }
2120 return bestIdx;
2121}
2122
2123//=============================================================================================================
2124
2125int BrainView::closestStcIndex(float timeSec) const
2126{
2127 return m_sourceManager.closestIndex(timeSec);
2128}
2129
2130//=============================================================================================================
2131
2132bool BrainView::sensorFieldTimeRange(float &tmin, float &tmax) const
2133{
2134 if (!m_fieldMapper.isLoaded() || m_fieldMapper.evoked().nave == -1 || m_fieldMapper.evoked().times.size() == 0) {
2135 return false;
2136 }
2137 tmin = m_fieldMapper.evoked().times(0);
2138 tmax = m_fieldMapper.evoked().times(m_fieldMapper.evoked().times.size() - 1);
2139 return true;
2140}
2141
2142//=============================================================================================================
2143// ── Real-time sensor data streaming ────────────────────────────────────
2144//=============================================================================================================
2145
2146void BrainView::startRealtimeSensorStreaming(const QString &modality)
2147{
2148 m_sensorStreamManager.startStreaming(modality, m_fieldMapper, m_surfaces);
2149}
2150
2151//=============================================================================================================
2152
2154{
2155 m_sensorStreamManager.stopStreaming();
2156}
2157
2158//=============================================================================================================
2159
2161{
2162 return m_sensorStreamManager.isStreaming();
2163}
2164
2165//=============================================================================================================
2166
2167void BrainView::pushRealtimeSensorData(const Eigen::VectorXf &data)
2168{
2169 m_sensorStreamManager.pushData(data);
2170}
2171
2172//=============================================================================================================
2173
2175{
2176 m_sensorStreamManager.setInterval(msec);
2177}
2178
2179//=============================================================================================================
2180
2182{
2183 m_sensorStreamManager.setLooping(enabled);
2184}
2185
2186//=============================================================================================================
2187
2189{
2190 m_sensorStreamManager.setAverages(numAvr);
2191}
2192
2193//=============================================================================================================
2194
2196{
2197 m_sensorStreamManager.setColormap(name);
2198}
2199
2200//=============================================================================================================
2201
2202void BrainView::onSensorStreamColorsAvailable(const QString &surfaceKey,
2203 const QVector<uint32_t> &colors)
2204{
2205 if (surfaceKey.isEmpty() || !m_surfaces.contains(surfaceKey)) {
2206 return;
2207 }
2208
2209 auto surface = m_surfaces[surfaceKey];
2210 if (surface && !colors.isEmpty()) {
2211 surface->applySourceEstimateColors(colors);
2212 }
2213
2214 update();
2215}
2216
2217//=============================================================================================================
2218
2219bool BrainView::loadSensors(const QString &fifPath) {
2220 auto r = DataLoader::loadSensors(fifPath, m_megHelmetOverridePath);
2221 if (!r.hasInfo && !r.hasDigitizer) return false;
2222
2223 // Store Device→Head transform for later helmet surface reloads
2224 m_devHeadTrans = r.devHeadTrans;
2225 m_hasDevHead = r.hasDevHead;
2226
2227 if (!r.megGradItems.isEmpty()) m_model->addSensors("MEG/Grad", r.megGradItems);
2228 if (!r.megMagItems.isEmpty()) m_model->addSensors("MEG/Mag", r.megMagItems);
2229 if (!r.eegItems.isEmpty()) m_model->addSensors("EEG", r.eegItems);
2230
2231 if (r.helmetSurface) {
2232 m_surfaces["sens_surface_meg"] = r.helmetSurface;
2233 } else {
2234 qWarning() << "BrainView::loadSensors: NO helmet surface returned from DataLoader!";
2235 }
2236
2237 if (!r.digitizerPoints.isEmpty())
2238 m_model->addDigitizerData(r.digitizerPoints);
2239
2240 return true;
2241}
2242
2243//=============================================================================================================
2244
2245bool BrainView::loadMegHelmetSurface(const QString &helmetFilePath) {
2246 auto surface = DataLoader::loadHelmetSurface(helmetFilePath, m_devHeadTrans, m_hasDevHead);
2247 if (!surface) {
2248 qWarning() << "BrainView::loadMegHelmetSurface: DataLoader returned nullptr!";
2249 return false;
2250 }
2251
2252 m_surfaces["sens_surface_meg"] = surface;
2253 refreshSensorTransforms();
2254 updateSceneBounds();
2255 update();
2256 return true;
2257}
2258
2259//=============================================================================================================
2260
2261bool BrainView::loadDipoles(const QString &dipPath)
2262{
2263 auto ecdSet = DataLoader::loadDipoles(dipPath);
2264 if (ecdSet.size() == 0) return false;
2265 m_model->addDipoles(ecdSet);
2266 return true;
2267}
2268
2269//=============================================================================================================
2270
2271bool BrainView::loadNetwork(const CONNECTIVITYLIB::Network &network, const QString &name)
2272{
2273 if (network.getNodes().isEmpty()) return false;
2274
2275 m_network = std::make_unique<NetworkObject>();
2276 m_network->load(network);
2277 m_network->setVisible(true);
2278
2279 // Also register in the tree model
2280 m_model->addNetwork(network, name);
2281
2282 update();
2283 return true;
2284}
2285
2286//=============================================================================================================
2287
2289{
2290 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
2291 profile.network = visible;
2292 m_networkVisible = visible;
2293 if (m_network) m_network->setVisible(visible);
2294 saveMultiViewSettings();
2295 update();
2296}
2297
2298//=============================================================================================================
2299
2301{
2302 if (m_network) {
2303 m_network->setThreshold(threshold);
2304 update();
2305 }
2306}
2307
2308//=============================================================================================================
2309
2310void BrainView::setNetworkColormap(const QString &name)
2311{
2312 if (m_network) {
2313 m_network->setColormap(name);
2314 update();
2315 }
2316}
2317
2318//=============================================================================================================
2319
2320bool BrainView::loadSourceSpace(const QString &fwdPath)
2321{
2322 auto srcSpace = DataLoader::loadSourceSpace(fwdPath);
2323 if (srcSpace.isEmpty()) return false;
2324 m_model->addSourceSpace(srcSpace);
2325 return true;
2326}
2327
2328//=============================================================================================================
2329
2331{
2332 auto &profile = visibilityProfileForTarget(m_visualizationEditTarget);
2333 profile.sourceSpace = visible;
2334 saveMultiViewSettings();
2335 update();
2336}
2337
2338//=============================================================================================================
2339
2340bool BrainView::loadTransformation(const QString &transPath)
2341{
2342 FiffCoordTrans trans;
2343 if (!DataLoader::loadHeadToMriTransform(transPath, trans))
2344 return false;
2345
2346 m_headToMriTrans = trans;
2347 refreshSensorTransforms();
2348 return true;
2349}
2350
2351void BrainView::refreshSensorTransforms()
2352{
2353 QMatrix4x4 qmat;
2354 if (m_applySensorTrans && !m_headToMriTrans.isEmpty()) {
2355 qmat = SURFACEKEYS::toQMatrix4x4(m_headToMriTrans.trans);
2356 }
2357
2358 int surfCount = 0;
2359 for (auto it = m_surfaces.begin(); it != m_surfaces.end(); ++it) {
2360 if ((it.key().startsWith("sens_") || it.key().startsWith("dig_")) && it.value()) {
2361 it.value()->applyTransform(qmat);
2362 surfCount++;
2363 }
2364 }
2365
2366 if (m_fieldMapper.isLoaded()) {
2367 m_fieldMapper.buildMapping(m_surfaces, m_headToMriTrans, m_applySensorTrans);
2368 m_fieldMapper.apply(m_surfaces, m_singleView, m_subViews);
2369 }
2370}
2371
2372//=============================================================================================================
2373
2374void BrainView::castRay(const QPoint &pos)
2375{
2376 // 1. Setup Matrix Stack (Must match render exactly, including multiview pane layout)
2377 const QSize outputSize = size();
2378
2379 const auto enabledViewports = enabledViewportIndices();
2380
2381 const int numEnabled = enabledViewports.size();
2382 int activeSlot = 0;
2383 QRect activePane(0, 0, outputSize.width(), outputSize.height());
2384
2385 bool hasValidPane = true;
2386 if (m_viewMode == MultiView && numEnabled > 1) {
2387 bool foundSlot = false;
2388 for (int slot = 0; slot < numEnabled; ++slot) {
2389 const QRect pane = multiViewSlotRect(slot, numEnabled, outputSize);
2390 if (pane.contains(pos)) {
2391 activeSlot = slot;
2392 activePane = pane;
2393 foundSlot = true;
2394 break;
2395 }
2396 }
2397
2398 hasValidPane = foundSlot;
2399 }
2400
2401 const int vp = (m_viewMode == MultiView) ? enabledViewports[activeSlot] : 0;
2402 const SubView &sv = (m_viewMode == MultiView) ? m_subViews[vp] : m_singleView;
2403
2404 m_camera.setSceneCenter(m_sceneCenter);
2405 m_camera.setSceneSize(m_sceneSize);
2406 m_camera.setRotation(m_cameraRotation);
2407 m_camera.setZoom(m_zoom);
2408 const float aspect = float(std::max(1, activePane.width())) / float(std::max(1, activePane.height()));
2409 const CameraResult cam = (m_viewMode == MultiView)
2410 ? m_camera.computeMultiView(sv, aspect)
2411 : m_camera.computeSingleView(aspect);
2412 QMatrix4x4 pvm = cam.projection * cam.view * cam.model;
2413
2414 // ── Unproject screen position to world-space ray ───────────────────
2415 QVector3D rayOrigin, rayDir;
2416 if (!RayPicker::unproject(pos, activePane, pvm, rayOrigin, rayDir))
2417 return;
2418
2419 // ── Pick against all scene geometry ────────────────────────────────
2420 PickResult pickResult;
2421 if (hasValidPane) {
2422 pickResult = RayPicker::pick(rayOrigin, rayDir, sv, m_surfaces, m_itemSurfaceMap, m_itemDipoleMap);
2423 }
2424 m_hasIntersection = pickResult.hit;
2425 if (pickResult.hit) {
2426 m_lastIntersectionPoint = pickResult.hitPoint;
2427 }
2428
2429 QStandardItem *hitItem = pickResult.item;
2430 int hitIndex = pickResult.vertexIndex;
2431
2432 // ── Build hover label ──────────────────────────────────────────────
2433 const QString displayLabel = RayPicker::buildLabel(pickResult, m_itemSurfaceMap, m_surfaces);
2434 const QString &hitKey = pickResult.surfaceKey;
2435 int currentRegionId = pickResult.regionId;
2436
2437 if (displayLabel != m_hoveredRegion) {
2438 m_hoveredRegion = displayLabel;
2439 emit hoveredRegionChanged(m_hoveredRegion);
2440 if (m_regionLabel) {
2441 if (m_hoveredRegion.isEmpty()) {
2442 m_regionLabel->hide();
2443 } else {
2444 m_regionLabel->setText(m_hoveredRegion);
2445 m_regionLabel->show();
2446 }
2447 }
2448 }
2449
2450 QString hoveredSurfaceKey;
2451 if (hitKey.startsWith("sens_surface_meg")) {
2452 hoveredSurfaceKey = hitKey;
2453 }
2454
2455 if (hitItem != m_hoveredItem || hitIndex != m_hoveredIndex || hoveredSurfaceKey != m_hoveredSurfaceKey) {
2456 // Deselect previous
2457 if (m_hoveredItem) {
2458 if (m_itemSurfaceMap.contains(m_hoveredItem)) {
2459 m_itemSurfaceMap[m_hoveredItem]->setSelected(false);
2460 m_itemSurfaceMap[m_hoveredItem]->setSelectedRegion(-1);
2461 m_itemSurfaceMap[m_hoveredItem]->setSelectedVertexRange(-1, 0);
2462 } else if (m_itemDipoleMap.contains(m_hoveredItem)) {
2463 m_itemDipoleMap[m_hoveredItem]->setSelected(m_hoveredIndex, false);
2464 }
2465 }
2466 if (!m_hoveredSurfaceKey.isEmpty() && m_surfaces.contains(m_hoveredSurfaceKey)) {
2467 m_surfaces[m_hoveredSurfaceKey]->setSelected(false);
2468 m_surfaces[m_hoveredSurfaceKey]->setSelectedRegion(-1);
2469 m_surfaces[m_hoveredSurfaceKey]->setSelectedVertexRange(-1, 0);
2470 }
2471
2472 m_hoveredItem = hitItem;
2473 m_hoveredIndex = hitIndex;
2474 m_hoveredSurfaceKey = hoveredSurfaceKey;
2475
2476 if (m_hoveredItem) {
2477 // Select new
2478 if (m_itemSurfaceMap.contains(m_hoveredItem)) {
2479 // Check if this is a digitizer batched mesh — highlight single sphere
2480 AbstractTreeItem* absHitSel = dynamic_cast<AbstractTreeItem*>(m_hoveredItem);
2481 bool isDigitizer = absHitSel &&
2482 (absHitSel->type() == AbstractTreeItem::DigitizerItem + QStandardItem::UserType);
2483
2484 if (isDigitizer && m_hoveredIndex >= 0) {
2485 const int vertsPerSphere = MeshFactory::sphereVertexCount();
2486 int sphereIdx = m_hoveredIndex / vertsPerSphere;
2487 m_itemSurfaceMap[m_hoveredItem]->setSelectedVertexRange(
2488 sphereIdx * vertsPerSphere, vertsPerSphere);
2489 } else if (currentRegionId != -1) {
2490 m_itemSurfaceMap[m_hoveredItem]->setSelectedRegion(currentRegionId);
2491 m_itemSurfaceMap[m_hoveredItem]->setSelected(false);
2492 } else {
2493 m_itemSurfaceMap[m_hoveredItem]->setSelected(true);
2494 m_itemSurfaceMap[m_hoveredItem]->setSelectedRegion(-1);
2495 }
2496 } else if (m_itemDipoleMap.contains(m_hoveredItem)) {
2497 m_itemDipoleMap[m_hoveredItem]->setSelected(m_hoveredIndex, true);
2498 }
2499 } else if (!m_hoveredSurfaceKey.isEmpty() && m_surfaces.contains(m_hoveredSurfaceKey)) {
2500 m_surfaces[m_hoveredSurfaceKey]->setSelected(true);
2501 m_surfaces[m_hoveredSurfaceKey]->setSelectedRegion(-1);
2502 }
2503 } else if (m_hoveredItem && m_itemSurfaceMap.contains(m_hoveredItem)) {
2504 AbstractTreeItem* absHitUpd = dynamic_cast<AbstractTreeItem*>(m_hoveredItem);
2505 bool isDigitizer = absHitUpd &&
2506 (absHitUpd->type() == AbstractTreeItem::DigitizerItem + QStandardItem::UserType);
2507
2508 if (isDigitizer && m_hoveredIndex >= 0) {
2509 const int vertsPerSphere = MeshFactory::sphereVertexCount();
2510 int sphereIdx = m_hoveredIndex / vertsPerSphere;
2511 m_itemSurfaceMap[m_hoveredItem]->setSelectedVertexRange(
2512 sphereIdx * vertsPerSphere, vertsPerSphere);
2513 } else if (currentRegionId != -1) {
2514 m_itemSurfaceMap[m_hoveredItem]->setSelectedRegion(currentRegionId);
2515 m_itemSurfaceMap[m_hoveredItem]->setSelected(false);
2516 } else {
2517 m_itemSurfaceMap[m_hoveredItem]->setSelectedRegion(-1);
2518 m_itemSurfaceMap[m_hoveredItem]->setSelected(true);
2519 }
2520 } else if (!m_hoveredSurfaceKey.isEmpty() && m_surfaces.contains(m_hoveredSurfaceKey)) {
2521 m_surfaces[m_hoveredSurfaceKey]->setSelected(true);
2522 }
2523 update();
2524}
2525
2526//=============================================================================================================
2527
2528void BrainView::showViewportPresetMenu(int viewport, const QPoint &globalPos)
2529{
2530 if (viewport < 0 || viewport >= m_subViews.size()) {
2531 return;
2532 }
2533
2534 QMenu menu;
2535 QAction *topAction = menu.addAction("Top");
2536 QAction *perspectiveAction = menu.addAction("Perspective");
2537 QAction *frontAction = menu.addAction("Front");
2538 QAction *leftAction = menu.addAction("Left");
2539 menu.addSeparator();
2540 QAction *bottomAction = menu.addAction("Bottom");
2541 QAction *backAction = menu.addAction("Back");
2542 QAction *rightAction = menu.addAction("Right");
2543
2544 const int currentPreset = std::clamp(m_subViews[viewport].preset, 0, 6);
2545 topAction->setCheckable(true);
2546 perspectiveAction->setCheckable(true);
2547 frontAction->setCheckable(true);
2548 leftAction->setCheckable(true);
2549 bottomAction->setCheckable(true);
2550 backAction->setCheckable(true);
2551 rightAction->setCheckable(true);
2552
2553 topAction->setChecked(currentPreset == 0);
2554 perspectiveAction->setChecked(currentPreset == 1);
2555 frontAction->setChecked(currentPreset == 2);
2556 leftAction->setChecked(currentPreset == 3);
2557 bottomAction->setChecked(currentPreset == 4);
2558 backAction->setChecked(currentPreset == 5);
2559 rightAction->setChecked(currentPreset == 6);
2560
2561 QAction *selected = menu.exec(globalPos);
2562 if (!selected) {
2563 return;
2564 }
2565
2566 int newPreset = currentPreset;
2567 if (selected == topAction) {
2568 newPreset = 0;
2569 } else if (selected == perspectiveAction) {
2570 newPreset = 1;
2571 } else if (selected == frontAction) {
2572 newPreset = 2;
2573 } else if (selected == leftAction) {
2574 newPreset = 3;
2575 } else if (selected == bottomAction) {
2576 newPreset = 4;
2577 } else if (selected == backAction) {
2578 newPreset = 5;
2579 } else if (selected == rightAction) {
2580 newPreset = 6;
2581 }
2582
2583 if (newPreset == currentPreset) {
2584 return;
2585 }
2586
2587 m_subViews[viewport].preset = newPreset;
2588 saveMultiViewSettings();
2589 updateOverlayLayout();
2590 update();
2591}
FiffEvokedSet class declaration.
RayPicker class declaration — ray casting and intersection testing.
MeshFactory class declaration — static utilities for generating primitive meshes (spheres,...
NetworkObject class declaration.
BrainSurface class declaration.
DipoleObject class declaration.
BrainRenderer class declaration.
SplitterHit
BrainView class declaration.
DataLoader — static helpers for loading MNE data files.
QString shaderModeName(ShaderMode mode)
VisualizationMode visualizationModeFromName(const QString &name)
QString visualizationModeName(VisualizationMode mode)
bool multiViewPresetIsPerspective(int preset)
ShaderMode shaderModeFromName(const QString &name)
int normalizedVisualizationTarget(int target, int maxIndex)
QString multiViewPresetName(int preset)
Surface key constants and type-to-key mappings.
BrainTreeModel class declaration.
SurfaceTreeItem class declaration.
BemTreeItem class declaration.
SourceSpaceTreeItem class declaration.
DipoleTreeItem class declaration.
SensorTreeItem class declaration.
DigitizerTreeItem class declaration.
Network class declaration.
MNESourceSpaces class declaration.
MNEBem class declaration.
FIFF file I/O and data structures (raw, epochs, evoked, covariance, forward).
QString sensorParentToKeyPrefix(const QString &parentText)
QString sensorTypeToObjectKey(const QString &uiType)
QMatrix4x4 toQMatrix4x4(const Eigen::Matrix4f &m)
This class holds information about a network, can compute a distance table and provide network metric...
Definition network.h:92
const QList< QSharedPointer< NetworkNode > > & getNodes() const
Definition network.cpp:156
static SensorLoadResult loadSensors(const QString &fifPath, const QString &megHelmetOverridePath={})
static QStringList probeEvokedSets(const QString &evokedPath)
static INVERSELIB::ECDSet loadDipoles(const QString &dipPath)
static MNELIB::MNESourceSpaces loadSourceSpace(const QString &fwdPath)
static bool loadHeadToMriTransform(const QString &transPath, FIFFLIB::FiffCoordTrans &trans)
static std::shared_ptr< BrainSurface > loadHelmetSurface(const QString &helmetFilePath, const QMatrix4x4 &devHeadTrans=QMatrix4x4(), bool applyTrans=false)
static FIFFLIB::FiffEvoked loadEvoked(const QString &evokedPath, int aveIndex=0)
Per-view toggle flags controlling which data layers (brain, sensors, sources, network) are visible.
Definition viewstate.h:74
Viewport subdivision holding its own camera, projection, and scissor rectangle.
Definition viewstate.h:148
bool matchesSurfaceType(const QString &key) const
ViewVisibilityProfile visibility
Definition viewstate.h:154
static bool isBrainSurfaceKey(const QString &key)
ShaderMode bemShader
Definition viewstate.h:152
bool shouldRenderSurface(const QString &key) const
QString surfaceType
Definition viewstate.h:150
VisualizationMode overlayMode
Definition viewstate.h:153
static SubView defaultForIndex(int index)
ShaderMode brainShader
Definition viewstate.h:151
int preset
Definition viewstate.h:160
static std::shared_ptr< BrainSurface > createPlate(const QVector3D &center, const QMatrix4x4 &orientation, const QColor &color, float size)
static std::shared_ptr< BrainSurface > createBatchedSpheres(const QVector< QVector3D > &positions, float radius, const QColor &color, int subdivisions=1)
static std::shared_ptr< BrainSurface > createBarbell(const QVector3D &center, const QMatrix4x4 &orientation, const QColor &color, float size)
static int sphereVertexCount(int subdivisions=1)
static std::shared_ptr< BrainSurface > createSphere(const QVector3D &center, float radius, const QColor &color, int subdivisions=1)
Computed camera matrices (projection, view, model) and vectors for a single viewport.
QVector3D cameraPos
QMatrix4x4 view
QMatrix4x4 model
QMatrix4x4 projection
static void applyMouseRotation(const QPoint &delta, QQuaternion &rotation, float speed=0.5f)
static void applyMousePan(const QPoint &delta, QVector2D &pan, float sceneSize)
Result of a ray–mesh intersection test containing the hit point, triangle index, and distance.
Definition raypicker.h:65
int vertexIndex
Vertex or element index at hit.
Definition raypicker.h:72
bool hit
True if something was hit.
Definition raypicker.h:66
QString surfaceKey
Surface map key of the hit surface.
Definition raypicker.h:71
QVector3D hitPoint
World-space intersection point.
Definition raypicker.h:68
QStandardItem * item
Tree item that was hit (nullable).
Definition raypicker.h:70
int regionId
Annotation label ID.
Definition raypicker.h:79
static bool unproject(const QPoint &screenPos, const QRect &paneRect, const QMatrix4x4 &pvm, QVector3D &rayOrigin, QVector3D &rayDir)
Definition raypicker.cpp:52
static QString buildLabel(const PickResult &result, const QMap< const QStandardItem *, std::shared_ptr< BrainSurface > > &itemSurfaceMap, const QMap< QString, std::shared_ptr< BrainSurface > > &surfaces)
static PickResult pick(const QVector3D &rayOrigin, const QVector3D &rayDir, const SubView &subView, const QMap< QString, std::shared_ptr< BrainSurface > > &surfaces, const QMap< const QStandardItem *, std::shared_ptr< BrainSurface > > &itemSurfaceMap, const QMap< const QStandardItem *, std::shared_ptr< DipoleObject > > &itemDipoleMap)
Definition raypicker.cpp:85
Hierarchical item model organizing all 3-D scene objects (surfaces, sensors, sources,...
Base tree item providing check-state, visibility, and data-role storage for all 3-D scene items.
QColor color() const
void setVisible(bool visible)
int type() const override
Tree item representing a BEM surface layer in the 3-D scene hierarchy.
Definition bemtreeitem.h:52
const MNELIB::MNEBemSurface & bemSurfaceData() const
Digitizer point group tree item.
PointKind pointKind() const
const QVector< QVector3D > & positions() const
Tree item representing a set of fitted dipoles in the 3-D scene hierarchy.
const INVERSELIB::ECDSet & ecdSet() const
Tree item representing MEG or EEG sensor positions in the 3-D scene hierarchy.
bool hasOrientation() const
QVector3D position() const
float scale() const
const QMatrix4x4 & orientation() const
Source space point tree item.
const QVector< QVector3D > & positions() const
Tree item representing a FreeSurfer cortical surface in the 3-D scene hierarchy.
FSLIB::Surface surfaceData() const
FSLIB::Annotation annotationData() const
Renderable cortical surface mesh with per-vertex color, curvature data, and GPU buffer management.
static constexpr VisualizationMode ModeScientific
::VisualizationMode VisualizationMode
static constexpr VisualizationMode ModeSurface
void colorsAvailable(const QString &surfaceKey, const QVector< uint32_t > &colors)
void loadingProgress(int percent, const QString &message)
void timePointChanged(int index, float time)
void loaded(int numTimePoints)
void thresholdsUpdated(float min, float mid, float max)
void realtimeColorsAvailable(const QVector< uint32_t > &colorsLh, const QVector< uint32_t > &colorsRh)
static constexpr ShaderMode Holographic
::ShaderMode ShaderMode
Aggregated GPU resources and render state for the 3-D brain visualization scene.
void setBemHighContrast(bool enabled)
void setSourceColormap(const QString &name)
bool loadMegHelmetSurface(const QString &helmetFilePath)
void setHemiVisible(int hemiIdx, bool visible)
void setSensorFieldTimePoint(int index)
void sourceThresholdsUpdated(float min, float mid, float max)
void setInfoPanelVisible(bool visible)
int stcNumTimePoints() const
bool loadTransformation(const QString &transPath)
bool loadDipoles(const QString &dipPath)
void wheelEvent(QWheelEvent *event) override
bool sensorFieldTimeRange(float &tmin, float &tmax) const
void sourceEstimateLoaded(int numTimePoints)
void setSensorFieldContourVisible(const QString &type, bool visible)
bool megFieldMapOnHeadForTarget(int target) const
void setShaderMode(const QString &mode)
void setNetworkColormap(const QString &name)
int closestSensorFieldIndex(float timeSec) const
bool isRealtimeSensorStreaming() const
void resizeEvent(QResizeEvent *event) override
int closestStcIndex(float timeSec) const
float stcStep() const
QString bemShaderModeForTarget(int target) const
void keyPressEvent(QKeyEvent *event) override
void setRealtimeLooping(bool enabled)
void setRealtimeInterval(int msec)
void showMultiView()
void setSensorFieldVisible(const QString &type, bool visible)
float stcTmin() const
int visualizationEditTarget() const
void startRealtimeSensorStreaming(const QString &modality=QStringLiteral("MEG"))
QString overlayModeForTarget(int target) const
void onRowsInserted(const QModelIndex &parent, int first, int last)
void stopRealtimeSensorStreaming()
QString shaderModeForTarget(int target) const
bool objectVisibleForTarget(const QString &object, int target) const
void setRealtimeSensorAverages(int numAvr)
void setSourceThresholds(float min, float mid, float max)
void mouseMoveEvent(QMouseEvent *event) override
void pushRealtimeSourceData(const Eigen::VectorXd &data)
void visualizationEditTargetChanged(int target)
void setVisualizationEditTarget(int target)
void timePointChanged(int index, float time)
void setSourceSpaceVisible(bool visible)
void render(QRhiCommandBuffer *cb) override
void setInitialCameraRotation(const QQuaternion &rotation)
bool loadSensors(const QString &fifPath)
void setVisualizationMode(const QString &mode)
void initialize(QRhiCommandBuffer *cb) override
void setModel(BrainTreeModel *model)
void mouseReleaseEvent(QMouseEvent *event) override
BrainView(QWidget *parent=nullptr)
Definition brainview.cpp:91
void setSensorVisible(const QString &type, bool visible)
void setSensorFieldColormap(const QString &name)
void showSingleView()
void syncBemShadersToBrainShaders()
void setRealtimeSensorLooping(bool enabled)
void setSensorTransEnabled(bool enabled)
void startRealtimeStreaming()
bool loadSourceEstimate(const QString &lhPath, const QString &rhPath)
void stopRealtimeStreaming()
void setViewportCameraPreset(int index, int preset)
void setBemVisible(const QString &name, bool visible)
static QStringList probeEvokedSets(const QString &evokedPath)
void castRay(const QPoint &pos)
void saveSnapshot()
void setViewportEnabled(int index, bool enabled)
void setBemShaderMode(const QString &mode)
void mousePressEvent(QMouseEvent *event) override
bool loadSourceSpace(const QString &fwdPath)
void setTimePoint(int index)
QString activeSurfaceForTarget(int target) const
bool isViewportEnabled(int index) const
bool loadNetwork(const CONNECTIVITYLIB::Network &network, const QString &name="Network")
void setActiveSurface(const QString &type)
void sensorFieldTimePointChanged(int index, float time)
bool isRealtimeStreaming() const
void setViewCount(int count)
void setMegHelmetOverride(const QString &path)
int viewportCameraPreset(int index) const
void setDipoleVisible(bool visible)
void setRealtimeSensorInterval(int msec)
void onDataChanged(const QModelIndex &topLeft, const QModelIndex &bottomRight, const QVector< int > &roles)
void hoveredRegionChanged(const QString &regionName)
void setLightingEnabled(bool enabled)
void setNetworkVisible(bool visible)
void sensorFieldLoaded(int numTimePoints, int initialTimePoint=0)
void setMegFieldMapOnHead(bool useHead)
bool loadSensorField(const QString &evokedPath, int aveIndex=0)
void setRealtimeSensorColormap(const QString &name)
void resetMultiViewLayout()
void stcLoadingProgress(int percent, const QString &message)
void pushRealtimeSensorData(const Eigen::VectorXf &data)
void setNetworkThreshold(double threshold)
static Qt::CursorShape cursorForHit(SplitterHit hit)
Coordinate transformation description.
Eigen::Matrix< float, 4, 4, Eigen::DontAlign > trans
bool isEmpty() const
Definition annotation.h:294
BEM surface provides geometry information.