-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathwellcome_report_nov_2024.html
736 lines (706 loc) · 42.9 KB
/
wellcome_report_nov_2024.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en"><head>
<meta charset="utf-8">
<meta name="generator" content="quarto-1.4.553">
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes">
<meta name="dcterms.date" content="2024-11-01">
<title>wellcome_report_nov_2024</title>
<style>
code{white-space: pre-wrap;}
span.smallcaps{font-variant: small-caps;}
div.columns{display: flex; gap: min(4vw, 1.5em);}
div.column{flex: auto; overflow-x: auto;}
div.hanging-indent{margin-left: 1.5em; text-indent: -1.5em;}
ul.task-list{list-style: none;}
ul.task-list li input[type="checkbox"] {
width: 0.8em;
margin: 0 0.8em 0.2em -1em; /* quarto-specific, see https://github.com/quarto-dev/quarto-cli/issues/4556 */
vertical-align: middle;
}
</style>
<script src="wellcome_report_nov_2024_files/libs/clipboard/clipboard.min.js"></script>
<script src="wellcome_report_nov_2024_files/libs/quarto-html/quarto.js"></script>
<script src="wellcome_report_nov_2024_files/libs/quarto-html/popper.min.js"></script>
<script src="wellcome_report_nov_2024_files/libs/quarto-html/tippy.umd.min.js"></script>
<script src="wellcome_report_nov_2024_files/libs/quarto-html/anchor.min.js"></script>
<link href="wellcome_report_nov_2024_files/libs/quarto-html/tippy.css" rel="stylesheet">
<link href="wellcome_report_nov_2024_files/libs/quarto-html/quarto-syntax-highlighting.css" rel="stylesheet" id="quarto-text-highlighting-styles">
<script src="wellcome_report_nov_2024_files/libs/bootstrap/bootstrap.min.js"></script>
<link href="wellcome_report_nov_2024_files/libs/bootstrap/bootstrap-icons.css" rel="stylesheet">
<link href="wellcome_report_nov_2024_files/libs/bootstrap/bootstrap.min.css" rel="stylesheet" id="quarto-bootstrap" data-mode="light">
<script src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script>
<script src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-chtml-full.js" type="text/javascript"></script>
<script type="text/javascript">
const typesetMath = (el) => {
if (window.MathJax) {
// MathJax Typeset
window.MathJax.typeset([el]);
} else if (window.katex) {
// KaTeX Render
var mathElements = el.getElementsByClassName("math");
var macros = [];
for (var i = 0; i < mathElements.length; i++) {
var texText = mathElements[i].firstChild;
if (mathElements[i].tagName == "SPAN") {
window.katex.render(texText.data, mathElements[i], {
displayMode: mathElements[i].classList.contains('display'),
throwOnError: false,
macros: macros,
fleqn: false
});
}
}
}
}
window.Quarto = {
typesetMath
};
</script>
</head>
<body class="fullcontent">
<div id="quarto-content" class="page-columns page-rows-contents page-layout-article">
<main class="content" id="quarto-document-content">
<header id="title-block-header" class="quarto-title-block default">
<div class="quarto-title">
</div>
<div class="quarto-title-meta">
<div>
<div class="quarto-title-meta-heading">Published</div>
<div class="quarto-title-meta-contents">
<p class="date">November 1, 2024</p>
</div>
</div>
</div>
</header>
<section id="project-title-surprises-as-a-mechanism-of-improvement-in-the-psychological-therapy-of-anxiety-and-depression-in-young-people." class="level1">
<h1>Project Title: Surprises as a Mechanism of Improvement in the Psychological Therapy of Anxiety and Depression in Young People.</h1>
<p>Grant Reference: 226785/Z/22/Z<br>
Project Start Date: 12/05/2023<br>
Project End Date: 12/05/2028</p>
<p>Report authors: Stringaris A (UCL, PI), Payne M (UCL, post-doc), Spencer L (Oxford, post-doc), Bagdades E (UCL, research assistant), Delpech R (UCL, research assistant), Norman J (UCL, research assistant), Tromans N (Oxford, research assistant), Z Kokan (Oxford, research assistant), Krebs G (UCL, Co-I), Leigh E (Oxford, Co-I), Singh I (Oxford, Co-I).</p>
<section id="progress-made-over-the-past-twelve-months" class="level2">
<h2 class="anchored" data-anchor-id="progress-made-over-the-past-twelve-months"><strong>Progress made over the past twelve months</strong></h2>
<p>We have made good progress towards our goals as indicated by the following:</p>
<ul>
<li><p>Three research assistants have joined the team. Ms Raphaelle Delpech (15/04/24), with a background in psychology and neuroscience and Ms Jessica Norman (28/05/24), with a background in biology, psychology and clinical experience, have joined through UCL. Ms Zeba Kokan (19/09/24), with a background in brain and behavioural sciences and global studies, has joined through Oxford. We have weekly inter-disciplinary meetings between the UCL and Oxford sites. Please see Fig 1 depicting some of the members of the team.</p></li>
<li><p>Dr Daniel Rautio will be joining the team to work specifically on this Wellcome project. He is funded through a prestigious starting grant from the Swedish Research Council for Health, Working Life and Welfare; Forte. This grant of approximately £300,000, divided over a period of three to four years, is intended to promote establishment and independence in research by providing junior researchers with the opportunity to conduct their own research project. Dr Rautio will be with our team from August 2025 through July 2027, and work on WP4 and WP5,is an experienced psychologist and psychotherapist that has been responsible for the care of patients with body dysmorphic disorder (BDD) at the specialist obsessive-compulsive and related disorders clinic within the child and adolescent mental health services in Stockholm, Sweden, since the opening in 2015. He has taken the lead at evaluating and refining the clinic’s treatment manual for BDD, in collaboration with colleagues at the Maudsley Hospital in London, UK. He has also been in charge of building the clinic’s database of cases with paediatric BDD, which is one of the largest cohorts of BDD cases in the world. Further, he has led the first treatment trial evaluating internet-delivered CBT for children and adolescents with BDD. Having just finished his PhD, Dr. Rautio is already an established BDD researcher, and has published several studies of high quality in the field.</p></li>
</ul>
<div class="cell">
<div class="cell-output-display">
<div class="quarto-figure quarto-figure-center">
<figure class="figure">
<p><img src="lab_members-1.webp" class="img-fluid figure-img" style="width:85.0%"></p>
<figcaption>Members of the surprise team</figcaption>
</figure>
</div>
</div>
</div>
<ul>
<li><p><strong>Development of Cognitive Tasks</strong></p>
<p>We’ve made significant progress in developing novel cognitive tasks for our projects. In particular, we’ve created a task that simulates the experience of social interactions <strong>(“surprise task”)</strong>. Through this task, we’ve already demonstrated a robust relationship between prediction errors and both mood and anxiety.</p>
<p>We have also developed a novel task to assess when and how individuals employ self-focussed attention, a crucial component of our active ingredients model. This task <strong>(“story-building task”)</strong> involves a collaborative story-building activity, in which young people must work together with online virtual players to create story snippets. We measure self-focussed attention through memory for participants’ own contributions to the stories, and have already found interesting pilot results that young people may become self-focussed in volatile social environments, when social feedback is unpredictable.</p></li>
<li><p><strong>Online Recruitment of Participants (18-25 years)</strong></p>
<p>We have recruited 1837 participants via Prolific. This includes participant recruitment for piloting during task development as well as recruitment of our final participant samples.</p></li>
<li><p><strong>Community Recruitment of Participants (18-25 years)</strong></p>
<p>For the surprise task, we have finished recruitment with 43 complete participants from the local community aged 18-25 years. We recruited via the UCL participant pool and via flyers distributed around UCL spaces.</p></li>
</ul>
<div class="cell">
<div class="cell-output-display">
<div class="quarto-figure quarto-figure-center">
<figure class="figure">
<p><img src="lc_consort_261124.png" class="img-fluid figure-img" style="width:85.0%"></p>
<figcaption>Consort diagram of recruitment</figcaption>
</figure>
</div>
</div>
</div>
<ul>
<li><p><strong>School Recruitment of Participants (14-18 years)</strong></p>
<p>So far, we have 40 complete participants, recruited from 6 schools. We have been building ongoing relationships with schools in the hopes of being able to get support with future studies and of understanding how we can make the recruitment process simple and rewarding for schools and students.</p></li>
</ul>
<div class="cell">
<div class="cell-output-display">
<div class="quarto-figure quarto-figure-center">
<figure class="figure">
<p><img src="students_consort_261124.png" class="img-fluid figure-img" style="width:85.0%"></p>
<figcaption>Consort diagram of school recruitment</figcaption>
</figure>
</div>
</div>
</div>
<ul>
<li><p><strong>Development of Research Protocol for “Impression management and Authenticity” sub-study</strong></p>
<ul>
<li><p>This sub-study is lead by Dr Lucienne Spencer and will aim to answer the following research question: “How does ‘impression management’ impact young people with social anxiety disorder (SAD)?”</p></li>
<li><p>We hypothesise that impression management will have more negative impacts on young people’s ‘authentic self’ than overall positive impacts.</p></li>
<li><p>This study will:</p>
<ol type="1">
<li><p>Investigate the negative and positive impacts of impression management on young people with SAD.</p></li>
<li><p>Analyse the balance of negative and positive impacts of impression management on this population</p></li>
<li><p>Investigate how impression management impacts a young person’s authentic self. This will include refining the concept of ‘authenticity’ for young people that best maps onto their intuitions.</p></li>
</ol></li>
<li><p>We will conduct 1:1 Semi-structured phenomenological interview of 40-50 young people with SAD, aged between 14 and 18 over Zoom. The interview will be approximately 2.5 hours. We have chosen to have the interview over Zoom because our participants have social anxiety disorder, and an in-person session would impact recruitment.</p></li>
</ul></li>
<li><p><strong>Progress on understanding the role of self-processing variables (self-focused attention, safety behaviours, and negative self-image) in producing or maintaining symptoms of social anxiety disorder</strong></p>
<ul>
<li><p>This effort is led by Naomi Tromans and involves:</p>
<ol type="1">
<li><p>A Systematic review and meta-analysis looking at the types of manipulations which have been used to experimentally alter these self-processing variables, and the effects of enhancing or reducing them on self-reported state anxiety and/or observer-reported anxious appearance across the lifespan.</p>
<ul>
<li><p>Working towards this, a pre-registration has been published on Prospero (CRD42024554587 Available from: <a href="https://www.crd.york.ac.uk/prospero/display_record.php?ID=CRD42024554587" class="uri">https://www.crd.york.ac.uk/prospero/display_record.php?ID=CRD42024554587</a>) outlining eligibility criteria, search strategy, and analysis plans.</p></li>
<li><p>The review is currently nearing the end of full text screening.</p></li>
</ul></li>
<li><p>Development of a qualitative sub-study investigating the subjective experience of self and external foci of attention in a sample of anxious adolescents.</p>
<ul>
<li><p>This study utilises an element of Cognitive Therapy for Social anxiety, the self-focused attention and safety behaviours experiment. This experiment is used early in treatment and has been shown to effectively reduce state social anxiety. It involves the patient, or in this case participant, having one conversations being highly self-focused and using their safety behaviours, and another being externally focused and dropping these behaviours.</p></li>
<li><p>The qualitative interview aims to understand the subjective experience of each conversation, what works about the instructions or delivery to manipulate self-processing and to alter state anxiety. It also goes on to ask about experiences of these different modes of attention more generally in everyday life.</p></li>
<li><p>Progressing towards this, a protocol has been developed and piloted with YPAG members, along with the interview topic guide and in-session VAS measures, e.g. of state anxiety, perceived performance, and the occurrence and quality or negative self-imagery.</p></li>
<li><p>An ethics amendment was submitted and recruitment is intended to begin in January.</p></li>
</ul></li>
</ol></li>
</ul></li>
<li><p><strong>Progress on WP3: identifying the neural markers of social interactions and mood</strong></p>
<ul>
<li><p>We have submitted and received approval from UCL Ethics to record EEG and MEG data simultaneously with performance on cognitive tasks in participants aged 16-25 years, following the same recruitment pathways currently in place.</p></li>
<li><p>MEG and EEG are non-invasive functional imaging techniques with a high temporal resolution allowing us to track the neural underpinnings of trial-by-trial changes in social surprises and participant self-reported mood. The high temporal resolution of these tools will allow us to look at neural responses during (and in the milliseconds following) the presentation of social feedback to participants.</p></li>
<li><p>In the next year, lab members based at UCL will undergo training for these methods, including safety practices, and we will work on adapting the tasks for EEG/MEG implementation.</p></li>
</ul></li>
</ul>
</section>
<section id="preliminary-findings-andor-key-discoveries" class="level2">
<h2 class="anchored" data-anchor-id="preliminary-findings-andor-key-discoveries"><strong>Preliminary findings and/or key discoveries</strong></h2>
<section id="surprise-task" class="level3">
<h3 class="anchored" data-anchor-id="surprise-task">Surprise Task</h3>
<p>We have collected data using the surprise task from the following groups: young adults between the ages of 18-25 through Prolific, young adults between the ages of 18-25 through local community recruitment, and students between the ages of 14-18 through school recruitment.</p>
<p>In a first set of analyses we used Linear Mixed Effects (LME) models to look at the relationship between social surprises or prediction errors (PEs) and momentary mood and anxiety. Specifically, we tested various models with different random effects structure as well as some that included trait social anxiety scores (mini SPIN scores) as a co-variate and some that didn’t.</p>
<ul>
<li><p><strong>Online recruitment (n=29)</strong></p>
<ul>
<li><p>The best fitted LME for mood and social prediction error was the following <span class="math inline">\(Mood \sim social\ PE + mini\ SPIN\ score + (social\ PE | Participant\ ID)\)</span>. The coefficient for the main effect of social PE on mood was 0.27 (95%CI = 0.16 - 0.38).</p>
<div class="cell">
<div class="cell-output-display">
<div class="quarto-figure quarto-figure-center">
<figure class="figure">
<p><img src="prolific_mood_lme.png" class="img-fluid figure-img" style="width:85.0%"></p>
<figcaption>Online recruitment: LME for Mood and Social PE</figcaption>
</figure>
</div>
</div>
</div></li>
<li><p>The best fitted LME for anxiety and social prediction error was the following <span class="math inline">\(Anxiety \sim social\ PE + mini\ SPIN\ score + (social\ PE | Participant\ ID)\)</span>. The coefficient for the main effect of social PE on mood was -0.15 (95%CI = -0.23 - -0.08).</p>
<div class="cell">
<div class="cell-output-display">
<div class="quarto-figure quarto-figure-center">
<figure class="figure">
<p><img src="prolific_anx_lme.png" class="img-fluid figure-img" style="width:85.0%"></p>
<figcaption>Online recruitment: LME for Anxiety and Social PE</figcaption>
</figure>
</div>
</div>
</div></li>
</ul></li>
<li><p><strong>Local community recruitment (n=45)</strong></p>
<ul>
<li><p>The best fitted LME for mood and social prediction error was the following <span class="math inline">\(Mood \sim social\ PE + mini\ SPIN\ score + (social\ PE | Participant\ ID)\)</span>. The coefficient for the main effect of social PE on mood was 0.19 (95%CI = 0.12 - 0.27).</p>
<div class="cell">
<div class="cell-output-display">
<div class="quarto-figure quarto-figure-center">
<figure class="figure">
<p><img src="com_mood_lme.png" class="img-fluid figure-img" style="width:85.0%"></p>
<figcaption>Community recruitment: LME for Mood and Social PE</figcaption>
</figure>
</div>
</div>
</div></li>
<li><p>The best fitted LME for anxiety and social prediction error was the following <span class="math inline">\(Anxiety \sim social\ PE + mini\ SPIN\ score + (social\ PE | Participant\ ID)\)</span>. The coefficient for the main effect of social PE on mood was -0.07 (95%CI = -0.11 - -0.03).</p>
<div class="cell">
<div class="cell-output-display">
<div class="quarto-figure quarto-figure-center">
<figure class="figure">
<p><img src="com_anx_lme.png" class="img-fluid figure-img" style="width:85.0%"></p>
<figcaption>Community recruitment: LME for Anxiety and Social PE</figcaption>
</figure>
</div>
</div>
</div></li>
</ul></li>
<li><p><strong>School recruitment (n=40)</strong></p>
<ul>
<li><p>The best fitted LME for mood and social prediction error was the following <span class="math inline">\(Mood \sim social\ PE + mini\ SPIN\ score + (social\ PE | Participant\ ID)\)</span>. The coefficient for the main effect of social PE on mood was 0.12 (95%CI = 0.06 - 0.18).</p>
<div class="cell">
<div class="cell-output-display">
<div class="quarto-figure quarto-figure-center">
<figure class="figure">
<p><img src="com_mood_lme.png" class="img-fluid figure-img" style="width:85.0%"></p>
<figcaption>School recruitment: LME for Mood and Social PE</figcaption>
</figure>
</div>
</div>
</div></li>
<li><p>The best fitted LME for anxiety and social prediction error was the following <span class="math inline">\(Anxiety \sim social\ PE + mini\ SPIN\ score + (social\ PE | Participant\ ID)\)</span>. The coefficient for the main effect of social PE on mood was -0.04 (95%CI = -0.07 - -0.02).</p>
<div class="cell">
<div class="cell-output-display">
<div class="quarto-figure quarto-figure-center">
<figure class="figure">
<p><img src="com_anx_lme.png" class="img-fluid figure-img" style="width:85.0%"></p>
<figcaption>School recruitment: LME for Anxiety and Social PE</figcaption>
</figure>
</div>
</div>
</div></li>
</ul></li>
</ul>
<p>As part of a second set of analyses, we are also currently in the process of developing and testing various computational models to be fitted on the data from the surprise task, with the hope that these will provide a more thorough understanding of the underlying cognitive mechanisms.</p>
</section>
<section id="cone-of-gaze-task-experiment-meta-analysis" class="level3">
<h3 class="anchored" data-anchor-id="cone-of-gaze-task-experiment-meta-analysis">Cone of Gaze Task (experiment + meta-analysis)</h3>
<ul>
<li>In this study, we investigate ‘cone of gaze’ in social anxiety. Cone of gaze describes the width of eye gaze, that individual’s perceive as self-directed. Interestingly, people tend to judge quite a wide range of gaze angles as self-directed, often perceiving another person to be looking at them even if they are really looking some distance to the left or right of them. In social anxiety, patients may be even more susceptible to this. This may underlie symptoms such as excessive self-focussed attention and self-consciousness in social anxiety, but may provide a more easily manipulable target for therapy. Here, we assessed the size of the difference in gaze perception between social anxiety and non-anxious participants. We conducted a systematic review and meta-analysis, showing that individuals with social anxiety showed a moderately wider cone of gaze as self-directed than non-anxious participants (<em>g</em> = .59, CI = [.22, .95], <em>p</em> = .005). Further we conducted three experiments, showing that the difference between socially anxious and non-anxious groups was stronger when the stimuli were more threatening (faces with angry expressions). This suggests a promising avenue for the project, providing a potential outcome measure and/or target for therapeutic intervention.</li>
</ul>
</section>
<section id="story-building-task" class="level3">
<h3 class="anchored" data-anchor-id="story-building-task">Story-building task</h3>
<ul>
<li>In the story-building task, participants work in teams to create stories, and are given feedback about how much they are valued by the team (how many players voted to keep them in the team). This set-up allows us to manipulate the feedback consistency and valence in different team environments, by changing how predictable (does the team’ behave the same way towards you throughout the whole game?) and how positive (does the team vote to keep you in the game or not?) the participant’s feedback is. To measure self-focussed attention, participants are asked about their memory for the interactions. We hypothesise that unpredictable social feedback will cue self-focussed attention in participants, and so participants will remember more of their own contributions than others’ during these blocks. So far, we have tested ~60 participants online, in two pilot studies, and 27 participants in a school setting. This project will allow us to better understand the conditions under which individuals shift their attention towards the self, and guide our interventions targetted at self-focussed attention.</li>
</ul>
</section>
<section id="the-surprise-intervention-in-social-anxiety-disorder-a-phenomenological-account" class="level3">
<h3 class="anchored" data-anchor-id="the-surprise-intervention-in-social-anxiety-disorder-a-phenomenological-account"><strong>The Surprise Intervention in Social Anxiety Disorder: A Phenomenological Account</strong></h3>
<ul>
<li>This study uses a phenomenological approach to reveal the complex dimensions and the subtle therapeutic benefits of the surprise intervention for those with social anxiety disorder. In recent years, the term ‘phenomenology’, when applied to the field of psychiatry, has often been misappropriated to merely refer to the first-person expressions of an individual or group. Rather, phenomenology is a stringent methodology that seeks to examine the most foundational experiential phenomena. This includes temporal and spatial experiences, a sense of self and agency, one’s experience of other people and overall attunement to the world. Psychiatric disorders are not limited to the mind but impact one’s entire perception of the world. Therefore, we require such phenomenological tools to access some of the more abstract aspects of psychiatric disorders and to design their accompanying interventions. First, we establish that surprise intervention can force a reflective process, akin to the epoché. We propose that surprise can act as an intellectual tool for the person with social anxiety disorder and can trigger a profound re-evaluation of implicitly held assumptions, namely the negative evaluations they assume to receive from others. Second, we identify that surprise can elicit a transformation in one’s experience of the self and the world. We provide a phenomenological account of the shift in attention from self to external, which is a central feature of Clark and Wells’s cognitive behavioural model. This phenomenological account of the surprise intervention reveals the complex ways in which the intervention impacts the individual.</li>
</ul>
</section>
</section>
<section id="lived-experience" class="level2">
<h2 class="anchored" data-anchor-id="lived-experience"><strong>Lived Experience</strong></h2>
<ul>
<li><p><strong>Young People’s Advisory Group (NeurOX YPAG)</strong></p>
<p>The NeurOX YPAG has played an invaluable role in developing research for the Surprise Project. The YPAG is comprised of 33 young people aged 14-23 from a range of backgrounds and ethnicities.</p>
<p>For the Authenticity & Impression Management sub-study, the YPAG has helped develop an interview guide and has advised on the terminology that would be most accessible to young people participating in this study. YPAG members have also participated in trial runs of the interview guide, which has been adapted according to their feedback.</p>
<p>For the subjective experience of self-focused attention sub-study, the YPAG has helped pilot the study procedure, providing feedback on content of instructions and interviews, on session structure and length, and on session intensity.</p>
<p>The YPAG have given feedback on the story-building task, and as a result, updates have been made to make it feel more collaborative (by changing the prompts) and to create realistic and believable profiles for the team members. The YPAG have given feedback verbally via a meeting and also provided written feedback for the team member profiles.</p></li>
</ul>
</section>
<section id="challenges-delays-or-risks" class="level2">
<h2 class="anchored" data-anchor-id="challenges-delays-or-risks"><strong>Challenges, delays or risks</strong></h2>
<p>Optimising Allocation of Resources.<br>
On 4th November we sent an email to Wellcome and followed up last week, about the following. We have already some major insights from our experimental work (as expanded above) and want to optimise the next steps, which in our case are designed to culminate in a large RCT of an innovative intervention to arise from or experimental work. As you may remember, it was your strong recommendation that we frame the final two work packages (WP4 and WP5) as a Randomised Controlled Trial (RCT). This requires specific work on the regulatory but also practical side, including the optimal sample size selection and the most feasible and promising methodology for mechanistic work.</p>
<p>To achieve this, we need to intensify further the experimental and recruitment work so that the intervention becomes as scalable as possible and is tested rigorously and mechanistically in a larger sample/samples of young people. For this, we believe that it would make most sense if we could make the following minor changes to the allocation of the budget.</p>
<p>Convert numerous short-term posts (indicated as “temporary PhD students” into fewer longer term RA/Fellow posts. They are presently at the equivalent of 70 months full time for these temporary contracts. Our work so far has shown that having fewer longer terms research assistants better serves the purposes of the project as they can be trained and operate within schools and communities much more effectively. This is because they are able to establish relationships with schools, a central aspect of this work. They also receive on the whole better training because of their time in the lab, including on quantitative methods but also in interactions with the YPAG and other collaborators. Having longer-term RAs will best serve the request for an RCT as they will be in best position to support it.</p>
<p>In keeping with the idea of optimally embedding mechanistic work, i.e. active ingredients work, into the RCT, we also ask to convert the funds for the Magnetoencephalography (MEG) to funds for Electroencephalography (EEG) and wider recruitment. MEG scanning is bound to a specifically designed scanning room, whereas EEG can in principle be portable and applied in non-specialist environments, such schools. The reason for preferring EEG over MEG is that having the portable EEG option for testing would be advantageous for testing the intervention in large sample sizes (which in turn is crucial for robust mechanistic inferences). Our work so far has demonstrated that we have an experimental set up that lends itself to large-scale testing which would be best served by EEG brain recordings. This switch would come at no cost to the testing of our hypotheses given that the temporal resolution (the key element for testing our hypotheses) is equivalent between the two methods. We specifically ask that we allocate the costs for MEG in such a way as to purchase a cutting-edge EEG machine (estimated max cost ~ £40,000) and the permission to use the rest of the money for even broader recruitment into the various phases of the mechanistic RCT (including online and in person participant testing costs and remuneration). The current funds allocated to MEG is £128,000 (original) + £69,516.71 (additional recent award).</p>
<p>We are happy to provide more details about these two changes and look forward to hearing back from Wellcome soon.</p>
</section>
</section>
</main>
<!-- /main column -->
<script id="quarto-html-after-body" type="application/javascript">
window.document.addEventListener("DOMContentLoaded", function (event) {
const toggleBodyColorMode = (bsSheetEl) => {
const mode = bsSheetEl.getAttribute("data-mode");
const bodyEl = window.document.querySelector("body");
if (mode === "dark") {
bodyEl.classList.add("quarto-dark");
bodyEl.classList.remove("quarto-light");
} else {
bodyEl.classList.add("quarto-light");
bodyEl.classList.remove("quarto-dark");
}
}
const toggleBodyColorPrimary = () => {
const bsSheetEl = window.document.querySelector("link#quarto-bootstrap");
if (bsSheetEl) {
toggleBodyColorMode(bsSheetEl);
}
}
toggleBodyColorPrimary();
const icon = "";
const anchorJS = new window.AnchorJS();
anchorJS.options = {
placement: 'right',
icon: icon
};
anchorJS.add('.anchored');
const isCodeAnnotation = (el) => {
for (const clz of el.classList) {
if (clz.startsWith('code-annotation-')) {
return true;
}
}
return false;
}
const clipboard = new window.ClipboardJS('.code-copy-button', {
text: function(trigger) {
const codeEl = trigger.previousElementSibling.cloneNode(true);
for (const childEl of codeEl.children) {
if (isCodeAnnotation(childEl)) {
childEl.remove();
}
}
return codeEl.innerText;
}
});
clipboard.on('success', function(e) {
// button target
const button = e.trigger;
// don't keep focus
button.blur();
// flash "checked"
button.classList.add('code-copy-button-checked');
var currentTitle = button.getAttribute("title");
button.setAttribute("title", "Copied!");
let tooltip;
if (window.bootstrap) {
button.setAttribute("data-bs-toggle", "tooltip");
button.setAttribute("data-bs-placement", "left");
button.setAttribute("data-bs-title", "Copied!");
tooltip = new bootstrap.Tooltip(button,
{ trigger: "manual",
customClass: "code-copy-button-tooltip",
offset: [0, -8]});
tooltip.show();
}
setTimeout(function() {
if (tooltip) {
tooltip.hide();
button.removeAttribute("data-bs-title");
button.removeAttribute("data-bs-toggle");
button.removeAttribute("data-bs-placement");
}
button.setAttribute("title", currentTitle);
button.classList.remove('code-copy-button-checked');
}, 1000);
// clear code selection
e.clearSelection();
});
var localhostRegex = new RegExp(/^(?:http|https):\/\/localhost\:?[0-9]*\//);
var mailtoRegex = new RegExp(/^mailto:/);
var filterRegex = new RegExp('/' + window.location.host + '/');
var isInternal = (href) => {
return filterRegex.test(href) || localhostRegex.test(href) || mailtoRegex.test(href);
}
// Inspect non-navigation links and adorn them if external
var links = window.document.querySelectorAll('a[href]:not(.nav-link):not(.navbar-brand):not(.toc-action):not(.sidebar-link):not(.sidebar-item-toggle):not(.pagination-link):not(.no-external):not([aria-hidden]):not(.dropdown-item):not(.quarto-navigation-tool)');
for (var i=0; i<links.length; i++) {
const link = links[i];
if (!isInternal(link.href)) {
// undo the damage that might have been done by quarto-nav.js in the case of
// links that we want to consider external
if (link.dataset.originalHref !== undefined) {
link.href = link.dataset.originalHref;
}
}
}
function tippyHover(el, contentFn, onTriggerFn, onUntriggerFn) {
const config = {
allowHTML: true,
maxWidth: 500,
delay: 100,
arrow: false,
appendTo: function(el) {
return el.parentElement;
},
interactive: true,
interactiveBorder: 10,
theme: 'quarto',
placement: 'bottom-start',
};
if (contentFn) {
config.content = contentFn;
}
if (onTriggerFn) {
config.onTrigger = onTriggerFn;
}
if (onUntriggerFn) {
config.onUntrigger = onUntriggerFn;
}
window.tippy(el, config);
}
const noterefs = window.document.querySelectorAll('a[role="doc-noteref"]');
for (var i=0; i<noterefs.length; i++) {
const ref = noterefs[i];
tippyHover(ref, function() {
// use id or data attribute instead here
let href = ref.getAttribute('data-footnote-href') || ref.getAttribute('href');
try { href = new URL(href).hash; } catch {}
const id = href.replace(/^#\/?/, "");
const note = window.document.getElementById(id);
if (note) {
return note.innerHTML;
} else {
return "";
}
});
}
const xrefs = window.document.querySelectorAll('a.quarto-xref');
const processXRef = (id, note) => {
// Strip column container classes
const stripColumnClz = (el) => {
el.classList.remove("page-full", "page-columns");
if (el.children) {
for (const child of el.children) {
stripColumnClz(child);
}
}
}
stripColumnClz(note)
if (id === null || id.startsWith('sec-')) {
// Special case sections, only their first couple elements
const container = document.createElement("div");
if (note.children && note.children.length > 2) {
container.appendChild(note.children[0].cloneNode(true));
for (let i = 1; i < note.children.length; i++) {
const child = note.children[i];
if (child.tagName === "P" && child.innerText === "") {
continue;
} else {
container.appendChild(child.cloneNode(true));
break;
}
}
if (window.Quarto?.typesetMath) {
window.Quarto.typesetMath(container);
}
return container.innerHTML
} else {
if (window.Quarto?.typesetMath) {
window.Quarto.typesetMath(note);
}
return note.innerHTML;
}
} else {
// Remove any anchor links if they are present
const anchorLink = note.querySelector('a.anchorjs-link');
if (anchorLink) {
anchorLink.remove();
}
if (window.Quarto?.typesetMath) {
window.Quarto.typesetMath(note);
}
// TODO in 1.5, we should make sure this works without a callout special case
if (note.classList.contains("callout")) {
return note.outerHTML;
} else {
return note.innerHTML;
}
}
}
for (var i=0; i<xrefs.length; i++) {
const xref = xrefs[i];
tippyHover(xref, undefined, function(instance) {
instance.disable();
let url = xref.getAttribute('href');
let hash = undefined;
if (url.startsWith('#')) {
hash = url;
} else {
try { hash = new URL(url).hash; } catch {}
}
if (hash) {
const id = hash.replace(/^#\/?/, "");
const note = window.document.getElementById(id);
if (note !== null) {
try {
const html = processXRef(id, note.cloneNode(true));
instance.setContent(html);
} finally {
instance.enable();
instance.show();
}
} else {
// See if we can fetch this
fetch(url.split('#')[0])
.then(res => res.text())
.then(html => {
const parser = new DOMParser();
const htmlDoc = parser.parseFromString(html, "text/html");
const note = htmlDoc.getElementById(id);
if (note !== null) {
const html = processXRef(id, note);
instance.setContent(html);
}
}).finally(() => {
instance.enable();
instance.show();
});
}
} else {
// See if we can fetch a full url (with no hash to target)
// This is a special case and we should probably do some content thinning / targeting
fetch(url)
.then(res => res.text())
.then(html => {
const parser = new DOMParser();
const htmlDoc = parser.parseFromString(html, "text/html");
const note = htmlDoc.querySelector('main.content');
if (note !== null) {
// This should only happen for chapter cross references
// (since there is no id in the URL)
// remove the first header
if (note.children.length > 0 && note.children[0].tagName === "HEADER") {
note.children[0].remove();
}
const html = processXRef(null, note);
instance.setContent(html);
}
}).finally(() => {
instance.enable();
instance.show();
});
}
}, function(instance) {
});
}
let selectedAnnoteEl;
const selectorForAnnotation = ( cell, annotation) => {
let cellAttr = 'data-code-cell="' + cell + '"';
let lineAttr = 'data-code-annotation="' + annotation + '"';
const selector = 'span[' + cellAttr + '][' + lineAttr + ']';
return selector;
}
const selectCodeLines = (annoteEl) => {
const doc = window.document;
const targetCell = annoteEl.getAttribute("data-target-cell");
const targetAnnotation = annoteEl.getAttribute("data-target-annotation");
const annoteSpan = window.document.querySelector(selectorForAnnotation(targetCell, targetAnnotation));
const lines = annoteSpan.getAttribute("data-code-lines").split(",");
const lineIds = lines.map((line) => {
return targetCell + "-" + line;
})
let top = null;
let height = null;
let parent = null;
if (lineIds.length > 0) {
//compute the position of the single el (top and bottom and make a div)
const el = window.document.getElementById(lineIds[0]);
top = el.offsetTop;
height = el.offsetHeight;
parent = el.parentElement.parentElement;
if (lineIds.length > 1) {
const lastEl = window.document.getElementById(lineIds[lineIds.length - 1]);
const bottom = lastEl.offsetTop + lastEl.offsetHeight;
height = bottom - top;
}
if (top !== null && height !== null && parent !== null) {
// cook up a div (if necessary) and position it
let div = window.document.getElementById("code-annotation-line-highlight");
if (div === null) {
div = window.document.createElement("div");
div.setAttribute("id", "code-annotation-line-highlight");
div.style.position = 'absolute';
parent.appendChild(div);
}
div.style.top = top - 2 + "px";
div.style.height = height + 4 + "px";
div.style.left = 0;
let gutterDiv = window.document.getElementById("code-annotation-line-highlight-gutter");
if (gutterDiv === null) {
gutterDiv = window.document.createElement("div");
gutterDiv.setAttribute("id", "code-annotation-line-highlight-gutter");
gutterDiv.style.position = 'absolute';
const codeCell = window.document.getElementById(targetCell);
const gutter = codeCell.querySelector('.code-annotation-gutter');
gutter.appendChild(gutterDiv);
}
gutterDiv.style.top = top - 2 + "px";
gutterDiv.style.height = height + 4 + "px";
}
selectedAnnoteEl = annoteEl;
}
};
const unselectCodeLines = () => {
const elementsIds = ["code-annotation-line-highlight", "code-annotation-line-highlight-gutter"];
elementsIds.forEach((elId) => {
const div = window.document.getElementById(elId);
if (div) {
div.remove();
}
});
selectedAnnoteEl = undefined;
};
// Handle positioning of the toggle
window.addEventListener(
"resize",
throttle(() => {
elRect = undefined;
if (selectedAnnoteEl) {
selectCodeLines(selectedAnnoteEl);
}
}, 10)
);
function throttle(fn, ms) {
let throttle = false;
let timer;
return (...args) => {
if(!throttle) { // first call gets through
fn.apply(this, args);
throttle = true;
} else { // all the others get throttled
if(timer) clearTimeout(timer); // cancel #2
timer = setTimeout(() => {
fn.apply(this, args);
timer = throttle = false;
}, ms);
}
};
}
// Attach click handler to the DT
const annoteDls = window.document.querySelectorAll('dt[data-target-cell]');
for (const annoteDlNode of annoteDls) {
annoteDlNode.addEventListener('click', (event) => {
const clickedEl = event.target;
if (clickedEl !== selectedAnnoteEl) {
unselectCodeLines();
const activeEl = window.document.querySelector('dt[data-target-cell].code-annotation-active');
if (activeEl) {
activeEl.classList.remove('code-annotation-active');
}
selectCodeLines(clickedEl);
clickedEl.classList.add('code-annotation-active');
} else {
// Unselect the line
unselectCodeLines();
clickedEl.classList.remove('code-annotation-active');
}
});
}
const findCites = (el) => {
const parentEl = el.parentElement;
if (parentEl) {
const cites = parentEl.dataset.cites;
if (cites) {
return {
el,
cites: cites.split(' ')
};
} else {
return findCites(el.parentElement)
}
} else {
return undefined;
}
};
var bibliorefs = window.document.querySelectorAll('a[role="doc-biblioref"]');
for (var i=0; i<bibliorefs.length; i++) {
const ref = bibliorefs[i];
const citeInfo = findCites(ref);
if (citeInfo) {
tippyHover(citeInfo.el, function() {
var popup = window.document.createElement('div');
citeInfo.cites.forEach(function(cite) {
var citeDiv = window.document.createElement('div');
citeDiv.classList.add('hanging-indent');
citeDiv.classList.add('csl-entry');
var biblioDiv = window.document.getElementById('ref-' + cite);
if (biblioDiv) {
citeDiv.innerHTML = biblioDiv.innerHTML;
}
popup.appendChild(citeDiv);
});
return popup.innerHTML;
});
}
}
});
</script>
</div> <!-- /content -->
</body></html>