Report Typos and Errors    
Semi-Lagrangian Library
Modular library for kinetic and gyrokinetic simulations of plasmas in fusion energy devices.
sll_m_pic_utilities.F90
Go to the documentation of this file.
1 !**************************************************************
2 ! Copyright INRIA
3 ! Authors :
4 ! CALVI project team
5 !
6 ! This code SeLaLib (for Semi-Lagrangian-Library)
7 ! is a parallel library for simulating the plasma turbulence
8 ! in a tokamak.
9 !
10 ! This software is governed by the CeCILL-B license
11 ! under French law and abiding by the rules of distribution
12 ! of free software. You can use, modify and redistribute
13 ! the software under the terms of the CeCILL-B license as
14 ! circulated by CEA, CNRS and INRIA at the following URL
15 ! "http://www.cecill.info".
16 !**************************************************************
17 
19 !+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
20 #include "sll_assert.h"
21 #include "sll_memory.h"
22 #include "sll_working_precision.h"
23 #include "sll_accumulators.h"
24 
25  use sll_m_accumulators, only: &
30 
31  use sll_m_particle_group_2d, only: &
33 
34  use sll_m_particle_group_4d, only: &
36 
40 
41 #ifdef _OPENMP
42  use omp_lib, only: &
43  omp_get_thread_num
44 
45 #endif
46  implicit none
47 
48  public :: &
53 
54  private
55 !+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
56 #ifdef _OPENMP
57 ! logical :: openmp_st
58 #endif
59 !! !$ openmp_st = OMP_IN_PARALLEL()
60 !! print*, 'USE of omp', openmp_st
61 !! !$omp end parallel
62 
63 contains
64 
65  subroutine sll_s_first_charge_accumulation_2d(p_group, q_accumulator)
66  type(sll_t_particle_group_4d), pointer :: p_group
67  type(sll_t_charge_accumulator_2d_ptr), dimension(:), pointer :: q_accumulator
68  type(sll_t_particle_4d), dimension(:), pointer :: p
69  type(sll_t_charge_accumulator_2d), pointer :: q_accum
70  sll_int64 :: i
71  sll_int64 :: num_particles
72  sll_real32 :: tmp1
73  sll_real32 :: tmp2
74  sll_int32 :: thread_id
75 
76  sll_assert(associated(p_group) .and. associated(q_accumulator))
77  num_particles = int(p_group%number_particles, i64)
78  p => p_group%p_list
79 
80 !$omp parallel PRIVATE(thread_id, tmp1, tmp2, q_accum)
81 #ifdef _OPENMP
82  thread_id = omp_get_thread_num()
83 #else
84  thread_id = 0
85 #endif
86  q_accum => q_accumulator(thread_id + 1)%q
87 !$omp do
88  do i = 1, num_particles
89  sll_accumulate_particle_charge(q_accum, p(i), tmp1, tmp2)
90  end do
91 !$omp end do
92 !$omp end parallel
93 
95 
96  subroutine sll_s_first_charge_accumulation_2d_cs(p_group, q_accumulator)
97 ! ---- Remember : _CS is for use of Cubic Splines ----
98 ! ------------------------------------------------------
99  type(sll_t_particle_group_4d), pointer :: p_group
100  type(sll_t_charge_accumulator_2d_cs_ptr), dimension(:), pointer :: q_accumulator
101  type(sll_t_particle_4d), dimension(:), pointer :: p
102  type(sll_t_charge_accumulator_2d_cs), pointer :: q_accum
103  sll_int64 :: i
104  sll_int64 :: num_particles
105  sll_real32 :: tmp(1:4, 1:2), temp
106  sll_int32 :: thread_id
107 
108  sll_assert(associated(p_group) .and. associated(q_accumulator))
109  num_particles = int(p_group%number_particles, i64)
110  p => p_group%p_list
111 
112 !$omp parallel default(SHARED) PRIVATE(thread_id, tmp, temp, q_accum)
113 #ifdef _OPENMP
114  thread_id = omp_get_thread_num()
115 #else
116  thread_id = 0
117 #endif
118  q_accum => q_accumulator(thread_id + 1)%q
119 !$omp do
120  do i = 1, num_particles
121  sll_accumulate_particle_charge_cs(q_accum, p(i), tmp, temp)
122  end do
123 !$omp end do
124 !$omp end parallel
125 
127 
128 !!$ - - - - for the GUIDING CENTER model - - - -
129  subroutine sll_s_first_gc_charge_accumulation_2d(p_group, q_accumulator)
130  type(sll_t_particle_group_2d), pointer :: p_group
131  type(sll_t_charge_accumulator_2d_ptr), dimension(:), pointer :: q_accumulator
132  type(sll_t_particle_2d), dimension(:), pointer :: p
133  type(sll_t_charge_accumulator_2d), pointer :: q_accum
134  sll_int64 :: i
135  sll_int64 :: num_particles
136  sll_real32 :: tmp1
137  sll_real32 :: tmp2
138  sll_int32 :: thread_id
139 
140  sll_assert(associated(p_group) .and. associated(q_accumulator))
141  num_particles = int(p_group%number_particles, i64)
142  p => p_group%p_list
143 
144 !$omp parallel default(SHARED) PRIVATE(thread_id, tmp1, tmp2, q_accum)
145 #ifdef _OPENMP
146  thread_id = omp_get_thread_num()
147 #else
148  thread_id = 0
149 #endif
150  q_accum => q_accumulator(thread_id + 1)%q
151 !$omp do
152  do i = 1, num_particles
153  sll_accumulate_particle_charge(q_accum, p(i), tmp1, tmp2)
154  end do
155 !$omp end do
156 !$omp end parallel
157 
159 
160  subroutine sll_s_first_gc_charge_accumulation_2d_cs(p_group, q_accumulator)
161  type(sll_t_particle_group_2d), pointer :: p_group
162  type(sll_t_charge_accumulator_2d_cs_ptr), dimension(:), pointer :: q_accumulator
163  type(sll_t_particle_2d), dimension(:), pointer :: p
164  type(sll_t_charge_accumulator_2d_cs), pointer :: q_accum
165  sll_int64 :: i
166  sll_int64 :: num_particles
167  sll_real32 :: tmp(1:4, 1:2), temp
168  sll_int32 :: thread_id
169 
170  sll_assert(associated(p_group) .and. associated(q_accumulator))
171  num_particles = int(p_group%number_particles, i64)
172  p => p_group%p_list
173 
174 !$omp parallel default(SHARED) PRIVATE(thread_id, tmp, temp, q_accum)
175 #ifdef _OPENMP
176  thread_id = omp_get_thread_num()
177 #else
178  thread_id = 0
179 #endif
180  q_accum => q_accumulator(thread_id + 1)%q
181 !$omp do
182  do i = 1, num_particles
183  sll_accumulate_particle_charge_cs(q_accum, p(i), tmp, temp)
184  end do
185 !$omp end do
186 !$omp end parallel
187 
189 
190 end module sll_m_pic_utilities
Particle deposition routines.
subroutine, public sll_s_first_gc_charge_accumulation_2d(p_group, q_accumulator)
subroutine, public sll_s_first_charge_accumulation_2d_cs(p_group, q_accumulator)
subroutine, public sll_s_first_gc_charge_accumulation_2d_cs(p_group, q_accumulator)
subroutine, public sll_s_first_charge_accumulation_2d(p_group, q_accumulator)
    Report Typos and Errors