11package org .phoebus .channelfinder .epics ;
22
33import java .util .Arrays ;
4- import java .util .Collections ;
54import java .util .HashMap ;
65import java .util .List ;
76import java .util .Map ;
8- import java .util .Set ;
9- import java .util .concurrent .ExecutorService ;
10- import java .util .concurrent .Executors ;
11- import java .util .concurrent .TimeUnit ;
127import java .util .concurrent .atomic .AtomicInteger ;
138import java .util .logging .Level ;
149import java .util .logging .Logger ;
1510
1611import javax .annotation .PostConstruct ;
1712import javax .annotation .PreDestroy ;
1813
19- import org .epics .nt .NTTable ;
20- import org .epics .nt .NTTableBuilder ;
21- import org .epics .nt .NTURI ;
22- import org .epics .pvaccess .PVAException ;
23- import org .epics .pvaccess .server .rpc .RPCResponseCallback ;
24- import org .epics .pvaccess .server .rpc .RPCServer ;
25- import org .epics .pvaccess .server .rpc .RPCServiceAsync ;
26- import org .epics .pvdata .factory .StatusFactory ;
27- import org .epics .pvdata .pv .PVBooleanArray ;
28- import org .epics .pvdata .pv .PVString ;
29- import org .epics .pvdata .pv .PVStringArray ;
30- import org .epics .pvdata .pv .PVStructure ;
31- import org .epics .pvdata .pv .ScalarType ;
14+ import org .epics .pva .data .PVABoolArray ;
15+ import org .epics .pva .data .PVAStringArray ;
16+ import org .epics .pva .data .PVAStructure ;
17+ import org .epics .pva .data .nt .MustBeArrayException ;
18+ import org .epics .pva .data .nt .NotValueException ;
19+ import org .epics .pva .data .nt .PVATable ;
20+ import org .epics .pva .data .nt .PVAURI ;
21+ import org .epics .pva .server .RPCService ;
3222import org .phoebus .channelfinder .entity .Channel ;
3323import org .phoebus .channelfinder .ChannelRepository ;
3424import org .springframework .beans .factory .annotation .Autowired ;
3525import org .springframework .context .annotation .ComponentScan ;
3626import org .springframework .stereotype .Service ;
3727import org .springframework .util .LinkedMultiValueMap ;
3828import org .springframework .util .MultiValueMap ;
29+ import org .epics .pva .server .PVAServer ;
30+ import org .epics .pva .server .ServerPV ;
3931
4032/**
4133 * A pva RPC service for channelfinder
42- *
34+ * <p>
4335 * Request:
44- *
36+ * <p>
4537 * The client requests a query as a NTURI pvStructure.
46- *
38+ * <p>
4739 * Result:
48- *
40+ * <p>
4941 * The service returns the result as an NTTable pvStructure.
5042 *
5143 * @author Kunal Shroff
@@ -57,108 +49,97 @@ public class ChannelFinderEpicsService {
5749
5850 private static final Logger logger = Logger .getLogger (ChannelFinderEpicsService .class .getName ());
5951
60- private final ExecutorService pool = Executors .newScheduledThreadPool (1 );
61-
6252 public static final String SERVICE_DESC = "cfService:query" ;
6353
54+ public static final String COLUMN_CHANNEL_NAME = "channelName" ;
55+ public static final String COLUMN_OWNER = "owner" ;
56+
6457 @ Autowired
6558 ChannelRepository repository ;
6659
67- RPCServer server ;
68-
69- private ChannelFinderServiceImpl service ;
60+ PVAServer server ;
61+ ServerPV serverPV ;
7062
7163 @ PostConstruct
72- public void init () {
64+ public void init () throws Exception {
7365
7466 logger .log (Level .INFO , "Launching the epics rpc channelfinder service: " + SERVICE_DESC );
75- server = new RPCServer ();
67+
68+ server = new PVAServer ();
7669
7770 logger .log (Level .INFO , SERVICE_DESC + " initializing..." );
78- service = new ChannelFinderServiceImpl (repository );
79- server .registerService (SERVICE_DESC , service );
80- server .printInfo ();
71+ ChannelFinderServiceImpl service = new ChannelFinderServiceImpl (repository );
72+ serverPV = server .createPV (SERVICE_DESC , service );
8173 logger .log (Level .INFO , SERVICE_DESC + " is operational." );
8274
83- pool .submit (() -> {
84- try {
85- server .run (0 );
86- } catch (PVAException e ) {
87- logger .log (Level .SEVERE , "Failed to start the epics rpc channelfinder service" , e );
88- }
89- });
9075
9176 }
9277
9378 @ PreDestroy
94- public void onDestroy () throws Exception {
79+ public void onDestroy () {
9580 logger .log (Level .INFO , "Shutting down service " + SERVICE_DESC );
96- try {
97- service .shutdown ();
98- server .destroy ();
99- logger .log (Level .INFO , SERVICE_DESC + " Shutdown complete." );
100- } catch (PVAException e ) {
101- logger .log (Level .SEVERE , "Failed to close service : " + SERVICE_DESC , e );
102- }
81+ logger .info ("Shutting down service " + SERVICE_DESC );
82+ serverPV .close ();
83+ server .close ();
84+ logger .info (SERVICE_DESC + " Shutdown complete." );
10385 }
10486
105- private static class ChannelFinderServiceImpl implements RPCServiceAsync {
87+ private static class ChannelFinderServiceImpl implements RPCService {
10688
10789
108- private ChannelRepository repository ;
90+ private final ChannelRepository repository ;
10991
11092 public ChannelFinderServiceImpl (ChannelRepository repository ) {
11193 this .repository = repository ;
11294 logger .log (Level .INFO , "start" );
11395 }
11496
115- private final ExecutorService pool = Executors .newScheduledThreadPool (50 );
116-
11797 @ Override
118- public void request ( PVStructure args , RPCResponseCallback call ) {
119- logger .log (Level .FINE , () -> args . toString () );
120- HandlerQuery query = new HandlerQuery (args , call , repository );
121- query .run ();
98+ public PVAStructure call ( PVAStructure args ) throws Exception {
99+ logger .log (Level .FINE , args :: toString );
100+ HandlerQuery query = new HandlerQuery (args , repository );
101+ return query .run ();
122102 }
123103
124- private static class HandlerQuery implements Runnable {
104+ private static class HandlerQuery {
125105
126- private final RPCResponseCallback callback ;
127- private final PVStructure args ;
106+ private final PVAStructure args ;
128107 private final ChannelRepository channelRepository ;
129108
130- public HandlerQuery (PVStructure args , RPCResponseCallback callback , ChannelRepository channelRepository ) {
131- this .callback = callback ;
109+ public HandlerQuery (PVAStructure args , ChannelRepository channelRepository ) {
132110 this .args = args ;
133111 this .channelRepository = channelRepository ;
134112 }
135113
136- @ Override
137- public void run () {
138-
139- final Set <String > filteredColumns = Collections .emptySet ();
114+ public PVAStructure run () throws MustBeArrayException {
140115
141116 MultiValueMap <String , String > searchParameters = new LinkedMultiValueMap <>();
142- NTURI uri = NTURI .wrap (args );
143- String [] query = uri .getQueryNames ();
144- for (String parameter : query ) {
145- String value = uri .getQueryField (PVString .class , parameter ).get ();
117+ PVAURI uri = PVAURI .fromStructure (args );
118+ Map <String , String > query ;
119+ try {
120+ query = uri .getQuery ();
121+ } catch (NotValueException e ) {
122+ logger .log (Level .WARNING , () -> "Query " + uri + " not valid." + e .getMessage ());
123+ throw new UnsupportedOperationException ("The requested operation is not supported." );
124+ }
125+ for (String parameter : query .keySet ()) {
126+ String value = query .get (parameter );
146127 if (value != null && !value .isEmpty ()) {
147128 switch (parameter ) {
148129 case "_name" :
149- searchParameters .put ("~name" , Arrays . asList (value ));
130+ searchParameters .put ("~name" , List . of (value ));
150131 break ;
151132 case "_tag" :
152- searchParameters .put ("~tag" , Arrays . asList (value ));
133+ searchParameters .put ("~tag" , List . of (value ));
153134 break ;
154135 case "_size" :
155- searchParameters .put ("~size" , Arrays . asList (value ));
136+ searchParameters .put ("~size" , List . of (value ));
156137 break ;
157138 case "_from" :
158- searchParameters .put ("~from" , Arrays . asList (value ));
139+ searchParameters .put ("~from" , List . of (value ));
159140 break ;
160141 default :
161- searchParameters .put (parameter , Arrays . asList (value ));
142+ searchParameters .put (parameter , List . of (value ));
162143 break ;
163144 }
164145 }
@@ -169,89 +150,48 @@ public void run() {
169150 final Map <String , List <String >> channelTable = new HashMap <>();
170151 final Map <String , List <String >> channelPropertyTable = new HashMap <>();
171152 final Map <String , boolean []> channelTagTable = new HashMap <>();
172- channelTable .put ("channelName" , Arrays .asList (new String [result .size ()]));
173- channelTable .put ("owner" , Arrays .asList (new String [result .size ()]));
153+ channelTable .put (COLUMN_CHANNEL_NAME , Arrays .asList (new String [result .size ()]));
154+ channelTable .put (COLUMN_OWNER , Arrays .asList (new String [result .size ()]));
174155
175156 AtomicInteger counter = new AtomicInteger (0 );
176157
177158 result .forEach (ch -> {
178159
179160 int index = counter .getAndIncrement ();
180161
181- channelTable .get ("channelName" ).set (index , ch .getName ());
182- channelTable .get ("owner" ).set (index , ch .getOwner ());
183-
184- if (!filteredColumns .contains ("ALL" )) {
185- ch .getTags ().stream ().filter (tag ->
186- filteredColumns .isEmpty () || filteredColumns .contains (tag .getName ())
187- ).forEach (t -> {
188- if (!channelTagTable .containsKey (t .getName ())) {
189- channelTagTable .put (t .getName (), new boolean [result .size ()]);
190- }
191- channelTagTable .get (t .getName ())[index ] = true ;
192- });
193-
194- ch .getProperties ().stream ().filter (prop ->
195- filteredColumns .isEmpty () || filteredColumns .contains (prop .getName ())
196- ).forEach (prop -> {
197- if (!channelPropertyTable .containsKey (prop .getName ())) {
198- channelPropertyTable .put (prop .getName (), Arrays .asList (new String [result .size ()]));
199- }
200- channelPropertyTable .get (prop .getName ()).set (index , prop .getValue ());
201- });
202- }
162+ channelTable .get (COLUMN_CHANNEL_NAME ).set (index , ch .getName ());
163+ channelTable .get (COLUMN_OWNER ).set (index , ch .getOwner ());
164+
165+ ch .getTags ().forEach (t -> {
166+ if (!channelTagTable .containsKey (t .getName ())) {
167+ channelTagTable .put (t .getName (), new boolean [result .size ()]);
168+ }
169+ channelTagTable .get (t .getName ())[index ] = true ;
170+ });
171+
172+ ch .getProperties ().forEach (prop -> {
173+ if (!channelPropertyTable .containsKey (prop .getName ())) {
174+ channelPropertyTable .put (prop .getName (), Arrays .asList (new String [result .size ()]));
175+ }
176+ channelPropertyTable .get (prop .getName ()).set (index , prop .getValue ());
177+ });
203178 });
204- NTTableBuilder ntTableBuilder = NTTable .createBuilder ();
179+ PVATable .PVATableBuilder ntTableBuilder = PVATable .PVATableBuilder .aPVATable ().name (SERVICE_DESC );
180+
205181 channelTable .keySet ().forEach (name ->
206- ntTableBuilder .addColumn (name , ScalarType . pvString )
182+ ntTableBuilder .addColumn (new PVAStringArray ( name , channelTable . get ( name ). toArray ( String []:: new )) )
207183 );
208184 channelPropertyTable .keySet ().forEach (name ->
209- ntTableBuilder .addColumn (name , ScalarType . pvString )
185+ ntTableBuilder .addColumn (new PVAStringArray ( name , channelPropertyTable . get ( name ). toArray ( String []:: new )) )
210186 );
211187 channelTagTable .keySet ().forEach (name ->
212- ntTableBuilder .addColumn (name , ScalarType .pvBoolean )
213- );
214- NTTable ntTable = ntTableBuilder .create ();
215-
216- channelTable .entrySet ().stream ().forEach (col ->
217- ntTable .getColumn (PVStringArray .class , col .getKey ()).put (0 , col .getValue ().size (),
218- col .getValue ().stream ().toArray (String []::new ), 0 )
219- );
220-
221- channelPropertyTable .entrySet ().stream ().forEach (col ->
222- ntTable .getColumn (PVStringArray .class , col .getKey ()).put (0 , col .getValue ().size (),
223- col .getValue ().stream ().toArray (String []::new ), 0 )
224- );
225-
226- channelTagTable .entrySet ().stream ().forEach (col ->
227- ntTable .getColumn (PVBooleanArray .class , col .getKey ()).put (0 , col .getValue ().length ,
228- col .getValue (), 0 )
188+ ntTableBuilder .addColumn (new PVABoolArray (name , channelTagTable .get (name )))
229189 );
230190
231- logger .log (Level .FINE , () -> ntTable . toString () );
232- this . callback . requestDone ( StatusFactory . getStatusCreate (). getStatusOK (), ntTable . getPVStructure () );
191+ logger .log (Level .FINE , ntTableBuilder :: toString );
192+ return ntTableBuilder . build ( );
233193 }
234194 }
235-
236- public void shutdown () {
237- logger .log (Level .INFO , "shutting down service." );
238- pool .shutdown ();
239- // Disable new tasks from being submitted
240- try {
241- // Wait a while for existing tasks to terminate
242- if (!pool .awaitTermination (60 , TimeUnit .SECONDS )) {
243- pool .shutdownNow (); // Cancel currently executing tasks
244- // Wait a while for tasks to respond to being cancelled
245- if (!pool .awaitTermination (60 , TimeUnit .SECONDS ))
246- System .err .println ("Pool did not terminate" );
247- }
248- } catch (InterruptedException ie ) {
249- // (Re-)Cancel if current thread also interrupted
250- pool .shutdownNow ();
251- // Preserve interrupt status
252- Thread .currentThread ().interrupt ();
253- }
254- logger .log (Level .INFO , "completed shut down." );
255- }
195+
256196 }
257197}
0 commit comments