001/*
002 * $Id: GroebnerBaseDistributedHybridMPJ.java 4952 2014-10-12 19:41:46Z axelclk
003 * $
004 */
005
006package edu.jas.gb;
007
008
009import java.io.IOException;
010import java.util.ArrayList;
011import java.util.Collections;
012import java.util.List;
013import java.util.ListIterator;
014import java.util.concurrent.atomic.AtomicInteger;
015
016import mpi.Comm;
017
018import org.apache.log4j.Logger;
019
020import edu.jas.kern.MPJEngine;
021import edu.jas.poly.ExpVector;
022import edu.jas.poly.GenPolynomial;
023import edu.jas.poly.GenPolynomialRing;
024import edu.jas.poly.PolyUtil;
025import edu.jas.structure.RingElem;
026import edu.jas.util.DistHashTableMPJ;
027import edu.jas.util.MPJChannel;
028import edu.jas.util.Terminator;
029import edu.jas.util.ThreadPool;
030
031
032/**
033 * Groebner Base distributed hybrid algorithm with MPJ. Implements a distributed
034 * memory with multi-core CPUs parallel version of Groebner bases with MPJ.
035 * Using pairlist class, distributed multi-threaded tasks do reduction, one
036 * communication channel per remote node.
037 * @param <C> coefficient type
038 * @author Heinz Kredel
039 */
040
041public class GroebnerBaseDistributedHybridMPJ<C extends RingElem<C>> extends GroebnerBaseAbstract<C> {
042
043
044    private static final Logger logger = Logger.getLogger(GroebnerBaseDistributedHybridMPJ.class);
045
046
047    public final boolean debug = logger.isDebugEnabled();
048
049
050    /**
051     * Number of threads to use.
052     */
053    protected final int threads;
054
055
056    /**
057     * Default number of threads.
058     */
059    protected static final int DEFAULT_THREADS = 2;
060
061
062    /**
063     * Number of threads per node to use.
064     */
065    protected final int threadsPerNode;
066
067
068    /**
069     * Default number of threads per compute node.
070     */
071    protected static final int DEFAULT_THREADS_PER_NODE = 1;
072
073
074    /**
075     * Pool of threads to use.
076     */
077    //protected final ExecutorService pool; // not for single node tests
078    protected transient final ThreadPool pool;
079
080
081    /*
082     * Underlying MPJ engine.
083     */
084    protected transient final Comm engine;
085
086
087    /**
088     * Message tag for pairs.
089     */
090    public static final int pairTag = GroebnerBaseDistributedHybridEC.pairTag.intValue();
091
092
093    /**
094     * Message tag for results.
095     */
096    public static final int resultTag = GroebnerBaseDistributedHybridEC.resultTag.intValue();
097
098
099    /**
100     * Message tag for acknowledgments.
101     */
102    public static final int ackTag = GroebnerBaseDistributedHybridEC.ackTag.intValue();
103
104
105    /**
106     * Constructor.
107     */
108    public GroebnerBaseDistributedHybridMPJ() throws IOException {
109        this(DEFAULT_THREADS);
110    }
111
112
113    /**
114     * Constructor.
115     * @param threads number of threads to use.
116     */
117    public GroebnerBaseDistributedHybridMPJ(int threads) throws IOException {
118        this(threads, new ThreadPool(threads));
119    }
120
121
122    /**
123     * Constructor.
124     * @param threads number of threads to use.
125     * @param threadsPerNode threads per node to use.
126     */
127    public GroebnerBaseDistributedHybridMPJ(int threads, int threadsPerNode) throws IOException {
128        this(threads, threadsPerNode, new ThreadPool(threads));
129    }
130
131
132    /**
133     * Constructor.
134     * @param threads number of threads to use.
135     * @param pool ThreadPool to use.
136     */
137    public GroebnerBaseDistributedHybridMPJ(int threads, ThreadPool pool) throws IOException {
138        this(threads, DEFAULT_THREADS_PER_NODE, pool);
139    }
140
141
142    /**
143     * Constructor.
144     * @param threads number of threads to use.
145     * @param threadsPerNode threads per node to use.
146     * @param pl pair selection strategy
147     */
148    public GroebnerBaseDistributedHybridMPJ(int threads, int threadsPerNode, PairList<C> pl)
149                    throws IOException {
150        this(threads, threadsPerNode, new ThreadPool(threads), pl);
151    }
152
153
154    /**
155     * Constructor.
156     * @param threads number of threads to use.
157     * @param threadsPerNode threads per node to use.
158     */
159    public GroebnerBaseDistributedHybridMPJ(int threads, int threadsPerNode, ThreadPool pool)
160                    throws IOException {
161        this(threads, threadsPerNode, pool, new OrderedPairlist<C>());
162    }
163
164
165    /**
166     * Constructor.
167     * @param threads number of threads to use.
168     * @param threadsPerNode threads per node to use.
169     * @param pool ThreadPool to use.
170     * @param pl pair selection strategy
171     */
172    public GroebnerBaseDistributedHybridMPJ(int threads, int threadsPerNode, ThreadPool pool, PairList<C> pl)
173                    throws IOException {
174        super(new ReductionPar<C>(), pl);
175        this.engine = MPJEngine.getCommunicator();
176        int size = engine.Size();
177        if (size < 2) {
178            throw new IllegalArgumentException("Minimal 2 MPJ processes required, not " + size);
179        }
180        if (threads != size || pool.getNumber() != size) {
181            throw new IllegalArgumentException("threads != size: " + threads + " != " + size + ", #pool "
182                            + pool.getNumber());
183        }
184        this.threads = threads;
185        this.pool = pool;
186        this.threadsPerNode = threadsPerNode;
187        //logger.info("generated pool: " + pool);
188    }
189
190
191    /**
192     * Cleanup and terminate.
193     */
194    @Override
195    public void terminate() {
196        if (pool == null) {
197            return;
198        }
199        //pool.terminate();
200        pool.cancel();
201    }
202
203
204    /**
205     * Distributed Groebner base.
206     * @param modv number of module variables.
207     * @param F polynomial list.
208     * @return GB(F) a Groebner base of F or null, if a IOException occurs or on
209     *         MPJ client part.
210     */
211    public List<GenPolynomial<C>> GB(int modv, List<GenPolynomial<C>> F) {
212        try {
213            if (engine.Rank() == 0) {
214                return GBmaster(modv, F);
215            }
216        } catch (IOException e) {
217            logger.info("GBmaster: " + e);
218            e.printStackTrace();
219            return null;
220        }
221        pool.terminate(); // not used on clients
222        try {
223            clientPart(0); // only 0
224        } catch (IOException e) {
225            logger.info("clientPart: " + e);
226            e.printStackTrace();
227        }
228        return null;
229    }
230
231
232    /**
233     * Distributed hybrid Groebner base.
234     * @param modv number of module variables.
235     * @param F polynomial list.
236     * @return GB(F) a Groebner base of F or null, if a IOException occurs.
237     */
238    List<GenPolynomial<C>> GBmaster(int modv, List<GenPolynomial<C>> F) throws IOException {
239        long t = System.currentTimeMillis();
240
241        List<GenPolynomial<C>> G = normalizeZerosOnes(F);
242        G = PolyUtil.<C> monic(G);
243        if (G.size() <= 1) {
244            //return G; 
245        }
246        if (G.isEmpty()) {
247            throw new IllegalArgumentException("empty F / zero ideal not allowed");
248        }
249        GenPolynomialRing<C> ring = G.get(0).ring;
250        if (!ring.coFac.isField()) {
251            throw new IllegalArgumentException("coefficients not from a field");
252        }
253        PairList<C> pairlist = strategy.create(modv, ring);
254        pairlist.put(G);
255
256        /*
257        GenPolynomial<C> p;
258        List<GenPolynomial<C>> G = new ArrayList<GenPolynomial<C>>();
259        PairList<C> pairlist = null;
260        boolean oneInGB = false;
261        int l = F.size();
262        int unused = 0;
263        ListIterator<GenPolynomial<C>> it = F.listIterator();
264        while (it.hasNext()) {
265            p = it.next();
266            if (p.length() > 0) {
267                p = p.monic();
268                if (p.isONE()) {
269                    oneInGB = true;
270                    G.clear();
271                    G.add(p);
272                    //return G; must signal termination to others
273                }
274                if (!oneInGB) {
275                    G.add(p);
276                }
277                if (pairlist == null) {
278                    //pairlist = new OrderedPairlist<C>(modv, p.ring);
279                    pairlist = strategy.create(modv, p.ring);
280                    if (!p.ring.coFac.isField()) {
281                        throw new IllegalArgumentException("coefficients not from a field");
282                    }
283                }
284                // theList not updated here
285                if (p.isONE()) {
286                    unused = pairlist.putOne();
287                } else {
288                    unused = pairlist.put(p);
289                }
290            } else {
291                l--;
292            }
293        }
294        //if (l <= 1) {
295        //return G; must signal termination to others
296        //}
297        */
298        logger.info("pairlist " + pairlist);
299
300        logger.debug("looking for clients");
301        DistHashTableMPJ<Integer, GenPolynomial<C>> theList = new DistHashTableMPJ<Integer, GenPolynomial<C>>(
302                        engine);
303        theList.init();
304
305        List<GenPolynomial<C>> al = pairlist.getList();
306        for (int i = 0; i < al.size(); i++) {
307            // no wait required
308            GenPolynomial<C> nn = theList.put(Integer.valueOf(i), al.get(i));
309            if (nn != null) {
310                logger.info("double polynomials " + i + ", nn = " + nn + ", al(i) = " + al.get(i));
311            }
312        }
313
314        Terminator finner = new Terminator((threads - 1) * threadsPerNode);
315        HybridReducerServerMPJ<C> R;
316        logger.info("using pool = " + pool);
317        for (int i = 1; i < threads; i++) {
318            MPJChannel chan = new MPJChannel(engine, i); // closed in server
319            R = new HybridReducerServerMPJ<C>(i, threadsPerNode, finner, chan, theList, pairlist);
320            pool.addJob(R);
321            //logger.info("server submitted " + R);
322        }
323        logger.info("main loop waiting " + finner);
324        finner.waitDone();
325        int ps = theList.size();
326        logger.info("#distributed list = " + ps);
327        // make sure all polynomials arrived: not needed in master
328        G = pairlist.getList();
329        if (ps != G.size()) {
330            logger.info("#distributed list = " + theList.size() + " #pairlist list = " + G.size());
331        }
332        for (GenPolynomial<C> q : theList.getValueList()) {
333            if (q != null && !q.isZERO()) {
334                logger.debug("final q = " + q.leadingExpVector());
335            }
336        }
337        logger.debug("distributed list end");
338        long time = System.currentTimeMillis();
339        List<GenPolynomial<C>> Gp;
340        Gp = minimalGB(G); // not jet distributed but threaded
341        time = System.currentTimeMillis() - time;
342        logger.debug("parallel gbmi time = " + time);
343        G = Gp;
344        logger.info("server theList.terminate() " + theList.size());
345        theList.terminate();
346        t = System.currentTimeMillis() - t;
347        logger.info("server GB end, time = " + t + ", " + pairlist.toString());
348        return G;
349    }
350
351
352    /**
353     * GB distributed client.
354     * @param rank of the MPJ where the server runs on.
355     * @throws IOException
356     */
357    public void clientPart(int rank) throws IOException {
358        if (rank != 0) {
359            throw new UnsupportedOperationException("only master at rank 0 implemented: " + rank);
360        }
361        Comm engine = MPJEngine.getCommunicator();
362
363        DistHashTableMPJ<Integer, GenPolynomial<C>> theList = new DistHashTableMPJ<Integer, GenPolynomial<C>>();
364        theList.init();
365
366        MPJChannel chan = new MPJChannel(engine, rank);
367
368        ThreadPool pool = new ThreadPool(threadsPerNode);
369        logger.info("client using pool = " + pool);
370        for (int i = 0; i < threadsPerNode; i++) {
371            HybridReducerClientMPJ<C> Rr = new HybridReducerClientMPJ<C>(chan, theList); // i
372            pool.addJob(Rr);
373        }
374        if (debug) {
375            logger.info("clients submitted");
376        }
377        pool.terminate();
378        logger.info("client pool.terminate()");
379
380        chan.close();
381        theList.terminate();
382        return;
383    }
384
385
386    /**
387     * Minimal ordered groebner basis.
388     * @param Fp a Groebner base.
389     * @return a reduced Groebner base of Fp.
390     */
391    @SuppressWarnings("unchecked")
392    @Override
393    public List<GenPolynomial<C>> minimalGB(List<GenPolynomial<C>> Fp) {
394        GenPolynomial<C> a;
395        ArrayList<GenPolynomial<C>> G;
396        G = new ArrayList<GenPolynomial<C>>(Fp.size());
397        ListIterator<GenPolynomial<C>> it = Fp.listIterator();
398        while (it.hasNext()) {
399            a = it.next();
400            if (a.length() != 0) { // always true
401                // already monic  a = a.monic();
402                G.add(a);
403            }
404        }
405        if (G.size() <= 1) {
406            return G;
407        }
408
409        ExpVector e;
410        ExpVector f;
411        GenPolynomial<C> p;
412        ArrayList<GenPolynomial<C>> F;
413        F = new ArrayList<GenPolynomial<C>>(G.size());
414        boolean mt;
415
416        while (G.size() > 0) {
417            a = G.remove(0);
418            e = a.leadingExpVector();
419
420            it = G.listIterator();
421            mt = false;
422            while (it.hasNext() && !mt) {
423                p = it.next();
424                f = p.leadingExpVector();
425                mt = e.multipleOf(f);
426            }
427            it = F.listIterator();
428            while (it.hasNext() && !mt) {
429                p = it.next();
430                f = p.leadingExpVector();
431                mt = e.multipleOf(f);
432            }
433            if (!mt) {
434                F.add(a);
435            } else {
436                // System.out.println("dropped " + a.length());
437            }
438        }
439        G = F;
440        if (G.size() <= 1) {
441            return G;
442        }
443        Collections.reverse(G); // important for lex GB
444
445        MiMPJReducerServer<C>[] mirs = (MiMPJReducerServer<C>[]) new MiMPJReducerServer[G.size()];
446        int i = 0;
447        F = new ArrayList<GenPolynomial<C>>(G.size());
448        while (G.size() > 0) {
449            a = G.remove(0);
450            // System.out.println("doing " + a.length());
451            List<GenPolynomial<C>> R = new ArrayList<GenPolynomial<C>>(G.size() + F.size());
452            R.addAll(G);
453            R.addAll(F);
454            mirs[i] = new MiMPJReducerServer<C>(R, a);
455            pool.addJob(mirs[i]);
456            i++;
457            F.add(a);
458        }
459        G = F;
460        F = new ArrayList<GenPolynomial<C>>(G.size());
461        for (i = 0; i < mirs.length; i++) {
462            a = mirs[i].getNF();
463            F.add(a);
464        }
465        return F;
466    }
467
468}
469
470
471/**
472 * Distributed server reducing worker proxy threads.
473 * @param <C> coefficient type
474 */
475
476class HybridReducerServerMPJ<C extends RingElem<C>> implements Runnable {
477
478
479    private static final Logger logger = Logger.getLogger(HybridReducerServerMPJ.class);
480
481
482    public final boolean debug = logger.isDebugEnabled();
483
484
485    private final Terminator finner;
486
487
488    private final MPJChannel pairChannel;
489
490
491    //protected transient final Comm engine;
492
493
494    private final DistHashTableMPJ<Integer, GenPolynomial<C>> theList;
495
496
497    private final PairList<C> pairlist;
498
499
500    private final int threadsPerNode;
501
502
503    final int rank;
504
505
506    /**
507     * Message tag for pairs.
508     */
509    public static final int pairTag = GroebnerBaseDistributedHybridMPJ.pairTag;
510
511
512    /**
513     * Constructor.
514     * @param r MPJ rank of partner.
515     * @param tpn number of threads per node
516     * @param fin terminator
517     * @param chan MPJ channel
518     * @param dl distributed hash table
519     * @param L ordered pair list
520     */
521    HybridReducerServerMPJ(int r, int tpn, Terminator fin, MPJChannel chan,
522                    DistHashTableMPJ<Integer, GenPolynomial<C>> dl, PairList<C> L) {
523        rank = r;
524        threadsPerNode = tpn;
525        finner = fin;
526        this.pairChannel = chan;
527        theList = dl;
528        pairlist = L;
529        //logger.info("reducer server created " + this);
530    }
531
532
533    /**
534     * Work loop.
535     * @see java.lang.Runnable#run()
536     */
537    @Override
538    @SuppressWarnings("unchecked")
539    public void run() {
540        //logger.info("reducer server running with " + engine);
541        // try {
542        //     pairChannel = new MPJChannel(engine, rank); //,pairTag
543        // } catch (IOException e) {
544        //     e.printStackTrace();
545        //     return;
546        // }
547        if (logger.isInfoEnabled()) {
548            logger.info("reducer server running: pairChannel = " + pairChannel);
549        }
550        // record idle remote workers (minus one?)
551        //finner.beIdle(threadsPerNode-1);
552        finner.initIdle(threadsPerNode);
553        AtomicInteger active = new AtomicInteger(0);
554
555        // start receiver
556        HybridReducerReceiverMPJ<C> receiver = new HybridReducerReceiverMPJ<C>(rank, finner, active,
557                        pairChannel, theList, pairlist);
558        receiver.start();
559
560        Pair<C> pair;
561        //boolean set = false;
562        boolean goon = true;
563        //int polIndex = -1;
564        int red = 0;
565        int sleeps = 0;
566
567        // while more requests
568        while (goon) {
569            // receive request if thread is reported incactive
570            logger.debug("receive request");
571            Object req = null;
572            try {
573                req = pairChannel.receive(pairTag);
574                //} catch (InterruptedException e) {
575                //goon = false;
576                //e.printStackTrace();
577            } catch (IOException e) {
578                goon = false;
579                e.printStackTrace();
580            } catch (ClassNotFoundException e) {
581                goon = false;
582                e.printStackTrace();
583            }
584            logger.debug("received request, req = " + req);
585            if (req == null) {
586                goon = false;
587                break;
588            }
589            if (!(req instanceof GBTransportMessReq)) {
590                goon = false;
591                break;
592            }
593
594            // find pair and manage termination status
595            logger.debug("find pair");
596            while (!pairlist.hasNext()) { // wait
597                if (!finner.hasJobs() && !pairlist.hasNext()) {
598                    goon = false;
599                    break;
600                }
601                try {
602                    sleeps++;
603                    if (sleeps % 3 == 0) {
604                        logger.info("waiting for reducers, remaining = " + finner.getJobs());
605                    }
606                    Thread.sleep(100);
607                } catch (InterruptedException e) {
608                    goon = false;
609                    break;
610                }
611            }
612            if (!pairlist.hasNext() && !finner.hasJobs()) {
613                logger.info("termination detection: no pairs and no jobs left");
614                goon = false;
615                break; //continue; //break?
616            }
617            finner.notIdle(); // before pairlist get!!
618            pair = pairlist.removeNext();
619            // send pair to client, even if null
620            if (debug) {
621                logger.info("active count = " + active.get());
622                logger.info("send pair = " + pair);
623            }
624            GBTransportMess msg = null;
625            if (pair != null) {
626                msg = new GBTransportMessPairIndex(pair);
627            } else {
628                msg = new GBTransportMess(); //not End(); at this time
629                // goon ?= false;
630            }
631            try {
632                red++;
633                pairChannel.send(pairTag, msg);
634                @SuppressWarnings("unused")
635                int a = active.getAndIncrement();
636            } catch (IOException e) {
637                e.printStackTrace();
638                goon = false;
639                break;
640            }
641            //logger.debug("#distributed list = " + theList.size());
642        }
643        logger.info("terminated, send " + red + " reduction pairs");
644
645        /*
646         * send end mark to clients
647         */
648        logger.debug("send end");
649        try {
650            for (int i = 0; i < threadsPerNode; i++) { // -1
651                pairChannel.send(pairTag, new GBTransportMessEnd());
652            }
653            logger.info("sent end to clients");
654            // send also end to receiver, no more
655            //pairChannel.send(resultTag, new GBTransportMessEnd(), engine.Rank());
656        } catch (IOException e) {
657            if (logger.isDebugEnabled()) {
658                e.printStackTrace();
659            }
660        }
661        int d = active.get();
662        if (d > 0) {
663            logger.info("remaining active tasks = " + d);
664        }
665        receiver.terminate();
666        //logger.info("terminated, send " + red + " reduction pairs");
667        pairChannel.close();
668        logger.info("redServ pairChannel.close()");
669        finner.release();
670    }
671}
672
673
674/**
675 * Distributed server receiving worker thread.
676 * @param <C> coefficient type
677 */
678
679class HybridReducerReceiverMPJ<C extends RingElem<C>> extends Thread {
680
681
682    private static final Logger logger = Logger.getLogger(HybridReducerReceiverMPJ.class);
683
684
685    public final boolean debug = logger.isDebugEnabled();
686
687
688    private final DistHashTableMPJ<Integer, GenPolynomial<C>> theList;
689
690
691    private final PairList<C> pairlist;
692
693
694    private final MPJChannel pairChannel;
695
696
697    final int rank;
698
699
700    private final Terminator finner;
701
702
703    //private final int threadsPerNode;
704
705
706    private final AtomicInteger active;
707
708
709    private volatile boolean goon;
710
711
712    /**
713     * Message tag for results.
714     */
715    public static final int resultTag = GroebnerBaseDistributedHybridMPJ.resultTag;
716
717
718    /**
719     * Message tag for acknowledgments.
720     */
721    public static final int ackTag = GroebnerBaseDistributedHybridMPJ.ackTag;
722
723
724    /**
725     * Constructor.
726     * @param r MPJ rank of partner.
727     * @param fin terminator
728     * @param a active remote tasks count
729     * @param pc tagged socket channel
730     * @param dl distributed hash table
731     * @param L ordered pair list
732     */
733    HybridReducerReceiverMPJ(int r, Terminator fin, AtomicInteger a, MPJChannel pc,
734                    DistHashTableMPJ<Integer, GenPolynomial<C>> dl, PairList<C> L) {
735        rank = r;
736        active = a;
737        //threadsPerNode = tpn;
738        finner = fin;
739        pairChannel = pc;
740        theList = dl;
741        pairlist = L;
742        goon = true;
743        //logger.info("reducer server created " + this);
744    }
745
746
747    /**
748     * Work loop.
749     * @see java.lang.Thread#run()
750     */
751    @Override
752    @SuppressWarnings("unchecked")
753    public void run() {
754        //Pair<C> pair = null;
755        GenPolynomial<C> H = null;
756        int red = 0;
757        int polIndex = -1;
758        //Integer senderId; // obsolete
759
760        // while more requests
761        while (goon) {
762            // receive request
763            logger.debug("receive result");
764            //senderId = null;
765            Object rh = null;
766            try {
767                rh = pairChannel.receive(resultTag);
768                @SuppressWarnings("unused")
769                int i = active.getAndDecrement();
770                //} catch (InterruptedException e) {
771                //goon = false;
772                ////e.printStackTrace();
773                ////?? finner.initIdle(1);
774                //break;
775            } catch (IOException e) {
776                e.printStackTrace();
777                goon = false;
778                finner.initIdle(1);
779                break;
780            } catch (ClassNotFoundException e) {
781                e.printStackTrace();
782                goon = false;
783                finner.initIdle(1);
784                break;
785            }
786            logger.info("received result");
787            if (rh == null) {
788                if (this.isInterrupted()) {
789                    goon = false;
790                    finner.initIdle(1);
791                    break;
792                }
793                //finner.initIdle(1);
794            } else if (rh instanceof GBTransportMessEnd) { // should only happen from server
795                logger.info("received GBTransportMessEnd");
796                goon = false;
797                //?? finner.initIdle(1);
798                break;
799            } else if (rh instanceof GBTransportMessPoly) {
800                // update pair list
801                red++;
802                GBTransportMessPoly<C> mpi = (GBTransportMessPoly<C>) rh;
803                H = mpi.pol;
804                //senderId = mpi.threadId;
805                if (H != null) {
806                    if (logger.isInfoEnabled()) { // debug
807                        logger.info("H = " + H.leadingExpVector());
808                    }
809                    if (!H.isZERO()) {
810                        if (H.isONE()) {
811                            polIndex = pairlist.putOne();
812                            //GenPolynomial<C> nn = 
813                            theList.putWait(Integer.valueOf(polIndex), H);
814                            //goon = false; must wait for other clients
815                            //finner.initIdle(1);
816                            //break;
817                        } else {
818                            polIndex = pairlist.put(H);
819                            // use putWait ? but still not all distributed
820                            //GenPolynomial<C> nn = 
821                            theList.putWait(Integer.valueOf(polIndex), H);
822                        }
823                    }
824                }
825            }
826            // only after recording in pairlist !
827            finner.initIdle(1);
828            try {
829                pairChannel.send(ackTag, new GBTransportMess());
830                logger.debug("send acknowledgement");
831            } catch (IOException e) {
832                e.printStackTrace();
833                goon = false;
834                break;
835            }
836        } // end while
837        goon = false;
838        logger.info("terminated, received " + red + " reductions");
839    }
840
841
842    /**
843     * Terminate.
844     */
845    public void terminate() {
846        goon = false;
847        try {
848            this.join();
849            //this.interrupt();
850        } catch (InterruptedException e) {
851            // unfug Thread.currentThread().interrupt();
852        }
853        logger.info("terminate end");
854    }
855
856}
857
858
859/**
860 * Distributed clients reducing worker threads.
861 */
862
863class HybridReducerClientMPJ<C extends RingElem<C>> implements Runnable {
864
865
866    private static final Logger logger = Logger.getLogger(HybridReducerClientMPJ.class);
867
868
869    public final boolean debug = logger.isDebugEnabled();
870
871
872    private final MPJChannel pairChannel;
873
874
875    private final DistHashTableMPJ<Integer, GenPolynomial<C>> theList;
876
877
878    private final ReductionPar<C> red;
879
880
881    //private final int threadsPerNode;
882
883
884    /*
885     * Identification number for this thread.
886     */
887    //public final Integer threadId; // obsolete
888
889
890    /**
891     * Message tag for pairs.
892     */
893    public static final int pairTag = GroebnerBaseDistributedHybridMPJ.pairTag;
894
895
896    /**
897     * Message tag for results.
898     */
899    public static final int resultTag = GroebnerBaseDistributedHybridMPJ.resultTag;
900
901
902    /**
903     * Message tag for acknowledgments.
904     */
905    public static final int ackTag = GroebnerBaseDistributedHybridMPJ.ackTag;
906
907
908    /**
909     * Constructor.
910     * @param tc tagged socket channel
911     * @param dl distributed hash table
912     */
913    HybridReducerClientMPJ(MPJChannel tc, DistHashTableMPJ<Integer, GenPolynomial<C>> dl) {
914        //this.threadsPerNode = tpn;
915        pairChannel = tc;
916        //threadId = 100 + tid; // keep distinct from other tags
917        theList = dl;
918        red = new ReductionPar<C>();
919    }
920
921
922    /**
923     * Work loop.
924     * @see java.lang.Runnable#run()
925     */
926    @Override
927    @SuppressWarnings("unchecked")
928    public void run() {
929        if (debug) {
930            logger.info("pairChannel   = " + pairChannel + " reducer client running");
931        }
932        Pair<C> pair = null;
933        GenPolynomial<C> pi, pj, ps;
934        GenPolynomial<C> S;
935        GenPolynomial<C> H = null;
936        //boolean set = false;
937        boolean goon = true;
938        boolean doEnd = true;
939        int reduction = 0;
940        //int sleeps = 0;
941        Integer pix, pjx, psx;
942
943        while (goon) {
944            /* protocol:
945             * request pair, process pair, send result, receive acknowledgment
946             */
947            // pair = (Pair) pairlist.removeNext();
948            Object req = new GBTransportMessReq();
949            logger.debug("send request = " + req);
950            try {
951                pairChannel.send(pairTag, req);
952            } catch (IOException e) {
953                goon = false;
954                if (debug) {
955                    e.printStackTrace();
956                }
957                logger.info("receive pair, exception ");
958                break;
959            }
960            logger.debug("receive pair, goon = " + goon);
961            doEnd = true;
962            Object pp = null;
963            try {
964                pp = pairChannel.receive(pairTag);
965                //} catch (InterruptedException e) {
966                //goon = false;
967                //e.printStackTrace();
968            } catch (IOException e) {
969                goon = false;
970                if (debug) {
971                    e.printStackTrace();
972                }
973                break;
974            } catch (ClassNotFoundException e) {
975                goon = false;
976                e.printStackTrace();
977            }
978            if (debug) {
979                logger.info("received pair = " + pp);
980            }
981            H = null;
982            if (pp == null) { // should not happen
983                continue;
984            }
985            if (pp instanceof GBTransportMessEnd) {
986                goon = false;
987                //doEnd = false; // bug
988                continue;
989            }
990            if (pp instanceof GBTransportMessPair || pp instanceof GBTransportMessPairIndex) {
991                pi = pj = ps = null;
992                if (pp instanceof GBTransportMessPair) {
993                    pair = ((GBTransportMessPair<C>) pp).pair;
994                    if (pair != null) {
995                        pi = pair.pi;
996                        pj = pair.pj;
997                        //logger.debug("pair: pix = " + pair.i 
998                        //               + ", pjx = " + pair.j);
999                    }
1000                }
1001                if (pp instanceof GBTransportMessPairIndex) {
1002                    pix = ((GBTransportMessPairIndex) pp).i;
1003                    pjx = ((GBTransportMessPairIndex) pp).j;
1004                    psx = ((GBTransportMessPairIndex) pp).s;
1005                    pi = theList.getWait(pix);
1006                    pj = theList.getWait(pjx);
1007                    ps = theList.getWait(psx);
1008                    //logger.info("pix = " + pix + ", pjx = " +pjx + ", psx = " +psx);
1009                }
1010
1011                if (pi != null && pj != null) {
1012                    S = red.SPolynomial(pi, pj);
1013                    //System.out.println("S   = " + S);
1014                    logger.info("ht(S) = " + S.leadingExpVector());
1015                    if (S.isZERO()) {
1016                        // pair.setZero(); does not work in dist
1017                        H = S;
1018                    } else {
1019                        if (debug) {
1020                            logger.debug("ht(S) = " + S.leadingExpVector());
1021                        }
1022                        H = red.normalform(theList, S);
1023                        reduction++;
1024                        if (H.isZERO()) {
1025                            // pair.setZero(); does not work in dist
1026                        } else {
1027                            H = H.monic();
1028                            if (logger.isInfoEnabled()) {
1029                                logger.info("ht(H) = " + H.leadingExpVector());
1030                            }
1031                        }
1032                    }
1033                } else {
1034                    logger.info("pi = " + pi + ", pj = " + pj + ", ps = " + ps);
1035                }
1036            }
1037            if (pp instanceof GBTransportMess) {
1038                logger.debug("null pair results in null H poly");
1039            }
1040
1041            // send H or must send null, if not at end
1042            if (debug) {
1043                logger.debug("#distributed list = " + theList.size());
1044                logger.debug("send H polynomial = " + H);
1045            }
1046            try {
1047                pairChannel.send(resultTag, new GBTransportMessPoly<C>(H)); //,threadId));
1048                doEnd = false;
1049            } catch (IOException e) {
1050                goon = false;
1051                e.printStackTrace();
1052            }
1053            logger.debug("done send poly message of " + pp);
1054            try {
1055                pp = pairChannel.receive(ackTag);
1056                //} catch (InterruptedException e) {
1057                //goon = false;
1058                //e.printStackTrace();
1059            } catch (IOException e) {
1060                goon = false;
1061                if (debug) {
1062                    e.printStackTrace();
1063                }
1064                break;
1065            } catch (ClassNotFoundException e) {
1066                goon = false;
1067                e.printStackTrace();
1068            }
1069            if (!(pp instanceof GBTransportMess)) {
1070                logger.error("invalid acknowledgement " + pp);
1071            }
1072            logger.debug("received acknowledgment " + pp);
1073        }
1074        logger.info("terminated, done " + reduction + " reductions");
1075        if (doEnd) {
1076            try {
1077                pairChannel.send(resultTag, new GBTransportMessEnd());
1078            } catch (IOException e) {
1079                //e.printStackTrace();
1080            }
1081            logger.info("terminated, send done");
1082        }
1083    }
1084}