001/*
002 * Copyright (C) 2012 eXo Platform SAS.
003 *
004 * This is free software; you can redistribute it and/or modify it
005 * under the terms of the GNU Lesser General Public License as
006 * published by the Free Software Foundation; either version 2.1 of
007 * the License, or (at your option) any later version.
008 *
009 * This software is distributed in the hope that it will be useful,
010 * but WITHOUT ANY WARRANTY; without even the implied warranty of
011 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
012 * Lesser General Public License for more details.
013 *
014 * You should have received a copy of the GNU Lesser General Public
015 * License along with this software; if not, write to the Free
016 * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
017 * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
018 */
019
020package org.crsh.cli.impl.parser;
021
022import org.crsh.cli.descriptor.ArgumentDescriptor;
023import org.crsh.cli.descriptor.CommandDescriptor;
024import org.crsh.cli.impl.Multiplicity;
025import org.crsh.cli.descriptor.OptionDescriptor;
026import org.crsh.cli.impl.tokenizer.Token;
027import org.crsh.cli.impl.tokenizer.Tokenizer;
028
029import java.util.ArrayList;
030import java.util.Arrays;
031import java.util.Collection;
032import java.util.LinkedList;
033import java.util.List;
034
035abstract class Status {
036
037  /**
038   * The input.
039   */
040  static class Request<T> {
041
042    /** . */
043    final Mode mode;
044
045    /** . */
046    Tokenizer tokenizer;
047
048    /** . */
049    final CommandDescriptor<T> command;
050
051    Request(Mode mode, Tokenizer tokenizer, CommandDescriptor<T> command) {
052      this.mode = mode;
053      this.tokenizer = tokenizer;
054      this.command = command;
055    }
056  }
057
058  /**
059   * The output.
060   */
061  static class Response<T> {
062
063    /** . */
064    Status status;
065
066    /** . */
067    LinkedList<Event> events;
068
069    /** . */
070    CommandDescriptor<T> command;
071
072    Response(Status status) {
073      this.status = status;
074      this.events = null;
075      this.command = null;
076    }
077
078    Response() {
079      this.status = null;
080      this.events = null;
081      this.command = null;
082    }
083
084    void add(Event event) {
085      if (events == null) {
086        events = new LinkedList<Event>();
087      }
088      events.add(event);
089    }
090
091    void addAll(Collection<Event> toAdd) {
092      if (events == null) {
093        events = new LinkedList<Event>();
094      }
095      events.addAll(toAdd);
096    }
097  }
098
099  /**
100   * Process a request.
101   *
102   * @param req the request
103   * @param <T> the generic type of the command
104   * @return the response
105   */
106  abstract <T> Response<T> process(Request<T> req);
107
108  static class ReadingOption extends Status {
109
110    <T> Response<T> process(Request<T> req) {
111      Response<T> response = new Response<T>();
112      Token token = req.tokenizer.peek();
113      if (token == null) {
114        response.add(new Event.Stop.Done(req.tokenizer.getIndex()));
115      } else if (token instanceof Token.Whitespace) {
116        response.add(new Event.Separator((Token.Whitespace) token));
117        req.tokenizer.next();
118      } else {
119        Token.Literal literal = (Token.Literal)token;
120        if (literal instanceof Token.Literal.Option) {
121          Token.Literal.Option optionToken = (Token.Literal.Option)literal;
122          if (optionToken.getName().length() == 0 && optionToken instanceof Token.Literal.Option.Long) {
123            req.tokenizer.next();
124            if (req.tokenizer.hasNext()) {
125              response.status = new Status.WantReadArg();
126            } else {
127              if (req.mode == Mode.INVOKE) {
128                response.status = new Status.Done();
129                response.add(new Event.Stop.Done(req.tokenizer.getIndex()));
130              } else {
131                response.add(new Event.Stop.Unresolved.NoSuchOption(optionToken));
132              }
133            }
134          } else {
135            OptionDescriptor desc = req.command.resolveOption(literal.getValue());
136            if (desc != null) {
137              req.tokenizer.next();
138              int arity = desc.getArity();
139              LinkedList<Token.Literal.Word> values = new LinkedList<Token.Literal.Word>();
140              while (arity > 0) {
141                if (req.tokenizer.hasNext()) {
142                  Token a = req.tokenizer.peek();
143                  if (a instanceof Token.Whitespace) {
144                    req.tokenizer.next();
145                    if (req.tokenizer.hasNext() && req.tokenizer.peek() instanceof Token.Literal.Word) {
146                      // ok
147                    } else {
148                      req.tokenizer.pushBack();
149                      break;
150                    }
151                  } else {
152                    Token.Literal b = (Token.Literal)a;
153                    if (b instanceof Token.Literal.Word) {
154                      values.addLast((Token.Literal.Word)b);
155                      req.tokenizer.next();
156                      arity--;
157                    } else {
158                      req.tokenizer.pushBack();
159                      break;
160                    }
161                  }
162                } else {
163                  break;
164                }
165              }
166              response.add(new Event.Option(req.command, desc, optionToken, values));
167            } else {
168              response.add(new Event.Stop.Unresolved.NoSuchOption(optionToken));
169            }
170          }
171        } else {
172          Token.Literal.Word wordLiteral = (Token.Literal.Word)literal;
173          CommandDescriptor<T> m = req.command.getSubordinate(wordLiteral.getValue());
174          if (m != null) {
175            response.command = m;
176            req.tokenizer.next();
177            response.add(new Event.Subordinate.Explicit(m, wordLiteral));
178          } else {
179            response.status = new Status.WantReadArg();
180          }
181        }
182      }
183      return response;
184    }
185
186  }
187
188  static class WantReadArg extends Status {
189    @Override
190    <T> Response<T> process(Request<T> req) {
191      switch (req.mode) {
192        case INVOKE:
193          return new Response<T>(new Status.ComputeArg());
194        case COMPLETE:
195          return new Response<T>(new Status.ReadingArg());
196        default:
197          throw new AssertionError();
198      }
199    }
200  }
201
202  static class ComputeArg extends Status {
203
204    @Override
205    <T> Response<T> process(Request<T> req) {
206      Token token = req.tokenizer.peek();
207      Response<T> response = new Response<T>();
208      if (token == null) {
209        response.add(new Event.Stop.Done(req.tokenizer.getIndex()));
210      } else if (token instanceof Token.Whitespace) {
211        response.add(new Event.Separator((Token.Whitespace) token));
212        req.tokenizer.next();
213      } else {
214
215        //
216        List<? extends ArgumentDescriptor> arguments = req.command.getArguments();
217
218        // Count the number ok remaining non whitespace;
219        int tokenCount = 0;
220        int wordCount = 0;
221        do {
222          Token t = req.tokenizer.next();
223          if (t instanceof Token.Literal) {
224            wordCount++;
225          }
226          tokenCount++;
227        }
228        while (req.tokenizer.hasNext());
229        req.tokenizer.pushBack(tokenCount);
230
231        //
232        int oneCount = 0;
233        int zeroOrOneCount = 0;
234        int index = 0;
235        for (ArgumentDescriptor argument : arguments) {
236          Multiplicity multiplicity = argument.getMultiplicity();
237          if (multiplicity == Multiplicity.SINGLE) {
238            if (argument.isRequired()) {
239              if (oneCount + 1 > wordCount) {
240                break;
241              }
242              oneCount++;
243            } else {
244              zeroOrOneCount++;
245            }
246          }
247          index++;
248        }
249
250        // This the number of arguments we can satisfy
251        arguments = arguments.subList(0, index);
252
253        // How many words we can consume for zeroOrOne and zeroOrMore
254        int toConsume = wordCount - oneCount;
255
256        // Correct the zeroOrOneCount and adjust toConsume
257        zeroOrOneCount = Math.min(zeroOrOneCount, toConsume);
258        toConsume -= zeroOrOneCount;
259
260        // The remaining
261        LinkedList<Event> events = new LinkedList<Event>();
262        for (ArgumentDescriptor argument : arguments) {
263          int size;
264          switch (argument.getMultiplicity()) {
265            case SINGLE:
266              if (argument.isRequired()) {
267                size = 1;
268              } else {
269                if (zeroOrOneCount > 0) {
270                  zeroOrOneCount--;
271                  size = 1;
272                } else {
273                  size = 0;
274                }
275              }
276              break;
277            case MULTI:
278              // We consume the remaining
279              size = toConsume;
280              toConsume = 0;
281              break;
282            default:
283              throw new AssertionError();
284          }
285
286          // Now take care of the argument
287          if (size > 0) {
288            List<Token.Literal> values = new ArrayList<Token.Literal>(size);
289            while (size > 0) {
290              Token t = req.tokenizer.next();
291              if (t instanceof Token.Literal) {
292                values.add(((Token.Literal)t));
293                size--;
294              }
295            }
296            events.addLast(new Event.Argument(req.command, argument, values));
297
298            // Add the whitespace if needed
299            if (req.tokenizer.hasNext() && req.tokenizer.peek() instanceof Token.Whitespace) {
300              events.addLast(new Event.Separator((Token.Whitespace) req.tokenizer.next()));
301            }
302          }
303        }
304
305        //
306        events.addLast(new Event.Stop.Done(req.tokenizer.getIndex()));
307
308        //
309        response.status = new Status.Done();
310        response.addAll(events);
311      }
312      return response;
313    }
314  }
315
316  static class Done extends Status {
317    @Override
318    <T> Response<T> process(Request<T> req) {
319      throw new IllegalStateException();
320    }
321  }
322
323  static class ReadingArg extends Status {
324
325    /** . */
326    private final int index;
327
328    ReadingArg() {
329      this(0);
330    }
331
332    private ReadingArg(int index) {
333      this.index = index;
334    }
335
336    ReadingArg next() {
337      return new ReadingArg(index + 1);
338    }
339
340    @Override
341    <T> Response<T> process(Request<T> req) {
342      Token token = req.tokenizer.peek();
343      Response<T> response = new Response<T>();
344      if (token == null) {
345        response.add(new Event.Stop.Done(req.tokenizer.getIndex()));
346      } else if (token instanceof Token.Whitespace) {
347        response.add(new Event.Separator((Token.Whitespace) token));
348        req.tokenizer.next();
349      } else {
350        final Token.Literal literal = (Token.Literal)token;
351        List<? extends ArgumentDescriptor> arguments = req.command.getArguments();
352        if (index < arguments.size()) {
353          ArgumentDescriptor argument = arguments.get(index);
354          switch (argument.getMultiplicity()) {
355            case SINGLE:
356              req.tokenizer.next();
357              response.add(new Event.Argument(req.command, argument, Arrays.asList(literal)));
358              response.status = next();
359              break;
360            case MULTI:
361              req.tokenizer.next();
362              List<Token.Literal> values = new ArrayList<Token.Literal>();
363              values.add(literal);
364              while (req.tokenizer.hasNext()) {
365                Token capture = req.tokenizer.next();
366                if (capture instanceof Token.Literal) {
367                  values.add(((Token.Literal)capture));
368                } else {
369                  if (req.tokenizer.hasNext()) {
370                    // Ok
371                  } else {
372                    req.tokenizer.pushBack();
373                    break;
374                  }
375                }
376              }
377              response.add(new Event.Argument(req.command, argument, values));
378          }
379        } else {
380          response.add(new Event.Stop.Unresolved.TooManyArguments(literal));
381        }
382      }
383      return response;
384    }
385  }
386}